diff --git a/.github/labeler.yml b/.github/labeler.yml new file mode 100644 index 0000000000..adee762bfc --- /dev/null +++ b/.github/labeler.yml @@ -0,0 +1,6 @@ +# Make sure any dependency changes are benchmarked (only changes to the locks +# make a material difference - changes to the Conda YAML files are not +# benchmarked). +benchmark_this: +- changed-files: + - any-glob-to-any-file: 'requirements/locks/*.lock' diff --git a/.github/workflows/benchmarks_run.yml b/.github/workflows/benchmarks_run.yml index 5bc2fba6ec..69d753e4cc 100644 --- a/.github/workflows/benchmarks_run.yml +++ b/.github/workflows/benchmarks_run.yml @@ -42,11 +42,11 @@ jobs: - name: Install ASV & Nox run: | - pip install asv nox + pip install "asv!=0.6.2" nox - name: Cache environment directories id: cache-env-dir - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | .nox @@ -56,7 +56,7 @@ jobs: - name: Cache test data directory id: cache-test-data - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ${{ env.IRIS_TEST_DATA_PATH }} diff --git a/.github/workflows/ci-citation.yml b/.github/workflows/ci-citation.yml new file mode 100644 index 0000000000..99cced758b --- /dev/null +++ b/.github/workflows/ci-citation.yml @@ -0,0 +1,30 @@ +name: ci-citation + +on: + pull_request: + paths: + - "CITATION.cff" + + push: + paths: + - "CITATION.cff" + + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + validate: + name: "validate" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: "check CITATION.cff" + uses: citation-file-format/cffconvert-github-action@4cf11baa70a673bfdf9dad0acc7ee33b3f4b6084 + with: + args: "--validate" diff --git a/.github/workflows/ci-manifest.yml b/.github/workflows/ci-manifest.yml index abe77be606..0868811ac6 100644 --- a/.github/workflows/ci-manifest.yml +++ b/.github/workflows/ci-manifest.yml @@ -23,4 +23,4 @@ concurrency: jobs: manifest: name: "check-manifest" - uses: scitools/workflows/.github/workflows/ci-manifest.yml@2023.12.1 + uses: scitools/workflows/.github/workflows/ci-manifest.yml@2024.02.0 diff --git a/.github/workflows/ci-tests.yml b/.github/workflows/ci-tests.yml index 2c2a083050..5f1b2397ef 100644 --- a/.github/workflows/ci-tests.yml +++ b/.github/workflows/ci-tests.yml @@ -140,5 +140,5 @@ jobs: nox --session ${{ matrix.session }} -- --verbose ${{ matrix.coverage }} - name: Upload coverage report - uses: codecov/codecov-action@v3 + uses: codecov/codecov-action@v4 if: ${{ matrix.coverage }} diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml new file mode 100644 index 0000000000..7914ec2531 --- /dev/null +++ b/.github/workflows/labeler.yml @@ -0,0 +1,15 @@ +# Reference +# - https://github.com/actions/labeler + +name: "Pull Request Labeler" +on: +- pull_request_target + +jobs: + labeler: + permissions: + contents: read + pull-requests: write + runs-on: ubuntu-latest + steps: + - uses: actions/labeler@v5 \ No newline at end of file diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index d1753a7b1b..082aed2cb1 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -14,5 +14,5 @@ on: jobs: refresh_lockfiles: - uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2023.12.1 + uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2024.02.0 secrets: inherit diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d4fc3eedd9..ac83b6178b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,7 +29,7 @@ repos: - id: no-commit-to-branch - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.1.8" + rev: "v0.2.1" hooks: - id: ruff types: [file, python] @@ -45,7 +45,7 @@ repos: additional_dependencies: [tomli] - repo: https://github.com/PyCQA/flake8 - rev: 6.1.0 + rev: 7.0.0 hooks: - id: flake8 types: [file, python] diff --git a/.readthedocs.yml b/.readthedocs.yml index af1a8f6303..d82bd513ca 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -1,9 +1,9 @@ version: 2 build: - os: ubuntu-20.04 + os: "ubuntu-22.04" tools: - python: mambaforge-4.10 + python: "mambaforge-22.9" jobs: post_checkout: # The SciTools/iris repository is shallow i.e., has a .git/shallow, diff --git a/.ruff.toml b/.ruff.toml index 9451023469..5d78ecdb57 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -11,7 +11,6 @@ lint.ignore = [ # pycodestyle (E, W) # https://docs.astral.sh/ruff/rules/#pycodestyle-e-w "E", - "W", # mccabe (C90) # https://docs.astral.sh/ruff/rules/#mccabe-c90 @@ -26,22 +25,10 @@ lint.ignore = [ # (D-1) Permanent "D105", # Missing docstring in magic method # (D-2) Temporary, to be removed when we are more compliant. Rare cases mmove to (1). - "D417", # Missing argument descriptions in the docstring "D101", # Missing docstring in public class "D102", # Missing docstring in public method - "D104", # Missing docstring in public package - "D106", # Missing docstring in public nested class - # (D-3) Temporary, before an initial review, either fix ocurrenvces or move to (2). - "D100", # Missing docstring in public module + # (D-3) Temporary, before an initial review, either fix ocurrences or move to (2). "D103", # Missing docstring in public function - "D200", # One-line docstring should fit on one line - "D202", # No blank lines allowed after function docstring - "D205", # 1 blank line required between summary line and description - "D401", # First line of docstring should be in imperative mood: ... - "D404", # First word of the docstring should not be "This" - "D405", # Section name should be properly capitalized - "D406", # Section name should end with a newline - "D407", # Missing dashed underline after section # pyupgrade (UP) # https://docs.astral.sh/ruff/rules/#pyupgrade-up @@ -71,10 +58,6 @@ lint.ignore = [ # https://docs.astral.sh/ruff/rules/#flake8-builtins-a "A", - # flake8-copyright (CPY) - # https://docs.astral.sh/ruff/rules/#flake8-copyright-cpy - "CPY", - # flake8-comprehensions (C4) # https://docs.astral.sh/ruff/rules/#flake8-comprehensions-c4 "C4", @@ -91,10 +74,6 @@ lint.ignore = [ # https://docs.astral.sh/ruff/rules/#flake8-future-annotations-fa "FA", - # flake8-implicit-str-concat (ISC) - # https://docs.astral.sh/ruff/rules/#flake8-implicit-str-concat-isc - "ISC", - # flake8-logging-format (G) # https://docs.astral.sh/ruff/rules/#flake8-logging-format-g "G", @@ -187,10 +166,6 @@ lint.ignore = [ # https://docs.astral.sh/ruff/rules/#flynt-fly "FLY", - # Airflow (AIR) - # https://docs.astral.sh/ruff/rules/#airflow-air - "AIR", - # Perflint (PERF) # https://docs.astral.sh/ruff/rules/#perflint-perf "PERF", diff --git a/CITATION.cff b/CITATION.cff new file mode 100644 index 0000000000..c3fcdd26d5 --- /dev/null +++ b/CITATION.cff @@ -0,0 +1,23 @@ +cff-version: 1.2.0 +message: "If Iris played an important part in your research then please add us to your reference list by using the references below." +title: "Iris" +keywords: + - "cf-metadata" + - "data-analysis" + - "earth-science" + - "grib" + - "netcdf" + - "meteorology" + - "oceanography" + - "space-weather" + - "ugrid" + - "visualisation" +authors: + - name: "Iris contributors" +abstract: "A powerful, format-agnostic, and community-driven Python package for analysing and visualising Earth science data" +license: "BSD-3-Clause" +license-url: "https://spdx.org/licenses/BSD-3-Clause.html" +doi: "10.5281/zenodo.595182" +url: "http://scitools.org.uk/" +repository-code: "https://github.com/SciTools/iris" +type: "software" diff --git a/MANIFEST.in b/MANIFEST.in index 28eaf30baa..e594303d8f 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -18,6 +18,9 @@ exclude .ruff.toml exclude CHANGES exclude CODE_OF_CONDUCT.md exclude codecov.yml +include COPYING +include COPYING.LESSER +include CITATION.cff include LICENSE exclude Makefile exclude noxfile.py diff --git a/benchmarks/asv_delegated_conda.py b/benchmarks/asv_delegated_conda.py index b0bdd6b64b..c8070b063a 100644 --- a/benchmarks/asv_delegated_conda.py +++ b/benchmarks/asv_delegated_conda.py @@ -2,8 +2,9 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""ASV plug-in providing an alternative :class:`asv.plugins.conda.Conda` -subclass that manages the Conda environment via custom user scripts. +"""ASV plug-in providing an alternative :class:`asv.plugins.conda.Conda` subclass. + +Manages the Conda environment via custom user scripts. """ @@ -42,7 +43,9 @@ def __init__( requirements: dict, tagged_env_vars: dict, ) -> None: - """Parameters + """__init__. + + Parameters ---------- conf : Config instance diff --git a/benchmarks/benchmarks/__init__.py b/benchmarks/benchmarks/__init__.py index 14edb2eda7..87a77fa5a4 100644 --- a/benchmarks/benchmarks/__init__.py +++ b/benchmarks/benchmarks/__init__.py @@ -10,7 +10,7 @@ def disable_repeat_between_setup(benchmark_object): - """Decorator for benchmarks where object persistence would be inappropriate. + """Benchmarks where object persistence would be inappropriate (decorator). E.g: * Benchmarking data realisation @@ -36,7 +36,9 @@ def disable_repeat_between_setup(benchmark_object): class TrackAddedMemoryAllocation: - """Context manager which measures by how much process resident memory grew, + """Measures by how much process resident memory grew, during execution. + + Context manager which measures by how much process resident memory grew, during execution of its enclosed code block. Obviously limited as to what it actually measures : Relies on the current @@ -84,7 +86,7 @@ def addedmem_mb(self): @staticmethod def decorator(decorated_func): - """Decorates this benchmark to track growth in resident memory during execution. + """Benchmark to track growth in resident memory during execution. Intended for use on ASV ``track_`` benchmarks. Applies the :class:`TrackAddedMemoryAllocation` context manager to the benchmark @@ -105,7 +107,9 @@ def _wrapper(*args, **kwargs): def on_demand_benchmark(benchmark_object): - """Decorator. Disables these benchmark(s) unless ON_DEMAND_BENCHARKS env var is set. + """Disables these benchmark(s) unless ON_DEMAND_BENCHARKS env var is set. + + This is a decorator. For benchmarks that, for whatever reason, should not be run by default. E.g: diff --git a/benchmarks/benchmarks/aux_factory.py b/benchmarks/benchmarks/aux_factory.py index 6f71e47086..2da93351ee 100644 --- a/benchmarks/benchmarks/aux_factory.py +++ b/benchmarks/benchmarks/aux_factory.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""AuxFactory benchmark tests. - -""" +"""AuxFactory benchmark tests.""" import numpy as np @@ -18,7 +16,9 @@ class FactoryCommon: # * make class an ABC # * remove NotImplementedError # * combine setup_common into setup - """A base class running a generalised suite of benchmarks for any factory. + """Run a generalised suite of benchmarks for any factory. + + A base class running a generalised suite of benchmarks for any factory. Factory to be specified in a subclass. ASV will run the benchmarks within this class for any subclasses. diff --git a/benchmarks/benchmarks/coords.py b/benchmarks/benchmarks/coords.py index f90ed1fd31..d1f7631e00 100644 --- a/benchmarks/benchmarks/coords.py +++ b/benchmarks/benchmarks/coords.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Coord benchmark tests. - -""" +"""Coord benchmark tests.""" import numpy as np @@ -25,7 +23,9 @@ class CoordCommon: # * make class an ABC # * remove NotImplementedError # * combine setup_common into setup - """A base class running a generalised suite of benchmarks for any coord. + """Run a generalised suite of benchmarks for any coord. + + A base class running a generalised suite of benchmarks for any coord. Coord to be specified in a subclass. ASV will run the benchmarks within this class for any subclasses. diff --git a/benchmarks/benchmarks/cperf/equality.py b/benchmarks/benchmarks/cperf/equality.py index a25cf99128..16f8c10aab 100644 --- a/benchmarks/benchmarks/cperf/equality.py +++ b/benchmarks/benchmarks/cperf/equality.py @@ -2,15 +2,17 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Equality benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project. -""" +"""Equality benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project.""" from .. import on_demand_benchmark from . import SingleDiagnosticMixin class EqualityMixin(SingleDiagnosticMixin): - r"""Uses :class:`SingleDiagnosticMixin` as the realistic case will be comparing + r"""Use :class:`SingleDiagnosticMixin` as the realistic case. + + Uses :class:`SingleDiagnosticMixin` as the realistic case will be comparing :class:`~iris.cube.Cube`\\ s that have been loaded from file. + """ # Cut down the parent parameters. @@ -24,9 +26,7 @@ def setup(self, file_type, three_d=False, three_times=False): @on_demand_benchmark class CubeEquality(EqualityMixin): - r"""Benchmark time and memory costs of comparing LFRic and UM - :class:`~iris.cube.Cube`\\ s. - """ + r"""Benchmark time & memory costs of comparing LFRic & UM :class:`~iris.cube.Cube`\\ s.""" def _comparison(self): _ = self.cube == self.other_cube diff --git a/benchmarks/benchmarks/cperf/load.py b/benchmarks/benchmarks/cperf/load.py index 2d3c0b5c6b..cafc4631c0 100644 --- a/benchmarks/benchmarks/cperf/load.py +++ b/benchmarks/benchmarks/cperf/load.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""File loading benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project. -""" +"""File loading benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project.""" from .. import on_demand_benchmark from . import SingleDiagnosticMixin @@ -11,7 +10,8 @@ @on_demand_benchmark class SingleDiagnosticLoad(SingleDiagnosticMixin): def time_load(self, _, __, ___): - """The 'real world comparison' + """Perform a 'real world comparison'. + * UM coords are always realised (DimCoords). * LFRic coords are not realised by default (MeshCoords). @@ -26,7 +26,7 @@ def time_load(self, _, __, ___): assert coord.has_lazy_bounds() == expecting_lazy_coords def time_load_w_realised_coords(self, _, __, ___): - """A valuable extra comparison where both UM and LFRic coords are realised.""" + """Valuable extra comparison where both UM and LFRic coords are realised.""" cube = self.load() for coord_name in "longitude", "latitude": coord = cube.coord(coord_name) diff --git a/benchmarks/benchmarks/cperf/save.py b/benchmarks/benchmarks/cperf/save.py index 528f878265..2d60f920c4 100644 --- a/benchmarks/benchmarks/cperf/save.py +++ b/benchmarks/benchmarks/cperf/save.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""File saving benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project. -""" +"""File saving benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project.""" from iris import save @@ -15,6 +14,7 @@ @on_demand_benchmark class NetcdfSave: """Benchmark time and memory costs of saving ~large-ish data cubes to netcdf. + Parametrised by file type. """ diff --git a/benchmarks/benchmarks/cube.py b/benchmarks/benchmarks/cube.py index ef42e03077..4548d4c28d 100644 --- a/benchmarks/benchmarks/cube.py +++ b/benchmarks/benchmarks/cube.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Cube benchmark tests. - -""" +"""Cube benchmark tests.""" import numpy as np @@ -30,7 +28,9 @@ class ComponentCommon: # * make class an ABC # * remove NotImplementedError # * combine setup_common into setup - """A base class running a generalised suite of benchmarks for cubes that + """Run a generalised suite of benchmarks for cubes. + + A base class running a generalised suite of benchmarks for cubes that include a specified component (e.g. Coord, CellMeasure etc.). Component to be specified in a subclass. @@ -45,7 +45,7 @@ def setup(self): raise NotImplementedError def create(self): - """Generic cube creation. + """Create a cube (generic). cube_kwargs allow dynamic inclusion of different components; specified in subclasses. diff --git a/benchmarks/benchmarks/experimental/__init__.py b/benchmarks/benchmarks/experimental/__init__.py index d1f34cdb15..ce727a7286 100644 --- a/benchmarks/benchmarks/experimental/__init__.py +++ b/benchmarks/benchmarks/experimental/__init__.py @@ -2,6 +2,4 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Benchmark tests for the experimental module. - -""" +"""Benchmark tests for the experimental module.""" diff --git a/benchmarks/benchmarks/experimental/ugrid/__init__.py b/benchmarks/benchmarks/experimental/ugrid/__init__.py index 322fe9acc0..c2335990aa 100644 --- a/benchmarks/benchmarks/experimental/ugrid/__init__.py +++ b/benchmarks/benchmarks/experimental/ugrid/__init__.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Benchmark tests for the experimental.ugrid module. - -""" +"""Benchmark tests for the experimental.ugrid module.""" from copy import deepcopy @@ -17,7 +15,9 @@ class UGridCommon: - """A base class running a generalised suite of benchmarks for any ugrid object. + """Run a generalised suite of benchmarks for any ugrid object. + + A base class running a generalised suite of benchmarks for any ugrid object. Object to be specified in a subclass. ASV will run the benchmarks within this class for any subclasses. diff --git a/benchmarks/benchmarks/experimental/ugrid/regions_combine.py b/benchmarks/benchmarks/experimental/ugrid/regions_combine.py index a4df9aa8c7..10711d0349 100644 --- a/benchmarks/benchmarks/experimental/ugrid/regions_combine.py +++ b/benchmarks/benchmarks/experimental/ugrid/regions_combine.py @@ -2,14 +2,18 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Benchmarks stages of operation of the function +"""Benchmarks stages of operation. + +Benchmarks stages of operation of the function :func:`iris.experimental.ugrid.utils.recombine_submeshes`. Where possible benchmarks should be parameterised for two sizes of input data: - * minimal: enables detection of regressions in parts of the run-time that do - NOT scale with data size. - * large: large enough to exclusively detect regressions in parts of the - run-time that scale with data size. + +* minimal: enables detection of regressions in parts of the run-time that do + NOT scale with data size. + +* large: large enough to exclusively detect regressions in parts of the + run-time that scale with data size. """ import os @@ -68,7 +72,6 @@ def _make_region_cubes(self, full_mesh_cube): def setup_cache(self): """Cache all the necessary source data on disk.""" - # Control dask, to minimise memory usage + allow largest data. self.fix_dask_settings() @@ -89,7 +92,7 @@ def setup_cache(self): ) def setup(self, n_cubesphere, imaginary_data=True, create_result_cube=True): - """The combine-tests "standard" setup operation. + """Combine-tests "standard" setup operation. Load the source cubes (full-mesh + region) from disk. These are specific to the cubesize parameter. @@ -106,7 +109,6 @@ def setup(self, n_cubesphere, imaginary_data=True, create_result_cube=True): NOTE: various test classes override + extend this. """ - # Load source cubes (full-mesh and regions) with PARSE_UGRID_ON_LOAD.context(): self.full_mesh_cube = load_cube( @@ -143,7 +145,6 @@ def fix_dask_settings(self): which is optimised for space saving so we can test largest data. """ - import dask.config as dcfg # Use single-threaded, to avoid process-switching costs and minimise memory usage. @@ -196,10 +197,13 @@ def track_addedmem_compute_data(self, n_cubesphere): class CombineRegionsSaveData(MixinCombineRegions): - """Test saving *only*, having replaced the input cube data with 'imaginary' + """Test saving *only*. + + Test saving *only*, having replaced the input cube data with 'imaginary' array data, so that input data is not loaded from disk during the save operation. + """ def time_save(self, n_cubesphere): @@ -222,6 +226,7 @@ def track_filesize_saved(self, n_cubesphere): class CombineRegionsFileStreamedCalc(MixinCombineRegions): """Test the whole cost of file-to-file streaming. + Uses the combined cube which is based on lazy data loading from the region cubes on disk. """ diff --git a/benchmarks/benchmarks/generate_data/stock.py b/benchmarks/benchmarks/generate_data/stock.py index 058eac01b1..17f3b23f92 100644 --- a/benchmarks/benchmarks/generate_data/stock.py +++ b/benchmarks/benchmarks/generate_data/stock.py @@ -49,14 +49,13 @@ def _external(func_name_, temp_file_dir, **kwargs_): def create_file__xios_2d_face_half_levels( temp_file_dir, dataset_name, n_faces=866, n_times=1 ): - """Wrapper for :meth:`iris.tests.stock.netcdf.create_file__xios_2d_face_half_levels`. + """Create file wrapper for :meth:`iris.tests.stock.netcdf.create_file__xios_2d_face_half_levels`. Have taken control of temp_file_dir todo: is create_file__xios_2d_face_half_levels still appropriate now we can properly save Mesh Cubes? """ - return _create_file__xios_common( func_name="create_file__xios_2d_face_half_levels", dataset_name=dataset_name, @@ -68,14 +67,13 @@ def create_file__xios_2d_face_half_levels( def create_file__xios_3d_face_half_levels( temp_file_dir, dataset_name, n_faces=866, n_times=1, n_levels=38 ): - """Wrapper for :meth:`iris.tests.stock.netcdf.create_file__xios_3d_face_half_levels`. + """Create file wrapper for :meth:`iris.tests.stock.netcdf.create_file__xios_3d_face_half_levels`. Have taken control of temp_file_dir todo: is create_file__xios_3d_face_half_levels still appropriate now we can properly save Mesh Cubes? """ - return _create_file__xios_common( func_name="create_file__xios_3d_face_half_levels", dataset_name=dataset_name, @@ -86,7 +84,7 @@ def create_file__xios_3d_face_half_levels( def sample_mesh(n_nodes=None, n_faces=None, n_edges=None, lazy_values=False): - """Wrapper for :meth:iris.tests.stock.mesh.sample_mesh`.""" + """Sample mesh wrapper for :meth:iris.tests.stock.mesh.sample_mesh`.""" def _external(*args, **kwargs): from iris.experimental.ugrid import save_mesh @@ -114,7 +112,7 @@ def _external(*args, **kwargs): def sample_meshcoord(sample_mesh_kwargs=None, location="face", axis="x"): - """Wrapper for :meth:`iris.tests.stock.mesh.sample_meshcoord`. + """Sample meshcoord wrapper for :meth:`iris.tests.stock.mesh.sample_meshcoord`. Parameters deviate from the original as cannot pass a :class:`iris.experimental.ugrid.Mesh to the separate Python instance - must diff --git a/benchmarks/benchmarks/generate_data/ugrid.py b/benchmarks/benchmarks/generate_data/ugrid.py index 8cca53c907..713e5dc7df 100644 --- a/benchmarks/benchmarks/generate_data/ugrid.py +++ b/benchmarks/benchmarks/generate_data/ugrid.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Scripts for generating supporting data for UGRID-related benchmarking. -""" +"""Scripts for generating supporting data for UGRID-related benchmarking.""" from iris import load_cube as iris_loadcube from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD @@ -15,7 +14,9 @@ def generate_cube_like_2d_cubesphere(n_cube: int, with_mesh: bool, output_path: str): - """Construct and save to file an LFRIc cubesphere-like cube for a given + """Construct and save to file an LFRIc cubesphere-like cube. + + Construct and save to file an LFRIc cubesphere-like cube for a given cubesphere size, *or* a simpler structured (UM-like) cube of equivalent size. @@ -23,9 +24,11 @@ def generate_cube_like_2d_cubesphere(n_cube: int, with_mesh: bool, output_path: Instead, it is to be called via benchmarks.remote_data_generation, so that it can use up-to-date facilities, independent of the ASV controlled environment which contains the "Iris commit under test". + This means: - * it must be completely self-contained : i.e. it includes all its - own imports, and saves results to an output file. + + * it must be completely self-contained : i.e. it includes all its + own imports, and saves results to an output file. """ from iris import save @@ -55,7 +58,9 @@ def generate_cube_like_2d_cubesphere(n_cube: int, with_mesh: bool, output_path: def make_cube_like_2d_cubesphere(n_cube: int, with_mesh: bool): - """Generate an LFRIc cubesphere-like cube for a given cubesphere size, + """Generate an LFRIc cubesphere-like cube. + + Generate an LFRIc cubesphere-like cube for a given cubesphere size, *or* a simpler structured (UM-like) cube of equivalent size. All the cube data, coords and mesh content are LAZY, and produced without @@ -106,8 +111,9 @@ def make_cube_like_umfield(xy_dims): Note: probably a bit over-simplified, as there is no time coord, but that is probably equally true of our LFRic-style synthetic data. - Args: - * xy_dims (2-tuple): + Parameters + ---------- + xy_dims : 2-tuple Set the horizontal dimensions = n-lats, n-lons. """ @@ -156,12 +162,13 @@ def _external(xy_dims_, save_path_): def make_cubesphere_testfile(c_size, n_levels=0, n_times=1): - """Build a C cubesphere testfile in a given directory, with a standard naming. + """Build a C cubesphere testfile in a given directory. + + Build a C cubesphere testfile in a given directory, with a standard naming. If n_levels > 0 specified: 3d file with the specified number of levels. Return the file path. - todo: is create_file__xios... still appropriate now we can properly save - Mesh Cubes? + TODO: is create_file__xios... still appropriate now we can properly save Mesh Cubes? """ n_faces = 6 * c_size * c_size diff --git a/benchmarks/benchmarks/import_iris.py b/benchmarks/benchmarks/import_iris.py index fbae82fee6..566ffca78b 100644 --- a/benchmarks/benchmarks/import_iris.py +++ b/benchmarks/benchmarks/import_iris.py @@ -2,6 +2,9 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. + +"""import iris benchmarking.""" + from importlib import import_module, reload ################ @@ -28,8 +31,7 @@ class Iris: @staticmethod def _import(module_name, reset_colormaps=False): - """Have experimented with adding sleep() commands into the imported - modules. + """Have experimented with adding sleep() commands into the imported modules. The results reveal: diff --git a/benchmarks/benchmarks/iterate.py b/benchmarks/benchmarks/iterate.py index 800911f21a..9353cf42ee 100644 --- a/benchmarks/benchmarks/iterate.py +++ b/benchmarks/benchmarks/iterate.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Iterate benchmark tests. - -""" +"""Iterate benchmark tests.""" import numpy as np from iris import coords, cube, iterate diff --git a/benchmarks/benchmarks/metadata_manager_factory.py b/benchmarks/benchmarks/metadata_manager_factory.py index 8e4de9949b..01a2b661b8 100644 --- a/benchmarks/benchmarks/metadata_manager_factory.py +++ b/benchmarks/benchmarks/metadata_manager_factory.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""metadata_manager_factory benchmark tests. - -""" +"""metadata_manager_factory benchmark tests.""" from iris.common import ( AncillaryVariableMetadata, diff --git a/benchmarks/benchmarks/mixin.py b/benchmarks/benchmarks/mixin.py index de5127253f..90fb017b12 100644 --- a/benchmarks/benchmarks/mixin.py +++ b/benchmarks/benchmarks/mixin.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Mixin benchmark tests. - -""" +"""Mixin benchmark tests.""" import numpy as np diff --git a/benchmarks/benchmarks/plot.py b/benchmarks/benchmarks/plot.py index b5bc064e84..9b008ec41c 100644 --- a/benchmarks/benchmarks/plot.py +++ b/benchmarks/benchmarks/plot.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Plot benchmark tests. - -""" +"""Plot benchmark tests.""" import matplotlib as mpl import numpy as np diff --git a/benchmarks/benchmarks/regridding.py b/benchmarks/benchmarks/regridding.py index 8e1c5e33df..b311c94717 100644 --- a/benchmarks/benchmarks/regridding.py +++ b/benchmarks/benchmarks/regridding.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Regridding benchmark test. - -""" +"""Regridding benchmark test.""" # import iris tests first so that some things can be initialised before # importing anything else diff --git a/benchmarks/benchmarks/sperf/combine_regions.py b/benchmarks/benchmarks/sperf/combine_regions.py index 1012ccd932..7d677ed74f 100644 --- a/benchmarks/benchmarks/sperf/combine_regions.py +++ b/benchmarks/benchmarks/sperf/combine_regions.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Region combine benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. -""" +"""Region combine benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project.""" import os.path from dask import array as da @@ -64,7 +63,6 @@ def _make_region_cubes(self, full_mesh_cube): def setup_cache(self): """Cache all the necessary source data on disk.""" - # Control dask, to minimise memory usage + allow largest data. self.fix_dask_settings() @@ -85,7 +83,7 @@ def setup_cache(self): ) def setup(self, n_cubesphere, imaginary_data=True, create_result_cube=True): - """The combine-tests "standard" setup operation. + """Combine-tests "standard" setup operation. Load the source cubes (full-mesh + region) from disk. These are specific to the cubesize parameter. @@ -102,7 +100,6 @@ def setup(self, n_cubesphere, imaginary_data=True, create_result_cube=True): NOTE: various test classes override + extend this. """ - # Load source cubes (full-mesh and regions) with PARSE_UGRID_ON_LOAD.context(): self.full_mesh_cube = load_cube( @@ -142,7 +139,6 @@ def fix_dask_settings(self): which is optimised for space saving so we can test largest data. """ - import dask.config as dcfg # Use single-threaded, to avoid process-switching costs and minimise memory usage. @@ -199,7 +195,9 @@ def track_addedmem_compute_data(self, n_cubesphere): @on_demand_benchmark class SaveData(Mixin): - """Test saving *only*, having replaced the input cube data with 'imaginary' + """Test saving *only*. + + Test saving *only*, having replaced the input cube data with 'imaginary' array data, so that input data is not loaded from disk during the save operation. @@ -221,8 +219,10 @@ def track_filesize_saved(self, n_cubesphere): @on_demand_benchmark class FileStreamedCalc(Mixin): """Test the whole cost of file-to-file streaming. + Uses the combined cube which is based on lazy data loading from the region cubes on disk. + """ def setup(self, n_cubesphere, imaginary_data=False, create_result_cube=True): diff --git a/benchmarks/benchmarks/sperf/equality.py b/benchmarks/benchmarks/sperf/equality.py index 813cfad6bf..339687a22c 100644 --- a/benchmarks/benchmarks/sperf/equality.py +++ b/benchmarks/benchmarks/sperf/equality.py @@ -2,16 +2,17 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Equality benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. -""" +"""Equality benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project.""" from .. import on_demand_benchmark from . import FileMixin @on_demand_benchmark class CubeEquality(FileMixin): - r"""Benchmark time and memory costs of comparing :class:`~iris.cube.Cube`\\ s - with attached :class:`~iris.experimental.ugrid.mesh.Mesh`\\ es. + r"""Benchmark time and memory costs. + + Benchmark time and memory costs of comparing :class:`~iris.cube.Cube`\\ s + with attached :class:`~iris.experimental.ugrid.mesh.Mesh`\\ es. Uses :class:`FileMixin` as the realistic case will be comparing :class:`~iris.cube.Cube`\\ s that have been loaded from file. diff --git a/benchmarks/benchmarks/sperf/load.py b/benchmarks/benchmarks/sperf/load.py index d3e9ea7ac9..f3c5ef1136 100644 --- a/benchmarks/benchmarks/sperf/load.py +++ b/benchmarks/benchmarks/sperf/load.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""File loading benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. -""" +"""File loading benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project.""" from .. import on_demand_benchmark from . import FileMixin diff --git a/benchmarks/benchmarks/sperf/save.py b/benchmarks/benchmarks/sperf/save.py index 2999e81227..3fb8133659 100644 --- a/benchmarks/benchmarks/sperf/save.py +++ b/benchmarks/benchmarks/sperf/save.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""File saving benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. -""" +"""File saving benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project.""" import os.path from iris import save diff --git a/benchmarks/benchmarks/stats.py b/benchmarks/benchmarks/stats.py new file mode 100644 index 0000000000..0530431900 --- /dev/null +++ b/benchmarks/benchmarks/stats.py @@ -0,0 +1,38 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Stats benchmark tests.""" + +import iris +from iris.analysis.stats import pearsonr +import iris.tests + + +class PearsonR: + def setup(self): + cube_temp = iris.load_cube( + iris.tests.get_data_path( + ("NetCDF", "global", "xyt", "SMALL_total_column_co2.nc") + ) + ) + + # Make data non-lazy. + cube_temp.data + + self.cube_a = cube_temp[:6] + self.cube_b = cube_temp[20:26] + self.cube_b.replace_coord(self.cube_a.coord("time")) + for name in ["latitude", "longitude"]: + self.cube_b.coord(name).guess_bounds() + self.weights = iris.analysis.cartography.area_weights(self.cube_b) + + def time_real(self): + pearsonr(self.cube_a, self.cube_b, weights=self.weights) + + def time_lazy(self): + for cube in self.cube_a, self.cube_b: + cube.data = cube.lazy_data() + + result = pearsonr(self.cube_a, self.cube_b, weights=self.weights) + result.data diff --git a/benchmarks/benchmarks/trajectory.py b/benchmarks/benchmarks/trajectory.py index 4214ed3f6e..ec2958b6a8 100644 --- a/benchmarks/benchmarks/trajectory.py +++ b/benchmarks/benchmarks/trajectory.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Trajectory benchmark test. - -""" +"""Trajectory benchmark test.""" # import iris tests first so that some things can be initialised before # importing anything else diff --git a/benchmarks/bm_runner.py b/benchmarks/bm_runner.py index 8b45031fca..10dc5f469a 100644 --- a/benchmarks/bm_runner.py +++ b/benchmarks/bm_runner.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Argparse conveniences for executing common types of benchmark runs. -""" +"""Argparse conveniences for executing common types of benchmark runs.""" from abc import ABC, abstractmethod import argparse @@ -68,7 +67,6 @@ def _check_requirements(package: str) -> None: def _prep_data_gen_env() -> None: """Create/access a separate, unchanging environment for generating test data.""" - python_version = "3.11" data_gen_var = "DATA_GEN_PYTHON" if data_gen_var in environ: @@ -257,8 +255,8 @@ def _gh_create_reports(commit_sha: str, results_full: str, results_shifts: str) * commit {commit_sha} ({pr_tag}).

- Please review the report below and - take corrective/congratulatory action as appropriate + Please review the report below and + take corrective/congratulatory action as appropriate :slightly_smiling_face:

""" @@ -334,7 +332,7 @@ def add_asv_arguments(self) -> None: @staticmethod @abstractmethod def func(args: argparse.Namespace): - """The function to return when the subparser is parsed. + """Return when the subparser is parsed. `func` is then called, performing the user's selected sub-command. diff --git a/docs/gallery_code/general/plot_SOI_filtering.py b/docs/gallery_code/general/plot_SOI_filtering.py index 5082162068..4b256c894c 100644 --- a/docs/gallery_code/general/plot_SOI_filtering.py +++ b/docs/gallery_code/general/plot_SOI_filtering.py @@ -1,4 +1,5 @@ -"""Applying a Filter to a Time-Series +""" +Applying a Filter to a Time-Series ================================== This example demonstrates low pass filtering a time-series by applying a @@ -17,7 +18,7 @@ Trenberth K. E. (1984) Signal Versus Noise in the Southern Oscillation. Monthly Weather Review, Vol 112, pp 326-332 -""" # noqa: D400 +""" # noqa: D205, D212, D400 import matplotlib.pyplot as plt import numpy as np @@ -29,12 +30,11 @@ def low_pass_weights(window, cutoff): """Calculate weights for a low pass Lanczos filter. - Args: - - window: int + Parameters + ---------- + window : int The length of the filter window. - - cutoff: float + cutoff : float The cutoff frequency in inverse time steps. """ diff --git a/docs/gallery_code/general/plot_anomaly_log_colouring.py b/docs/gallery_code/general/plot_anomaly_log_colouring.py index 98751b0dbf..cd11161041 100644 --- a/docs/gallery_code/general/plot_anomaly_log_colouring.py +++ b/docs/gallery_code/general/plot_anomaly_log_colouring.py @@ -1,4 +1,5 @@ -"""Colouring Anomaly Data With Logarithmic Scaling +""" +Colouring Anomaly Data With Logarithmic Scaling =============================================== In this example, we need to plot anomaly data where the values have a @@ -14,15 +15,17 @@ the matplotlib Norm class :obj:`matplotlib.colors.SymLogNorm`. We use this to make a cell-filled pseudocolor plot with a colorbar. -NOTE: By "pseudocolour", we mean that each data point is drawn as a "cell" -region on the plot, coloured according to its data value. -This is provided in Iris by the functions :meth:`iris.plot.pcolor` and -:meth:`iris.plot.pcolormesh`, which call the underlying matplotlib -functions of the same names (i.e., :obj:`matplotlib.pyplot.pcolor` -and :obj:`matplotlib.pyplot.pcolormesh`). -See also: https://en.wikipedia.org/wiki/False_color#Pseudocolor. +.. note:: -""" # noqa: D400 + By "pseudocolour", we mean that each data point is drawn as a "cell" + region on the plot, coloured according to its data value. + This is provided in Iris by the functions :meth:`iris.plot.pcolor` and + :meth:`iris.plot.pcolormesh`, which call the underlying matplotlib + functions of the same names (i.e., :obj:`matplotlib.pyplot.pcolor` + and :obj:`matplotlib.pyplot.pcolormesh`). + See also: https://en.wikipedia.org/wiki/False_color#Pseudocolor. + +""" # noqa: D205, D212, D400 import cartopy.crs as ccrs import matplotlib.colors as mcols diff --git a/docs/gallery_code/general/plot_coriolis.py b/docs/gallery_code/general/plot_coriolis.py index 3d5aa853e3..905108abfd 100644 --- a/docs/gallery_code/general/plot_coriolis.py +++ b/docs/gallery_code/general/plot_coriolis.py @@ -1,11 +1,12 @@ -"""Deriving the Coriolis Frequency Over the Globe +""" +Deriving the Coriolis Frequency Over the Globe ============================================== This code computes the Coriolis frequency and stores it in a cube with associated metadata. It then plots the Coriolis frequency on an orthographic projection. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import cartopy.crs as ccrs import matplotlib.pyplot as plt diff --git a/docs/gallery_code/general/plot_cross_section.py b/docs/gallery_code/general/plot_cross_section.py index 8eac3955b1..8e5bee85ed 100644 --- a/docs/gallery_code/general/plot_cross_section.py +++ b/docs/gallery_code/general/plot_cross_section.py @@ -1,10 +1,11 @@ -"""Cross Section Plots +""" +Cross Section Plots =================== This example demonstrates contour plots of a cross-sectioned multi-dimensional cube which features a hybrid height vertical coordinate system. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import matplotlib.pyplot as plt diff --git a/docs/gallery_code/general/plot_custom_aggregation.py b/docs/gallery_code/general/plot_custom_aggregation.py index 521d0069b9..540f785ed6 100644 --- a/docs/gallery_code/general/plot_custom_aggregation.py +++ b/docs/gallery_code/general/plot_custom_aggregation.py @@ -1,4 +1,5 @@ -"""Calculating a Custom Statistic +""" +Calculating a Custom Statistic ============================== This example shows how to define and use a custom @@ -11,7 +12,7 @@ over North America, and we want to calculate in how many years these exceed a certain temperature over a spell of 5 years or more. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import matplotlib.pyplot as plt import numpy as np @@ -27,25 +28,24 @@ # Note: in order to meet the requirements of iris.analysis.Aggregator, it must # do the calculation over an arbitrary (given) data axis. def count_spells(data, threshold, axis, spell_length): - """Function to calculate the number of points in a sequence where the value + """Calculate the number of points in a sequence. + + Function to calculate the number of points in a sequence where the value has exceeded a threshold value for at least a certain number of timepoints. Generalised to operate on multiple time sequences arranged on a specific axis of a multidimensional array. - Args: - - * data (array): + Parameters + ---------- + data : array raw data to be compared with value threshold. - - * threshold (float): + threshold : float threshold point for 'significant' datapoints. - - * axis (int): + axis : int number of the array dimension mapping the time sequences. (Can also be negative, e.g. '-1' means last dimension) - - * spell_length (int): + spell_length : int number of consecutive times at which value > threshold to "count". """ diff --git a/docs/gallery_code/general/plot_custom_file_loading.py b/docs/gallery_code/general/plot_custom_file_loading.py index 53781ba044..06de887614 100644 --- a/docs/gallery_code/general/plot_custom_file_loading.py +++ b/docs/gallery_code/general/plot_custom_file_loading.py @@ -1,5 +1,6 @@ -"""Loading a Cube From a Custom File Format -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +""" +Loading a Cube From a Custom File Format +======================================== This example shows how a custom text file can be loaded using the standard Iris load mechanism. @@ -8,25 +9,25 @@ ` for the file format. To create a format specification we need to define the following: -* format_name - Some text that describes the format specification we are +* **format_name** - Some text that describes the format specification we are creating -* file_element - FileElement object describing the element which identifies +* **file_element** - FileElement object describing the element which identifies this FormatSpecification. Possible values are: - ``iris.io.format_picker.MagicNumber(n, o)`` - The n bytes from the file at offset o. + * ``iris.io.format_picker.MagicNumber(n, o)`` + The n bytes from the file at offset o. - ``iris.io.format_picker.FileExtension()`` - The file's extension. + * ``iris.io.format_picker.FileExtension()`` + The file extension. - ``iris.io.format_picker.LeadingLine()`` - The first line of the file. + * ``iris.io.format_picker.LeadingLine()`` + The first line of the file. -* file_element_value - The value that the file_element should take if a file +* **file_element_value** - The value that the file_element should take if a file matches this FormatSpecification -* handler (optional) - A generator function that will be called when the file +* **handler** (optional) - A generator function that will be called when the file specification has been identified. This function is provided by the user and provides the means to parse the whole file. If no handler function is provided, then identification is still possible without any handling. @@ -40,7 +41,7 @@ The handler function must be defined as generator which yields each cube as they are produced. -* priority (optional) - Integer giving a priority for considering this +* **priority** (optional) - Integer giving a priority for considering this specification where higher priority means sooner consideration In the following example, the function :func:`load_NAME_III` has been defined @@ -52,7 +53,7 @@ function which automatically invokes the ``FormatSpecification`` we defined. The cube returned from the load function is then used to produce a plot. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import datetime @@ -89,11 +90,12 @@ def load_NAME_III(filename): - """Loads the Met Office's NAME III grid output files returning headers, column + """Load the Met Office's NAME III grid output files. + + Loads the Met Office's NAME III grid output files returning headers, column definitions and data arrays as 3 separate lists. """ - # Loading a file gives a generator of lines which can be progressed using # the next() function. This will come in handy as we wish to progress # through the file line by line. @@ -178,8 +180,7 @@ def load_NAME_III(filename): def NAME_to_cube(filenames, callback): - """Returns a generator of cubes given a list of filenames and a callback.""" - + """Return a generator of cubes given a list of filenames and a callback.""" for filename in filenames: header, column_headings, data_arrays = load_NAME_III(filename) diff --git a/docs/gallery_code/general/plot_global_map.py b/docs/gallery_code/general/plot_global_map.py index 9634fc1458..60ac200a43 100644 --- a/docs/gallery_code/general/plot_global_map.py +++ b/docs/gallery_code/general/plot_global_map.py @@ -1,10 +1,11 @@ -"""Quickplot of a 2D Cube on a Map +""" +Quickplot of a 2D Cube on a Map =============================== This example demonstrates a contour plot of global air temperature. The plot title and the labels for the axes are automatically derived from the metadata. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import cartopy.crs as ccrs import matplotlib.pyplot as plt diff --git a/docs/gallery_code/general/plot_inset.py b/docs/gallery_code/general/plot_inset.py index 7b7e04c7d3..5edd375743 100644 --- a/docs/gallery_code/general/plot_inset.py +++ b/docs/gallery_code/general/plot_inset.py @@ -1,11 +1,12 @@ -"""Test Data Showing Inset Plots +""" +Test Data Showing Inset Plots ============================= This example demonstrates the use of a single 3D data cube with time, latitude and longitude dimensions to plot a temperature series for a single latitude coordinate, with an inset plot of the data region. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import cartopy.crs as ccrs import matplotlib.pyplot as plt diff --git a/docs/gallery_code/general/plot_lineplot_with_legend.py b/docs/gallery_code/general/plot_lineplot_with_legend.py index 981e9694ec..d1b3acd912 100644 --- a/docs/gallery_code/general/plot_lineplot_with_legend.py +++ b/docs/gallery_code/general/plot_lineplot_with_legend.py @@ -1,7 +1,8 @@ -"""Multi-Line Temperature Profile Plot -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +""" +Multi-Line Temperature Profile Plot +=================================== -""" # noqa: D400 +""" # noqa: D205, D212, D400 import matplotlib.pyplot as plt diff --git a/docs/gallery_code/general/plot_polar_stereo.py b/docs/gallery_code/general/plot_polar_stereo.py index 0cb4e533fa..99abbd0ae0 100644 --- a/docs/gallery_code/general/plot_polar_stereo.py +++ b/docs/gallery_code/general/plot_polar_stereo.py @@ -1,10 +1,11 @@ -"""Example of a Polar Stereographic Plot +""" +Example of a Polar Stereographic Plot ===================================== Demonstrates plotting data that are defined on a polar stereographic projection. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import matplotlib.pyplot as plt diff --git a/docs/gallery_code/general/plot_polynomial_fit.py b/docs/gallery_code/general/plot_polynomial_fit.py index 951f17209e..37cc4e283b 100644 --- a/docs/gallery_code/general/plot_polynomial_fit.py +++ b/docs/gallery_code/general/plot_polynomial_fit.py @@ -1,11 +1,12 @@ -"""Fitting a Polynomial +""" +Fitting a Polynomial ==================== This example demonstrates computing a polynomial fit to 1D data from an Iris cube, adding the fit to the cube's metadata, and plotting both the 1D data and the fit. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import matplotlib.pyplot as plt import numpy as np diff --git a/docs/gallery_code/general/plot_projections_and_annotations.py b/docs/gallery_code/general/plot_projections_and_annotations.py index 1a4701837a..6e8ba5a5af 100644 --- a/docs/gallery_code/general/plot_projections_and_annotations.py +++ b/docs/gallery_code/general/plot_projections_and_annotations.py @@ -1,4 +1,5 @@ -"""Plotting in Different Projections +""" +Plotting in Different Projections ================================= This example shows how to overlay data and graphics in different projections, @@ -11,7 +12,7 @@ We plot these over a specified region, in two different map projections. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import cartopy.crs as ccrs import matplotlib.pyplot as plt diff --git a/docs/gallery_code/general/plot_rotated_pole_mapping.py b/docs/gallery_code/general/plot_rotated_pole_mapping.py index 3674e89e28..60b187ee56 100644 --- a/docs/gallery_code/general/plot_rotated_pole_mapping.py +++ b/docs/gallery_code/general/plot_rotated_pole_mapping.py @@ -1,15 +1,16 @@ -"""Rotated Pole Mapping +""" +Rotated Pole Mapping ===================== This example uses several visualisation methods to achieve an array of differing images, including: - * Visualisation of point based data - * Contouring of point based data - * Block plot of contiguous bounded data - * Non native projection and a Natural Earth shaded relief image underlay +* Visualisation of point based data +* Contouring of point based data +* Block plot of contiguous bounded data +* Non native projection and a Natural Earth shaded relief image underlay -""" # noqa: D400 +""" # noqa: D205, D212, D400 import cartopy.crs as ccrs import matplotlib.pyplot as plt diff --git a/docs/gallery_code/general/plot_zonal_means.py b/docs/gallery_code/general/plot_zonal_means.py index 47a7127d06..d4ec1eb1fc 100644 --- a/docs/gallery_code/general/plot_zonal_means.py +++ b/docs/gallery_code/general/plot_zonal_means.py @@ -1,7 +1,11 @@ -"""Zonal Mean Diagram of Air Temperature +""" +Zonal Mean Diagram of Air Temperature ===================================== -This example demonstrates aligning a linear plot and a cartographic plot using Matplotlib. -""" # noqa: D400 + +This example demonstrates aligning a linear plot and a cartographic plot +using Matplotlib. + +""" # noqa: D205, D212, D400 import cartopy.crs as ccrs import matplotlib.pyplot as plt diff --git a/docs/gallery_code/meteorology/plot_COP_1d.py b/docs/gallery_code/meteorology/plot_COP_1d.py index d5ef2a9990..84addd140a 100644 --- a/docs/gallery_code/meteorology/plot_COP_1d.py +++ b/docs/gallery_code/meteorology/plot_COP_1d.py @@ -1,4 +1,5 @@ -"""Global Average Annual Temperature Plot +""" +Global Average Annual Temperature Plot ====================================== Produces a time-series plot of North American temperature forecasts for 2 @@ -25,7 +26,7 @@ Further details on the aggregation functionality being used in this example can be found in :ref:`cube-statistics`. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import matplotlib.pyplot as plt import numpy as np diff --git a/docs/gallery_code/meteorology/plot_COP_maps.py b/docs/gallery_code/meteorology/plot_COP_maps.py index eda8a3a53a..1c5e865a8f 100644 --- a/docs/gallery_code/meteorology/plot_COP_maps.py +++ b/docs/gallery_code/meteorology/plot_COP_maps.py @@ -1,4 +1,5 @@ -"""Global Average Annual Temperature Maps +""" +Global Average Annual Temperature Maps ====================================== Produces maps of global temperature forecasts from the A1B and E1 scenarios. @@ -18,7 +19,7 @@ Analyses, and Scenarios. Eos Trans. AGU, Vol 90, No. 21, doi:10.1029/2009EO210001. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import os.path @@ -31,10 +32,7 @@ def cop_metadata_callback(cube, field, filename): - """A function which adds an "Experiment" coordinate which comes from the - filename. - """ - + """Add an "Experiment" coordinate which comes from the filename.""" # Extract the experiment name (such as A1B or E1) from the filename (in # this case it is just the start of the file name, before the first "."). fname = os.path.basename(filename) # filename without path. diff --git a/docs/gallery_code/meteorology/plot_TEC.py b/docs/gallery_code/meteorology/plot_TEC.py index cb642af588..e6269eaf9b 100644 --- a/docs/gallery_code/meteorology/plot_TEC.py +++ b/docs/gallery_code/meteorology/plot_TEC.py @@ -1,4 +1,5 @@ -"""Ionosphere Space Weather +""" +Ionosphere Space Weather ======================== This space weather example plots a filled contour of rotated pole point @@ -8,7 +9,7 @@ The plot exhibits an interesting outline effect due to excluding data values below a certain threshold. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import matplotlib.pyplot as plt import numpy.ma as ma diff --git a/docs/gallery_code/meteorology/plot_deriving_phenomena.py b/docs/gallery_code/meteorology/plot_deriving_phenomena.py index ef78d2f1c9..81a05be9b9 100644 --- a/docs/gallery_code/meteorology/plot_deriving_phenomena.py +++ b/docs/gallery_code/meteorology/plot_deriving_phenomena.py @@ -1,4 +1,5 @@ -"""Deriving Exner Pressure and Air Temperature +""" +Deriving Exner Pressure and Air Temperature =========================================== This example shows some processing of cubes in order to derive further related @@ -7,7 +8,7 @@ specific humidity. Finally, the two new cubes are presented side-by-side in a plot. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import matplotlib.pyplot as plt import matplotlib.ticker @@ -20,7 +21,9 @@ def limit_colorbar_ticks(contour_object): - """Takes a contour object which has an associated colorbar and limits the + """Limit colobar number of ticks. + + Take a contour object which has an associated colorbar and limits the number of ticks on the colorbar to 4. """ diff --git a/docs/gallery_code/meteorology/plot_hovmoller.py b/docs/gallery_code/meteorology/plot_hovmoller.py index 6c1f1a800a..829b370d78 100644 --- a/docs/gallery_code/meteorology/plot_hovmoller.py +++ b/docs/gallery_code/meteorology/plot_hovmoller.py @@ -1,4 +1,5 @@ -"""Hovmoller Diagram of Monthly Surface Temperature +""" +Hovmoller Diagram of Monthly Surface Temperature ================================================ This example demonstrates the creation of a Hovmoller diagram with fine control @@ -6,7 +7,7 @@ and has been pre-processed to calculate the monthly mean sea surface temperature. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import matplotlib.dates as mdates import matplotlib.pyplot as plt diff --git a/docs/gallery_code/meteorology/plot_lagged_ensemble.py b/docs/gallery_code/meteorology/plot_lagged_ensemble.py index a8887238d4..7c34572136 100644 --- a/docs/gallery_code/meteorology/plot_lagged_ensemble.py +++ b/docs/gallery_code/meteorology/plot_lagged_ensemble.py @@ -1,4 +1,5 @@ -"""Seasonal Ensemble Model Plots +""" +Seasonal Ensemble Model Plots ============================= This example demonstrates the loading of a lagged ensemble dataset from the @@ -15,7 +16,7 @@ better approach would be to take the climatological mean, calibrated to the model, from each ensemble member. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import matplotlib.pyplot as plt import matplotlib.ticker @@ -26,7 +27,9 @@ def realization_metadata(cube, field, fname): - """A function which modifies the cube's metadata to add a "realization" + """Modify the cube's metadata to add a "realization" coordinate. + + A function which modifies the cube's metadata to add a "realization" (ensemble member) coordinate from the filename if one doesn't already exist in the cube. diff --git a/docs/gallery_code/meteorology/plot_wind_barbs.py b/docs/gallery_code/meteorology/plot_wind_barbs.py index 9745a40db2..f11c9a7b50 100644 --- a/docs/gallery_code/meteorology/plot_wind_barbs.py +++ b/docs/gallery_code/meteorology/plot_wind_barbs.py @@ -1,4 +1,5 @@ -"""Plotting Wind Direction Using Barbs +""" +Plotting Wind Direction Using Barbs =================================== This example demonstrates using barbs to plot wind speed contours and wind @@ -8,7 +9,8 @@ The magnitude of the wind in the original data is low and so doesn't illustrate the full range of barbs. The wind is scaled to simulate a storm that better illustrates the range of barbs that are available. -""" # noqa: D400 + +""" # noqa: D205, D212, D400 import matplotlib.pyplot as plt diff --git a/docs/gallery_code/meteorology/plot_wind_speed.py b/docs/gallery_code/meteorology/plot_wind_speed.py index dde87824fd..5310ad937d 100644 --- a/docs/gallery_code/meteorology/plot_wind_speed.py +++ b/docs/gallery_code/meteorology/plot_wind_speed.py @@ -1,4 +1,5 @@ -"""Plotting Wind Direction Using Quiver +""" +Plotting Wind Direction Using Quiver ==================================== This example demonstrates using quiver to plot wind speed contours and wind @@ -8,7 +9,7 @@ For the second plot, the data used for the arrows is normalised to produce arrows with a uniform size on the plot. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import cartopy.feature as cfeat import matplotlib.pyplot as plt diff --git a/docs/gallery_code/oceanography/plot_atlantic_profiles.py b/docs/gallery_code/oceanography/plot_atlantic_profiles.py index 736ddbe7fb..a43fb7f8cb 100644 --- a/docs/gallery_code/oceanography/plot_atlantic_profiles.py +++ b/docs/gallery_code/oceanography/plot_atlantic_profiles.py @@ -1,4 +1,5 @@ -"""Oceanographic Profiles and T-S Diagrams +""" +Oceanographic Profiles and T-S Diagrams ======================================= This example demonstrates how to plot vertical profiles of different @@ -13,7 +14,7 @@ presence of the attribute positive=down on the depth coordinate. This means depth values intuitively increase downward on the y-axis. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import matplotlib.pyplot as plt diff --git a/docs/gallery_code/oceanography/plot_load_nemo.py b/docs/gallery_code/oceanography/plot_load_nemo.py index 77c95e2353..36ff363a15 100644 --- a/docs/gallery_code/oceanography/plot_load_nemo.py +++ b/docs/gallery_code/oceanography/plot_load_nemo.py @@ -1,11 +1,13 @@ -"""Load a Time Series of Data From the NEMO Model +""" +Load a Time Series of Data From the NEMO Model ============================================== This example demonstrates how to load multiple files containing data output by the NEMO model and combine them into a time series in a single cube. The different time dimensions in these files can prevent Iris from concatenating them without the intervention shown here. -""" # noqa: D400 + +""" # noqa: D205, D212, D400 import matplotlib.pyplot as plt diff --git a/docs/gallery_code/oceanography/plot_orca_projection.py b/docs/gallery_code/oceanography/plot_orca_projection.py index 33e3ecac46..bb68056cb3 100644 --- a/docs/gallery_code/oceanography/plot_orca_projection.py +++ b/docs/gallery_code/oceanography/plot_orca_projection.py @@ -1,4 +1,5 @@ -"""Tri-Polar Grid Projected Plotting +""" +Tri-Polar Grid Projected Plotting ================================= This example demonstrates cell plots of data on the semi-structured ORCA2 model @@ -9,7 +10,7 @@ Second four pcolormesh plots are created from this projected dataset, using different projections for the output image. -""" # noqa: D400 +""" # noqa: D205, D212, D400 import cartopy.crs as ccrs import matplotlib.pyplot as plt diff --git a/docs/gallery_tests/__init__.py b/docs/gallery_tests/__init__.py index 091e997248..9468138e04 100644 --- a/docs/gallery_tests/__init__.py +++ b/docs/gallery_tests/__init__.py @@ -2,3 +2,5 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. + +"""Gallery Tests.""" diff --git a/docs/gallery_tests/conftest.py b/docs/gallery_tests/conftest.py index b1b83b7f42..564a2892a2 100644 --- a/docs/gallery_tests/conftest.py +++ b/docs/gallery_tests/conftest.py @@ -18,7 +18,7 @@ @pytest.fixture def image_setup_teardown(): - """Setup and teardown fixture. + """Perform setup and teardown fixture. Ensures all figures are closed before and after test to prevent one test polluting another if it fails with a figure unclosed. @@ -31,7 +31,9 @@ def image_setup_teardown(): @pytest.fixture def import_patches(monkeypatch): - """Replace plt.show() with a function that does nothing, also add all the + """Replace plt.show() with a function that does nothing, also add to sys.path. + + Replace plt.show() with a function that does nothing, also add all the gallery examples to sys.path. """ @@ -50,7 +52,9 @@ def no_show(): @pytest.fixture def iris_future_defaults(): - """Create a fixture which resets all the iris.FUTURE settings to the defaults, + """Create a fixture which resets all the iris.FUTURE settings to the defaults. + + Create a fixture which resets all the iris.FUTURE settings to the defaults, as otherwise changes made in one test can affect subsequent ones. """ diff --git a/docs/gallery_tests/test_gallery_examples.py b/docs/gallery_tests/test_gallery_examples.py index 93f361a62a..39e8fe0507 100644 --- a/docs/gallery_tests/test_gallery_examples.py +++ b/docs/gallery_tests/test_gallery_examples.py @@ -3,6 +3,8 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. +"""Test all the gallery examples.""" + import importlib import matplotlib.pyplot as plt @@ -15,8 +17,7 @@ def gallery_examples(): - """Generator to yield all current gallery examples.""" - + """Entry point for generator to yield all current gallery examples.""" for example_file in GALLERY_DIR.glob("*/plot*.py"): yield example_file.stem @@ -30,7 +31,6 @@ def test_plot_example( iris_future_defaults, ): """Test that all figures from example code match KGO.""" - module = importlib.import_module(example) # Run example. diff --git a/docs/src/community/iris_xarray.rst b/docs/src/community/iris_xarray.rst index 9d795fcd9e..71585d8b9f 100644 --- a/docs/src/community/iris_xarray.rst +++ b/docs/src/community/iris_xarray.rst @@ -38,9 +38,28 @@ There are multiple ways to convert between Iris and Xarray objects. feasible to save a NetCDF file using one package then load that file using the other package. This will be lossy in places, as both Iris and Xarray are opinionated on how certain NetCDF concepts relate to their data models. -* The Iris development team are exploring an improved 'bridge' between the two - packages. Follow the conversation on GitHub: `iris#4994`_. This project is - expressly intended to be as lossless as possible. +* `ncdata `_ is a package which + the Iris development team have developed to manage netcdf data, which can act as an + improved 'bridge' between Iris and Xarray : + +Ncdata can convert Iris cubes to an Xarray dataset, or vice versa, with minimal +overhead and as lossless as possible. + +For example : + +.. code-block:: python + + from ncdata.iris_xarray import cubes_from_xarray, cubes_to_xarray + cubes = cubes_from_xarray(dataset) + xrds = cubes_to_xarray(cubes) + +Ncdata avoids the feature limitations previously mentioned regarding Xarray's +:meth:`~xarray.DataArray.to_iris` and :meth:`~xarray.DataArray.from_iris`, +because it doesn't replicate any logic of either Xarray or Iris. +Instead, it uses the netcdf file interfaces of both to exchange data +"as if" via a netcdf file. So, these conversions *behave* just like exchanging data +via a file, but are far more efficient because they can transfer data without copying +arrays or fetching lazy data. Regridding ---------- @@ -98,7 +117,7 @@ Iris :class:`~iris.cube.Cube`\ s, although an ambition for the future. NetCDF File Control ------------------- -(More info: :term:`NetCDF Format`) +(More info: :ref:`netcdf_io`) Unlike Iris, Xarray generally provides full control of major file structures, i.e. dimensions + variables, including their order in the file. It mostly @@ -107,15 +126,41 @@ However, attribute handling is not so complete: like Iris, it interprets and modifies some recognised aspects, and can add some extra attributes not in the input. -.. todo: - More detail on dates and fill values (@pp-mo suggestion). - -Handling of dates and fill values have some special problems here. - -Ultimately, nearly everything wanted in a particular desired result file can -be achieved in Xarray, via provided override mechanisms (`loading keywords`_ +Whereas Iris is primarily designed to handle netCDF data encoded according to +`CF Conventions `_ , this is not so important to Xarray, +which therefore may make it harder to correctly manage this type of data. +While Xarray CF support is not complete, it may improve, and obviously +:ref:`cfxarray` may be relevant here. +There is also relevant documentation +`at this page `_. + +In some particular aspects, CF data is not loaded well (or at all), and in many cases +output is not fully CF compliant (as-per `the cf checker `_). + +* xarray has it's own interpretation of coordinates, which is different from the CF-based + approach in Iris, and means that the use of the "coordinates" attribute in output is + often not CF compliant. +* dates are converted to datetime-like objects internally. There are special features + providing `support for non-standard calendars `_, + however date units may not always be saved correctly. +* CF-style coordinate bounds variables are not fully understood. The CF approach + where bounds variables do not usually define their units or standard_names can cause + problems. Certain files containing bounds variables with more than 2 bounds (e.g. + unstructured data) may not load at all. +* missing points are always represented as NaNs, as-per Pandas usage. + (See :ref:`xarray_missing_data` ). + This means that fill values are not preserved, and that masked integer data is + converted to floats. + The netCDF default fill-values are not supported, so that variables with no + "_FillValue" attribute will have missing points equal to the fill-value + in place of NaNs. By default, output variables generally have ``_FillValue = NaN``. + +Ultimately, however, nearly everything wanted in a particular desired result file +**can** be achieved in Xarray, via provided override mechanisms (`loading keywords`_ and the '`encoding`_' dictionaries). +.. _xarray_missing_data: + Missing Data ------------ Xarray uses :data:`numpy.nan` to represent missing values and this will support diff --git a/docs/src/conf.py b/docs/src/conf.py index dab4c9052d..89133d0e1b 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -17,6 +17,8 @@ # serve to show the default. # ---------------------------------------------------------------------------- +"""sphinx config.""" + import datetime from importlib.metadata import version as get_version import ntpath @@ -191,11 +193,20 @@ def _dotv(version): # sphinx.ext.todo configuration ----------------------------------------------- # See https://www.sphinx-doc.org/en/master/usage/extensions/todo.html -todo_include_todos = True - -# api generation configuration -autodoc_member_order = "alphabetical" -autodoc_default_flags = ["show-inheritance"] +todo_include_todos = False +todo_emit_warnings = False + +# sphinx.ext.autodoc configuration -------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#confval-autodoc_default_options +autodoc_default_options = { + "members": True, + "member-order": "alphabetical", + "undoc-members": True, + "private-members": False, + "special-members": False, + "inherited-members": True, + "show-inheritance": True, +} # https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#confval-autodoc_typehints autodoc_typehints = "none" @@ -234,6 +245,7 @@ def _dotv(version): intersphinx_mapping = { "cartopy": ("https://scitools.org.uk/cartopy/docs/latest/", None), "dask": ("https://docs.dask.org/en/stable/", None), + "iris-esmf-regrid": ("https://iris-esmf-regrid.readthedocs.io/en/stable/", None), "matplotlib": ("https://matplotlib.org/stable/", None), "numpy": ("https://numpy.org/doc/stable/", None), "python": ("https://docs.python.org/3/", None), @@ -289,8 +301,9 @@ def _dotv(version): html_theme_options = { "footer_start": ["copyright", "sphinx-version"], "footer_end": ["custom_footer"], - "collapse_navigation": True, "navigation_depth": 3, + "navigation_with_keys": False, + "show_toc_level": 2, "show_prev_next": True, "navbar_align": "content", # removes the search box from the top bar @@ -318,7 +331,6 @@ def _dotv(version): }, ], "use_edit_page_button": True, - "show_toc_level": 1, # Omit `theme-switcher` from navbar_end below to disable it # Info: https://pydata-sphinx-theme.readthedocs.io/en/stable/user_guide/light-dark.html#configure-default-theme-mode # "navbar_end": ["navbar-icon-links"], @@ -395,6 +407,7 @@ def _dotv(version): "https://biggus.readthedocs.io/", "https://stickler-ci.com/", "https://twitter.com/scitools_iris", + "https://stackoverflow.com/questions/tagged/python-iris", ] # list of sources to exclude from the build. diff --git a/docs/src/developers_guide/contributing_ci_tests.rst b/docs/src/developers_guide/contributing_ci_tests.rst index 21bcd7fca4..542178c2ff 100644 --- a/docs/src/developers_guide/contributing_ci_tests.rst +++ b/docs/src/developers_guide/contributing_ci_tests.rst @@ -1,7 +1,7 @@ -.. _developer_testing_ci: - .. include:: ../common_links.inc +.. _developer_testing_ci: + Continuous Integration (CI) Testing =================================== @@ -118,7 +118,34 @@ See the `pre-commit.ci dashboard`_ for details of recent past and active Iris jo Append to the ``ignore-words-list`` option any **valid words** that are considered **not** a typo and should **not** be corrected by `codespell`_. +ruff +---- +As of **Iris 3.8** `ruff`_ has been adopted to ensure our codebase is using best +practice. `ruff`_ is configured in the `Iris`_ GitHub repository using +`.pre-commit-config.yaml`_. + +You can install and run `ruff`_ in your development **iris-dev** conda environment +via:: + + conda activate iris-dev + pip install ruff + cd iris + ruff . + +.. note:: + + The `ruff`_ ``pre-commit`` hook checks for compliance of the whole codebase. + This hook is configured in the ``[tool.ruff]`` section + of the ``pyproject.toml`` file. + + Edit the ``.ruff.toml`` file to include any *temporary* rules to be ignored. Edit the ``pyproject.toml`` to include any *permanent* rules to be ignored. We + aim to be fully `ruff`_ compliant as possible. + +For more information on how to use `ruff`_ please see the `ruff documentation`_. + + .. _.pre-commit-config.yaml: https://github.com/SciTools/iris/blob/main/.pre-commit-config.yaml .. _pre-commit.ci dashboard: https://results.pre-commit.ci/repo/github/5312648 .. _CLA Assistant: https://github.com/cla-assistant/cla-assistant .. |SciTools Contributor's License Agreement (CLA)| replace:: **SciTools Contributor's License Agreement (CLA)** +.. _ruff documentation: https://docs.astral.sh/ruff/tutorial/ diff --git a/docs/src/developers_guide/documenting/__init__.py b/docs/src/developers_guide/documenting/__init__.py index e69de29bb2..6e031999e7 100644 --- a/docs/src/developers_guide/documenting/__init__.py +++ b/docs/src/developers_guide/documenting/__init__.py @@ -0,0 +1 @@ +# noqa: D104 diff --git a/docs/src/developers_guide/documenting/docstrings_attribute.py b/docs/src/developers_guide/documenting/docstrings_attribute.py index 9485ca5af7..9b85ecb201 100644 --- a/docs/src/developers_guide/documenting/docstrings_attribute.py +++ b/docs/src/developers_guide/documenting/docstrings_attribute.py @@ -1,3 +1,6 @@ +"""docstring attribute example.""" + + class ExampleClass: """Class Summary.""" @@ -6,15 +9,16 @@ def __init__(self, arg1, arg2): Description section text. - Args: - - * arg1 (int): + Parameters + ---------- + arg1 : int First argument description. - * arg2 (float): + arg2 : float Second argument description. - Returns: - Boolean. + Returns + ------- + bool """ self.a = arg1 @@ -26,8 +30,9 @@ def __init__(self, arg1, arg2): def square(self): """*(read-only)* Purpose section description. - Returns: - int. + Returns + ------- + int """ return self.a * self.a diff --git a/docs/src/developers_guide/documenting/docstrings_sample_routine.py b/docs/src/developers_guide/documenting/docstrings_sample_routine.py index 65a312c027..4c26bc3569 100644 --- a/docs/src/developers_guide/documenting/docstrings_sample_routine.py +++ b/docs/src/developers_guide/documenting/docstrings_sample_routine.py @@ -1,24 +1,26 @@ +"""docstring routine example.""" + + def sample_routine(arg1, arg2, kwarg1="foo", kwarg2=None): """Purpose section text goes here. Description section longer text goes here. - Args: - - * arg1 (numpy.ndarray): + Parameters + ---------- + arg1 : numpy.ndarray First argument description. - * arg2 (numpy.ndarray): + arg2 : numpy.ndarray Second argument description. - - Kwargs: - - * kwarg1 (string): + kwarg1: str, optional The first keyword argument. This argument description can be multi-lined. - * kwarg2 (Boolean or None): + kwarg2 : bool, optional The second keyword argument. - Returns: + Returns + ------- + numpy.ndarray numpy.ndarray of arg1 * arg2 """ diff --git a/docs/src/further_topics/filtering_warnings.rst b/docs/src/further_topics/filtering_warnings.rst index c71cae433a..f39e6153f2 100644 --- a/docs/src/further_topics/filtering_warnings.rst +++ b/docs/src/further_topics/filtering_warnings.rst @@ -16,7 +16,7 @@ you to ``ignore`` Warnings which you do not find helpful. import iris import iris.coord_systems - import iris.exceptions + import iris.warnings # Hack to ensure doctests actually see Warnings that are raised, and that # they have a relative path (so a test pass is not machine-dependent). @@ -47,9 +47,9 @@ Warnings: >>> my_operation() ... - iris/coord_systems.py:434: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. - warnings.warn(wmsg, category=iris.exceptions.IrisUserWarning) - iris/coord_systems.py:772: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. + iris/coord_systems.py:442: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + warnings.warn(wmsg, category=iris.warnings.IrisUserWarning) + iris/coord_systems.py:768: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. warnings.warn( Warnings can be suppressed using the Python warnings filter with the ``ignore`` @@ -110,8 +110,8 @@ You can target specific Warning messages, e.g. ... warnings.filterwarnings("ignore", message="Discarding false_easting") ... my_operation() ... - iris/coord_systems.py:434: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. - warnings.warn(wmsg, category=iris.exceptions.IrisUserWarning) + iris/coord_systems.py:442: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + warnings.warn(wmsg, category=iris.warnings.IrisUserWarning) :: @@ -125,18 +125,16 @@ Or you can target Warnings raised by specific lines of specific modules, e.g. .. doctest:: filtering_warnings >>> with warnings.catch_warnings(): - ... warnings.filterwarnings("ignore", module="iris.coord_systems", lineno=449) + ... warnings.filterwarnings("ignore", module="iris.coord_systems", lineno=442) ... my_operation() ... - iris/coord_systems.py:434: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. - warnings.warn(wmsg, category=iris.exceptions.IrisUserWarning) - iris/coord_systems.py:772: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. + iris/coord_systems.py:768: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. warnings.warn( :: - python -W ignore:::iris.coord_systems:453 - export PYTHONWARNINGS=ignore:::iris.coord_systems:453 + python -W ignore:::iris.coord_systems:442 + export PYTHONWARNINGS=ignore:::iris.coord_systems:442 Warnings from a Common Source ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -178,7 +176,7 @@ Warnings of a Common Type code you are calling.** The below example will ``ignore`` any -:class:`~iris.exceptions.IrisDefaultingWarning` that gets raised by *any* +:class:`~iris.warnings.IrisDefaultingWarning` that gets raised by *any* module during execution: .. doctest:: filtering_warnings @@ -186,25 +184,25 @@ module during execution: >>> with warnings.catch_warnings(): ... warnings.filterwarnings( ... "ignore", - ... category=iris.exceptions.IrisDefaultingWarning + ... category=iris.warnings.IrisDefaultingWarning ... ) ... my_operation() ... - iris/coord_systems.py:434: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. - warnings.warn(wmsg, category=iris.exceptions.IrisUserWarning) + iris/coord_systems.py:442: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + warnings.warn(wmsg, category=iris.warnings.IrisUserWarning) ---- -Using :class:`~iris.exceptions.IrisUserWarning` in the filter will ``ignore`` -both Warnings, since :class:`~iris.exceptions.IrisDefaultingWarning` subclasses -:class:`~iris.exceptions.IrisUserWarning` : +Using :class:`~iris.warnings.IrisUserWarning` in the filter will ``ignore`` +both Warnings, since :class:`~iris.warnings.IrisDefaultingWarning` subclasses +:class:`~iris.warnings.IrisUserWarning` : .. doctest:: filtering_warnings >>> with warnings.catch_warnings(): ... warnings.filterwarnings( ... "ignore", - ... category=iris.exceptions.IrisUserWarning + ... category=iris.warnings.IrisUserWarning ... ) ... my_operation() @@ -222,10 +220,10 @@ There are several built-in Python warning categories that can be used here (:class:`DeprecationWarning` being a popular example, see :external+python:mod:`warnings` for more). Since Iris has so many different warnings that might be raised, Iris subclasses -:class:`UserWarning` to :class:`~iris.exceptions.IrisUserWarning`, which itself +:class:`UserWarning` to :class:`~iris.warnings.IrisUserWarning`, which itself has **many** specialised subclasses. These subclasses exist to give you more granularity in your warning filtering; you can see the full list by -searching the :mod:`iris.exceptions` page for ``warning`` . +viewing the :mod:`iris.warnings` module. .. attention:: diff --git a/docs/src/further_topics/index.rst b/docs/src/further_topics/index.rst index 016d9f80ea..73ce3d55e7 100644 --- a/docs/src/further_topics/index.rst +++ b/docs/src/further_topics/index.rst @@ -17,4 +17,5 @@ Extra information on specific technical issues. missing_data_handling netcdf_io dask_best_practices/index - ugrid/index \ No newline at end of file + ugrid/index + which_regridder_to_use \ No newline at end of file diff --git a/docs/src/further_topics/netcdf_io.rst b/docs/src/further_topics/netcdf_io.rst index e151b2b7c1..bae32ebcae 100644 --- a/docs/src/further_topics/netcdf_io.rst +++ b/docs/src/further_topics/netcdf_io.rst @@ -134,7 +134,49 @@ Deferred Saving TBC -Guess Axis ------------ - -TBC +Guessing Coordinate Axes +------------------------ + +Iris will attempt to add an ``axis`` attribute when saving any coordinate +variable in a NetCDF file. E.g: + +:: + + float longitude(longitude) ; + longitude:axis = "X" ; + +This is achieved by calling :func:`iris.util.guess_coord_axis` on each +coordinate being saved. + +Disabling Axis-Guessing +^^^^^^^^^^^^^^^^^^^^^^^ + +For some coordinates, :func:`~iris.util.guess_coord_axis` will derive an +axis that is not appropriate. If you have such a coordinate, you can disable +axis-guessing by setting the coordinate's +:attr:`~iris.coords.Coord.ignore_axis` property to ``True``. + +One example (from https://github.com/SciTools/iris/issues/5003) is a +coordinate describing pressure thresholds, measured in hecto-pascals. +Iris interprets pressure units as indicating a Z-dimension coordinate, since +pressure is most commonly used to describe altitude/depth. But a +**pressure threshold** coordinate is instead describing alternate +**scenarios** - not a spatial dimension at all - and it is therefore +inappropriate to assign an axis to it. + +Worked example: + +.. doctest:: + + >>> from iris.coords import DimCoord + >>> from iris.util import guess_coord_axis + >>> my_coord = DimCoord( + ... points=[1000, 1010, 1020], + ... long_name="pressure_threshold", + ... units="hPa", + ... ) + >>> print(guess_coord_axis(my_coord)) + Z + >>> my_coord.ignore_axis = True + >>> print(guess_coord_axis(my_coord)) + None diff --git a/docs/src/further_topics/which_regridder_to_use.rst b/docs/src/further_topics/which_regridder_to_use.rst new file mode 100644 index 0000000000..5d7d7fdba1 --- /dev/null +++ b/docs/src/further_topics/which_regridder_to_use.rst @@ -0,0 +1,422 @@ +.. include:: ../common_links.inc + +.. _which_regridder_to_use: + +====================== +Which Regridder to Use +====================== + +This section compares all the regridding schemes which exist in `Iris`_, and +externally in `iris-esmf-regrid`_ with a view to helping you to choose the right +regridder for your workflow. The choice of regridder +is usually limited by the kind of data you are going from and to, but there are +also factors of performance and numerical accuracy to consider. This section +provides a reference for how each of the regridders differ with respect to +these factors, beginning with a set of short tables going into their differences +in brief and ending in a more in depth look at how these differences might +play out in different contexts. + +For an introduction on using regridders, see the :ref:`user guide`. + +Regridder Comparison +==================== + +We will highlight here some of the properties of each regridder in a table of +the following form: + ++-----------------+-----------------------------------------------------------+ +| **API** | Link to API documentation. | ++-----------------+-----------------------------------------------------------+ +| **Method** | The type of algorithm used to calculate the result. | +| | See section on `comparing methods`_. | ++-----------------+-----------------------------------------------------------+ +| **Source Grid** | The type of **coordinates** required on the ``src`` cube. | ++-----------------+-----------------------------------------------------------+ +| **Target Grid** | The type of **coordinates** required on the ``tgt`` cube. | ++-----------------+-----------------------------------------------------------+ +| **Coordinate | The type of **coordinate system** required on the | +| System** | ``src``/``tgt`` cube coordinates. | ++-----------------+-----------------------------------------------------------+ +| **Lazy | If the result is calculated lazily. See | +| Regridding** | :doc:`real and lazy data `.| ++-----------------+-----------------------------------------------------------+ +| **Weights | See `regridder performance`_. | +| Caching** | | ++-----------------+-----------------------------------------------------------+ +| **Notes** | Additional details. | ++-----------------+-----------------------------------------------------------+ + +AreaWeighted +------------ + ++-----------------+--------------------------------------------------------+ +| **API** | :class:`~iris.analysis.AreaWeighted` | ++-----------------+--------------------------------------------------------+ +| **Method** | Conservative | ++-----------------+--------------------------------------------------------+ +| **Source Grid** | Pair of 1D lat/lon coordinates, must have bounds. | ++-----------------+--------------------------------------------------------+ +| **Target Grid** | Pair of 1D lat/lon coordinates, must have bounds. | ++-----------------+--------------------------------------------------------+ +| **Coordinate | Must be equal on ``src`` and ``tgt``, may be ``None``. | +| System** | | ++-----------------+--------------------------------------------------------+ +| **Lazy | ``True`` | +| Regridding** | | ++-----------------+--------------------------------------------------------+ +| **Weights | ``True`` | +| Caching** | | ++-----------------+--------------------------------------------------------+ +| **Notes** | Supports masked data with ``mdtol`` argument. | +| | See `area conservation`_. | ++-----------------+--------------------------------------------------------+ + +Linear +------ + ++-----------------+----------------------------------------------------------------+ +| **API** | :class:`~iris.analysis.Linear` | ++-----------------+----------------------------------------------------------------+ +| **Method** | Linear | ++-----------------+----------------------------------------------------------------+ +| **Source Grid** | Pair of 1D lat/lon coordinates. | ++-----------------+----------------------------------------------------------------+ +| **Target Grid** | Pair of 1D lat/lon coordinates. | ++-----------------+----------------------------------------------------------------+ +| **Coordinate | May be present on both ``src`` and ``tgt`` or both be ``None``.| +| System** | May be different. | ++-----------------+----------------------------------------------------------------+ +| **Lazy | ``True`` | +| Regridding** | | ++-----------------+----------------------------------------------------------------+ +| **Weights | ``False`` | +| Caching** | | ++-----------------+----------------------------------------------------------------+ +| **Notes** | Supports extrapolation outside source data bounds. | ++-----------------+----------------------------------------------------------------+ + +Nearest +------- + ++-----------------+----------------------------------------------------------------+ +| **API** | :class:`~iris.analysis.Nearest` | ++-----------------+----------------------------------------------------------------+ +| **Method** | Nearest (destination to source) | ++-----------------+----------------------------------------------------------------+ +| **Source Grid** | Pair of 1D lat/lon coordinates. | ++-----------------+----------------------------------------------------------------+ +| **Target Grid** | Pair of 1D lat/lon coordinates. | ++-----------------+----------------------------------------------------------------+ +| **Coordinate | May be present on both ``src`` and ``tgt`` or both be ``None``.| +| System** | May be different. | ++-----------------+----------------------------------------------------------------+ +| **Lazy | ``True`` | +| Regridding** | | ++-----------------+----------------------------------------------------------------+ +| **Weights | ``False`` | +| Caching** | | ++-----------------+----------------------------------------------------------------+ + +UnstructuredNearest +------------------- + ++-----------------+----------------------------------------------------+ +| **API** | :class:`~iris.analysis.UnstructuredNearest` | ++-----------------+----------------------------------------------------+ +| **Method** | Nearest (destination to source) | ++-----------------+----------------------------------------------------+ +| **Source Grid** | Pair of lat/lon coordinates with any dimensionality| +| | (e.g., 1D or 2D). Must be associated to the same | +| | axes on the source cube. | ++-----------------+----------------------------------------------------+ +| **Target Grid** | Pair of 1D lat/lon coordinates. | ++-----------------+----------------------------------------------------+ +| **Coordinate | Must be equal on ``src`` and ``tgt``, may be | +| System** | ``None``. | ++-----------------+----------------------------------------------------+ +| **Lazy | ``False`` | +| Regridding** | | ++-----------------+----------------------------------------------------+ +| **Weights | ``False`` | +| Caching** | | ++-----------------+----------------------------------------------------+ + +PointInCell +----------- + ++-----------------+----------------------------------------------------+ +| **API** | :class:`~iris.analysis.PointInCell` | ++-----------------+----------------------------------------------------+ +| **Method** | Point in cell | ++-----------------+----------------------------------------------------+ +| **Source Grid** | Pair of lat/lon coordinates with any dimensionality| +| | (e.g., 1D or 2D). Must be associated to the same | +| | axes on the source cube. | ++-----------------+----------------------------------------------------+ +| **Target Grid** | Pair of 1D lat/lon coordinates, must have bounds. | ++-----------------+----------------------------------------------------+ +| **Coordinate | Must be equal on ``srs`` and ``tgt``, may be | +| System** | ``None``. | ++-----------------+----------------------------------------------------+ +| **Lazy | ``False`` | +| Regridding** | | ++-----------------+----------------------------------------------------+ +| **Weights | ``True`` | +| Caching** | | ++-----------------+----------------------------------------------------+ + +External Regridders +=================== + +ESMFAreaWeighted +---------------- + ++-----------------+-------------------------------------------------------------------------+ +| **API** | :class:`~iris-esmf-regrid:esmf_regrid.schemes.ESMFAreaWeighted` | ++-----------------+-------------------------------------------------------------------------+ +| **Method** | Conservative | ++-----------------+-------------------------------------------------------------------------+ +| **Source Grid** | May be either: | +| | | +| | - A pair of 1D x/y coordinates on different axes. Must have bounds. | +| | - A pair of 2D x/y coordinates on the same axes. Must have bounds. | +| | - An unstructured mesh located on cell faces. | ++-----------------+-------------------------------------------------------------------------+ +| **Target Grid** | Any of the above. May be a different type to ``src`` grid. | ++-----------------+-------------------------------------------------------------------------+ +| **Coordinate | ``src`` and ``tgt`` grid may have any coordinate system or ``None``. | +| System** | | ++-----------------+-------------------------------------------------------------------------+ +| **Lazy | ``True`` | +| Regridding** | | ++-----------------+-------------------------------------------------------------------------+ +| **Weights | ``True`` | +| Caching** | | ++-----------------+-------------------------------------------------------------------------+ +| **Notes** | Supports masked data with ``mdtol`` argument (see `area conservation`_).| +| | Differs numerically to :class:`~iris.analysis.AreaWeighted` due to | +| | representing edges as great circle arcs rather than lines of | +| | latitude/longitude. This causes less difference at higher resolutions. | +| | This can be mitigated somewhat by using the | +| | ``src_resolution`` / ``tgt_resolution`` arguments. | ++-----------------+-------------------------------------------------------------------------+ + +ESMFBilinear +------------ + ++-----------------+---------------------------------------------------------------------+ +| **API** | :class:`~iris-esmf-regrid:esmf_regrid.schemes.ESMFBilinear` | ++-----------------+---------------------------------------------------------------------+ +| **Method** | Linear | ++-----------------+---------------------------------------------------------------------+ +| **Source Grid** | May be either: | +| | | +| | - A pair of 1D x/y coordinates on different axes. | +| | - A pair of 2D x/y coordinates on the same axes. | +| | - An unstructured mesh located on cell faces. | ++-----------------+---------------------------------------------------------------------+ +| **Target Grid** | Any of the above. May be a different type to ``src`` grid. | ++-----------------+---------------------------------------------------------------------+ +| **Coordinate | ``src`` and ``tgt`` grid may have any coordinate system or ``None``.| +| System** | | ++-----------------+---------------------------------------------------------------------+ +| **Lazy | ``True`` | +| Regridding** | | ++-----------------+---------------------------------------------------------------------+ +| **Weights | ``True`` | +| Caching** | | ++-----------------+---------------------------------------------------------------------+ + +ESMFNearest +----------- + ++-----------------+---------------------------------------------------------------------+ +| **API** | :class:`~iris-esmf-regrid:esmf_regrid.schemes.ESMFNearest` | ++-----------------+---------------------------------------------------------------------+ +| **Method** | Nearest (destination to source) | ++-----------------+---------------------------------------------------------------------+ +| **Source Grid** | May be either: | +| | | +| | - A pair of 1D x/y coordinates on different axes. | +| | - A pair of 2D x/y coordinates on the same axes. | +| | - An unstructured mesh located on cell faces | ++-----------------+---------------------------------------------------------------------+ +| **Target Grid** | Any of the above. May be a different type to ``src`` grid. | ++-----------------+---------------------------------------------------------------------+ +| **Coordinate | ``src`` and ``tgt`` grid may have any coordinate system or ``None``.| +| System** | | ++-----------------+---------------------------------------------------------------------+ +| **Lazy | ``True`` | +| Regridding** | | ++-----------------+---------------------------------------------------------------------+ +| **Weights | ``True`` | +| Caching** | | ++-----------------+---------------------------------------------------------------------+ + +.. _comparing methods: + +Comparing Methods +================= + +The various regridding algorithms are implementations of the following +methods. While there may be slight differences in the way each regridder +implements a given method, each regridder broadly follows the principles +of that method. We give here a very brief overview of what situations +each method are best suited to followed by a more detailed discussion. + +Conservative +------------ + +Good for representing the *entirety* of the underlying data. +Designed for data represented by cell faces. A fuller description of +what it means to be *conservative* can be found in the section on +`area conservation`_. + +Linear +------ + +Good for approximating data represented at *precise points* in space and in +cases where it is desirable for the resulting data to be smooth. For more +detail, see the section on `regridder smoothness`_. + +Nearest +------- + +Tends to be the fastest regridding method. Ensures each resulting data value +represents a data value in the source. Good in cases where averaging is +inappropriate, e.g., for discontinuous data. + +Point in cell +------------- + +Similarly to the conservative method, represents the entirety of the underlying +data. Works well with data whose source is an unstructured series of points. + +.. _numerical accuracy: + +Numerical Accuracy +================== + +An important thing to understand when regridding is that no regridding method +is perfect. That is to say, you will tend to lose information when you regrid +so that if you were to regrid from a source grid to a target and then back onto +the original source, you will usually end up with slightly different data. +Furthermore, statistical properties such as min, max and standard deviation are +not guaranteed to be preserved. While regridding is inherently imperfect, there +are some properties which can be better preserved by choosing the appropriate +regridding method. These include: + +.. _area conservation: + +Global Area Weighted Average +---------------------------- +Area weighted regridding schemes such as :class:`~iris.analysis.AreaWeighted` and +:class:`~iris-esmf-regrid:esmf_regrid.schemes.ESMFAreaWeighted` +use *conservative* regridding schemes. The property which these regridders +*conserve* is the global area weighted average of the data (or equivalently, +the area weighted sum). More precisely, this means that:: + + When regridding from a source cube to a target cube defined + over the same area (e.g., the entire globe), assuming there + are no masked data points, the area weighted average + (weighted by the area covered by each data point) of the + source cube ought to be equal (within minor tolerances) + to the area weighted average of the result. + +This property will be particularly important to consider if you are intending to +calculate global properties such as average temperature or total rainfall over a +given area. It may be less important if you are only interested in local behaviour, +e.g., temperature at particular locations. + +When there are masked points in your data, the same global conservative properties +no longer strictly hold. This is because the area which the unmasked points in the +source cover is no longer the same as the area covered by unmasked points in the +target. With the keyword argument ``mdtol=0`` this means that there will be an area +around the source mask which will be masked in the result and therefore unaccounted +for in the area weighted average calculation. Conversely, with the keyword argument +``mdtol=1`` there will be an unmasked area in the result that is masked in the source. +This may be particularly important if you are intending to calculate properties +which depend area e.g., calculating the total global rainfall based on data in units +of ``kg m-2`` as an area weighted sum. With ``mdtol=0`` this will consistently +underestimate this total and with ``mdtol=1`` will consistently overestimate. This can +be somewhat mitigated with a choice of ``mdtol=0.5``, but you should still be aware of +potential inaccuracies. It should be noted that this choice of ``mdtol`` is highly +context dependent and there wil likely be occasions where a choice of ``mdtol=0`` or +``mdtol=1`` is more suitable. The important thing is to *know your data, know what* +*you're doing with your data and know how your regridder fits in this process*. + +.. todo:: + + add worked example + +.. _regridder smoothness: + +Data Gradient/Smoothness +------------------------ +Alternatively, rather than conserving global properties, it may be more important to +approximate each individual point of data as accurately as possible. In this case, it +may be more appropriate to use a *linear* regridder such as :class:`~iris.analysis.Linear` +or :class:`~iris-esmf-regrid:esmf_regrid.schemes.ESMFBilinear`. + +The linear method calculates each target point as the weighted average of the four +surrounding source points. This average is weighted according to how close this target +point is to the surrounding points. Notably, the value assigned to a target point varys +*continuously* with its position (as opposed to nearest neighbour regridding). + +Such regridders work best when the data in question can be considered +as a collection of measurements made at *points on a smoothly varying field*. The +difference in behaviour between linear and conservative regridders can be seen most +clearly when there is a large difference between the source and target grid resolution. + +Suppose you were regridding from a high resolution to a low resolution, if you were +regridding using a *conservative* method, each result point would be the average of many +result points. On the other hand, if you were using a *linear* method then the result +would only be the average the 4 nearest source points. This means that, while +*conservative* methods will give you a better idea of the *totality* of the source data, +*linear* methods will give you a better idea of the source data at a *particular point*. + +Conversely, suppose you were regridding from a low resolution to a high resolution. For +other regridding methods (conservative and nearest), most of the target points covered by +a given source point would have the same value and there would be a steep difference between +target points near the cell boundary. For linear regridding however, the resulting data +will vary smoothly. + +.. todo:: + + add worked example + +Consistency +----------- +As noted above, each regridding method has its own unique effect on the data. While this can +be manageable when contained within context of a particular workflow, you should take care +not to compare data which has been regrid with different regridding methods as the artefacts +of that regridding method may dominate the underlying differences. + +.. todo:: + + add worked example + +It should also be noted that some implementations of the *same method* (e.g., +:class:`~iris.analysis.Nearest` and :class:`~iris.analysis.UnstructuredNearest`) may +differ slightly and so may yield slightly different results when applied to equivalent +data. However this difference will be significantly less than the difference between +regridders based on different methods. + +.. _regridder performance: + +Performance +----------- +Regridding can be an expensive operation, but there are ways to work with regridders to +mitigate this cost. For most regridders, the regridding process can be broken down into +two steps: + +- *Preparing* the regridder by comparing the source and target grids and generating weights. +- *Performing* the regridding by applying those weights to the source data. + +Generally, the *prepare* step is the more expensive of the two. It is better to avoid +repeating this step unnecessarily. This can be done by *reusing* a regridder, as described +in the :ref:`user guide `. + +.. todo:: + + add benchmarks - note the iris and iris-esmf-regrid version diff --git a/docs/src/userguide/citation.rst b/docs/src/userguide/citation.rst index 7169ca3072..d0496f4876 100644 --- a/docs/src/userguide/citation.rst +++ b/docs/src/userguide/citation.rst @@ -5,53 +5,16 @@ Citing Iris =========== If Iris played an important part in your research then please add us to your -reference list by using one of the recommendations below. +reference list by using the recommendations below. -************ -BibTeX Entry -************ +Iris can be cited directly from the `GitHub repository `_ +, for more information including where to find the citation on the repo please +see the `GitHub documentation`_. -For example:: +The Iris citation does not contain the version of the software used. We +recommend that you use the version number of the release you used, and the +commit hash if you checked out a unreleased version of Iris. This will allow +others to reproduce the environment that you worked in. You can see what a +citation should look like for a particular version of Iris, on the `GitHub documentation`_. - @manual{Iris, - author = {{Met Office}}, - title = {Iris: A powerful, format-agnostic, and community-driven Python package for analysing and visualising Earth science data}, - edition = {v3.7}, - year = {2010 - 2023}, - address = {Exeter, Devon}, - url = {https://github.com/SciTools/iris}, - doi = {10.5281/zenodo.8305232} - } - - -******************* -Downloaded Software -******************* - -Suggested format:: - - ProductName. Version. ReleaseDate. Publisher. Location. DOIorURL. DownloadDate. - -For example:: - - Iris. v3.7. 31-Aug-2023. Met Office. UK. https://doi.org/10.5281/zenodo.8305232 22-12-2022 - - -******************** -Checked Out Software -******************** - -Suggested format:: - - ProductName. Publisher. URL. CheckoutDate. RepositorySpecificCheckoutInformation. - -For example:: - - Iris. Met Office. https://github.com/SciTools/iris.git 31-08-2023 - -.. _How to cite and describe software: https://software.ac.uk/how-cite-software - - -Reference: [Jackson]_. - -.. [Jackson] Jackson, M. 2012. `How to cite and describe software`_. Accessed 06-03-2013. +.. _GitHub documentation: https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-citation-files/ diff --git a/docs/src/userguide/interpolation_and_regridding.rst b/docs/src/userguide/interpolation_and_regridding.rst index cba7d778d5..571c43bf0e 100644 --- a/docs/src/userguide/interpolation_and_regridding.rst +++ b/docs/src/userguide/interpolation_and_regridding.rst @@ -25,13 +25,15 @@ available in Iris: The following are the regridding schemes that are currently available in Iris: * linear regridding (:class:`iris.analysis.Linear`), -* nearest-neighbour regridding (:class:`iris.analysis.Nearest`), and +* nearest-neighbour regridding (:class:`iris.analysis.Nearest` and :class:`iris.analysis.UnstructuredNearest`), +* point in cell regridding (:class:`iris.analysis.PointInCell`) and * area-weighted regridding (:class:`iris.analysis.AreaWeighted`, first-order conservative). The linear, nearest-neighbor, and area-weighted regridding schemes support lazy regridding, i.e. if the source cube has lazy data, the resulting cube will also have lazy data. See :doc:`real_and_lazy_data` for an introduction to lazy data. +See :doc:`../further_topics/which_regridder_to_use` for a more in depth overview of the different regridders. .. _interpolation: diff --git a/docs/src/userguide/plotting_examples/1d_quickplot_simple.py b/docs/src/userguide/plotting_examples/1d_quickplot_simple.py index 725ff69f11..58d0918dcb 100644 --- a/docs/src/userguide/plotting_examples/1d_quickplot_simple.py +++ b/docs/src/userguide/plotting_examples/1d_quickplot_simple.py @@ -1,3 +1,5 @@ +"""Simple 1D plot using iris.quickplot.plot().""" + import matplotlib.pyplot as plt import iris diff --git a/docs/src/userguide/plotting_examples/1d_simple.py b/docs/src/userguide/plotting_examples/1d_simple.py index 249412f44f..4511a0fbe1 100644 --- a/docs/src/userguide/plotting_examples/1d_simple.py +++ b/docs/src/userguide/plotting_examples/1d_simple.py @@ -1,3 +1,5 @@ +"""Simple 1D plot using iris.plot.plot().""" + import matplotlib.pyplot as plt import iris diff --git a/docs/src/userguide/plotting_examples/1d_with_legend.py b/docs/src/userguide/plotting_examples/1d_with_legend.py index 6b29fc9e76..b325657766 100644 --- a/docs/src/userguide/plotting_examples/1d_with_legend.py +++ b/docs/src/userguide/plotting_examples/1d_with_legend.py @@ -1,3 +1,5 @@ +"""Simple 1D plot using iris.plot.plot() with a legend.""" + import matplotlib.pyplot as plt import iris diff --git a/docs/src/userguide/plotting_examples/brewer.py b/docs/src/userguide/plotting_examples/brewer.py index 905296279d..e42ad57cc0 100644 --- a/docs/src/userguide/plotting_examples/brewer.py +++ b/docs/src/userguide/plotting_examples/brewer.py @@ -1,3 +1,5 @@ +"""Plot a chart of all Brewer colour schemes.""" + import matplotlib.pyplot as plt import numpy as np diff --git a/docs/src/userguide/plotting_examples/cube_blockplot.py b/docs/src/userguide/plotting_examples/cube_blockplot.py index 50a2d1f4d7..1f4d3985a3 100644 --- a/docs/src/userguide/plotting_examples/cube_blockplot.py +++ b/docs/src/userguide/plotting_examples/cube_blockplot.py @@ -1,3 +1,5 @@ +"""Cube block plot using using iris.plot.pcolormesh().""" + import matplotlib.pyplot as plt import iris diff --git a/docs/src/userguide/plotting_examples/cube_brewer_cite_contourf.py b/docs/src/userguide/plotting_examples/cube_brewer_cite_contourf.py index 6e3996660c..4e28510e43 100644 --- a/docs/src/userguide/plotting_examples/cube_brewer_cite_contourf.py +++ b/docs/src/userguide/plotting_examples/cube_brewer_cite_contourf.py @@ -1,3 +1,5 @@ +"""Addind a citation for a plot using iris.plot.citation().""" + import matplotlib.pyplot as plt import iris diff --git a/docs/src/userguide/plotting_examples/cube_brewer_contourf.py b/docs/src/userguide/plotting_examples/cube_brewer_contourf.py index d562443f07..94692c924c 100644 --- a/docs/src/userguide/plotting_examples/cube_brewer_contourf.py +++ b/docs/src/userguide/plotting_examples/cube_brewer_contourf.py @@ -1,3 +1,5 @@ +"""Plot a cube with a Brewer colour palette using iris.quickplot.contourf().""" + import matplotlib.cm as mpl_cm import matplotlib.pyplot as plt diff --git a/docs/src/userguide/plotting_examples/cube_contour.py b/docs/src/userguide/plotting_examples/cube_contour.py index 7a9fe6ef09..0d8c1e02aa 100644 --- a/docs/src/userguide/plotting_examples/cube_contour.py +++ b/docs/src/userguide/plotting_examples/cube_contour.py @@ -1,3 +1,8 @@ +"""Simple contour plot of a cube. + +Can use iris.plot.contour() or iris.quicplot.contour(). + +""" import matplotlib.pyplot as plt import iris diff --git a/docs/src/userguide/plotting_examples/cube_contourf.py b/docs/src/userguide/plotting_examples/cube_contourf.py index 5989e42c71..531dd45d25 100644 --- a/docs/src/userguide/plotting_examples/cube_contourf.py +++ b/docs/src/userguide/plotting_examples/cube_contourf.py @@ -1,3 +1,8 @@ +"""Simple filled contour plot of a cube. + +Can use iris.plot.contour() or iris.quickplot.contour(). + +""" import matplotlib.pyplot as plt import iris diff --git a/docs/src/userguide/plotting_examples/masking_brazil_plot.py b/docs/src/userguide/plotting_examples/masking_brazil_plot.py new file mode 100644 index 0000000000..3dc521d451 --- /dev/null +++ b/docs/src/userguide/plotting_examples/masking_brazil_plot.py @@ -0,0 +1,24 @@ +"""Global cube masked to Brazil and plotted with quickplot.""" +import cartopy.io.shapereader as shpreader +import matplotlib.pyplot as plt + +import iris +import iris.quickplot as qplt +from iris.util import mask_cube_from_shapefile + +country_shp_reader = shpreader.Reader( + shpreader.natural_earth( + resolution="110m", category="cultural", name="admin_0_countries" + ) +) +brazil_shp = [ + country.geometry + for country in country_shp_reader.records() + if "Brazil" in country.attributes["NAME_LONG"] +][0] + +cube = iris.load_cube(iris.sample_data_path("air_temp.pp")) +brazil_cube = mask_cube_from_shapefile(cube, brazil_shp) + +qplt.pcolormesh(brazil_cube) +plt.show() diff --git a/docs/src/userguide/regridding_plots/interpolate_column.py b/docs/src/userguide/regridding_plots/interpolate_column.py index ec1f37a3b2..681af0c998 100644 --- a/docs/src/userguide/regridding_plots/interpolate_column.py +++ b/docs/src/userguide/regridding_plots/interpolate_column.py @@ -1,3 +1,5 @@ +"""Interpolate using iris.analysis.Linear().""" + import matplotlib.pyplot as plt import numpy as np diff --git a/docs/src/userguide/regridding_plots/regridded_to_global.py b/docs/src/userguide/regridding_plots/regridded_to_global.py index 5ce6513ef0..8e43f1471a 100644 --- a/docs/src/userguide/regridding_plots/regridded_to_global.py +++ b/docs/src/userguide/regridding_plots/regridded_to_global.py @@ -1,3 +1,5 @@ +"""Interpolate using iris.analysis.Linear().""" + import matplotlib.pyplot as plt import iris diff --git a/docs/src/userguide/regridding_plots/regridded_to_global_area_weighted.py b/docs/src/userguide/regridding_plots/regridded_to_global_area_weighted.py index f53e624e03..6c906ba87b 100644 --- a/docs/src/userguide/regridding_plots/regridded_to_global_area_weighted.py +++ b/docs/src/userguide/regridding_plots/regridded_to_global_area_weighted.py @@ -1,3 +1,5 @@ +"""Regrid using iris.analysis.AreaWeighted.""" + import matplotlib.colors import matplotlib.pyplot as plt import numpy as np diff --git a/docs/src/userguide/regridding_plots/regridded_to_rotated.py b/docs/src/userguide/regridding_plots/regridded_to_rotated.py index cb54a016cb..31afdb7da1 100644 --- a/docs/src/userguide/regridding_plots/regridded_to_rotated.py +++ b/docs/src/userguide/regridding_plots/regridded_to_rotated.py @@ -1,3 +1,5 @@ +"""Rotated pole.""" + import matplotlib.pyplot as plt import iris diff --git a/docs/src/userguide/regridding_plots/regridding_plot.py b/docs/src/userguide/regridding_plots/regridding_plot.py index c559e0e3e7..ed45822a51 100644 --- a/docs/src/userguide/regridding_plots/regridding_plot.py +++ b/docs/src/userguide/regridding_plots/regridding_plot.py @@ -1,3 +1,5 @@ +"""Plot regridded data.""" + import matplotlib.pyplot as plt import iris diff --git a/docs/src/userguide/subsetting_a_cube.rst b/docs/src/userguide/subsetting_a_cube.rst index 019982ad6d..27a223042e 100644 --- a/docs/src/userguide/subsetting_a_cube.rst +++ b/docs/src/userguide/subsetting_a_cube.rst @@ -5,7 +5,8 @@ Subsetting a Cube ================= The :doc:`loading_iris_cubes` section of the user guide showed how to load data into multidimensional Iris cubes. -However it is often necessary to reduce the dimensionality of a cube down to something more appropriate and/or manageable. +However it is often necessary to reduce the dimensionality of a cube down to something more appropriate and/or manageable, +or only examine and analyse a subset of data in a dimension. Iris provides several ways of reducing both the amount of data and/or the number of dimensions in your cube depending on the circumstance. In all cases **the subset of a valid cube is itself a valid cube**. @@ -329,6 +330,36 @@ on bounds can be done in the following way:: The above example constrains to cells where either the upper or lower bound occur after 1st January 2008. +Cube Masking +-------------- + +.. _masking-from-shapefile: + +Masking from a shapefile +^^^^^^^^^^^^^^^^^^^^^^^^ + +Often we want to perform so kind of analysis over a complex geographical feature - only over land points or sea points: +or over a continent, a country, a river watershed or administrative region. These geographical features can often be described by shapefiles. +Shapefiles are a file format first developed for GIS software in the 1990s, and now `Natural Earth`_ maintain a large freely usable database of shapefiles of many geographical and poltical divisions, +accessible via cartopy. Users may also provide their own custom shapefiles. + +These shapefiles can be used to mask an iris cube, so that any data outside the bounds of the shapefile is hidden from further analysis or plotting. + +First, we load the correct shapefile from NaturalEarth via the `Cartopy`_ instructions. Here we get one for Brazil. +The `.geometry` attribute of the records in the reader contain the shapely polygon we're interested in - once we have those we just need to provide them to +the :class:`iris.util.mask_cube_from_shapefile` function. Once plotted, we can see that only our area of interest remains in the data. + + +.. plot:: userguide/plotting_examples/masking_brazil_plot.py + :include-source: + +We can see that the dimensions of the cube haven't changed - the plot is still global. But only the data over Brazil is plotted - the rest is masked. + +.. note:: + While Iris will try to dynamically adjust the shapefile to mask cubes of different projections, it can struggle with rotated pole projections and cubes with Meridians not at 0° + Converting your Cube's coordinate system may help if you get a fully masked cube from this function. + + Cube Iteration -------------- It is not possible to directly iterate over an Iris cube. That is, you cannot use code such as @@ -440,3 +471,7 @@ Similarly, Iris cubes have indexing capability:: # Get the second element of the first dimension and all of the second dimension # in reverse, by steps of two. print(cube[1, ::-2]) + + +.. _Cartopy: https://scitools.org.uk/cartopy/docs/latest/tutorials/using_the_shapereader.html#id1 +.. _Natural Earth: https://www.naturalearthdata.com/ diff --git a/docs/src/whatsnew/3.8.rst b/docs/src/whatsnew/3.8.rst new file mode 100644 index 0000000000..7a1c66d14b --- /dev/null +++ b/docs/src/whatsnew/3.8.rst @@ -0,0 +1,292 @@ +.. include:: ../common_links.inc + +v3.8 (21 Feb 2024) [release candidate] +************************************** + +This document explains the changes made to Iris for this release +(:doc:`View all changes `.) + + +.. dropdown:: v3.8 Release Highlights + :color: primary + :icon: info + :animate: fade-in + :open: + + The highlights for this major/minor release of Iris include: + + * We have significantly improved :class:`~iris.analysis.AreaWeighted` + regridding performance, and added improved regridding documentation (see + :ref:`which_regridder_to_use`). + + * We have improved :class:`~iris.cube.Cube` + :attr:`~iris.cube.Cube.attributes` handling to better preserve local and + global attribute metadata. + + * We have implemented the + :data:`iris.fileformats.netcdf.loader.CHUNK_CONTROL` context manager to + offer greater control to NetCDF chunking (see :ref:`netcdf_io`). + + * We have added functionality to mask cubes using shapefiles via + :func:`iris.util.mask_cube_from_shapefile` (see + :ref:`masking-from-shapefile`). + + * We have added :attr:`~iris.coords.Coord.ignore_axis` to allow for + preventing :func:`~iris.util.guess_coord_axis` acting on desired + coordinates. + + * We have begun adding improvements to Iris' warnings, to prevent warning + duplication. + + And finally, get in touch with us on :issue:`GitHub` if you have + any issues or feature requests for improving Iris. Enjoy! + + +📢 Announcements +================ + +#. `@lbdreyer`_ relicensed Iris from LGPL-3 to BSD-3. (:pull:`5577`) + +#. `@HGWright`_, `@bjlittle`_ and `@trexfeathers`_ (reviewers) added a + CITATION.cff file to Iris and updated the :ref:`citation documentation ` + , to help users cite Iris in their work. (:pull:`5483`) + + +✨ Features +=========== +#. `@pp-mo`_, `@lbdreyer`_ and `@trexfeathers`_ improved + :class:`~iris.cube.Cube` :attr:`~iris.cube.Cube.attributes` handling to + better preserve the distinction between dataset-level and variable-level + attributes, allowing file-Cube-file round-tripping of NetCDF attributes. See + :class:`~iris.cube.CubeAttrsDict`, NetCDF + :func:`~iris.fileformats.netcdf.saver.save` and :data:`~iris.Future` for more. + (:pull:`5152`, `split attributes project`_) + +#. `@rcomer`_ rewrote :func:`~iris.util.broadcast_to_shape` so it now handles + lazy data. (:pull:`5307`) + +#. `@trexfeathers`_ and `@HGWright`_ (reviewer) sub-categorised all Iris' + :class:`UserWarning`\s for richer filtering. The full index of + sub-categories can be seen here: :mod:`iris.warnings` . (:pull:`5498`, + :pull:`5760`) + +#. `@trexfeathers`_ added the :class:`~iris.coord_systems.ObliqueMercator` + and :class:`~iris.coord_systems.RotatedMercator` coordinate systems, + complete with NetCDF loading and saving. (:pull:`5548`) + +#. `@trexfeathers`_ added the ``use_year_at_season_start`` parameter to + :func:`iris.coord_categorisation.add_season_year`. When + ``use_year_at_season_start==True``: seasons spanning the year boundary (e.g. + Winter - December to February) will be assigned to the preceding year (e.g. + the year of December) instead of the following year (the default behaviour). + (:pull:`5573`) + +#. `@HGWright`_ added :attr:`~iris.coords.Coord.ignore_axis` to allow manual + intervention preventing :func:`~iris.util.guess_coord_axis` from acting on a + coordinate. `@trexfeathers`_ documented this. (:pull:`5551`, :pull:`5744`) + +#. `@pp-mo`_, `@trexfeathers`_ and `@ESadek-MO`_ added more control over + NetCDF chunking with the use of the :data:`iris.fileformats.netcdf.loader.CHUNK_CONTROL` + context manager. (:pull:`5588`) + +#. `@acchamber`_ and `@trexfeathers`_ (reviewer) added + :func:`iris.util.mask_cube_from_shapefile`. This builds on the original work + of `@ckmo`_, `@david-bentley`_, `@jmendesmetoffice`_, `@evyve`_ and + `@pelson`_ for the UK Met Office **ASCEND** library. See + :ref:`masking-from-shapefile` for documentation. (:pull:`5470`) + +#. `@trexfeathers`_ updated to the latest CF Standard Names Table v84 + (19 January 2024). (:pull:`5761`) + + +🐛 Bugs Fixed +============= + +#. `@scottrobinson02`_ fixed the output units when dividing a coordinate by a + cube. (:issue:`5305`, :pull:`5331`) + +#. `@ESadek-MO`_ has updated :mod:`iris.tests.graphics.idiff` to stop duplicated file names + preventing acceptance. (:issue:`5098`, :pull:`5482`) + +#. `@acchamber`_ and `@rcomer`_ modified 2D plots so that time axes and their + ticks have more sensible default labels. (:issue:`5426`, :pull:`5561`) + +#. `@rcomer`_ and `@trexfeathers`_ (reviewer) added handling for realization + coordinates when saving pp files (:issue:`4747`, :pull:`5568`) + +#. `@ESadek-MO`_ has updated + :mod:`iris.fileformats._nc_load_rules.helpers` to lessen warning duplication. + (:issue:`5536`, :pull:`5685`) + +#. `@bjlittle`_ fixed coordinate construction in the NetCDF loading pipeline to + ensure that bounds have the same units as the associated points. + (:issue:`1801`, :pull:`5746`) + + +💣 Incompatible Changes +======================= + +#. `@bouweandela`_ and `@trexfeathers`_ (reviewer) updated :class:`~iris.cube.Cube` + comparison so equality is now possible between cubes with data containing a + :obj:`numpy.nan`. e.g. ``Cube([np.nan, 1.0]) == Cube([np.nan, 1.0])`` will now + evaluate to :obj:`True`, while previously this would have been :obj:`False`. (:pull:`5713`) + + +🚀 Performance Enhancements +=========================== + +#. `@stephenworsley`_ improved the speed of :class:`~iris.analysis.AreaWeighted` + regridding. (:pull:`5543`) + +#. `@bouweandela`_ made :func:`iris.util.array_equal` faster when comparing + lazy data from file. This will also speed up coordinate comparison. + (:pull:`5610`) + +#. `@bouweandela`_ changed :func:`iris.coords.Coord.cell` so it does not realize + all coordinate data and only loads a single cell instead. (:pull:`5693`) + +#. `@rcomer`_ and `@trexfeathers`_ (reviewer) modified + :func:`~iris.analysis.stats.pearsonr` so it preserves lazy data in all cases + and also runs a little faster. (:pull:`5638`) + +#. `@bouweandela`_ made comparing coordinates and arrays to themselves faster. (:pull:`5691`) + +#. `@bouweandela`_ and `@trexfeathers`_ (reviewer) made comparing cubes to + themselves faster. (:pull:`5713`) + + +🔥 Deprecations +=============== + +#. N/A + + +🔗 Dependencies +=============== + +#. `@bjlittle`_ enforced the minimum pin of ``numpy>1.21`` in accordance with the `NEP29 Drop Schedule`_. + (:pull:`5525`) + +#. `@bjlittle`_ enforced the minimum pin of ``numpy>1.22`` in accordance with the `NEP29 Drop Schedule`_. + (:pull:`5668`) + +#. `@bjlittle`_ updated ``ubuntu`` and ``mambaforge`` to the latest versions for ``readthedocs`` + (:pull:`5702`) + + +📚 Documentation +================ + +#. `@trexfeathers`_ documented the intended use of warnings filtering with + Iris. See :ref:`filtering-warnings`. (:pull:`5509`) + +#. `@rcomer`_ updated the + :ref:`sphx_glr_generated_gallery_meteorology_plot_COP_maps.py` to show how + a colourbar may steal space from multiple axes. (:pull:`5537`) + +#. `@tkknight`_ improved the top navgation bar alignment and amount of + links shown. Also improved how the warning banner is implemented. + (:pull:`5505` and :pull:`5508`) + +#. `@tkknight`_ removed broken git links. (:pull:`5569`) + +#. `@ESadek-MO`_ added a phrasebook for synonymous terms used in similar + packages. (:pull:`5564`) + +#. `@ESadek-MO`_ and `@trexfeathers`_ created a technical paper for NetCDF + saving and loading, :ref:`netcdf_io` with a section on chunking, and placeholders + for further topics. (:pull:`5588`) + +#. `@bouweandela`_ updated all hyperlinks to https. (:pull:`5621`) + +#. `@ESadek-MO`_ created an index page for :ref:`further_topics_index`, and + relocated all 'Technical Papers' into + :ref:`further_topics_index`. (:pull:`5602`) + +#. `@trexfeathers`_ made drop-down icons visible to show which pages link to + 'sub-pages'. (:pull:`5684`) + +#. `@trexfeathers`_ improved the documentation of acceptable + :class:`~iris.cube.Cube` standard names in + :func:`iris.analysis.calculus.curl`. (:pull:`5680`) + +#. `@tkknight`_ added ruff documentation in the :ref:`developer_testing_ci` of the + :ref:`developers_guide`. (:pull:`5701`) + +#. `@tkknight`_ configured the API documentation to show 2 levels + for the ToC (Table of Contents) for each page. (:pull:`5714`) + + +💼 Internal +=========== + +#. `@trexfeathers`_ and `@ESadek-MO`_ (reviewer) performed a suite of fixes and + improvements for benchmarking, primarily to get + :ref:`on demand pull request benchmarking ` + working properly. (Main pull request: :pull:`5437`, more detail: + :pull:`5430`, :pull:`5431`, :pull:`5432`, :pull:`5434`, :pull:`5436`) + +#. `@trexfeathers`_ set a number of memory benchmarks to be on-demand, as they + were vulnerable to false positives in CI runs. (:pull:`5481`) + +#. `@acchamber`_ and `@ESadek-MO`_ resolved several deprecation to reduce + number of warnings raised during tests. + (:pull:`5493`, :pull:`5511`) + +#. `@trexfeathers`_ replaced all uses of the ``logging.WARNING`` level, in + favour of using Python warnings, following team agreement. (:pull:`5488`) + +#. `@trexfeathers`_ adapted benchmarking to work with ASV ``>=v0.6`` by no + longer using the ``--strict`` argument. (:pull:`5496`) + +#. `@fazledyn-or`_ replaced ``NotImplementedError`` with ``NotImplemented`` as + a proper method call. (:pull:`5544`) + +#. `@bjlittle`_ corrected various comment spelling mistakes detected by + `codespell`_. (:pull:`5546`) + +#. `@rcomer`_ reduced the size of the conda environment used for testing. + (:pull:`5606`) + +#. `@trexfeathers`_ and `@pp-mo`_ improved how the conda-forge feedstock + release candidate branch is managed, via: + :doc:`../developers_guide/release_do_nothing`. + (:pull:`5515`) + +#. `@bjlittle`_ adopted and configured the `ruff`_ linter. (:pull:`5623`) + +#. `@bjlittle`_ configured the ``line-length = 88`` for `black`_, `isort`_ + and `ruff`_. (:pull:`5632`) + +#. `@bjlittle`_ replaced `isort`_ with `ruff`_. (:pull:`5633`) + +#. `@bjlittle`_ replaced `black`_ with `ruff`_. (:pull:`5634`) + +#. `@tkknight`_ and `@bjlittle`_ (reviewer) updated codebase to be compliant with + almost all of the rules for `ruff pydocstyle`_. + (https://github.com/SciTools/iris/issues/5625#issuecomment-1859159734) + +#. `@tkknight`_ and `@bjlittle`_ (reviewer) updated codebase to ensure docstrings + that are not covered by the ruff checks, are consistent with numpydocstyle. + (:issue:`4721`) + +.. comment + Whatsnew author names (@github name) in alphabetical order. Note that, + core dev names are automatically included by the common_links.inc: + +.. _@scottrobinson02: https://github.com/scottrobinson02 +.. _@acchamber: https://github.com/acchamber +.. _@fazledyn-or: https://github.com/fazledyn-or +.. _@ckmo: https://github.com/ckmo +.. _@david-bentley: https://github.com/david-bentley +.. _@jmendesmetoffice: https://github.com/jmendesmetoffice +.. _@evyve: https://github.com/evyve + + +.. comment + Whatsnew resources in alphabetical order: + +.. _NEP29 Drop Schedule: https://numpy.org/neps/nep-0029-deprecation_policy.html#drop-schedule +.. _codespell: https://github.com/codespell-project/codespell +.. _split attributes project: https://github.com/orgs/SciTools/projects/5?pane=info +.. _ruff pydocstyle: https://docs.astral.sh/ruff/rules/#pydocstyle-d \ No newline at end of file diff --git a/docs/src/whatsnew/index.rst b/docs/src/whatsnew/index.rst index c556f82761..23cd022f52 100644 --- a/docs/src/whatsnew/index.rst +++ b/docs/src/whatsnew/index.rst @@ -12,6 +12,7 @@ What's New in Iris :hidden: latest.rst + 3.8.rst 3.7.rst 3.6.rst 3.5.rst diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 13e3b9e4ea..ab5d18d3eb 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -24,60 +24,19 @@ This document explains the changes made to Iris for this release 📢 Announcements ================ -#. `@lbdreyer`_ relicensed Iris from LGPL-3 to BSD-3. (:pull: `5577`) +#. N/A ✨ Features =========== -#. `@pp-mo`_, `@lbdreyer`_ and `@trexfeathers`_ improved - :class:`~iris.cube.Cube` :attr:`~iris.cube.Cube.attributes` handling to - better preserve the distinction between dataset-level and variable-level - attributes, allowing file-Cube-file round-tripping of NetCDF attributes. See - :class:`~iris.cube.CubeAttrsDict`, NetCDF - :func:`~iris.fileformats.netcdf.saver.save` and :data:`~iris.Future` for more. - (:pull:`5152`, `split attributes project`_) - -#. `@rcomer`_ rewrote :func:`~iris.util.broadcast_to_shape` so it now handles - lazy data. (:pull:`5307`) - -#. `@trexfeathers`_ and `@HGWright`_ (reviewer) sub-categorised all Iris' - :class:`UserWarning`\s for richer filtering. The full index of - sub-categories can be seen here: :mod:`iris.exceptions` . (:pull:`5498`) - -#. `@trexfeathers`_ added the :class:`~iris.coord_systems.ObliqueMercator` - and :class:`~iris.coord_systems.RotatedMercator` coordinate systems, - complete with NetCDF loading and saving. (:pull:`5548`) - -#. `@trexfeathers`_ added the ``use_year_at_season_start`` parameter to - :func:`iris.coord_categorisation.add_season_year`. When - ``use_year_at_season_start==True``: seasons spanning the year boundary (e.g. - Winter - December to February) will be assigned to the preceding year (e.g. - the year of December) instead of the following year (the default behaviour). - (:pull:`5573`) -#. `@HGWright`_ added :attr:`~iris.coords.Coord.ignore_axis` to allow manual - intervention preventing :func:`~iris.util.guess_coord_axis` from acting on a - coordinate. (:pull:`5551`) - -#. `@pp-mo`_, `@trexfeathers`_ and `@ESadek-MO`_ added more control over - NetCDF chunking with the use of the :data:`iris.fileformats.netcdf.loader.CHUNK_CONTROL` - context manager. (:pull:`5588`) +#. N/A 🐛 Bugs Fixed ============= -#. `@scottrobinson02`_ fixed the output units when dividing a coordinate by a - cube. (:issue:`5305`, :pull:`5331`) - -#. `@ESadek-MO`_ has updated :mod:`iris.tests.graphics.idiff` to stop duplicated file names - preventing acceptance. (:issue:`5098`, :pull:`5482`) - -#. `@acchamber`_ and `@rcomer`_ modified 2D plots so that time axes and their - ticks have more sensible default labels. (:issue:`5426`, :pull:`5561`) - -#. `@rcomer`_ and `@trexfeathers`_ (reviewer) added handling for realization - coordinates when saving pp files (:issue:`4747`, :pull:`5568`) +#. N/A 💣 Incompatible Changes @@ -89,12 +48,7 @@ This document explains the changes made to Iris for this release 🚀 Performance Enhancements =========================== -#. `@stephenworsley`_ improved the speed of :class:`~iris.analysis.AreaWeighted` - regridding. (:pull:`5543`) - -#. `@bouweandela`_ made :func:`iris.util.array_equal` faster when comparing - lazy data from file. This will also speed up coordinate comparison. - (:pull:`5610`) +#. N/A 🔥 Deprecations @@ -106,94 +60,33 @@ This document explains the changes made to Iris for this release 🔗 Dependencies =============== -#. `@bjlittle`_ enforced the minimum pin of ``numpy>1.21`` in accordance with the `NEP29 Drop Schedule`_. - (:pull:`5525`) +#. N/A 📚 Documentation ================ -#. `@trexfeathers`_ documented the intended use of warnings filtering with - Iris. See :ref:`filtering-warnings`. (:pull:`5509`) - -#. `@rcomer`_ updated the - :ref:`sphx_glr_generated_gallery_meteorology_plot_COP_maps.py` to show how - a colourbar may steal space from multiple axes. (:pull:`5537`) - -#. `@tkknight`_ improved the top navgation bar alignment and amount of - links shown. Also improved how the warning banner is implemented. - (:pull:`5505` and :pull:`5508`) - -#. `@tkknight`_ removed broken git links. (:pull:`5569`) - -#. `@ESadek-MO`_ added a phrasebook for synonymous terms used in similar - packages. (:pull:`5564`) - -#. `@ESadek-MO`_ and `@trexfeathers`_ created a technical paper for NetCDF - saving and loading, :ref:`netcdf_io` with a section on chunking, and placeholders - for further topics. (:pull:`5588`) - -#. `@bouweandela`_ updated all hyperlinks to https. (:pull:`5621`) +#. N/A 💼 Internal =========== -#. `@trexfeathers`_ and `@ESadek-MO`_ (reviewer) performed a suite of fixes and - improvements for benchmarking, primarily to get - :ref:`on demand pull request benchmarking ` - working properly. (Main pull request: :pull:`5437`, more detail: - :pull:`5430`, :pull:`5431`, :pull:`5432`, :pull:`5434`, :pull:`5436`) - -#. `@trexfeathers`_ set a number of memory benchmarks to be on-demand, as they - were vulnerable to false positives in CI runs. (:pull:`5481`) - -#. `@acchamber`_ and `@ESadek-MO`_ resolved several deprecation to reduce - number of warnings raised during tests. - (:pull:`5493`, :pull:`5511`) - -#. `@trexfeathers`_ replaced all uses of the ``logging.WARNING`` level, in - favour of using Python warnings, following team agreement. (:pull:`5488`) - -#. `@trexfeathers`_ adapted benchmarking to work with ASV ``>=v0.6`` by no - longer using the ``--strict`` argument. (:pull:`5496`) - -#. `@fazledyn-or`_ replaced ``NotImplementedError`` with ``NotImplemented`` as - a proper method call. (:pull:`5544`) - -#. `@bjlittle`_ corrected various comment spelling mistakes detected by - `codespell`_. (:pull:`5546`) - -#. `@rcomer`_ reduced the size of the conda environment used for testing. - (:pull:`5606`) - -#. `@trexfeathers`_ and `@pp-mo`_ improved how the conda-forge feedstock - release candidate branch is managed, via: - :doc:`../developers_guide/release_do_nothing`. - (:pull:`5515`) - -#. `@bjlittle`_ adopted and configured the `ruff`_ linter. (:pull:`5623`) - -#. `@bjlittle`_ configured the ``line-length = 88`` for `black`_, `isort`_ - and `ruff`_. (:pull:`5632`) - -#. `@bjlittle`_ replaced `isort`_ with `ruff`_. (:pull:`5633`) - -#. `@bjlittle`_ replaced `black`_ with `ruff`_. (:pull:`5634`) +#. `@trexfeathers`_ used the `Pull Request Labeler Github action`_ to add the + ``benchmark_this`` label (:ref:`more info `) to + pull requests that modify ``requirements/locks/*.lock`` files - ensuring + that we know whether dependency changes will affect performance. + (:pull:`5763`) .. comment Whatsnew author names (@github name) in alphabetical order. Note that, core dev names are automatically included by the common_links.inc: -.. _@scottrobinson02: https://github.com/scottrobinson02 -.. _@acchamber: https://github.com/acchamber -.. _@fazledyn-or: https://github.com/fazledyn-or + .. comment Whatsnew resources in alphabetical order: -.. _NEP29 Drop Schedule: https://numpy.org/neps/nep-0029-deprecation_policy.html#drop-schedule -.. _codespell: https://github.com/codespell-project/codespell -.. _split attributes project: https://github.com/orgs/SciTools/projects/5?pane=info +.. _Pull Request Labeler GitHub action: https://github.com/actions/labeler diff --git a/etc/cf-standard-name-table.xml b/etc/cf-standard-name-table.xml index 6e3c014849..ef05fde69a 100644 --- a/etc/cf-standard-name-table.xml +++ b/etc/cf-standard-name-table.xml @@ -1,7 +1,7 @@ - 82 - 2023-07-06T13:17:07Z + 84 + 2024-01-19T15:55:10Z Centre for Environmental Data Analysis support@ceda.ac.uk @@ -2239,6 +2239,13 @@ "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. + + m + + + The height in the atmosphere, L, that buoyant production or destruction of turbulent energy balances the shear production of turbulent kinetic energy: L = -u*3 / (kB0), where u* is the wind frictional velocity, k is the von Karman constant, and B0 is the atmospheric surface buoyancy flux. If the buoyancy flux is destabilizing, L is negative. + + 1 @@ -2715,6 +2722,13 @@ "Vegetation" means any plants e.g. trees, shrubs, grass. "Litter" is dead plant material in or above the soil. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. + + kg s-1 + + + The amount of total carbon mass transported in the river channels from land into the ocean. This quantity can be provided at a certain location within the river network and floodplain (over land) or at the river mouth (over ocean) where the river enters the ocean. "River" refers to water in the fluvial system (stream and floodplain). + + m2 @@ -3261,6 +3275,27 @@ "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. To specify the relative humidity and temperature at which the quantity described by the standard name applies, provide scalar coordinate variables with standard names of "relative_humidity" and "air_temperature". + + degree_C + + + This variable quantifies the temperature difference between the skin temperature (sea_surface_skin_temperature) and the subskin temperature (sea_surface_subskin_temperature) due to the turbulent and radiative heat fluxes at the air-sea interface. This difference is commonly referred to as the “cool skin effect” as the solar radiation absorbed within the very thin thermal subskin layer is typically negligible compared to ocean surface heat loss from the combined sensible, latent, and net longwave radiation heat fluxes. + + + + degree_C + + + This variable quantifies the temperature difference between the top (sea_surface_subskin_temperature) and bottom (sea_surface_foundation_temperature) of the diurnal warm layer. This diurnal warm layer, caused by absorption of solar radiation in the absence of strong mixing, together with a cool skin effect, account for the total temperature difference between the sea_surface_skin_temperature and the sea_surface_foundation_temperature. The cool skin effect is associated with the turbulent and infrared radiative heat loss at the air-sea interface. Freshwater fluxes may also affect this variable (sea_surface_subskin_temperature_minus_sea_surface_foundation_temperature). + + + + degree_C + + + This variable quantifies the temperature difference between the top of the diurnal warm layer (sea_surface_subskin_temperature) and the in-situ measured sea surface temperature at depth (sea_surface_temperature). A diurnal warm layer can develop in the top few meters of the ocean through the absorption of solar radiation, if surface mixing is sufficiently weak. + + K @@ -3737,6 +3772,13 @@ The quantity with standard name drainage_amount_through_base_of_soil_model is the amount of water that drains through the bottom of a soil column extending from the surface to a specified depth. "Drainage" is the process of removal of excess water from soil by gravitational flow. "Amount" means mass per unit area. A vertical coordinate variable or scalar coordinate with standard name "depth" should be used to specify the depth to which the soil column extends. + + kg m-2 + + + “Drainage” is the process of removal of excess water from soil by gravitational flow. "Amount" means mass per unit area. The vertical drainage amount in soil is the amount of water that drains through the bottom of a soil column extending from the surface to a specified depth. + + 1 @@ -3926,6 +3968,13 @@ A velocity is a vector quantity. "Eastward" indicates a vector component which is positive when directed eastward (negative westward). The velocity at the sea floor is that adjacent to the ocean bottom, which would be the deepest grid cell in an ocean model and within the benthic boundary layer for measurements. + + m s-1 + + + A velocity is a vector quantity. "Eastward" indicates a vector component which is positive when directed eastward (negative westward).The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. + + m s-1 @@ -3968,13 +4017,6 @@ "Eastward" indicates a vector component which is positive when directed eastward (negative westward). Wind is defined as a two-dimensional (horizontal) air velocity vector, with no vertical component. (Vertical motion in the atmosphere has the standard name upward_air_velocity.) - - s-1 - 45 - - "Eastward" indicates a vector component which is positive when directed eastward (negative westward). Wind is defined as a two-dimensional (horizontal) air velocity vector, with no vertical component. (Vertical motion in the atmosphere has the standard name upward_air_velocity.) Wind shear is the derivative of wind with respect to height. - - m @@ -4612,11 +4654,11 @@ A period is an interval of time, or the time-period of an oscillation. - + W m-2 hfcorr - Flux correction is also called "flux adjustment". A positive flux correction is downward i.e. added to the ocean. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. + A positive flux adjustment is downward i.e. added to the ocean. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. @@ -4850,6 +4892,13 @@ The phrase "integral_wrt_X_of_Y" means int Y dX. To specify the limits of the integral the data variable should have an axis for X and associated coordinate bounds. If no axis for X is associated with the data variable, or no coordinate bounds are specified, it is assumed that the integral is calculated over the entire vertical extent of the medium, e.g, if the medium is air the integral is assumed to be calculated over the full depth of the atmosphere. "wrt" means with respect to. "tendency_of_X" means derivative of X with respect to time. Depth is the vertical distance below the surface. 'sea_water_alkalinity_expressed_as_mole_equivalent' is the total alkalinity equivalent concentration (including carbonate, nitrogen, silicate, and borate components). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. + + kg m-1 s-1 + + + Eastward vertically-integrated moisture flux per unit length in latitude. "Eastward" indicates a vector component which is positive when directed eastward (negative westward). Height is the vertical distance above the surface. Wind is defined as a two-dimensional (horizontal) air velocity vector, with no vertical component. (Vertical motion in the atmosphere has the standard name "upward_air_velocity"). The phrase "product_of_X_and_Y" means X*Y. The abbreviation "wrt" means "with respect to". The phrase "integral_wrt_X_of_Y" means int Y dX. To specify the limits of the integral the data variable should have an axis for X and associated coordinate bounds. If no axis for X is associated with the data variable, or no coordinate bounds are specified, it is assumed that the integral is calculated over the entire vertical extent of the medium, e.g, if the medium is air the integral is assumed to be calculated over the full depth of the atmosphere. "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". + + m2 s-1 @@ -4857,6 +4906,13 @@ The phrase "integral_wrt_X_of_Y" means int Y dX. To specify the limits of the integral the data variable should have an axis for X and associated coordinate bounds. If no axis for X is associated with the data variable, or no coordinate bounds are specified, it is assumed that the integral is calculated over the entire vertical extent of the medium, e.g, if the medium is air the integral is assumed to be calculated over the full depth of the atmosphere. The phrase "wrt" means "with respect to". Height is the vertical distance above the surface. The phrase "product_of_X_and_Y" means X*Y. Wind is defined as a two-dimensional (horizontal) air velocity vector, with no vertical component. (Vertical motion in the atmosphere has the standard name "upward_air_velocity".) "Eastward" indicates a vector component which is positive when directed eastward (negative westward). Specific humidity is the mass fraction of water vapor in (moist) air. + + kg m-1 s-1 + + + Northward vertically-integrated moisture flux per unit length in longitude. "Northward" indicates a vector component which is positive when directed northward (negative southward). Height is the vertical distance above the surface. Wind is defined as a two-dimensional (horizontal) air velocity vector, with no vertical component. (Vertical motion in the atmosphere has the standard name "upward_air_velocity"). The phrase "product_of_X_and_Y" means X*Y. The abbreviation "wrt" means "with respect to". The phrase "integral_wrt_X_of_Y" means int Y dX. To specify the limits of the integral the data variable should have an axis for X and associated coordinate bounds. If no axis for X is associated with the data variable, or no coordinate bounds are specified, it is assumed that the integral is calculated over the entire vertical extent of the medium, e.g, if the medium is air the integral is assumed to be calculated over the full depth of the atmosphere. "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". + + m2 s-1 @@ -7724,7 +7780,7 @@ kg - "Land ice not displacing sea water" means land ice that would not alter sea level if the ice were converted to water and added to the ocean. It excludes ice shelves (and any other sort of floating ice) and it excludes a fraction of grounded ice-sheet mass equivalent to the mass of any sea water it displaces. It includes glaciers and a portion of grounded ice-sheet mass exceeding the mass of any sea water displaced. The quantity with standard name land_ice_mass_not_displacing_sea_water is the total mass integrated over an area of land ice. The geographical extent of the ice over which the mass was calculated should be described by providing bounds on the horizontal coordinate variable or scalar with the standard name of "region" supplied according to section 6.1.1 of the CF convention. + "Land ice not displacing sea water" means land ice that would alter sea level if the ice were converted to water and added to the ocean. It excludes ice shelves (and any other sort of floating ice) and it excludes a fraction of grounded ice-sheet mass equivalent to the mass of any sea water it displaces. It includes glaciers and a portion of grounded ice-sheet mass exceeding the mass of any sea water displaced. The quantity with standard name land_ice_mass_not_displacing_sea_water is the total mass integrated over an area of land ice. The geographical extent of the ice over which the mass was calculated should be described by providing bounds on the horizontal coordinate variable or scalar with the standard name of "region" supplied according to section 6.1.1 of the CF convention. "Land ice not displacing sea water" is sometimes referred to as "ice above flotation" or "ice above floatation". @@ -8301,6 +8357,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for alpha_pinene is C10H16. The IUPAC name for alpha-pinene is (1S,5S)-2,6,6-trimethylbicyclo[3.1.1]hept-2-ene. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Aluminium means aluminium in all chemical forms, commonly referred to as "total aluminium". "Sea floor sediment" is sediment deposited at the sea bed. + + kg m-3 @@ -8329,6 +8392,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Aromatic compounds in organic chemistry are compounds that contain at least one benzene ring of six carbon atoms joined by alternating single and double covalent bonds. The simplest aromatic compound is benzene itself. In standard names "aromatic_compounds" is the term used to describe the group of aromatic chemical species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. Standard names exist for some individual aromatic species, e.g. benzene and xylene. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Arsenic means arsenic in all chemical forms, commonly referred to as "total arsenic". "Sea floor sediment" is sediment deposited at the sea bed. + + kg m-3 @@ -8441,6 +8511,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for butane is C4H10. Butane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Cadmium means cadmium in all chemical forms, commonly referred to as "total cadmium". "Sea floor sediment" is sediment deposited at the sea bed. + + kg m-3 @@ -8567,6 +8644,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for chlorine nitrate is ClONO2. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Chlorophylls are the green pigments found in most plants, algae and cyanobacteria; their presence is essential for photosynthesis to take place. There are several different forms of chlorophyll that occur naturally. All contain a chlorin ring (chemical formula C20H16N4) which gives the green pigment and a side chain whose structure varies. The naturally occurring forms of chlorophyll contain between 35 and 55 carbon atoms. Chlorophyll-a is the most commonly occurring form of natural chlorophyll. The chemical formula of chlorophyll-a is C55H72O5N4Mg. "Sea floor sediment" is sediment deposited at the sea bed. + + kg m-3 @@ -8630,6 +8714,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Chlorophylls are the green pigments found in most plants, algae and cyanobacteria; their presence is essential for photosynthesis to take place. There are several different forms of chlorophyll that occur naturally. All contain a chlorin ring (chemical formula C20H16N4) which gives the green pigment and a side chain whose structure varies. The naturally occurring forms of chlorophyll contain between 35 and 55 carbon atoms. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Chromium means chromium in all chemical forms, commonly referred to as "total chromium". "Sea floor sediment" is sediment deposited at the sea bed. + + kg m-3 @@ -8651,6 +8742,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. Coarse mode aerosol particles have a diameter of more than 1 micrometer. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as "nox_expressed_as_nitrogen". Cobalt means cobalt in all chemical forms, commonly referred to as "total cobalt". "Sea floor sediment" is sediment deposited at the sea bed. + + kg m-3 @@ -8658,6 +8756,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. Condensed water means liquid and ice. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Copper means copper in all chemical forms, commonly referred to as "total copper". "Sea floor sediment" is sediment deposited at the sea bed. + + kg m-3 @@ -9001,6 +9106,13 @@ 'Mass concentration' means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. 'Inorganic nitrogen' describes a family of chemical species which, in an ocean model, usually includes nitrite, nitrate and ammonium which act as nitrogen nutrients. 'Inorganic nitrogen' is the term used in standard names for all species belonging to the family that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Iron means iron in all chemical forms, commonly referred to as "total iron". "Sea floor sediment" is sediment deposited at the sea bed. + + kg m-3 @@ -9008,6 +9120,13 @@ "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula for isoprene is CH2=C(CH3)CH=CH2. The IUPAC name for isoprene is 2-methylbuta-1,3-diene. Isoprene is a member of the group of hydrocarbons known as terpenes. There are standard names for the terpene group as well as for some of the individual species. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Lead means lead in all chemical forms, commonly referred to as "total lead". "Sea floor sediment" is sediment deposited at the sea bed. + + kg m-3 @@ -9022,6 +9141,13 @@ Mass concentration means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The mass concentration of liquid water takes into account all cloud droplets and liquid precipitation regardless of drop size or fall speed. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Lithium means lithium in all chemical forms, commonly referred to as "total lithium". "Sea floor sediment" is sediment deposited at the sea bed. + + kg m-3 @@ -9029,6 +9155,13 @@ "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of lutein is C40H56O2. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Manganese means manganese in all chemical forms, commonly referred to as "total manganese". "Sea floor sediment" is sediment deposited at the sea bed. + + kg m-3 @@ -9036,6 +9169,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Mercury means mercury in all chemical forms, commonly referred to as "total mercury". "Sea floor sediment" is sediment deposited at the sea bed. + + kg m-3 @@ -9113,6 +9253,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Chlorophylls are the green pigments found in most plants, algae and cyanobacteria; their presence is essential for photosynthesis to take place. There are several different forms of chlorophyll that occur naturally. All contain a chlorin ring (chemical formula C20H16N4) which gives the green pigment and a side chain whose structure varies. The naturally occurring forms of chlorophyll contain between 35 and 55 carbon atoms. Nanophytoplankton are phytoplankton between 2 and 20 micrometers in size. Phytoplankton are algae that grow where there is sufficient light to support photosynthesis. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Nickel means nickel in all chemical forms, commonly referred to as "total nickel". "Sea floor sediment" is sediment deposited at the sea bed. + + kg m-3 @@ -9148,6 +9295,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for nitrogen dioxide is NO2. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Total nitrogen means nitrogen in all chemical forms. "Sea floor sediment" is sediment deposited at the sea bed. + + kg m-3 @@ -9218,6 +9372,13 @@ "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. "Pm2p5 aerosol" means atmospheric particulate compounds with an aerodynamic diameter of less than or equal to 2.5 micrometers. Chemically, "organic carbon aerosol" refers to the carbonaceous fraction of particulate matter contained in any of the vast number of compounds where carbon is chemically combined with hydrogen and other elements like O, S, N, P, Cl, etc. In measurements of carbonaceous aerosols, organic carbon samples may also include some inorganic carbon compounds, whose mass is neglected and assumed to be distributed between the elemental and organic carbon components of the aerosol particles. Reference: Petzold, A., Ogren, J. A., Fiebig, M., Laj, P., Li, S.-M., Baltensperger, U., Holzer-Popp, T., Kinne, S., Pappalardo, G., Sugimoto, N., Wehrli, C., Wiedensohler, A., and Zhang, X.-Y.: Recommendations for reporting "black carbon" measurements, Atmos. Chem. Phys., 13, 8365–8379, https://doi.org/10.5194/acp-13-8365-2013, 2013. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Organic carbon describes a family of chemical species and is the term used in standard names for all species belonging to the family that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. "Sea floor sediment" is sediment deposited at the sea bed. + + kg m-3 @@ -9295,6 +9456,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Petroleum hydrocarbons are compounds containing just carbon and hydrogen originating from the fossil fuel crude oil. + + kg m-3 + + + Concentration of phaeopigment per unit volume of the water body, where the filtration size or collection method is unspecified (equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/. "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Phaeopigments are a group of non-photosynthetic pigments that are the degradation product of algal chlorophyll pigments. Phaeopigments contain phaeophytin, which fluoresces in response to excitation light, and phaeophorbide, which is colorless and does not fluoresce (source: https://academic.oup.com/plankt/article/24/11/1221/1505482). Phaeopigment concentration commonly increases during the development phase of marine phytoplankton blooms, and declines in the post bloom stage (source: https://www.sciencedirect.com/science/article/pii/0967063793901018). "Sea floor sediment" is sediment deposited at the sea bed. + + kg m-3 @@ -9316,6 +9484,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Phosphorus means phosphorus in all chemical forms, commonly referred to as "total phosphorus". "Sea floor sediment" is sediment deposited at the sea bed. + + kg m-3 @@ -9456,6 +9631,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Silver means silver in all chemical forms, commonly referred to as "total silver". "Sea floor sediment" is sediment deposited at the sea bed. + + kg m-3 @@ -9498,6 +9680,13 @@ "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula for toluene is C6H5CH3. Toluene has the same structure as benzene, except that one of the hydrogen atoms is replaced by a methyl group. The IUPAC name for toluene is methylbenzene. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Vanadium means vanadium in all chemical forms, commonly referred to as "total vanadium". "Sea floor sediment" is sediment deposited at the sea bed. + + kg m-3 @@ -9540,6 +9729,13 @@ "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of zeaxanthin is C40H56O2. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/ZEAXXXXX/2/. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Zinc means zinc in all chemical forms, commonly referred to as "total zinc". "Sea floor sediment" is sediment deposited at the sea bed. + + kg m-3 @@ -11626,6 +11822,13 @@ Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". In ocean biogeochemistry models, an "abiotic analogue" is used to simulate the effect on a modelled variable when biological effects on ocean carbon concentration and alkalinity are ignored. "Dissolved inorganic carbon" describes a family of chemical species in solution, including carbon dioxide, carbonic acid and the carbonate and bicarbonate anions. "Dissolved inorganic carbon" is the term used in standard names for all species belonging to the family that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. + + mol m-3 + + + "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Dissolved inorganic carbon" describes a family of chemical species in solution, including carbon dioxide, carbonic acid and the carbonate and bicarbonate anions. "Dissolved inorganic carbon" is the term used in standard names for all species belonging to the family that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. "Sea floor sediment" is sediment deposited at the sea bed. "Water" means water in all phases. + + mol m-3 @@ -11710,6 +11913,13 @@ The sum of dissolved organic carbon-13 component concentrations. "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Organic carbon" describes a family of chemical species and is the term used in standard names for all species belonging to the family that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. "C" means the element carbon and "13C" is the stable isotope "carbon-13", having six protons and seven neutrons. + + mol m-3 + + + "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen' or a phrase such as "nox_expressed_as_nitrogen". "Organic carbon" describes a family of chemical species and is the term used in standard names for all species belonging to the family that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. "Sea floor sediment" is sediment deposited at the sea bed. "Water" means water in all phases. + + mol m-3 @@ -12641,6 +12851,13 @@ Mole fraction is used in the construction mole_fraction_of_X_in_Y, where X is a material constituent of Y. + + 1 + + + "Mole fraction" is used in the construction "mole_fraction_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "in_dry_air" means that the quantity is calculated as the total number of particles of X divided by the number of dry air particles, i.e. the effect of water vapor is excluded. The chemical formula for carbon dioxide is CO2. + + 1 @@ -12648,6 +12865,13 @@ Mole fraction is used in the construction mole_fraction_of_X_in_Y, where X is a material constituent of Y. + + 1 + + + "Mole fraction" is used in the construction "mole_fraction_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "in_dry_air" means that the quantity is calculated as the total number of particles of X divided by the number of dry air particles, i.e. the effect of water vapor is excluded. The chemical formula of carbon monoxide is CO. + + 1 @@ -12718,6 +12942,13 @@ "Mole fraction" is used in the construction "mole_fraction_of_X_in_Y", where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula for CFC12 is CF2Cl2. The IUPAC name for CFC12 is dichloro(difluoro)methane. + + 1 + + + "Mole fraction" is used in the construction "mole_fraction_of_X_in_Y", where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula for CFC13 is CF3Cl. The IUPAC name for CFC13 is chloro(trifluoro)methane. + + 1 @@ -12907,6 +13138,20 @@ "Mole fraction" is used in the construction "mole_fraction_of_X_in_Y", where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula for hcfc124 is C2HClF4. The IUPAC name for hcfc124 is 1-chloro-1,2,2,2-tetrafluoroethane. + + 1 + + + "Mole fraction" is used in the construction "mole_fraction_of_X_in_Y", where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula for HCFC132b is CH2ClCClF2. The IUPAC name for HCFC132b is 1,2-dichloro-1,1-difluoroethane. + + + + 1 + + + "Mole fraction" is used in the construction "mole_fraction_of_X_in_Y", where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula for HCFC133a is CH2ClCF3. The IUPAC name for HCFC133a is 2-chloro-1,1,1-trifluoroethane. + + 1 @@ -13117,6 +13362,13 @@ Mole fraction is used in the construction mole_fraction_of_X_in_Y, where X is a material constituent of Y. + + 1 + + + "Mole fraction" is used in the construction "mole_fraction_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "in_dry_air" means that the quantity is calculated as the number of particles of X divided by the number of dry air particles, i.e. the effect of water vapor is excluded. The chemical formula for methane is CH4. Methane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. + + 1 @@ -13222,6 +13474,13 @@ Mole fraction is used in the construction mole_fraction_of_X_in_Y, where X is a material constituent of Y. The chemical formula of nitrous oxide is N2O. + + 1 + + + "Mole fraction" is used in the construction "mole_fraction_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "in_dry_air" means that the quantity is calculated as the number of particles of X divided by the number of dry air particles, i.e. the effect of water vapor is excluded. The chemical formula for nitrous oxide is N2O. + + 1 @@ -13383,6 +13642,20 @@ "Mole ratio" is used in the construction "mole_ratio_of_X_to_Y_in_medium", where X and Y are both material constituents of the medium. "Medium" can take any of the values given in the "medium" section of the standard name Guidelines document. The phrase "ratio_of_X_to_Y" means X/Y. The chemical formula for the nitrate anion is NO3-. The chemical formula of the phosphate anion is PO4 with a charge of minus three. + + mol kg-1 + + + The construction "moles_of_X_per_unit_mass_in_Y" is also called "molality" of X in Y, where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/ATPXZZDZ/2/. + + + + mol kg-1 + + + The construction "moles_of_X_per_unit_mass_in_Y" is also called "molality" of X in Y, where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of ammonium is NH4. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/MDMAP004/3/. + + mol kg-1 @@ -13397,6 +13670,49 @@ The construction "moles_of_X_per_unit_mass_in_Y" is also called "molality" of X in Y, where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Dissolved inorganic carbon" describes a family of chemical species in solution, including carbon dioxide, carbonic acid and the carbonate and bicarbonate anions. "Dissolved inorganic carbon" is the term used in standard names for all species belonging to the family that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. + + mol kg-1 + + + The construction "moles_of_X_per_unit_mass_in_Y" is also called "molality" of X in Y, where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Dissolved nitrogen" means the sum of all nitrogen in solution: inorganic nitrogen (nitrite, nitrate and ammonium) plus nitrogen in carbon compounds. + + + + mol kg-1 + + + The construction "moles_of_X_per_unit_mass_in_Y" is also called "molality" of X in Y, where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Organic carbon" describes a family of chemical species and is the term used in standard names for all species belonging to the family that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/CORGZZKG/1/. + + + + mol kg-1 + + + The construction "moles_of_X_per_unit_mass_in_Y" is also called "molality" of X in Y, where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Dissolved organic nitrogen" describes the nitrogen held in carbon compounds in solution. These are mostly generated by plankton excretion and decay. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/MDMAP008/3/. + + + + mol kg-1 + + + The construction "moles_of_X_per_unit_mass_in_Y" is also called "molality" of X in Y, where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen”. "Organic phosphorus" means phosphorus in carbon compounds. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/ORGPMSZZ/4/. + + + + mol kg-1 + + + The construction "moles_of_X_per_unit_mass_in_Y" is also called "molality" of X in Y, where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". “Phosphorus” means phosphorus in all chemical forms, commonly referred to as "total phosphorus". The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at +http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. + + + + mol kg-1 + + + The construction "moles_of_X_per_unit_mass_in_Y" is also called "molality" of X in Y, where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of guanosine triphosphate is C10H16N5O14P3. + + mol kg-1 @@ -13453,6 +13769,48 @@ The construction "moles_of_X_per_unit_mass_in_Y" is also called "molality" of X in Y, where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Particulate means suspended solids of all sizes. Particulate inorganic carbon is carbon bound in molecules ionically that may be liberated from the particles as carbon dioxide by acidification. + + mol kg-1 + + + The construction "moles_of_X_per_unit_mass_in_Y" is also called "molality" of X in Y, where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Particulate means suspended solids of all sizes. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/MDMAP011/4/. + + + + mol kg-1 + + + The construction "moles_of_X_per_unit_mass_in_Y" is also called "molality" of X in Y, where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Particulate means suspended solids of all sizes. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/MDMAP013/4/. + + + + mol kg-1 + + + The construction "moles_of_X_per_unit_mass_in_Y" is also called "molality" of X in Y, where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Particulate means suspended solids of all sizes. Phosphorus means phosphorus in all chemical forms, commonly referred to as "total phosphorus". The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/TPHSVLPT/5/. + + + + mol kg-1 + + + The construction "moles_of_X_per_unit_mass_in_Y" is also called "molality" of X in Y, where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Particulate means suspended solids of all sizes. + + + + mol kg-1 + + + The construction "moles_of_X_per_unit_mass_in_Y" is also called "molality" of X in Y, where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Particulate means suspended solids of all sizes. + + + + mol kg-1 + + + The construction "moles_of_X_per_unit_mass_in_Y" is also called "molality" of X in Y, where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Particulate means suspended solids of all sizes. Phosphorus means phosphorus in all chemical forms, commonly referred to as "total phosphorus". The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/OPHSVLPT/6/. + + mol kg-1 @@ -13467,6 +13825,13 @@ moles_of_X_per_unit_mass_inY is also called "molality" of X in Y, where X is a material constituent of Y. + + mol kg-1 + + + The construction "moles_of_X_per_unit_mass_in_Y" is also called "molality" of X in Y, where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of sulfur hexafluoride is SF6. + + 1 @@ -13775,6 +14140,13 @@ In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. "Vegetation" means any living plants e.g. trees, shrubs, grass. "Litter" is dead plant material in or above the soil. + + kg s-1 + + + The amount of total nitrogen mass transported in the river channels from land into the ocean. This quantity can be provided at a certain location within the river network and floodplain (over land) or at the river mouth (over ocean) where the river enters the ocean. "River" refers to water in the fluvial system (stream and floodplain). + + m @@ -14069,6 +14441,13 @@ A velocity is a vector quantity. "Northward" indicates a vector component which is positive when directed northward (negative southward). The velocity at the sea floor is that adjacent to the ocean bottom, which would be the deepest grid cell in an ocean model and within the benthic boundary layer for measurements. + + m s-1 + + + A velocity is a vector quantity. "Northward" indicates a vector component which is positive when directed northward (negative southward). The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. + + m s-1 @@ -14125,13 +14504,6 @@ "Northward" indicates a vector component which is positive when directed northward (negative southward). Wind is defined as a two-dimensional (horizontal) air velocity vector, with no vertical component. (Vertical motion in the atmosphere has the standard name upward_air_velocity.) - - s-1 - 46 - - "Northward" indicates a vector component which is positive when directed northward (negative southward). Wind is defined as a two-dimensional (horizontal) air velocity vector, with no vertical component. (Vertical motion in the atmosphere has the standard name upward_air_velocity.) Wind shear is the derivative of wind with respect to height. - - kg m-2 @@ -14608,6 +14980,13 @@ Montgomery potential is defined as M = ap + gz, where a = specific volume, p = pressure, g = gravity, and z=depth. It represents an exact streamfunction on specific volume anomaly surfaces. + + m + + + The depth in the ocean, L, that buoyant production or destruction of turbulent energy balances the turbulent kinetic energy: L = -u*3 / (kB0), where u* is the oceanic surface frictional velocity, k is the von Karman constant, and B0 is the oceanic surface buoyancy flux. If the buoyancy flux is destabilizing, L is negative. + + s-1 @@ -14965,6 +15344,13 @@ Global average sea level change is due to change in volume of the water in the ocean, caused by mass and/or density change, or to change in the volume of the ocean basins, caused by tectonics etc. It is sometimes called "eustatic", which is a term that also has other definitions. It differs from the change in the global average sea surface height relative to the centre of the Earth by the global average vertical movement of the ocean floor. Zero sea level change is an arbitrary level. Phase is the initial angle of a wave modelled by a sinusoidal function. A coordinate variable of harmonic_period should be used to specify the period of the sinusoidal wave. Because global average sea level change quantifies the change in volume of the world ocean, it is not calculated necessarily by considering local changes in mean sea level. + + kg s-1 + + + The amount of total phosphorus mass transported in the river channels from land into the ocean. This quantity can be provided at a certain location within the river network and floodplain (over land) or at the river mouth (over ocean) where the river enters the ocean. "River" refers to water in the fluvial system (stream and floodplain). Phosphorus means phosphorus in all chemical forms, commonly referred to as "total phosphorus". + + s-1 @@ -19571,6 +19957,13 @@ sea_water_alkalinity_expressed_as_mole_equivalent is the total alkalinity equivalent concentration (including carbonate, nitrogen, silicate, and borate components). In ocean biogeochemistry models, a "natural analogue" is used to simulate the effect on a modelled variable of imposing preindustrial atmospheric carbon dioxide concentrations, even when the model as a whole may be subjected to varying forcings. + + mol kg-1 + + + The standard name sea_water_alkalinity_per_unit_mass_expressed_as_mole_equivalent is the total alkalinity equivalent concentration (including carbonate, nitrogen, silicate, and borate components) expressed as the number of moles of alkalinity per unit mass of seawater. The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/MDMAP014/1/. + + K @@ -20047,6 +20440,13 @@ The magnitude of an acoustic signal emitted by the instrument toward a reflecting surface and received again by the instrument. + + kg s-1 + + + The amount of silicate mass transported in the river channels from land into the ocean. This quantity can be provided at a certain location within the river network and floodplain (over land) or at the river mouth (over ocean) where the river enters the ocean. "River" refers to water in the fluvial system (stream and floodplain). + + 1 @@ -20215,6 +20615,13 @@ Hydraulic conductivity is the constant k in Darcy's Law q=-k grad h for fluid flow q (volume transport per unit area i.e. velocity) through a porous medium, where h is the hydraulic head (pressure expressed as an equivalent depth of water). + + kg m-2 + + + "Content" indicates a quantity per unit area. The "soil content" of a quantity refers to the vertical integral from the surface down to the bottom of the soil model. For the content between specified levels in the soil, standard names including "content_of_soil_layer" are used. + + kg m-2 @@ -20964,6 +21371,20 @@ The surface called "surface" means the lower boundary of the atmosphere.The brightness temperature of a body is the temperature of a black body which radiates the same power per unit solid angle per unit area. + + m2 s-3 + + + A variable quantifying net density gains or losses in air parcel buoyancy based on turbulent heat and moisture fluxes, represented by virtual temperature flux, at the air-sea interface. Positive values indicate a buoyancy flux out of the ocean (into the air) that will destabilize the atmosphere. + + + + m2 s-3 + + + A variable quantifying net density gains or losses in water parcel buoyancy based on thermal (net surface heat flux) and haline (precipitation minus evaporation) forcings at the air-sea interface. A positive value indicates a buoyancy flux into the ocean that will stabilize (i.e., stratify) the surface ocean layer. + + Pa @@ -31550,16 +31971,16 @@ s-1 - + 45 The quantity with standard name upward_derivative_of_eastward_wind is the derivative of the eastward component of wind with respect to height. The phrase "component_derivative_of_X" means derivative of X with respect to distance in the component direction, which may be "northward", "southward", "eastward", "westward", "upward", "downward", "x" or "y". The last two indicate derivatives along the axes of the grid, in the case where they are not true longitude and latitude. A positive value indicates that X is increasing with distance along the positive direction of the axis. Wind is defined as a two-dimensional (horizontal) air velocity vector, with no vertical component. (Vertical motion in the atmosphere has the standard name "upward_air_velocity"). s-1 - + 46 - The quantity with standard name upward_derivative_of_northward_wind is the derivative of the northward component of wind speed with respect to height. The phrase "component_derivative_of_X" means derivative of X with respect to distance in the component direction, which may be "northward", "southward", "eastward", "westward", "upward", "downward", "x" or "y". The last two indicate derivatives along the axes of the grid, in the case where they are not true longitude and latitude. A positive value indicates that X is increasing with distance along the positive direction of the axis. Wind is defined as a two-dimensional (horizontal) air velocity vector, with no vertical component. (Vertical motion in the atmosphere has the standard name "upward_air_velocity"). + The quantity with standard name upward_derivative_of_northward_wind is the derivative of the northward component of wind with respect to height. The phrase "component_derivative_of_X" means derivative of X with respect to distance in the component direction, which may be "northward", "southward", "eastward", "westward", "upward", "downward", "x" or "y". The last two indicate derivatives along the axes of the grid, in the case where they are not true longitude and latitude. A positive value indicates that X is increasing with distance along the positive direction of the axis. Wind is defined as a two-dimensional (horizontal) air velocity vector, with no vertical component. (Vertical motion in the atmosphere has the standard name "upward_air_velocity"). @@ -31940,11 +32361,11 @@ The visibility is the distance at which something can be seen. - + m-1 - The volume scattering/absorption/attenuation coefficient is the fractional change of radiative flux per unit path length due to the stated process. Coefficients with canonical units of m2 s-1 i.e. multiplied by density have standard names with "specific_" instead of "volume_". The scattering/absorption/attenuation coefficient is assumed to be an integral over all wavelengths unless a coordinate of "radiation_wavelength" or "radiation_frequency" is included to specify the wavelength. "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. "Dried_aerosol" means that the aerosol sample has been dried from the ambient state, but that the dry state (relative humidity less than 40 per cent) has not necessarily been reached. To specify the relative humidity at which the sample was measured, provide a scalar coordinate variable with the standard name of "relative_humidity". The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. + The volume scattering/absorption/attenuation coefficient is the fractional change of radiative flux per unit path length due to the stated process. Coefficients with canonical units of m2 s-1 i.e. multiplied by density have standard names with "specific_" instead of "volume_". The scattering/absorption/attenuation coefficient is assumed to be an integral over all wavelengths unless a coordinate of "radiation_wavelength" or "radiation_frequency" is included to specify the wavelength. Radiative flux is the sum of shortwave and longwave radiative fluxes. "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. "Dried_aerosol" means that the aerosol sample has been dried from the ambient state, but that the dry state (relative humidity less than 40 per cent) has not necessarily been reached. To specify the relative humidity at which the sample was measured, provide a scalar coordinate variable with the standard name of "relative_humidity". The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. @@ -31961,18 +32382,18 @@ The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Radiative flux is the sum of shortwave and longwave radiative fluxes. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The volume scattering/absorption/attenuation coefficient is the fractional change of radiative flux per unit path length due to the stated process. Coefficients with canonical units of m2 s-1 i.e. multiplied by density have standard names with specific_ instead of volume_. The scattering/absorption/attenuation coefficient is assumed to be an integral over all wavelengths, unless a coordinate of radiation_wavelength is included to specify the wavelength. - + m-1 sr-1 Attenuation is the sum of absorption and scattering. Attenuation is sometimes called "extinction". The attenuated backwards scattering function includes the effects of two-way attenuation by the medium between a radar source and receiver. The volume scattering function is the fraction of incident radiative flux scattered into unit solid angle per unit path length. Backwards scattering refers to the sum of scattering into all backward angles i.e. scattering_angle exceeding pi/2 radians. A scattering_angle should not be specified with this quantity. - + m-1 sr-1 - Attenuation is the sum of absorption and scattering. Attenuation is sometimes called "extinction". The attenuated backwards scattering function includes the effects of two-way attenuation by the medium between a radar source and receiver. The volume scattering function is the fraction of incident radiative flux scattered into unit solid angle per unit path length. Backwards scattering refers to the sum of scattering into all backward angles i.e. scattering_angle exceeding pi/2 radians. A scattering_angle should not be specified with this quantity. A phrase "assuming_condition" indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. + Attenuation is the sum of absorption and scattering. Attenuation is sometimes called "extinction". The attenuated backwards scattering coefficient includes the effects of two-way attenuation by the medium between a radar source and receiver. The volume scattering coefficient is the fraction of incident radiative flux scattered into unit solid angle per unit path length. Backwards scattering refers to the sum of scattering into all backward angles i.e. scattering_angle exceeding pi/2 radians. A scattering_angle should not be specified with this quantity. The scattering coefficient is assumed to be an integral over all wavelengths unless a coordinate of "radiation_wavelength" or "radiation_frequency" is included to specify the wavelength. Coefficients with canonical units of m2 s-1, i.e. multiplied by density, have standard names with "specific_" instead of "volume_". Radiative flux is the sum of shortwave and longwave radiative fluxes. A phrase "assuming_condition" indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. @@ -31982,18 +32403,18 @@ Downwelling radiation is radiation from above. It does not mean "net downward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. Radiative flux is the sum of shortwave and longwave radiative fluxes. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The volume scattering/absorption/attenuation coefficient is the fractional change of radiative flux per unit path length due to the stated process. Coefficients with canonical units of m2 s-1 i.e. multiplied by density have standard names with specific_ instead of volume_. The scattering/absorption/attenuation coefficient is assumed to be an integral over all wavelengths, unless a coordinate of radiation_wavelength is included to specify the wavelength. Attenuation is the sum of absorption and scattering. Attenuation is sometimes called "extinction". Also called "diffuse" attenuation, the attenuation of downwelling radiative flux refers to the decrease with decreasing height or increasing depth of the downwelling component of radiative flux, regardless of incident direction. - - m-1 + + m-1 sr-1 - The volume scattering/absorption/attenuation coefficient is the fractional change of radiative flux per unit path length due to the stated process. Coefficients with canonical units of m2 s-1 i.e. multiplied by density have standard names with specific_ instead of volume_. Backwards scattering refers to the sum of scattering into all backward angles i.e. scattering_angle exceeds pi/2 radians. A scattering_angle should not be specified with this quantity. The scattering/absorption/attenuation coefficient is assumed to be an integral over all wavelengths, unless a coordinate of radiation_wavelength is included to specify the wavelength. "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. "Dried_aerosol" means that the aerosol sample has been dried from the ambient state, but that the dry state (relative humidity less than 40 per cent) has not necessarily been reached. To specify the relative humidity at which the sample was measured, provide a scalar coordinate variable with the standard name of "relative_humidity". The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. + Volume backwards scattering coefficient by ranging instrument is the fraction of radiative flux, per unit path length and per unit solid angle, scattered at 180 degrees angle respect to the incident radiation and obtained through ranging techniques like lidar and radar. Backwards scattering coefficient is assumed to be related to the same wavelength of incident radiation. "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. - - m-1 sr-1 + + m-1 - Volume backwards scattering coefficient by ranging instrument is the fraction of radiative flux, per unit path length and per unit solid angle, scattered at 180 degrees angle respect to the incident radiation and obtained through ranging techniques like lidar and radar. Backwards scattering coefficient is assumed to be related to the same wavelength of incident radiation. "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. + The volume scattering/absorption/attenuation coefficient is the fractional change of radiative flux per unit path length due to the stated process. Coefficients with canonical units of m2 s-1 i.e. multiplied by density have standard names with specific_ instead of volume_. Backwards scattering refers to the sum of scattering into all backward angles i.e. scattering_angle exceeds pi/2 radians. A scattering_angle should not be specified with this quantity. The scattering/absorption/attenuation coefficient is assumed to be an integral over all wavelengths, unless a coordinate of radiation_wavelength is included to specify the wavelength. Radiative flux is the sum of shortwave and longwave radiative fluxes. "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. "Dried_aerosol" means that the aerosol sample has been dried from the ambient state, but that the dry state (relative humidity less than 40 per cent) has not necessarily been reached. To specify the relative humidity at which the sample was measured, provide a scalar coordinate variable with the standard name of "relative_humidity". The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. @@ -32087,6 +32508,13 @@ "Volume fraction" is used in the construction "volume_fraction_of_X_in_Y", where X is a material constituent of Y. It is evaluated as the volume of X divided by the volume of Y (including X). It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. The phrase "frozen_water" means ice. + + + + + "Volume fraction" is used in the construction "volume_fraction_of_X_in_Y", where X is a material constituent of Y. It is evaluated as the volume of X divided by the volume of Y (including X). It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. "Sea floor sediment" is sediment deposited at the sea bed. "Water" means water in all phases. + + 1 @@ -32108,6 +32536,13 @@ "Volume fraction" is used in the construction "volume_fraction_of_X_in_Y", where X is a material constituent of Y. It is evaluated as the volume of X divided by the volume of Y (including X). It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. + + 1 + + + "Volume fraction" is used in the construction volume_fraction_of_X_in_Y, where X is a material constituent of Y. It is evaluated as the volume of X divided by the volume of Y (including X). It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. The volume_fraction_of_water_in_soil_at_saturation is the volume fraction at which a soil has reached it's maximum water holding capacity. + + 1 @@ -32192,18 +32627,18 @@ Water means water in all phases. "Evapotranspiration" means all water vapor fluxes into the atmosphere from the surface: liquid evaporation, sublimation and transpiration. Evaporation is the conversion of liquid or solid into vapor. Transpiration is the process by which liquid water in plant stomata is transferred as water vapor into the atmosphere. (The conversion of solid alone into vapor is called "sublimation".) In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. Unless indicated in the cell_methods attribute, a quantity is assumed to apply to the whole area of each horizontal grid box. - + kg m-2 s-1 - - "Water" means water in all phases. Flux correction is also called "flux adjustment". A positive flux correction is downward i.e. added to the ocean. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. + wfo + "Water" means water in all phases. The water flux into sea water is the freshwater entering as a result of precipitation, evaporation, river inflow, sea ice effects and water flux relaxation and correction (if applied). In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. - + kg m-2 s-1 - wfo - "Water" means water in all phases. The water flux into sea water is the freshwater entering as a result of precipitation, evaporation, river inflow, sea ice effects and water flux relaxation and correction (if applied). In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. + + "Water" means water in all phases. Flux correction is also called "flux adjustment". A positive flux correction is downward i.e. added to the ocean. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. @@ -32551,14 +32986,6 @@ isotropic_shortwave_radiance_in_air - - water_evapotranspiration_flux - - - - drainage_amount_through_base_of_soil_model - - mole_fraction_of_ozone_in_air @@ -32587,6 +33014,50 @@ surface_drag_coefficient_in_air + + sea_surface_swell_wave_period + + + + sea_surface_wind_wave_period + + + + mass_fraction_of_convective_cloud_condensed_water_in_air + + + + mass_fraction_of_ozone_in_air + + + + wave_frequency + + + + northward_eliassen_palm_flux_in_air + + + + northward_heat_flux_in_air_due_to_eddy_advection + + + + upward_eliassen_palm_flux_in_air + + + + upward_eastward_momentum_flux_in_air_due_to_nonorographic_eastward_gravity_waves + + + + upward_eastward_momentum_flux_in_air_due_to_nonorographic_westward_gravity_waves + + + + upward_eastward_momentum_flux_in_air_due_to_orographic_gravity_waves + + water_flux_into_sea_water @@ -32627,64 +33098,196 @@ surface_upward_sensible_heat_flux + + atmosphere_moles_of_carbon_monoxide + + + + atmosphere_moles_of_methane + + + + atmosphere_moles_of_methyl_bromide + + + + atmosphere_moles_of_methyl_chloride + + + + atmosphere_moles_of_molecular_hydrogen + + + + atmosphere_moles_of_nitrous_oxide + + mass_concentration_of_suspended_matter_in_sea_water - - universal_thermal_comfort_index + + mole_concentration_of_mesozooplankton_expressed_as_nitrogen_in_sea_water - - sea_surface_swell_wave_period + + mole_concentration_of_microzooplankton_expressed_as_nitrogen_in_sea_water - - sea_surface_wind_wave_period + + mole_concentration_of_organic_detritus_expressed_as_nitrogen_in_sea_water + + + + mole_concentration_of_organic_detritus_expressed_as_silicon_in_sea_water + + + + tendency_of_atmosphere_moles_of_methyl_bromide + + + + tendency_of_atmosphere_moles_of_methyl_chloride + + + + tendency_of_atmosphere_moles_of_molecular_hydrogen + + + + tendency_of_atmosphere_moles_of_nitrous_oxide + + + + tendency_of_middle_atmosphere_moles_of_carbon_monoxide + + + + tendency_of_middle_atmosphere_moles_of_methane + + + + tendency_of_middle_atmosphere_moles_of_methyl_bromide + + + + tendency_of_middle_atmosphere_moles_of_methyl_chloride + + + + tendency_of_middle_atmosphere_moles_of_molecular_hydrogen + + + + tendency_of_troposphere_moles_of_carbon_monoxide + + + + tendency_of_troposphere_moles_of_methane + + + + tendency_of_troposphere_moles_of_methyl_bromide + + + + tendency_of_troposphere_moles_of_methyl_chloride + + + + tendency_of_troposphere_moles_of_molecular_hydrogen atmosphere_net_upward_convective_mass_flux - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_deep_convection + + eastward_water_vapor_flux_in_air - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_shallow_convection + + kinetic_energy_dissipation_in_atmosphere_boundary_layer - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_turbulence + + lwe_stratiform_snowfall_rate - - wave_frequency + + lwe_thickness_of_stratiform_snowfall_amount - - northward_eliassen_palm_flux_in_air + + northward_water_vapor_flux_in_air - - northward_heat_flux_in_air_due_to_eddy_advection + + stratiform_rainfall_amount - - upward_eliassen_palm_flux_in_air + + stratiform_rainfall_flux - - upward_eastward_momentum_flux_in_air_due_to_nonorographic_eastward_gravity_waves + + stratiform_rainfall_rate - - upward_eastward_momentum_flux_in_air_due_to_nonorographic_westward_gravity_waves + + stratiform_snowfall_amount - - upward_eastward_momentum_flux_in_air_due_to_orographic_gravity_waves + + stratiform_snowfall_flux + + + + thickness_of_stratiform_rainfall_amount + + + + thickness_of_stratiform_snowfall_amount + + + + atmosphere_mass_content_of_cloud_condensed_water + + + + atmosphere_mass_content_of_cloud_ice + + + + atmosphere_mass_content_of_convective_cloud_condensed_water + + + + atmosphere_mass_content_of_water_vapor + + + + surface_downward_mole_flux_of_carbon_dioxide + + + + surface_upward_mole_flux_of_carbon_dioxide + + + + atmosphere_mass_content_of_sulfate + + + + atmosphere_mass_content_of_sulfate + + + + change_over_time_in_atmosphere_mass_content_of_water_due_to_advection + + + + change_over_time_in_atmosphere_mass_content_of_water_due_to_advection @@ -32739,240 +33342,96 @@ tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_convection - - tendency_of_middle_atmosphere_moles_of_carbon_monoxide - - - - tendency_of_middle_atmosphere_moles_of_methane - - - - tendency_of_middle_atmosphere_moles_of_methyl_bromide - - - - tendency_of_middle_atmosphere_moles_of_methyl_chloride - - - - tendency_of_middle_atmosphere_moles_of_molecular_hydrogen - - - - tendency_of_troposphere_moles_of_carbon_monoxide - - - - tendency_of_troposphere_moles_of_methane - - - - tendency_of_troposphere_moles_of_methyl_bromide - - - - tendency_of_troposphere_moles_of_methyl_chloride - - - - tendency_of_troposphere_moles_of_molecular_hydrogen - - - - mass_fraction_of_convective_cloud_condensed_water_in_air - - - - mass_fraction_of_ozone_in_air - - - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_emission - - - - tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_emission - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_residential_and_commercial_combustion - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_waste_treatment_and_disposal - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_savanna_and_grassland_fires - - - - tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_emission - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_land_transport - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_agricultural_waste_burning - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_energy_production_and_distribution - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_maritime_transport - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production_and_emission - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production_and_emission - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_forest_fires - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_industrial_processes_and_combustion - - - - sea_surface_swell_wave_significant_height - - - - sea_surface_wind_wave_significant_height - - - - sea_surface_wave_significant_height - - - - mass_content_of_water_in_soil_layer - - - - mass_content_of_water_in_soil - - - - sea_surface_swell_wave_to_direction - - - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_gravitational_settling - - - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_gravitational_settling - - - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition - - - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition - - - - tendency_of_atmosphere_moles_of_nitric_acid_trihydrate_ambient_aerosol_particles + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_deep_convection - - eastward_water_vapor_flux_in_air + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_shallow_convection - - kinetic_energy_dissipation_in_atmosphere_boundary_layer + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_turbulence - - lwe_stratiform_snowfall_rate + + equivalent_thickness_at_stp_of_atmosphere_ozone_content - - lwe_thickness_of_stratiform_snowfall_amount + + sea_water_x_velocity - - northward_water_vapor_flux_in_air + + sea_water_y_velocity - - stratiform_rainfall_amount + + x_wind - - stratiform_rainfall_flux + + y_wind - - stratiform_rainfall_rate + + tendency_of_atmosphere_mass_content_of_water_vapor_due_to_advection - - stratiform_snowfall_amount + + land_ice_surface_specific_mass_balance_rate - - stratiform_snowfall_flux + + land_ice_lwe_surface_specific_mass_balance_rate - - thickness_of_stratiform_rainfall_amount + + isotropic_radiance_per_unit_wavelength_in_air - - thickness_of_stratiform_snowfall_amount + + isotropic_radiance_per_unit_wavelength_in_air - - atmosphere_mass_content_of_cloud_condensed_water + + omnidirectional_spherical_irradiance_per_unit_wavelength_in_sea_water - - atmosphere_mass_content_of_cloud_ice + + mass_concentration_of_chlorophyll_in_sea_water - - atmosphere_mass_content_of_convective_cloud_condensed_water + + mass_concentration_of_chlorophyll_in_sea_water - - atmosphere_mass_content_of_water_vapor + + atmosphere_convective_available_potential_energy - - surface_downward_mole_flux_of_carbon_dioxide + + atmosphere_convective_available_potential_energy - - surface_upward_mole_flux_of_carbon_dioxide + + gross_primary_productivity_of_biomass_expressed_as_carbon - - atmosphere_mass_content_of_sulfate + + net_primary_productivity_of_biomass_expressed_as_carbon - - atmosphere_mass_content_of_sulfate + + net_primary_productivity_of_biomass_expressed_as_carbon_accumulated_in_leaves - - change_over_time_in_atmosphere_mass_content_of_water_due_to_advection + + net_primary_productivity_of_biomass_expressed_as_carbon_accumulated_in_roots - - change_over_time_in_atmosphere_mass_content_of_water_due_to_advection + + net_primary_productivity_of_biomass_expressed_as_carbon_accumulated_in_wood @@ -33283,840 +33742,868 @@ tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_dry_deposition - - x_wind + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_gravitational_settling - - y_wind + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_gravitational_settling - - land_ice_surface_specific_mass_balance_rate + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition - - land_ice_lwe_surface_specific_mass_balance_rate + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition - - isotropic_radiance_per_unit_wavelength_in_air + + tendency_of_atmosphere_moles_of_nitric_acid_trihydrate_ambient_aerosol_particles - - isotropic_radiance_per_unit_wavelength_in_air + + tendency_of_atmosphere_moles_of_sulfate_dry_aerosol_particles - - omnidirectional_spherical_irradiance_per_unit_wavelength_in_sea_water + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_emission - - mass_concentration_of_chlorophyll_in_sea_water + + tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_emission - - mass_concentration_of_chlorophyll_in_sea_water + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_residential_and_commercial_combustion - - tendency_of_atmosphere_moles_of_sulfate_dry_aerosol_particles + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_waste_treatment_and_disposal - - tendency_of_atmosphere_moles_of_methyl_bromide + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_savanna_and_grassland_fires - - tendency_of_atmosphere_moles_of_methyl_chloride + + tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_emission - - tendency_of_atmosphere_moles_of_molecular_hydrogen + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_land_transport - - tendency_of_atmosphere_moles_of_nitrous_oxide + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_agricultural_waste_burning - - tendency_of_atmosphere_mass_content_of_water_vapor_due_to_advection + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_energy_production_and_distribution - - atmosphere_moles_of_carbon_monoxide + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_maritime_transport - - sea_surface_wind_wave_to_direction + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production_and_emission - - sea_surface_wave_mean_period + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production_and_emission - - equivalent_thickness_at_stp_of_atmosphere_ozone_content + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_forest_fires - - atmosphere_moles_of_methane + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_industrial_processes_and_combustion - - atmosphere_moles_of_methyl_bromide + + sea_surface_swell_wave_significant_height - - atmosphere_moles_of_methyl_chloride + + sea_surface_wind_wave_significant_height - - atmosphere_moles_of_molecular_hydrogen + + sea_surface_wave_significant_height - - atmosphere_moles_of_nitrous_oxide + + mass_content_of_water_in_soil_layer - - sea_water_x_velocity + + mass_content_of_water_in_soil - - sea_water_y_velocity + + sea_surface_swell_wave_to_direction - - integral_wrt_time_of_air_temperature_deficit + + sea_surface_wind_wave_to_direction - - integral_wrt_time_of_air_temperature_excess + + sea_surface_wave_mean_period - - integral_wrt_time_of_surface_downward_latent_heat_flux + + sea_surface_wind_wave_mean_period - - integral_wrt_time_of_surface_downward_sensible_heat_flux + + sea_surface_swell_wave_mean_period - - atmosphere_convective_available_potential_energy + + ocean_mixed_layer_thickness_defined_by_vertical_tracer_diffusivity_deficit - - atmosphere_convective_available_potential_energy + + atmosphere_mass_content_of_sea_salt_dry_aerosol_particles - - gross_primary_productivity_of_biomass_expressed_as_carbon + + atmosphere_mass_content_of_sea_salt_dry_aerosol_particles - - net_primary_productivity_of_biomass_expressed_as_carbon + + atmosphere_optical_thickness_due_to_sea_salt_ambient_aerosol_particles - - net_primary_productivity_of_biomass_expressed_as_carbon_accumulated_in_leaves + + atmosphere_optical_thickness_due_to_sea_salt_ambient_aerosol_particles - - net_primary_productivity_of_biomass_expressed_as_carbon_accumulated_in_roots + + mass_concentration_of_sea_salt_dry_aerosol_particles_in_air - - net_primary_productivity_of_biomass_expressed_as_carbon_accumulated_in_wood + + mass_concentration_of_sea_salt_dry_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_dry_deposition + + mass_fraction_of_sea_salt_dry_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_emission + + mass_fraction_of_sea_salt_dry_aerosol_particles_in_air - - sea_surface_wind_wave_mean_period + + tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_dry_deposition - - sea_surface_swell_wave_mean_period + + tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_emission - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_emission + + tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_wet_deposition - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_emission + + tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_wet_deposition - - sea_surface_height_above_geoid + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_dry_deposition - - sea_surface_height_above_geoid + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_dry_deposition - - sea_floor_depth_below_geoid + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_gravitational_settling - - air_pressure_at_mean_sea_level + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_gravitational_settling - - lagrangian_tendency_of_air_pressure + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_turbulent_deposition - - lagrangian_tendency_of_air_pressure + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_turbulent_deposition - - mass_concentration_of_elemental_carbon_dry_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_wet_deposition - - atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_wet_deposition - - mass_fraction_of_elemental_carbon_dry_aerosol_particles_in_air + + atmosphere_optical_thickness_due_to_pm1_ambient_aerosol_particles - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_dry_deposition + + mass_concentration_of_pm1_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission + + mass_fraction_of_pm1_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_energy_production_and_distribution + + mass_fraction_of_pm1_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_forest_fires + + atmosphere_optical_thickness_due_to_pm2p5_ambient_aerosol_particles - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_industrial_processes_and_combustion + + mass_concentration_of_pm2p5_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_land_transport + + mass_fraction_of_pm2p5_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_maritime_transport + + mass_fraction_of_pm2p5_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_residential_and_commercial_combustion + + atmosphere_optical_thickness_due_to_pm10_ambient_aerosol_particles - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_savanna_and_grassland_fires + + mass_concentration_of_pm10_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_waste_treatment_and_disposal + + mass_fraction_of_pm10_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_gravitational_settling + + mass_fraction_of_pm10_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_turbulent_deposition + + tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_dry_deposition - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_wet_deposition + + tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_emission - - tendency_of_mass_concentration_of_elemental_carbon_dry_aerosol_particles_in_air_due_to_emission_from_aviation + + sea_floor_depth_below_mean_sea_level - - integral_wrt_time_of_surface_net_downward_longwave_flux + + sea_surface_height_above_mean_sea_level - - integral_wrt_time_of_surface_net_downward_shortwave_flux + + sea_surface_height_above_mean_sea_level - - integral_wrt_time_of_toa_net_downward_shortwave_flux + + surface_geostrophic_eastward_sea_water_velocity_assuming_mean_sea_level_for_geoid - - integral_wrt_time_of_toa_outgoing_longwave_flux + + surface_geostrophic_eastward_sea_water_velocity_assuming_mean_sea_level_for_geoid - - northward_ocean_freshwater_transport_due_to_parameterized_eddy_advection + + surface_geostrophic_northward_sea_water_velocity_assuming_mean_sea_level_for_geoid - - northward_ocean_salt_transport_due_to_parameterized_eddy_advection + + surface_geostrophic_northward_sea_water_velocity_assuming_mean_sea_level_for_geoid - - ocean_heat_x_transport_due_to_parameterized_eddy_advection + + surface_geostrophic_sea_water_x_velocity_assuming_mean_sea_level_for_geoid - - ocean_heat_y_transport_due_to_parameterized_eddy_advection + + surface_geostrophic_sea_water_y_velocity_assuming_mean_sea_level_for_geoid - - ocean_mass_x_transport_due_to_advection_and_parameterized_eddy_advection + + tendency_of_sea_surface_height_above_mean_sea_level - - ocean_mass_y_transport_due_to_advection_and_parameterized_eddy_advection + + surface_geostrophic_northward_sea_water_velocity - - ocean_meridional_overturning_mass_streamfunction_due_to_parameterized_eddy_advection + + surface_geostrophic_eastward_sea_water_velocity - - ocean_y_overturning_mass_streamfunction_due_to_parameterized_eddy_advection + + tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_dry_deposition - - tendency_of_sea_water_salinity_due_to_parameterized_eddy_advection + + tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_deposition - - tendency_of_sea_water_temperature_due_to_parameterized_eddy_advection + + atmosphere_absorption_optical_thickness_due_to_sea_salt_ambient_aerosol_particles - - northward_sea_water_velocity_due_to_parameterized_mesoscale_eddies + + atmosphere_absorption_optical_thickness_due_to_sea_salt_ambient_aerosol_particles - - eastward_sea_water_velocity_due_to_parameterized_mesoscale_eddies + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_emission - - sea_water_x_velocity_due_to_parameterized_mesoscale_eddies + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_emission - - sea_water_y_velocity_due_to_parameterized_mesoscale_eddies + + sea_surface_height_above_geoid - - upward_sea_water_velocity_due_to_parameterized_mesoscale_eddies + + sea_surface_height_above_geoid - - ocean_tracer_biharmonic_diffusivity_due_to_parameterized_mesoscale_eddy_advection + + sea_floor_depth_below_geoid - - ocean_tracer_laplacian_diffusivity_due_to_parameterized_mesoscale_eddy_advection + + air_pressure_at_mean_sea_level - - tendency_of_ocean_eddy_kinetic_energy_content_due_to_parameterized_eddy_advection + + lagrangian_tendency_of_air_pressure - - mole_concentration_of_mesozooplankton_expressed_as_nitrogen_in_sea_water + + lagrangian_tendency_of_air_pressure - - mole_concentration_of_microzooplankton_expressed_as_nitrogen_in_sea_water + + mass_concentration_of_elemental_carbon_dry_aerosol_particles_in_air - - mole_concentration_of_organic_detritus_expressed_as_nitrogen_in_sea_water + + atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles - - mole_concentration_of_organic_detritus_expressed_as_silicon_in_sea_water + + mass_fraction_of_elemental_carbon_dry_aerosol_particles_in_air - - northward_ocean_heat_transport_due_to_parameterized_eddy_advection + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_dry_deposition - - integral_wrt_depth_of_sea_water_practical_salinity + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission - - integral_wrt_depth_of_sea_water_temperature + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_energy_production_and_distribution - - integral_wrt_depth_of_sea_water_temperature + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_forest_fires - - integral_wrt_depth_of_sea_water_temperature + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_industrial_processes_and_combustion - - integral_wrt_depth_of_sea_water_temperature + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_land_transport - - integral_wrt_height_of_product_of_eastward_wind_and_specific_humidity + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_maritime_transport - - integral_wrt_height_of_product_of_northward_wind_and_specific_humidity + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_residential_and_commercial_combustion - - water_flux_into_sea_water_from_rivers + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_savanna_and_grassland_fires - - toa_outgoing_shortwave_flux_assuming_clear_sky_and_no_aerosol + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_waste_treatment_and_disposal - - wood_debris_mass_content_of_carbon + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_gravitational_settling - - stratiform_graupel_flux + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_turbulent_deposition - - water_volume_transport_into_sea_water_from_rivers + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_wet_deposition - - surface_water_evaporation_flux + + tendency_of_mass_concentration_of_elemental_carbon_dry_aerosol_particles_in_air_due_to_emission_from_aviation - - northward_transformed_eulerian_mean_air_velocity + + integral_wrt_time_of_air_temperature_deficit - - ocean_mixed_layer_thickness_defined_by_vertical_tracer_diffusivity_deficit + + integral_wrt_time_of_air_temperature_excess - - atmosphere_mass_content_of_sea_salt_dry_aerosol_particles + + integral_wrt_time_of_surface_downward_latent_heat_flux - - atmosphere_mass_content_of_sea_salt_dry_aerosol_particles + + integral_wrt_time_of_surface_downward_sensible_heat_flux - - atmosphere_optical_thickness_due_to_sea_salt_ambient_aerosol_particles + + integral_wrt_time_of_surface_net_downward_longwave_flux - - atmosphere_optical_thickness_due_to_sea_salt_ambient_aerosol_particles + + integral_wrt_time_of_surface_net_downward_shortwave_flux - - mass_concentration_of_sea_salt_dry_aerosol_particles_in_air + + integral_wrt_time_of_toa_net_downward_shortwave_flux - - mass_concentration_of_sea_salt_dry_aerosol_particles_in_air + + integral_wrt_time_of_toa_outgoing_longwave_flux - - mass_fraction_of_sea_salt_dry_aerosol_particles_in_air + + northward_ocean_freshwater_transport_due_to_parameterized_eddy_advection - - mass_fraction_of_sea_salt_dry_aerosol_particles_in_air + + northward_ocean_salt_transport_due_to_parameterized_eddy_advection - - tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_dry_deposition + + ocean_heat_x_transport_due_to_parameterized_eddy_advection - - tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_emission + + ocean_heat_y_transport_due_to_parameterized_eddy_advection - - tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_wet_deposition + + ocean_mass_x_transport_due_to_advection_and_parameterized_eddy_advection - - tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_wet_deposition + + ocean_mass_y_transport_due_to_advection_and_parameterized_eddy_advection - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_dry_deposition + + ocean_meridional_overturning_mass_streamfunction_due_to_parameterized_eddy_advection - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_dry_deposition + + ocean_y_overturning_mass_streamfunction_due_to_parameterized_eddy_advection - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_gravitational_settling + + tendency_of_sea_water_salinity_due_to_parameterized_eddy_advection - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_gravitational_settling + + tendency_of_sea_water_temperature_due_to_parameterized_eddy_advection - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_turbulent_deposition + + northward_sea_water_velocity_due_to_parameterized_mesoscale_eddies - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_turbulent_deposition + + eastward_sea_water_velocity_due_to_parameterized_mesoscale_eddies - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_wet_deposition + + sea_water_x_velocity_due_to_parameterized_mesoscale_eddies - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_wet_deposition + + sea_water_y_velocity_due_to_parameterized_mesoscale_eddies - - atmosphere_optical_thickness_due_to_pm1_ambient_aerosol_particles + + upward_sea_water_velocity_due_to_parameterized_mesoscale_eddies - - mass_concentration_of_pm1_ambient_aerosol_particles_in_air + + ocean_tracer_biharmonic_diffusivity_due_to_parameterized_mesoscale_eddy_advection - - mass_fraction_of_pm1_ambient_aerosol_particles_in_air + + ocean_tracer_laplacian_diffusivity_due_to_parameterized_mesoscale_eddy_advection - - mass_fraction_of_pm1_ambient_aerosol_particles_in_air + + tendency_of_ocean_eddy_kinetic_energy_content_due_to_parameterized_eddy_advection - - atmosphere_optical_thickness_due_to_pm2p5_ambient_aerosol_particles + + northward_ocean_heat_transport_due_to_parameterized_eddy_advection - - mass_concentration_of_pm2p5_ambient_aerosol_particles_in_air + + mole_concentration_of_dissolved_inorganic_13C_in_sea_water - - mass_fraction_of_pm2p5_ambient_aerosol_particles_in_air + + surface_downward_mass_flux_of_13C_dioxide_abiotic_analogue_expressed_as_13C - - mass_fraction_of_pm2p5_ambient_aerosol_particles_in_air + + surface_downward_mass_flux_of_14C_dioxide_abiotic_analogue_expressed_as_carbon - - atmosphere_optical_thickness_due_to_pm10_ambient_aerosol_particles + + mole_concentration_of_dissolved_inorganic_14C_in_sea_water - - mass_concentration_of_pm10_ambient_aerosol_particles_in_air + + stem_mass_content_of_carbon - - mass_fraction_of_pm10_ambient_aerosol_particles_in_air + + subsurface_litter_mass_content_of_carbon - - mass_fraction_of_pm10_ambient_aerosol_particles_in_air + + mass_flux_of_carbon_into_litter_from_vegetation - - sea_floor_depth_below_mean_sea_level + + litter_mass_content_of_carbon - - sea_surface_height_above_mean_sea_level + + surface_litter_mass_content_of_carbon - - sea_surface_height_above_mean_sea_level + + eastward_transformed_eulerian_mean_air_velocity - - surface_geostrophic_eastward_sea_water_velocity_assuming_mean_sea_level_for_geoid + + northward_transformed_eulerian_mean_air_velocity - - surface_geostrophic_eastward_sea_water_velocity_assuming_mean_sea_level_for_geoid + + surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_heterotrophic_respiration - - surface_geostrophic_northward_sea_water_velocity_assuming_mean_sea_level_for_geoid + + surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_respiration_in_soil - - surface_geostrophic_northward_sea_water_velocity_assuming_mean_sea_level_for_geoid + + surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration - - surface_geostrophic_sea_water_x_velocity_assuming_mean_sea_level_for_geoid + + surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration_for_biomass_growth - - surface_geostrophic_sea_water_y_velocity_assuming_mean_sea_level_for_geoid + + surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration_for_biomass_maintenance - - tendency_of_sea_surface_height_above_mean_sea_level + + carbon_mass_content_of_forestry_and_agricultural_products - - surface_geostrophic_northward_sea_water_velocity + + carbon_mass_content_of_forestry_and_agricultural_products - - surface_geostrophic_eastward_sea_water_velocity + + leaf_mass_content_of_carbon - - tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_dry_deposition + + medium_soil_pool_mass_content_of_carbon - - tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_deposition + + fast_soil_pool_mass_content_of_carbon - - atmosphere_absorption_optical_thickness_due_to_sea_salt_ambient_aerosol_particles + + miscellaneous_living_matter_mass_content_of_carbon - - atmosphere_absorption_optical_thickness_due_to_sea_salt_ambient_aerosol_particles + + root_mass_content_of_carbon - - mole_concentration_of_dissolved_inorganic_13C_in_sea_water + + slow_soil_pool_mass_content_of_carbon - - surface_downward_mass_flux_of_13C_dioxide_abiotic_analogue_expressed_as_13C + + soil_mass_content_of_carbon - - surface_downward_mass_flux_of_14C_dioxide_abiotic_analogue_expressed_as_carbon + + volume_scattering_coefficient_of_radiative_flux_in_air_due_to_dried_aerosol_particles - - mole_concentration_of_dissolved_inorganic_14C_in_sea_water + + volume_scattering_coefficient_of_radiative_flux_in_air_due_to_ambient_aerosol_particles - - stem_mass_content_of_carbon + + integral_wrt_depth_of_sea_water_practical_salinity - - subsurface_litter_mass_content_of_carbon + + integral_wrt_depth_of_sea_water_temperature - - mass_flux_of_carbon_into_litter_from_vegetation + + integral_wrt_depth_of_sea_water_temperature - - platform_id + + integral_wrt_depth_of_sea_water_temperature - - atmosphere_moles_of_halon1301 + + integral_wrt_depth_of_sea_water_temperature - - tendency_of_atmosphere_moles_of_halon1301 + + integral_wrt_height_of_product_of_eastward_wind_and_specific_humidity - - atmosphere_moles_of_halon1211 + + integral_wrt_height_of_product_of_northward_wind_and_specific_humidity - - tendency_of_atmosphere_moles_of_halon1211 + + water_flux_into_sea_water_from_rivers - - atmosphere_moles_of_halon1202 + + toa_outgoing_shortwave_flux_assuming_clear_sky_and_no_aerosol - - tendency_of_atmosphere_moles_of_halon1202 + + wood_debris_mass_content_of_carbon - - atmosphere_moles_of_cfc12 + + stratiform_graupel_flux - - atmosphere_mass_content_of_cloud_liquid_water + + water_volume_transport_into_sea_water_from_rivers - - effective_radius_of_cloud_liquid_water_particles + + surface_water_evaporation_flux - - effective_radius_of_convective_cloud_liquid_water_particles + + sea_ice_temperature_expressed_as_heat_content - - effective_radius_of_convective_cloud_liquid_water_particles_at_convective_liquid_water_cloud_top + + sea_ice_temperature_expressed_as_heat_content - - effective_radius_of_stratiform_cloud_liquid_water_particles + + sea_water_potential_temperature_expressed_as_heat_content - - effective_radius_of_stratiform_cloud_liquid_water_particles_at_stratiform_liquid_water_cloud_top + + sea_water_potential_temperature_expressed_as_heat_content - - magnitude_of_sea_ice_displacement + + incoming_water_volume_transport_along_river_channel - - number_concentration_of_convective_cloud_liquid_water_particles_at_convective_liquid_water_cloud_top + + surface_upwelling_longwave_flux_in_air - - number_concentration_of_stratiform_cloud_liquid_water_particles_at_stratiform_liquid_water_cloud_top + + surface_upwelling_radiance_per_unit_wavelength_in_air - - air_equivalent_potential_temperature + + surface_upwelling_radiance_per_unit_wavelength_in_air_emerging_from_sea_water - - mass_content_of_cloud_liquid_water_in_atmosphere_layer + + surface_upwelling_radiance_per_unit_wavelength_in_air_reflected_by_sea_water - - air_pseudo_equivalent_temperature + + surface_upwelling_radiance_per_unit_wavelength_in_sea_water - - air_equivalent_temperature + + surface_upwelling_radiative_flux_per_unit_wavelength_in_air - - effective_radius_of_cloud_liquid_water_particles_at_liquid_water_cloud_top + + surface_upwelling_radiative_flux_per_unit_wavelength_in_sea_water - - atmosphere_mass_content_of_convective_cloud_liquid_water + + surface_upwelling_shortwave_flux_in_air - - mole_concentration_of_phytoplankton_expressed_as_nitrogen_in_sea_water + + surface_upwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol - - tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_riming_from_cloud_liquid_water + + upwelling_radiance_per_unit_wavelength_in_air - - tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_heterogeneous_nucleation_from_cloud_liquid_water + + upwelling_radiative_flux_per_unit_wavelength_in_air - - tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_melting_to_cloud_liquid_water + + upwelling_radiative_flux_per_unit_wavelength_in_sea_water - - air_pseudo_equivalent_potential_temperature + + upwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol - - growth_limitation_of_diazotrophic_phytoplankton_due_to_solar_irradiance + + surface_upwelling_longwave_flux_in_air_assuming_clear_sky - - iron_growth_limitation_of_diazotrophic_phytoplankton + + surface_upwelling_shortwave_flux_in_air_assuming_clear_sky - - mass_concentration_of_diazotrophic_phytoplankton_expressed_as_chlorophyll_in_sea_water + + downwelling_photon_flux_per_unit_wavelength_in_sea_water - - mole_concentration_of_diazotrophic_phytoplankton_expressed_as_carbon_in_sea_water + + downwelling_photon_radiance_per_unit_wavelength_in_sea_water - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diazotrophic_phytoplankton + + downwelling_photon_spherical_irradiance_per_unit_wavelength_in_sea_water - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diazotrophic_phytoplankton + + downwelling_radiance_per_unit_wavelength_in_air - - nitrogen_growth_limitation_of_diazotrophic_phytoplankton + + downwelling_radiance_per_unit_wavelength_in_sea_water - - tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_diazotrophic_phytoplankton + + downwelling_radiative_flux_per_unit_wavelength_in_air - - mass_fraction_of_liquid_precipitation_in_air + + downwelling_radiative_flux_per_unit_wavelength_in_sea_water - - mass_fraction_of_liquid_precipitation_in_air + + downwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol - - area_type + + downwelling_spherical_irradiance_per_unit_wavelength_in_sea_water - - area_type + + integral_wrt_time_of_surface_downwelling_longwave_flux_in_air - - atmosphere_upward_absolute_vorticity + + integral_wrt_time_of_surface_downwelling_shortwave_flux_in_air - - atmosphere_upward_relative_vorticity + + surface_downwelling_longwave_flux_in_air - - surface_snow_density + + surface_downwelling_photon_flux_per_unit_wavelength_in_sea_water - - tendency_of_atmosphere_mass_content_of_water_vapor_due_to_sublimation_of_surface_snow_and_ice + + surface_downwelling_photon_radiance_per_unit_wavelength_in_sea_water - - mass_fraction_of_mercury_dry_aerosol_particles_in_air + + surface_downwelling_photon_spherical_irradiance_per_unit_wavelength_in_sea_water - - tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_emission + + surface_downwelling_radiance_per_unit_wavelength_in_sea_water + + + + surface_downwelling_radiative_flux_per_unit_wavelength_in_air + + + + surface_downwelling_radiative_flux_per_unit_wavelength_in_sea_water + + + + surface_downwelling_shortwave_flux_in_air + + + + surface_downwelling_shortwave_flux_in_air_assuming_clear_sky + + + + surface_downwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol + + + + surface_downwelling_spherical_irradiance_per_unit_wavelength_in_sea_water + + + + magnitude_of_sea_ice_displacement @@ -34155,20 +34642,36 @@ stratiform_precipitation_flux - - tendency_of_air_temperature_due_to_stratiform_precipitation + + tendency_of_air_temperature_due_to_stratiform_precipitation + + + + tendency_of_specific_humidity_due_to_stratiform_precipitation + + + + platform_roll + + + + platform_pitch + + + + platform_yaw - - tendency_of_specific_humidity_due_to_stratiform_precipitation + + platform_id - - tendency_of_atmosphere_moles_of_methane + + platform_name - - mole_fraction_of_noy_expressed_as_nitrogen_in_air + + water_vapor_partial_pressure_in_air @@ -34187,6 +34690,14 @@ tendency_of_mole_concentration_of_dissolved_inorganic_silicon_in_sea_water_due_to_biological_processes + + mole_concentration_of_diatoms_expressed_as_nitrogen_in_sea_water + + + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_calcareous_phytoplankton + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diatoms @@ -34203,16 +34714,12 @@ net_primary_mole_productivity_of_biomass_expressed_as_carbon_due_to_nitrate_utilization - - platform_roll - - - - platform_pitch + + mole_concentration_of_phytoplankton_expressed_as_nitrogen_in_sea_water - - platform_yaw + + tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_wet_deposition @@ -34243,112 +34750,116 @@ rate_of_hydroxyl_radical_destruction_due_to_reaction_with_nmvoc - - tendency_of_sea_water_conservative_temperature_expressed_as_heat_content_due_to_parameterized_dianeutral_mixing + + tendency_of_atmosphere_moles_of_methane - - tendency_of_sea_water_potential_temperature_expressed_as_heat_content_due_to_parameterized_dianeutral_mixing + + mole_fraction_of_noy_expressed_as_nitrogen_in_air - - tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_wet_deposition + + mole_fraction_of_dichlorine_peroxide_in_air - - eastward_transformed_eulerian_mean_air_velocity + + mole_fraction_of_methylglyoxal_in_air - - surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_heterotrophic_respiration + + atmosphere_moles_of_carbon_tetrachloride - - surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_respiration_in_soil + + floating_ice_shelf_area_fraction - - surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration + + stratiform_cloud_area_fraction - - surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration_for_biomass_growth + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_wet_deposition - - surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration_for_biomass_maintenance + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_wet_deposition - - carbon_mass_content_of_forestry_and_agricultural_products + + mass_fraction_of_mercury_dry_aerosol_particles_in_air - - carbon_mass_content_of_forestry_and_agricultural_products + + tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_emission - - leaf_mass_content_of_carbon + + carbon_mass_flux_into_litter_and_soil_due_to_anthropogenic_land_use_or_land_cover_change - - medium_soil_pool_mass_content_of_carbon + + product_of_eastward_wind_and_lagrangian_tendency_of_air_pressure - - fast_soil_pool_mass_content_of_carbon + + product_of_northward_wind_and_lagrangian_tendency_of_air_pressure - - miscellaneous_living_matter_mass_content_of_carbon + + backscattering_ratio_in_air - - root_mass_content_of_carbon + + histogram_of_backscattering_ratio_in_air_over_height_above_reference_ellipsoid - - slow_soil_pool_mass_content_of_carbon + + effective_radius_of_convective_cloud_ice_particles - - soil_mass_content_of_carbon + + effective_radius_of_convective_cloud_rain_particles - - volume_scattering_coefficient_of_radiative_flux_in_air_due_to_dried_aerosol_particles + + effective_radius_of_convective_cloud_snow_particles - - volume_scattering_coefficient_of_radiative_flux_in_air_due_to_ambient_aerosol_particles + + effective_radius_of_stratiform_cloud_graupel_particles - - mole_fraction_of_dichlorine_peroxide_in_air + + effective_radius_of_stratiform_cloud_ice_particles - - mole_fraction_of_methylglyoxal_in_air + + effective_radius_of_stratiform_cloud_rain_particles - - atmosphere_moles_of_carbon_tetrachloride + + mass_concentration_of_biomass_burning_dry_aerosol_particles_in_air - - floating_ice_shelf_area_fraction + + diameter_of_ambient_aerosol_particles - - carbon_mass_flux_into_litter_and_soil_due_to_anthropogenic_land_use_or_land_cover_change + + electrical_mobility_diameter_of_ambient_aerosol_particles - - product_of_eastward_wind_and_lagrangian_tendency_of_air_pressure + + lagrangian_tendency_of_atmosphere_sigma_coordinate - - product_of_northward_wind_and_lagrangian_tendency_of_air_pressure + + lagrangian_tendency_of_atmosphere_sigma_coordinate + + + + tendency_of_atmosphere_number_content_of_aerosol_particles_due_to_turbulent_deposition @@ -34383,160 +34894,88 @@ atmosphere_moles_of_halon2402 - - upwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol - - - - surface_upwelling_longwave_flux_in_air_assuming_clear_sky - - - - surface_upwelling_shortwave_flux_in_air_assuming_clear_sky - - - - downwelling_photon_flux_per_unit_wavelength_in_sea_water - - - - downwelling_photon_radiance_per_unit_wavelength_in_sea_water - - - - downwelling_photon_spherical_irradiance_per_unit_wavelength_in_sea_water - - - - downwelling_radiance_per_unit_wavelength_in_air - - - - downwelling_radiance_per_unit_wavelength_in_sea_water - - - - downwelling_radiative_flux_per_unit_wavelength_in_air - - - - downwelling_radiative_flux_per_unit_wavelength_in_sea_water - - - - downwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol - - - - downwelling_spherical_irradiance_per_unit_wavelength_in_sea_water - - - - integral_wrt_time_of_surface_downwelling_longwave_flux_in_air - - - - integral_wrt_time_of_surface_downwelling_shortwave_flux_in_air - - - - surface_downwelling_longwave_flux_in_air - - - - surface_downwelling_photon_flux_per_unit_wavelength_in_sea_water - - - - surface_downwelling_photon_radiance_per_unit_wavelength_in_sea_water - - - - surface_downwelling_photon_spherical_irradiance_per_unit_wavelength_in_sea_water - - - - surface_downwelling_radiance_per_unit_wavelength_in_sea_water + + tendency_of_atmosphere_moles_of_halon2402 - - surface_downwelling_radiative_flux_per_unit_wavelength_in_air + + atmosphere_moles_of_halon1301 - - surface_downwelling_radiative_flux_per_unit_wavelength_in_sea_water + + tendency_of_atmosphere_moles_of_halon1301 - - surface_downwelling_shortwave_flux_in_air + + atmosphere_moles_of_halon1211 - - surface_downwelling_shortwave_flux_in_air_assuming_clear_sky + + tendency_of_atmosphere_moles_of_halon1211 - - surface_downwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol + + atmosphere_moles_of_halon1202 - - surface_downwelling_spherical_irradiance_per_unit_wavelength_in_sea_water + + tendency_of_atmosphere_moles_of_halon1202 - - sea_ice_temperature_expressed_as_heat_content + + atmosphere_moles_of_cfc12 - - sea_ice_temperature_expressed_as_heat_content + + tendency_of_atmosphere_moles_of_cfc12 - - sea_water_potential_temperature_expressed_as_heat_content + + atmosphere_moles_of_cfc115 - - sea_water_potential_temperature_expressed_as_heat_content + + tendency_of_atmosphere_moles_of_cfc115 - - incoming_water_volume_transport_along_river_channel + + atmosphere_moles_of_cfc114 - - surface_upwelling_longwave_flux_in_air + + tendency_of_atmosphere_moles_of_cfc114 - - surface_upwelling_radiance_per_unit_wavelength_in_air + + atmosphere_moles_of_cfc113 - - surface_upwelling_radiance_per_unit_wavelength_in_air_emerging_from_sea_water + + tendency_of_atmosphere_moles_of_cfc113 - - surface_upwelling_radiance_per_unit_wavelength_in_air_reflected_by_sea_water + + atmosphere_moles_of_cfc11 - - stratiform_cloud_area_fraction + + moles_of_cfc11_per_unit_mass_in_sea_water - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_wet_deposition + + tendency_of_atmosphere_moles_of_cfc11 - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_wet_deposition + + effective_radius_of_stratiform_cloud_snow_particles - - litter_mass_content_of_carbon + + tendency_of_sea_water_conservative_temperature_expressed_as_heat_content_due_to_parameterized_dianeutral_mixing - - surface_litter_mass_content_of_carbon + + tendency_of_sea_water_potential_temperature_expressed_as_heat_content_due_to_parameterized_dianeutral_mixing @@ -34603,197 +35042,237 @@ sea_water_velocity_from_direction - - integral_wrt_time_of_surface_downward_northward_stress + + atmosphere_mass_content_of_cloud_liquid_water - - integral_wrt_time_of_surface_downward_eastward_stress + + effective_radius_of_cloud_liquid_water_particles - - temperature_in_surface_snow + + effective_radius_of_convective_cloud_liquid_water_particles - - thermal_energy_content_of_surface_snow + + effective_radius_of_convective_cloud_liquid_water_particles_at_convective_liquid_water_cloud_top - - surface_snow_thickness + + effective_radius_of_stratiform_cloud_liquid_water_particles - - liquid_water_content_of_surface_snow + + effective_radius_of_stratiform_cloud_liquid_water_particles_at_stratiform_liquid_water_cloud_top - - soot_content_of_surface_snow + + number_concentration_of_convective_cloud_liquid_water_particles_at_convective_liquid_water_cloud_top - - backscattering_ratio_in_air + + number_concentration_of_stratiform_cloud_liquid_water_particles_at_stratiform_liquid_water_cloud_top - - histogram_of_backscattering_ratio_in_air_over_height_above_reference_ellipsoid + + air_equivalent_potential_temperature - - effective_radius_of_convective_cloud_ice_particles + + mass_content_of_cloud_liquid_water_in_atmosphere_layer - - effective_radius_of_convective_cloud_rain_particles + + air_pseudo_equivalent_temperature - - effective_radius_of_convective_cloud_snow_particles + + air_equivalent_temperature - - effective_radius_of_stratiform_cloud_graupel_particles + + effective_radius_of_cloud_liquid_water_particles_at_liquid_water_cloud_top - - effective_radius_of_stratiform_cloud_ice_particles + + atmosphere_mass_content_of_convective_cloud_liquid_water - - effective_radius_of_stratiform_cloud_rain_particles + + tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_riming_from_cloud_liquid_water - - mass_concentration_of_biomass_burning_dry_aerosol_particles_in_air + + tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_heterogeneous_nucleation_from_cloud_liquid_water - - diameter_of_ambient_aerosol_particles + + tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_melting_to_cloud_liquid_water - - electrical_mobility_diameter_of_ambient_aerosol_particles + + air_pseudo_equivalent_potential_temperature - - lagrangian_tendency_of_atmosphere_sigma_coordinate + + growth_limitation_of_diazotrophic_phytoplankton_due_to_solar_irradiance - - lagrangian_tendency_of_atmosphere_sigma_coordinate + + iron_growth_limitation_of_diazotrophic_phytoplankton - - tendency_of_atmosphere_number_content_of_aerosol_particles_due_to_turbulent_deposition + + mass_concentration_of_diazotrophic_phytoplankton_expressed_as_chlorophyll_in_sea_water - - biological_taxon_lsid + + mole_concentration_of_diazotrophic_phytoplankton_expressed_as_carbon_in_sea_water - - temperature_in_ground + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diazotrophic_phytoplankton - - tendency_of_atmosphere_moles_of_halon2402 + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diazotrophic_phytoplankton - - tendency_of_atmosphere_moles_of_cfc12 + + nitrogen_growth_limitation_of_diazotrophic_phytoplankton - - atmosphere_moles_of_cfc115 + + tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_diazotrophic_phytoplankton - - tendency_of_atmosphere_moles_of_cfc115 + + mass_fraction_of_liquid_precipitation_in_air - - atmosphere_moles_of_cfc114 + + mass_fraction_of_liquid_precipitation_in_air - - tendency_of_atmosphere_moles_of_cfc114 + + area_type - - atmosphere_moles_of_cfc113 + + area_type - - tendency_of_atmosphere_moles_of_cfc113 + + upward_derivative_of_eastward_wind - - atmosphere_moles_of_cfc11 + + upward_derivative_of_northward_wind - - moles_of_cfc11_per_unit_mass_in_sea_water + + atmosphere_upward_absolute_vorticity - - tendency_of_atmosphere_moles_of_cfc11 + + atmosphere_upward_relative_vorticity - - effective_radius_of_stratiform_cloud_snow_particles + + surface_snow_density - - water_vapor_partial_pressure_in_air + + tendency_of_atmosphere_mass_content_of_water_vapor_due_to_sublimation_of_surface_snow_and_ice - - platform_name + + integral_wrt_time_of_surface_downward_northward_stress - - surface_upwelling_radiance_per_unit_wavelength_in_sea_water + + integral_wrt_time_of_surface_downward_eastward_stress - - surface_upwelling_radiative_flux_per_unit_wavelength_in_air + + temperature_in_surface_snow - - surface_upwelling_radiative_flux_per_unit_wavelength_in_sea_water + + thermal_energy_content_of_surface_snow - - surface_upwelling_shortwave_flux_in_air + + surface_snow_thickness - - surface_upwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol + + liquid_water_content_of_surface_snow - - upwelling_radiance_per_unit_wavelength_in_air + + soot_content_of_surface_snow - - upwelling_radiative_flux_per_unit_wavelength_in_air + + biological_taxon_lsid - - upwelling_radiative_flux_per_unit_wavelength_in_sea_water + + temperature_in_ground - - mole_concentration_of_diatoms_expressed_as_nitrogen_in_sea_water + + water_evapotranspiration_flux - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_calcareous_phytoplankton + + drainage_amount_through_base_of_soil_model moles_of_particulate_inorganic_carbon_per_unit_mass_in_sea_water + + + drainage_amount_through_base_of_soil_model + + + + universal_thermal_comfort_index + + + + water_flux_into_sea_water_due_to_flux_adjustment + + + + heat_flux_into_sea_water_due_to_flux_adjustment + + + + upward_derivative_of_eastward_wind + + + + upward_derivative_of_northward_wind + + + + volume_backwards_scattering_coefficient_of_radiative_flux_in_air_due_to_dried_aerosol_particles + + + + volume_attenuated_backwards_scattering_coefficient_of_radiative_flux_in_air_assuming_no_aerosol_or_cloud + + + + volume_attenuated_backwards_scattering_coefficient_of_radiative_flux_in_air + + + + volume_absorption_coefficient_of_radiative_flux_in_air_due_to_dried_aerosol_particles + diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index 828a507fff..f0421e9662 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -72,9 +72,9 @@ 1. Function signature must be: ``(cube, field, filename)``. 2. Modifies the given cube inplace, unless a new cube is - returned by the function. + returned by the function. 3. If the cube is to be rejected the callback must raise - an :class:`iris.exceptions.IgnoreCubeException`. + an :class:`iris.exceptions.IgnoreCubeException`. For example:: @@ -141,7 +141,7 @@ class Future(threading.local): """Run-time configuration controller.""" def __init__(self, datum_support=False, pandas_ndim=False, save_split_attrs=False): - r"""Container for run-time options controls. + """Container for run-time options controls. To adjust the values simply update the relevant attribute from within your code. For example:: @@ -156,7 +156,7 @@ def __init__(self, datum_support=False, pandas_ndim=False, save_split_attrs=Fals ---------- datum_support : bool, default=False Opts in to loading coordinate system datum information from NetCDF - files into :class:`~iris.coord_systems.CoordSystem`\\ s, wherever + files into :class:`~iris.coord_systems.CoordSystem`, wherever this information is present. pandas_ndim : bool, default=False See :func:`iris.pandas.as_data_frame` for details - opts in to the diff --git a/lib/iris/_concatenate.py b/lib/iris/_concatenate.py index 049222953f..7011df0924 100644 --- a/lib/iris/_concatenate.py +++ b/lib/iris/_concatenate.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Automatic concatenation of multiple cubes over one or more existing dimensions. - -""" +"""Automatic concatenation of multiple cubes over one or more existing dimensions.""" from collections import defaultdict, namedtuple import warnings @@ -16,6 +14,7 @@ import iris.cube import iris.exceptions from iris.util import array_equal, guess_coord_axis +import iris.warnings # # TODO: @@ -36,16 +35,15 @@ class _CoordAndDims(namedtuple("CoordAndDims", ["coord", "dims"])): - """Container for a coordinate and the associated data dimension(s) - spanned over a :class:`iris.cube.Cube`. - - Args: + """Container for a coordinate and the associated data dimension(s). - * coord: - A :class:`iris.coords.DimCoord` or :class:`iris.coords.AuxCoord` - coordinate instance. + Container for a coordinate and the associated data dimension(s) + spanned over a :class:`iris.cube.Cube`. - * dims: + Parameters + ---------- + coord : :class:`iris.coords.DimCoord` or :class:`iris.coords.AuxCoord` + dims : tuple A tuple of the data dimension(s) spanned by the coordinate. """ @@ -59,25 +57,20 @@ class _CoordMetaData( ["defn", "dims", "points_dtype", "bounds_dtype", "kwargs"], ) ): - """Container for the metadata that defines a dimension or auxiliary - coordinate. - - Args: + """Container for the metadata that defines a dimension or auxiliary coordinate. - * defn: + Parameters + ---------- + defn : :class:`iris.common.CoordMetadata` The :class:`iris.common.CoordMetadata` metadata that represents a coordinate. - - * dims: + dims : The dimension(s) associated with the coordinate. - - * points_dtype: + points_dtype : :class:`np.dtype` The points data :class:`np.dtype` of an associated coordinate. - - * bounds_dtype: + bounds_dtype : :class:`np.dtype` The bounds data :class:`np.dtype` of an associated coordinate. - - * kwargs: + **kwargs : dict, optional A dictionary of key/value pairs required to define a coordinate. """ @@ -85,16 +78,15 @@ class _CoordMetaData( def __new__(mcs, coord, dims): """Create a new :class:`_CoordMetaData` instance. - Args: - - * coord: - The :class:`iris.coord.DimCoord` or :class:`iris.coord.AuxCoord`. - - * dims: + Parameters + ---------- + coord : :class:`iris.coord.DimCoord` or :class:`iris.coord.AuxCoord` + dims : The dimension(s) associated with the coordinate. - Returns: - The new class instance. + Returns + ------- + The new class instance. """ defn = coord.metadata @@ -160,20 +152,17 @@ def name(self): class _DerivedCoordAndDims( namedtuple("DerivedCoordAndDims", ["coord", "dims", "aux_factory"]) ): - """Container for a derived coordinate, the associated AuxCoordFactory, and the - associated data dimension(s) spanned over a :class:`iris.cube.Cube`. + """Container for a derived coordinate and dimensions(s). - Args: - - * coord: - A :class:`iris.coords.DimCoord` or :class:`iris.coords.AuxCoord` - coordinate instance. + Container for a derived coordinate, the associated AuxCoordFactory, and the + associated data dimension(s) spanned over a :class:`iris.cube.Cube`. - * dims: + Parameters + ---------- + coord : :class:`iris.coord.DimCoord` or :class:`iris.coord.AuxCoord` + dims: tuple A tuple of the data dimension(s) spanned by the coordinate. - - * aux_factory: - A :class:`iris.aux_factory.AuxCoordFactory` instance. + aux_factory : :class:`iris.aux_factory.AuxCoordFactory` """ @@ -190,16 +179,14 @@ def __eq__(self, other): class _OtherMetaData(namedtuple("OtherMetaData", ["defn", "dims"])): - """Container for the metadata that defines a cell measure or ancillary - variable. - - Args: + """Container for the metadata that defines a cell measure or ancillary variable. - * defn: + Parameters + ---------- + defn : :class:`iris.coords._DMDefn` or :class:`iris.coords._CellMeasureDefn` The :class:`iris.coords._DMDefn` or :class:`iris.coords._CellMeasureDefn` metadata that represents a coordinate. - - * dims: + dims : The dimension(s) associated with the coordinate. """ @@ -207,17 +194,15 @@ class _OtherMetaData(namedtuple("OtherMetaData", ["defn", "dims"])): def __new__(cls, ancil, dims): """Create a new :class:`_OtherMetaData` instance. - Args: - - * ancil: - The :class:`iris.coord.CellMeasure` or - :class:`iris.coord.AncillaryVariable`. - - * dims: + Parameters + ---------- + ancil : :class:`iris.coord.CellMeasure` or :class:`iris.coord.AncillaryVariable`. + dims : The dimension(s) associated with ancil. - Returns: - The new class instance. + Returns + ------- + The new class instance. """ defn = ancil.metadata @@ -247,15 +232,16 @@ def name(self): class _SkeletonCube(namedtuple("SkeletonCube", ["signature", "data"])): - """Basis of a source-cube, containing the associated coordinate metadata, - coordinates and cube data payload. + """Basis of a source-cube. - Args: + Basis of a source-cube, containing the associated coordinate metadata, + coordinates and cube data payload. - * signature: + Parameters + ---------- + signature : :class:`_CoordSignature` The :class:`_CoordSignature` of an associated source-cube. - - * data: + data : The data payload of an associated :class:`iris.cube.Cube` source-cube. """ @@ -266,12 +252,11 @@ class _SkeletonCube(namedtuple("SkeletonCube", ["signature", "data"])): class _Extent(namedtuple("Extent", ["min", "max"])): """Container representing the limits of a one-dimensional extent/range. - Args: - - * min: + Parameters + ---------- + min : The minimum value of the extent. - - * max: + max : The maximum value of the extent. """ @@ -280,15 +265,14 @@ class _Extent(namedtuple("Extent", ["min", "max"])): class _CoordExtent(namedtuple("CoordExtent", ["points", "bounds"])): - """Container representing the points and bounds extent of a one dimensional - coordinate. - - Args: + """Container representing the points and bounds extent of a one dimensional coordinate. - * points: + Parameters + ---------- + points : :class:`_Extent` The :class:`_Extent` of the coordinate point values. - * bounds: + bounds : A list containing the :class:`_Extent` of the coordinate lower bound and the upper bound. Defaults to None if no associated bounds exist for the coordinate. @@ -308,34 +292,27 @@ def concatenate( ): """Concatenate the provided cubes over common existing dimensions. - Args: - - * cubes: + Parameters + ---------- + cubes : iterable of :class:`iris.cube.Cube` An iterable containing one or more :class:`iris.cube.Cube` instances to be concatenated together. - - Kwargs: - - * error_on_mismatch: + error_on_mismatch: bool, default=False If True, raise an informative :class:`~iris.exceptions.ContatenateError` if registration fails. - - * check_aux_coords + check_aux_coords : bool, default=True Checks if the points and bounds of auxiliary coordinates of the cubes match. This check is not applied to auxiliary coordinates that span the dimension the concatenation is occurring along. Defaults to True. - - * check_cell_measures + check_cell_measures : bool, default=True Checks if the data of cell measures of the cubes match. This check is not applied to cell measures that span the dimension the concatenation is occurring along. Defaults to True. - - * check_ancils + check_ancils : bool, default=True Checks if the data of ancillary variables of the cubes match. This check is not applied to ancillary variables that span the dimension the concatenation is occurring along. Defaults to True. - - * check_derived_coords + check_derived_coords : bool, default=True Checks if the points and bounds of derived coordinates of the cubes match. This check is not applied to derived coordinates that span the dimension the concatenation is occurring along. Note that differences @@ -344,9 +321,10 @@ def concatenate( derive the coordinates can be ignored with `check_aux_coords`. Defaults to True. - Returns: - A :class:`iris.cube.CubeList` of concatenated :class:`iris.cube.Cube` - instances. + Returns + ------- + :class:`iris.cube.CubeList` + A :class:`iris.cube.CubeList` of concatenated :class:`iris.cube.Cube` instances. """ proto_cubes_by_name = defaultdict(list) @@ -400,18 +378,19 @@ def _none_sort(item): class _CubeSignature: - """Template for identifying a specific type of :class:`iris.cube.Cube` based + """Template for identifying a specific type of :class:`iris.cube.Cube`. + + Template for identifying a specific type of :class:`iris.cube.Cube` based on its metadata, coordinates and cell_measures. """ def __init__(self, cube): - """Represents the cube metadata and associated coordinate metadata that - allows suitable cubes for concatenation to be identified. + """Represent the cube metadata and associated coordinate metadata. - Args: - - * cube: + Parameters + ---------- + cube : :class:`iris.cube.Cube` The :class:`iris.cube.Cube` source-cube. """ @@ -498,23 +477,25 @@ def name_key_func(factory): self.derived_coords_and_dims.append(coord_and_dims) def _coordinate_differences(self, other, attr, reason="metadata"): - """Determine the names of the coordinates that differ between `self` and - `other` for a coordinate attribute on a _CubeSignature. + """Determine the names of the coordinates that differ. - Args: + Determine the names of the coordinates that differ between `self` and + `other` for a coordinate attribute on a _CubeSignature. - * other (_CubeSignature): + Parameters + ---------- + other : _CubeSignature The _CubeSignature to compare against. - - * attr (string): + attr : str The _CubeSignature attribute within which differences exist between `self` and `other`. - - * reason (string): + reason : str, default="metadata" The reason to give for mismatch (function is normally, but not always, testing metadata) - Returns: + Returns + ------- + tuple Tuple of a descriptive error message and the names of attributes that differ between `self` and `other`. @@ -550,24 +531,25 @@ def match(self, other, error_on_mismatch): This is the first step to determine if two "cubes" (either a real Cube or a ProtoCube) can be concatenated, by considering: - - data dimensions - - dimensions metadata - - aux coords metadata - - scalar coords - - attributes - - dtype - Args: + * data dimensions + * aux coords metadata + * scalar coords + * attributes + * dtype - * other (_CubeSignature): + Parameters + ---------- + other : _CubeSignature The _CubeSignature to compare against. - - * error_on_mismatch (bool): + error_on_mismatch : bool If True, raise a :class:`~iris.exceptions.MergeException` with a detailed explanation if the two do not match. - Returns: - Boolean. True if and only if this _CubeSignature matches the other. + Returns + ------- + bool + True if and only if this _CubeSignature matches the other. """ msg_template = "{}{} differ: {} != {}" @@ -624,19 +606,18 @@ def match(self, other, error_on_mismatch): class _CoordSignature: - """Template for identifying a specific type of :class:`iris.cube.Cube` based - on its coordinates. - - """ + """Template for identifying a specific type of :class:`iris.cube.Cube` based on its coordinates.""" def __init__(self, cube_signature): - """Represents the coordinate metadata required to identify suitable + """Represent the coordinate metadata. + + Represent the coordinate metadata required to identify suitable non-overlapping :class:`iris.cube.Cube` source-cubes for concatenation over a common single dimension. - Args: - - * cube_signature: + Parameters + ---------- + cube_signature : :class:`_CubeSignature` The :class:`_CubeSignature` that defines the source-cube. """ @@ -658,7 +639,9 @@ def __init__(self, cube_signature): def _cmp(coord, other): """Compare the coordinates for concatenation compatibility. - Returns: + Returns + ------- + bool tuple A boolean tuple pair of whether the coordinates are compatible, and whether they represent a candidate axis of concatenation. @@ -680,20 +663,20 @@ def _cmp(coord, other): return result, candidate_axis def candidate_axis(self, other): - """Determine the candidate axis of concatenation with the - given coordinate signature. + """Determine the candidate axis of concatenation with the given coordinate signature. If a candidate axis is found, then the coordinate signatures are compatible. - Args: - - * other: - The :class:`_CoordSignature` + Parameters + ---------- + other : :class:`_CoordSignature` - Returns: - None if no single candidate axis exists, otherwise - the candidate axis of concatenation. + Returns + ------- + result : + None if no single candidate axis exists, otherwise the candidate + axis of concatenation. """ result = False @@ -745,18 +728,14 @@ def _calculate_extents(self): class _ProtoCube: - """Framework for concatenating multiple source-cubes over one - common dimension. - - """ + """Framework for concatenating multiple source-cubes over one common dimension.""" def __init__(self, cube): - """Create a new _ProtoCube from the given cube and record the cube - as a source-cube. + """Create a new _ProtoCube from the given cube and record the cube as a source-cube. - Args: - - * cube: + Parameters + ---------- + cube : Source :class:`iris.cube.Cube` of the :class:`_ProtoCube`. """ @@ -781,14 +760,17 @@ def __init__(self, cube): @property def axis(self): """Return the nominated dimension of concatenation.""" - return self._axis def concatenate(self): - """Concatenates all the source-cubes registered with the + """Concatenates all the source-cubes registered with the :class:`_ProtoCube`. + + Concatenates all the source-cubes registered with the :class:`_ProtoCube` over the nominated common dimension. - Returns: + Returns + ------- + :class:`iris.cube.Cube` The concatenated :class:`iris.cube.Cube`. """ @@ -860,41 +842,35 @@ def register( check_ancils=False, check_derived_coords=False, ): - """Determine whether the given source-cube is suitable for concatenation - with this :class:`_ProtoCube`. + """Determine if the given source-cube is suitable for concatenation. - Args: + Determine if the given source-cube is suitable for concatenation + with this :class:`_ProtoCube`. - * cube: + Parameters + ---------- + cube : :class:`iris.cube.Cube` The :class:`iris.cube.Cube` source-cube candidate for concatenation. - - Kwargs: - - * axis: + axis : optional Seed the dimension of concatenation for the :class:`_ProtoCube` rather than rely on negotiation with source-cubes. - - * error_on_mismatch: + error_on_mismatch : bool, default=False If True, raise an informative error if registration fails. - - * check_aux_coords + check_aux_coords : bool, default=False Checks if the points and bounds of auxiliary coordinates of the cubes match. This check is not applied to auxiliary coordinates that span the dimension the concatenation is occurring along. Defaults to False. - - * check_cell_measures + check_cell_measures : bool, default=False Checks if the data of cell measures of the cubes match. This check is not applied to cell measures that span the dimension the concatenation is occurring along. Defaults to False. - - * check_ancils + check_ancils : bool, default=False Checks if the data of ancillary variables of the cubes match. This check is not applied to ancillary variables that span the dimension the concatenation is occurring along. Defaults to False. - - * check_derived_coords + check_derived_coords : bool, default=False Checks if the points and bounds of derived coordinates of the cubes match. This check is not applied to derived coordinates that span the dimension the concatenation is occurring along. Note that @@ -903,8 +879,9 @@ def register( coordinates used to derive the coordinates can be ignored with `check_aux_coords`. Defaults to False. - Returns: - Boolean. + Returns + ------- + bool """ # Verify and assert the nominated axis. @@ -935,7 +912,7 @@ def register( raise iris.exceptions.ConcatenateError([msg]) elif not match: msg = f"Found cubes with overlap on concatenate axis {candidate_axis}, skipping concatenation for these cubes" - warnings.warn(msg, category=iris.exceptions.IrisUserWarning) + warnings.warn(msg, category=iris.warnings.IrisUserWarning) # Check for compatible AuxCoords. if match: @@ -1018,16 +995,15 @@ def register( return match def _add_skeleton(self, coord_signature, data): - """Create and add the source-cube skeleton to the - :class:`_ProtoCube`. - - Args: + """Create and add the source-cube skeleton to the :class:`_ProtoCube`. - * coord_signature: + Parameters + ---------- + coord_signature : :`_CoordSignature` The :class:`_CoordSignature` of the associated given source-cube. - * data: + data : :class:`iris.cube.Cube` The data payload of an associated :class:`iris.cube.Cube` source-cube. @@ -1036,11 +1012,14 @@ def _add_skeleton(self, coord_signature, data): self._skeletons.append(skeleton) def _build_aux_coordinates(self): - """Generate the auxiliary coordinates with associated dimension(s) + """Generate the auxiliary coordinates with associated dimension(s) mapping. + + Generate the auxiliary coordinates with associated dimension(s) mapping for the new concatenated cube. - Returns: - A list of auxiliary coordinates and dimension(s) tuple pairs. + Returns + ------- + A list of auxiliary coordinates and dimension(s) tuple pairs. """ # Setup convenience hooks. @@ -1095,8 +1074,9 @@ def _build_aux_coordinates(self): def _build_scalar_coordinates(self): """Generate the scalar coordinates for the new concatenated cube. - Returns: - A list of scalar coordinates. + Returns + ------- + A list of scalar coordinates. """ scalar_coords = [] @@ -1106,11 +1086,14 @@ def _build_scalar_coordinates(self): return scalar_coords def _build_cell_measures(self): - """Generate the cell measures with associated dimension(s) + """Generate the cell measures with associated dimension(s) mapping. + + Generate the cell measures with associated dimension(s) mapping for the new concatenated cube. - Returns: - A list of cell measures and dimension(s) tuple pairs. + Returns + ------- + A list of cell measures and dimension(s) tuple pairs. """ # Setup convenience hooks. @@ -1143,11 +1126,14 @@ def _build_cell_measures(self): return cell_measures_and_dims def _build_ancillary_variables(self): - """Generate the ancillary variables with associated dimension(s) + """Generate the ancillary variables with associated dimension(s) mapping. + + Generate the ancillary variables with associated dimension(s) mapping for the new concatenated cube. - Returns: - A list of ancillary variables and dimension(s) tuple pairs. + Returns + ------- + A list of ancillary variables and dimension(s) tuple pairs. """ # Setup convenience hooks. @@ -1184,21 +1170,20 @@ def _build_aux_factories( ): """Generate the aux factories for the new concatenated cube. - Args: - - * dim_coords_and_dims: + Parameters + ---------- + dim_coords_and_dims : A list of dimension coordinate and dimension tuple pairs from the concatenated cube. - - * aux_coords_and_dims: + aux_coords_and_dims : A list of auxiliary coordinates and dimension(s) tuple pairs from the concatenated cube. - - * scalar_coords: + scalar_coords : A list of scalar coordinates from the concatenated cube. - Returns: - A list of :class:`iris.aux_factory.AuxCoordFactory`. + Returns + ------- + list of :class:`iris.aux_factory.AuxCoordFactory` """ # Setup convenience hooks. @@ -1253,8 +1238,9 @@ def _build_aux_factories( def _build_data(self): """Generate the data payload for the new concatenated cube. - Returns: - The concatenated :class:`iris.cube.Cube` data payload. + Returns + ------- + The concatenated :class:`iris.cube.Cube` data payload. """ skeletons = self._skeletons @@ -1265,11 +1251,14 @@ def _build_data(self): return data def _build_dim_coordinates(self): - """Generate the dimension coordinates with associated dimension + """Generate the dimension coordinates. + + Generate the dimension coordinates with associated dimension mapping for the new concatenated cube. - Return: - A list of dimension coordinate and dimension tuple pairs. + Returns + ------- + A list of dimension coordinate and dimension tuple pairs. """ # Setup convenience hooks. @@ -1313,21 +1302,23 @@ def _build_dim_coordinates(self): return dim_coords_and_dims def _sequence(self, extent, axis): - """Determine whether the given extent can be sequenced along with + """Determine whether the extent can be sequenced. + + Determine whether the given extent can be sequenced along with all the extents of the source-cubes already registered with this :class:`_ProtoCube` into non-overlapping segments for the given axis. - Args: - - * extent: + Parameters + ---------- + extent : :class:`_CoordExtent` The :class:`_CoordExtent` of the candidate source-cube. - - * axis: + axis : The candidate axis of concatenation. - Returns: - Boolean. + Returns + ------- + bool """ result = True diff --git a/lib/iris/_constraints.py b/lib/iris/_constraints.py index 1d81cd603b..4c993885a8 100644 --- a/lib/iris/_constraints.py +++ b/lib/iris/_constraints.py @@ -40,7 +40,7 @@ def __init__(self, name=None, cube_func=None, coord_values=None, **kwargs): coord_values : dict or None, optional If a dict, it must map coordinate name to the condition on the associated coordinate. - **kwargs : + ***kwargs : dict, optional The remaining keyword arguments are converted to coordinate constraints. The name of the argument gives the name of a coordinate, and the value of the argument is the condition to meet diff --git a/lib/iris/_data_manager.py b/lib/iris/_data_manager.py index 601066630c..15dfbd0030 100644 --- a/lib/iris/_data_manager.py +++ b/lib/iris/_data_manager.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Management of common state and behaviour for cube and coordinate data. - -""" +"""Management of common state and behaviour for cube and coordinate data.""" import copy @@ -20,9 +18,9 @@ class DataManager: def __init__(self, data): """Create a data manager for the specified data. - Args: - - * data: + Parameters + ---------- + data : The :class:`~numpy.ndarray` or :class:`~numpy.ma.core.MaskedArray` real data, or :class:`~dask.array.core.Array` lazy data to be managed. @@ -39,10 +37,7 @@ def __init__(self, data): self._assert_axioms() def __copy__(self): - """Forbid :class:`~iris._data_manager.DataManager` instance - shallow-copy support. - - """ + """Forbid :class:`~iris._data_manager.DataManager` instance shallow-copy support.""" name = type(self).__name__ emsg = ( "Shallow-copy of {!r} is not permitted. Use " @@ -51,19 +46,20 @@ def __copy__(self): raise copy.Error(emsg.format(name, name)) def __deepcopy__(self, memo): - """Allow :class:`~iris._data_manager.DataManager` instance - deepcopy support. + """Allow :class:`~iris._data_manager.DataManager` instance deepcopy support. - Args: - - * memo: - :class:`copy` memo dictionary. + Parameters + ---------- + memo : :func:`copy` + :func:`copy` memo dictionary. """ return self._deepcopy(memo) def __eq__(self, other): """Perform :class:`~iris._data_manager.DataManager` instance equality. + + Perform :class:`~iris._data_manager.DataManager` instance equality. Note that, this is explicitly not a lazy operation and will load any lazy payload to determine the equality result. @@ -71,14 +67,15 @@ def __eq__(self, other): the realised_dtype, the dtype of the payload, the fill-value and the payload content. - Args: - - * other: + Parameters + ---------- + other : :class:`~iris._data_manager.DataManager` The :class:`~iris._data_manager.DataManager` instance to compare with. - Returns: - Boolean. + Returns + ------- + bool """ from iris.util import array_equal @@ -96,17 +93,20 @@ def __eq__(self, other): def __ne__(self, other): """Perform :class:`~iris._data_manager.DataManager` instance inequality. + + Perform :class:`~iris._data_manager.DataManager` instance inequality. Note that, this is explicitly not a lazy operation and will load any lazy payload to determine the inequality result. - Args: - - * other: + Parameters + ---------- + other : :class:`~iris._data_manager.DataManager` The :class:`~iris._data_manager.DataManager` instance to compare with. - Returns: - Boolean. + Returns + ------- + bool """ result = self.__eq__(other) @@ -117,7 +117,7 @@ def __ne__(self, other): return result def __repr__(self): - """Returns an string representation of the instance.""" + """Return an string representation of the instance.""" fmt = "{cls}({data!r})" result = fmt.format(data=self.core_data(), cls=type(self).__name__) @@ -133,22 +133,19 @@ def _assert_axioms(self): assert state, emsg.format("" if is_lazy else "no ", "" if is_real else "no ") def _deepcopy(self, memo, data=None): - """Perform a deepcopy of the :class:`~iris._data_manager.DataManager` - instance. - - Args: - - * memo: - :class:`copy` memo dictionary. - - Kwargs: + """Perform a deepcopy of the :class:`~iris._data_manager.DataManager` instance. - * data: + Parameters + ---------- + memo : :func:`copy` + :func:`copy` memo dictionary. + data : optional Replacement data to substitute the currently managed data with. - Returns: - :class:`~iris._data_manager.DataManager` instance. + Returns + ------- + :class:`~iris._data_manager.DataManager` instance. """ try: @@ -176,8 +173,9 @@ def _deepcopy(self, memo, data=None): def data(self): """Returns the real data. Any lazy data being managed will be realised. - Returns: - :class:`~numpy.ndarray` or :class:`numpy.ma.core.MaskedArray`. + Returns + ------- + :class:`~numpy.ndarray` or :class:`numpy.ma.core.MaskedArray`. """ if self.has_lazy_data(): @@ -205,15 +203,17 @@ def data(self): @data.setter def data(self, data): - """Replaces the currently managed data with the specified data, which must + """Replace the currently managed data with the specified data. + + Replace the currently managed data with the specified data, which must be of an equivalent shape. Note that, the only shape promotion permitted is for 0-dimensional scalar data to be replaced with a single item 1-dimensional data. - Args: - - * data: + Parameters + ---------- + data : The :class:`~numpy.ndarray` or :class:`~numpy.ma.core.MaskedArray` real data, or :class:`~dask.array.core.Array` lazy data to be managed. @@ -269,28 +269,31 @@ def shape(self): return self.core_data().shape def copy(self, data=None): - """Returns a deep copy of this :class:`~iris._data_manager.DataManager` - instance. + """Return a deep copy of this :class:`~iris._data_manager.DataManager` instance. - Kwargs: - - * data: + Parameters + ---------- + data : optional Replace the data of the copy with this data. - Returns: - A copy :class:`~iris._data_manager.DataManager` instance. + Returns + ------- + A copy :class:`~iris._data_manager.DataManager` instance. """ memo = {} return self._deepcopy(memo, data=data) def core_data(self): - """If real data is being managed, then return the :class:`~numpy.ndarray` + """Provide real data or lazy data. + + If real data is being managed, then return the :class:`~numpy.ndarray` or :class:`numpy.ma.core.MaskedArray`. Otherwise, return the lazy :class:`~dask.array.core.Array`. - Returns: - The real or lazy data. + Returns + ------- + The real or lazy data. """ if self.has_lazy_data(): @@ -303,8 +306,9 @@ def core_data(self): def has_lazy_data(self): """Determine whether lazy data is being managed. - Returns: - Boolean. + Returns + ------- + bool """ return self._lazy_array is not None @@ -315,8 +319,9 @@ def lazy_data(self): If only real data is being managed, then return a lazy representation of that real data. - Returns: - :class:`~dask.array.core.Array` + Returns + ------- + :class:`~dask.array.core.Array` .. note:: This method will never realise any lazy data. diff --git a/lib/iris/_deprecation.py b/lib/iris/_deprecation.py index 711e4081cd..b771883a71 100644 --- a/lib/iris/_deprecation.py +++ b/lib/iris/_deprecation.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Utilities for producing runtime deprecation messages. - -""" +"""Utilities for producing runtime deprecation messages.""" import warnings diff --git a/lib/iris/_lazy_data.py b/lib/iris/_lazy_data.py index 5cfa08a7de..36c0825ad8 100644 --- a/lib/iris/_lazy_data.py +++ b/lib/iris/_lazy_data.py @@ -42,7 +42,9 @@ def is_lazy_data(data): def is_lazy_masked_data(data): - """Return True if the argument is both an Iris 'lazy' data array and the + """Determine whether managed data is lazy and masked. + + Return True if the argument is both an Iris 'lazy' data array and the underlying array is of masked type. Otherwise return False. """ @@ -58,47 +60,50 @@ def _optimum_chunksize_internals( dims_fixed=None, dask_array_chunksize=dask.config.get("array.chunk-size"), ): - """Reduce or increase an initial chunk shape to get close to a chosen ideal + """Reduce or increase an initial chunk shap. + + Reduce or increase an initial chunk shape to get close to a chosen ideal size, while prioritising the splitting of the earlier (outer) dimensions and keeping intact the later (inner) ones. - Args: - - * chunks (tuple of int): + Parameters + ---------- + chunks : tuple of int Pre-existing chunk shape of the target data. - * shape (tuple of int): + shape : tuple of int The full array shape of the target data. - * limit (int): + limit : int, optional The 'ideal' target chunk size, in bytes. Default from :mod:`dask.config`. - * dtype (np.dtype): + dtype : np.dtype Numpy dtype of target data. - * dims_fixed (list of bool): + dims_fixed : list of bool, optional If set, a list of values equal in length to 'chunks' or 'shape'. 'True' values indicate a dimension that can not be changed, i.e. that element of the result must equal the corresponding value in 'chunks' or data.shape. - Returns: - * chunk (tuple of int): + Returns + ------- + tuple of int The proposed shape of one full chunk. - .. note:: - The purpose of this is very similar to - :func:`dask.array.core.normalize_chunks`, when called as - `(chunks='auto', shape, dtype=dtype, previous_chunks=chunks, ...)`. - Except, the operation here is optimised specifically for a 'c-like' - dimension order, i.e. outer dimensions first, as for netcdf variables. - So if, in future, this policy can be implemented in dask, then we would - prefer to replace this function with a call to that one. - Accordingly, the arguments roughly match 'normalize_chunks', except - that we don't support the alternative argument forms of that routine. - The return value, however, is a single 'full chunk', rather than a - complete chunking scheme : so an equivalent code usage could be - "chunks = [c[0] for c in normalise_chunks('auto', ...)]". + Notes + ----- + The purpose of this is very similar to + :func:`dask.array.core.normalize_chunks`, when called as + `(chunks='auto', shape, dtype=dtype, previous_chunks=chunks, ...)`. + Except, the operation here is optimised specifically for a 'c-like' + dimension order, i.e. outer dimensions first, as for netcdf variables. + So if, in future, this policy can be implemented in dask, then we would + prefer to replace this function with a call to that one. + Accordingly, the arguments roughly match 'normalize_chunks', except + that we don't support the alternative argument forms of that routine. + The return value, however, is a single 'full chunk', rather than a + complete chunking scheme : so an equivalent code usage could be + "chunks = [c[0] for c in normalise_chunks('auto', ...)]". """ - # Set the chunksize limit. if limit is None: # Fetch the default 'optimal' chunksize from the dask config. @@ -220,40 +225,37 @@ def as_lazy_data( ): """Convert the input array `data` to a :class:`dask.array.Array`. - Args: - - * data (array-like): + Parameters + ---------- + data : array-like An indexable object with 'shape', 'dtype' and 'ndim' properties. This will be converted to a :class:`dask.array.Array`. - - Kwargs: - - * chunks (list of int): + chunks : list of int, optional If present, a source chunk shape, e.g. for a chunked netcdf variable. - - * asarray (bool): + asarray : bool, default=False If True, then chunks will be converted to instances of `ndarray`. Set to False (default) to pass passed chunks through unchanged. - - * dims_fixed (list of bool): + dims_fixed : list of bool, optional If set, a list of values equal in length to 'chunks' or data.ndim. 'True' values indicate a dimension which can not be changed, i.e. the result for that index must equal the value in 'chunks' or data.shape. - - * dask_chunking (bool): + dask_chunking : bool, default=False If True, Iris chunking optimisation will be bypassed, and dask's default chunking will be used instead. Including a value for chunks while dask_chunking is set to True will result in a failure. - Returns: + Returns + ------- + :class:`dask.array.Array` The input array converted to a :class:`dask.array.Array`. - .. note:: - The result chunk size is a multiple of 'chunks', if given, up to the - dask default chunksize, i.e. `dask.config.get('array.chunk-size'), - or the full data shape if that is smaller. - If 'chunks' is not given, the result has chunks of the full data shape, - but reduced by a factor if that exceeds the dask default chunksize. + Notes + ----- + The result chunk size is a multiple of 'chunks', if given, up to the + dask default chunksize, i.e. `dask.config.get('array.chunk-size')`, + or the full data shape if that is smaller. + If 'chunks' is not given, the result has chunks of the full data shape, + but reduced by a factor if that exceeds the dask default chunksize. """ if dask_chunking: @@ -326,18 +328,21 @@ def _co_realise_lazy_arrays(arrays): def as_concrete_data(data): """Return the actual content of a lazy array, as a numpy array. + + Return the actual content of a lazy array, as a numpy array. If the input data is a NumPy `ndarray` or masked array, return it unchanged. If the input data is lazy, return the realised result. - Args: - - * data: + Parameters + ---------- + data : A dask array, NumPy `ndarray` or masked array - Returns: - A NumPy `ndarray` or masked array. + Returns + ------- + NumPy `ndarray` or masked array. """ if is_lazy_data(data): @@ -352,13 +357,14 @@ def multidim_lazy_stack(stack): This is needed because :meth:`dask.array.Array.stack` only accepts a 1-dimensional list. - Args: - - * stack: + Parameters + ---------- + stack : An ndarray of :class:`dask.array.Array`. - Returns: - The input array converted to a lazy :class:`dask.array.Array`. + Returns + ------- + The input array converted to a lazy :class:`dask.array.Array`. """ if stack.ndim == 0: @@ -380,12 +386,14 @@ def co_realise_cubes(*cubes): However, lazy calculations and data fetches can be shared between the computations, improving performance. - Args: - - * cubes (list of :class:`~iris.cube.Cube`): + Parameters + ---------- + cubes : list of :class:`~iris.cube.Cube` Arguments, each of which is a cube to be realised. - For example:: + Examples + -------- + :: # Form stats. a_std = cube_a.collapsed(['x', 'y'], iris.analysis.STD_DEV) @@ -398,9 +406,9 @@ def co_realise_cubes(*cubes): co_realise_cubes(a_std, b_std, ab_mean_diff, std_err) - .. Note:: + .. note:: - Cubes with non-lazy data may also be passed, with no ill effect. + Cubes with non-lazy data may also be passed, with no ill effect. """ results = _co_realise_lazy_arrays([cube.core_data() for cube in cubes]) @@ -414,13 +422,15 @@ def lazy_elementwise(lazy_array, elementwise_op): Elementwise means that it performs a independent calculation at each point of the input, producing a result array of the same shape. - Args: - - * lazy_array: + Parameters + ---------- + lazy_array : The lazy array object to operate on. - * elementwise_op: + elementwise_op : The elementwise operation, a function operating on numpy arrays. + Notes + ----- .. note: A single-point "dummy" call is made to the operation function, to @@ -445,15 +455,15 @@ def map_complete_blocks(src, func, dims, out_sizes): Complete means that the data is not chunked along the chosen dimensions. - Args: - - * src (:class:`~iris.cube.Cube` or array-like): + Parameters + ---------- + src : :class:`~iris.cube.Cube` or array-like Source cube that function is applied to. - * func: + func : Function to apply. - * dims (tuple of int): + dims : tuple of int Dimensions that cannot be chunked. - * out_sizes (tuple of int): + out_sizes : tuple of int Output size of dimensions that cannot be chunked. """ diff --git a/lib/iris/_merge.py b/lib/iris/_merge.py index 19848ff244..85012c0ef8 100644 --- a/lib/iris/_merge.py +++ b/lib/iris/_merge.py @@ -5,7 +5,7 @@ """Automatic collation of cubes into higher-dimensional cubes. Typically the cube merge process is handled by -:method:`iris.cube.CubeList.merge`. +:meth:`iris.cube.CubeList.merge`. """ @@ -36,21 +36,18 @@ class _Template(namedtuple("Template", ["dims", "points", "bounds", "kwargs"])): """Common framework from which to build a dimension or auxiliary coordinate. - Args: - - * dims: + Parameters + ---------- + dims : Tuple of the associated :class:`iris.cube.Cube` data dimension/s spanned by this coordinate template. - - * points: + points : A :mod:`numpy` array representing the coordinate point data. No points data is represented by None. - - * bounds: + bounds : A :mod:`numpy` array representing the coordinate bounds data. No bounds data is represented by None. - - * kwargs: + kwargs : A dictionary of key/value pairs required to create a coordinate. """ @@ -63,17 +60,15 @@ class _CoordMetaData( ): """Bespoke metadata required to build a dimension or auxiliary coordinate. - Args: - - * points_dtype: + Parameters + ---------- + points_dtype : The points data :class:`numpy.dtype` of an associated coordinate. None otherwise. - - * bounds_dtype: + bounds_dtype : The bounds data :class:`numpy.dtype` of an associated coordinate. None otherwise. - - * kwargs: + kwargs : A dictionary of key/value pairs required to create a coordinate. """ @@ -82,16 +77,17 @@ class _CoordMetaData( class _CoordAndDims(namedtuple("CoordAndDims", ["coord", "dims"])): - """Container for a coordinate and the associated data dimension/s - spanned over a :class:`iris.cube.Cube`. + """Container for a coordinate and the associated data dimension/s. - Args: + Container for a coordinate and the associated data dimension/s + spanned over a :class:`iris.cube.Cube`. - * coord: + Parameters + ---------- + coord : A :class:`iris.coords.DimCoord` or :class:`iris.coords.AuxCoord` coordinate instance. - - * dims: + dims : A tuple of the data dimension/s spanned by the coordinate. """ @@ -102,24 +98,24 @@ class _CoordAndDims(namedtuple("CoordAndDims", ["coord", "dims"])): class _ScalarCoordPayload( namedtuple("ScalarCoordPayload", ["defns", "values", "metadata"]) ): - """Container for all scalar coordinate data and metadata represented + """Container for the payload. + + Container for all scalar coordinate data and metadata represented within a :class:`iris.cube.Cube`. All scalar coordinate related data is sorted into ascending order of the associated coordinate definition. - Args: - - * defns: + Parameters + ---------- + defns : A list of scalar coordinate metadata :class:`iris.common.CoordMetadata` belonging to a :class:`iris.cube.Cube`. - - * values: + values : A list of scalar coordinate values belonging to a :class:`iris.cube.Cube`. Each scalar coordinate value is typically an :class:`iris.coords.Cell`. - - * metadata: + metadata : A list of :class:`_CoordMetaData` instances belonging to a :class:`iris.cube.Cube`. @@ -131,18 +127,19 @@ class _ScalarCoordPayload( class _VectorCoordPayload( namedtuple("VectorCoordPayload", ["dim_coords_and_dims", "aux_coords_and_dims"]) ): - """Container for all vector coordinate data and metadata represented - within a :class:`iris.cube.Cube`. + """Container for the payload. - Args: + Container for all vector coordinate data and metadata represented + within a :class:`iris.cube.Cube`. - * dim_coords_and_dims: + Parameters + ---------- + dim_coords_and_dims : A list of :class:`_CoordAndDim` instances containing non-scalar (i.e. multi-valued) :class:`iris.coords.DimCoord` instances and the associated data dimension spanned by them for a :class:`iris.cube.Cube`. - - * aux_coords_and_dims: + aux_coords_and_dims : A list of :class:`_CoordAndDim` instances containing non-scalar (i.e. multi-valued) :class:`iris.coords.DimCoord` and/or :class:`iris.coords.AuxCoord` instances and the associated data @@ -154,22 +151,22 @@ class _VectorCoordPayload( class _CoordPayload(namedtuple("CoordPayload", ["scalar", "vector", "factory_defns"])): - """Container for all the scalar and vector coordinate data and + """Container for the payload. + + Container for all the scalar and vector coordinate data and metadata, and auxiliary coordinate factories represented within a :class:`iris.cube.Cube`. All scalar coordinate and factory related data is sorted into ascending order of the associated coordinate definition. - Args: - - * scalar: + Parameters + ---------- + scalar : A :class:`_ScalarCoordPayload` instance. - - * vector: + vector : A :class:`_VectorCoordPayload` instance. - - * factory_defns: + factory_defns : A list of :class:`_FactoryDefn` instances. """ @@ -178,7 +175,6 @@ class _CoordPayload(namedtuple("CoordPayload", ["scalar", "vector", "factory_def def as_signature(self): """Construct and return a :class:`_CoordSignature` from the payload.""" - return _CoordSignature( self.scalar.defns, self.vector.dim_coords_and_dims, @@ -212,20 +208,23 @@ def _coords_msgs(msgs, coord_group, defns_a, defns_b): ) def match_signature(self, signature, error_on_mismatch): - """Return whether this _CoordPayload matches the corresponding - aspects of a _CoordSignature. + """Check if _CoordPayload matches the corresponding aspects of a _CoordSignature. - Args: + Return whether this _CoordPayload matches the corresponding + aspects of a _CoordSignature. - * signature (_CoordSignature): + Parameters + ---------- + signature : _CoordSignature The _CoordSignature to compare against. - - * error_on_mismatch (bool): + error_on_mismatch : bool If True, raise an Exception with detailed explanation. - Returns: - Boolean. True if and only if this _CoordPayload matches - the corresponding aspects `other`. + Returns + ------- + bool + True if and only if this _CoordPayload matches + the corresponding aspects `other`. """ @@ -284,28 +283,27 @@ class _CoordSignature( ], ) ): - """Criterion for identifying a specific type of :class:`iris.cube.Cube` + """Criterion for identifying a specific type of :class:`iris.cube.Cube`. + + Criterion for identifying a specific type of :class:`iris.cube.Cube` based on its scalar and vector coordinate data and metadata, and auxiliary coordinate factories. - Args: - - * scalar_defns: + Parameters + ---------- + scalar_defns : A list of scalar coordinate definitions sorted into ascending order. - - * vector_dim_coords_and_dims: + vector_dim_coords_and_dims : A list of :class:`_CoordAndDim` instances containing non-scalar (i.e. multi-valued) :class:`iris.coords.DimCoord` instances and the associated data dimension spanned by them for a :class:`iris.cube.Cube`. - - * vector_aux_coords_and_dims: + vector_aux_coords_and_dims : A list of :class:`_CoordAndDim` instances containing non-scalar (i.e. multi-valued) :class:`iris.coords.DimCoord` and/or :class:`iris.coords.AuxCoord` instances and the associated data dimension/s spanned by them for a :class:`iris.cube.Cube`. - - * factory_defns: + factory_defns : A list of :class:`_FactoryDefn` instances. """ @@ -325,24 +323,19 @@ class _CubeSignature( ], ) ): - """Criterion for identifying a specific type of :class:`iris.cube.Cube` - based on its metadata. + """Criterion for identifying specific type of :class:`iris.cube.Cube` based on its metadata. - Args: - - * defn: + Parameters + ---------- + defn : A cube definition tuple. - - * data_shape: + data_shape : The data payload shape of a :class:`iris.cube.Cube`. - - * data_type: + data_type : The data payload :class:`numpy.dtype` of a :class:`iris.cube.Cube`. - - * cell_measures_and_dims: + cell_measures_and_dims : A list of cell_measures and dims for the cube. - - * ancillary_variables_and_dims: + ancillary_variables_and_dims : A list of ancillary variables and dims for the cube. """ @@ -410,23 +403,25 @@ def match(self, other, error_on_mismatch): This is the first step to determine if two "cubes" (either a real Cube or a ProtoCube) can be merged, by considering: - - standard_name, long_name, var_name - - units - - attributes - - cell_methods - - shape, dtype - Args: + * standard_name, long_name, var_name + * units + * attributes + * cell_methods + * shape, dtype - * other (_CubeSignature): + Parameters + ---------- + other : _CubeSignature The _CubeSignature to compare against. - - * error_on_mismatch (bool): + error_on_mismatch : bool If True, raise a :class:`~iris.exceptions.MergeException` with a detailed explanation if the two do not match. - Returns: - Boolean. True if and only if this _CubeSignature matches `other`. + Returns + ------- + bool + True if and only if this _CubeSignature matches `other`. """ msgs = self._defn_msgs(other.defn) @@ -451,18 +446,19 @@ def match(self, other, error_on_mismatch): class _Skeleton(namedtuple("Skeleton", ["scalar_values", "data"])): - """Basis of a source-cube, containing the associated scalar coordinate values - and data payload of a :class:`iris.cube.Cube`. + """Basis of a source-cube. - Args: + Basis of a source-cube, containing the associated scalar coordinate values + and data payload of a :class:`iris.cube.Cube`. - * scalar_values: + Parameters + ---------- + scalar_values : A list of scalar coordinate values belonging to a :class:`iris.cube.Cube` sorted into ascending order of the associated coordinate definition. Each scalar coordinate value is typically an :class:`iris.coords.Cell`. - - * data: + data : The data payload of a :class:`iris.cube.Cube`. """ @@ -473,12 +469,11 @@ class _Skeleton(namedtuple("Skeleton", ["scalar_values", "data"])): class _FactoryDefn(namedtuple("_FactoryDefn", ["class_", "dependency_defns"])): """The information required to identify and rebuild a single AuxCoordFactory. - Args: - - * class_: + Parameters + ---------- + class_ : The class of the AuxCoordFactory. - - * dependency_defns: + dependency_defns : A list of pairs, where each pair contains a dependency key and its corresponding coordinate definition. Sorted on dependency key. @@ -488,16 +483,17 @@ class _FactoryDefn(namedtuple("_FactoryDefn", ["class_", "dependency_defns"])): class _Relation(namedtuple("Relation", ["separable", "inseparable"])): - """Categorisation of the candidate dimensions belonging to a + """Categorisation of the candidate dimensions. + + Categorisation of the candidate dimensions belonging to a :class:`ProtoCube` into separable 'independent' dimensions, and inseparable dependent dimensions. - Args: - - * separable: + Parameters + ---------- + separable : A set of independent candidate dimension names. - - * inseparable: + inseparable : A set of dependent candidate dimension names. """ @@ -509,23 +505,28 @@ class _Relation(namedtuple("Relation", ["separable", "inseparable"])): def _is_combination(name): - """Determine whether the candidate dimension is an 'invented' combination - of candidate dimensions. + """Determine whether the candidate dimension is an 'invented' combination. - Args: + Determine whether the candidate dimension is an 'invented' combination + of candidate dimensions. - * name: + Parameters + ---------- + name : The candidate dimension. - Returns: - Boolean. + Returns + ------- + bool """ return _COMBINATION_JOIN in str(name) def build_indexes(positions): - r"""Construct a mapping for each candidate dimension that maps for each + r"""Construct a mapping for each candidate dimension. + + Construct a mapping for each candidate dimension that maps for each of its scalar values the set of values for each of the other candidate dimensions. @@ -557,14 +558,15 @@ def build_indexes(positions): 200: 'a': set([1]) 'b': set([10]) 300: 'a': set([2]) 'b': set([20]) - Args: - - * positions: + Parameters + ---------- + positions : A list containing a dictionary of candidate dimension key to scalar value pairs for each source-cube. - Returns: - The cross-reference dictionary for each candidate dimension. + Returns + ------- + The cross-reference dictionary for each candidate dimension. """ names = positions[0].keys() @@ -595,20 +597,19 @@ def _separable_pair(name, index): A candidate dimension X and Y are separable if each scalar value of X maps to the same set of scalar values of Y. - Args: - - * name1: + Parameters + ---------- + name1 : The first candidate dimension to be compared. - - * name2: + name2 : The second candidate dimension to be compared. - - * index: + index : The cross-reference dictionary for the first candidate dimension. - Returns: - Boolean. + Returns + ------- + bool """ items = iter(index.values()) @@ -618,22 +619,25 @@ def _separable_pair(name, index): def _separable(name, indexes): - """Determine the candidate dimensions that are separable and + """Determine the candidate dimensions that are separable and inseparable. + + Determine the candidate dimensions that are separable and inseparable relative to the provided candidate dimension. A candidate dimension X and Y are separable if each scalar value of X maps to the same set of scalar values of Y. - Args: - - * name: + Parameters + ---------- + name : The candidate dimension that requires its separable and inseparable relationship to be determined. - - * indexes: + indexes : The cross-reference dictionary for each candidate dimension. - Returns: + Returns + ------- + tuple A tuple containing the set of separable and inseparable candidate dimensions. @@ -652,7 +656,9 @@ def _separable(name, indexes): def derive_relation_matrix(indexes): - """Construct a mapping for each candidate dimension that specifies + """Construct a mapping for each candidate dimension. + + Construct a mapping for each candidate dimension that specifies which of the other candidate dimensions are separable or inseparable. A candidate dimension X and Y are separable if each scalar value of @@ -676,13 +682,14 @@ def derive_relation_matrix(indexes): 'c': Relation(separable=set([]), inseparable=set(['a', 'b'])) 'b': Relation(separable=set([]), inseparable=set(['a', 'c'])) - Args: - - * indexes: + Parameters + ---------- + indexes : The cross-reference dictionary for each candidate dimension. - Returns: - The relation dictionary for each candidate dimension. + Returns + ------- + The relation dictionary for each candidate dimension. """ # TODO: This takes twice as long as it could do because it doesn't @@ -698,13 +705,14 @@ def derive_groups(relation_matrix): If candidate dimension A is inseparable for B and C, and B is inseparable from D, and E is inseparable from F. Then the groups are ABCD and EF. - Args: - - * relation_matrix: + Parameters + ---------- + relation_matrix : The relation dictionary for each candidate dimension. - Returns: - A list of all related (chained) inseparable candidate dimensions. + Returns + ------- + A list of all related (chained) inseparable candidate dimensions. """ names = set(relation_matrix) @@ -730,17 +738,16 @@ def derive_groups(relation_matrix): def _derive_separable_group(relation_matrix, group): """Determine which candidate dimensions in the group are separable. - Args: - - * relation_matrix: + Parameters + ---------- + relation_matrix : The relation dictionary for each candidate dimension. - - * group: + group : A set of related (chained) inseparable candidate dimensions. - Returns: - The set of candidate dimensions within the group that are - separable. + Returns + ------- + The set of candidate dimensions within the group that are separable. """ result = set() @@ -753,33 +760,31 @@ def _derive_separable_group(relation_matrix, group): def _is_dependent(dependent, independent, positions, function_mapping=None): - """Determine whether there exists a one-to-one functional relationship + """Determine whether there exists a one-to-one functional relationship. + + Determine whether there exists a one-to-one functional relationship between the independent candidate dimension/s and the dependent candidate dimension. - Args: - - * dependent: + Parameters + ---------- + dependent : A candidate dimension that requires to be functionally dependent on all the independent candidate dimensions. - - * independent: + independent : A list of candidate dimension/s that require to act as the independent variables in a functional relationship. - - * positions: + positions : A list containing a dictionary of candidate dimension key to scalar value pairs for each source-cube. - - Kwargs: - - * function_mapping: + function_mapping : optional A dictionary that enumerates a valid functional relationship between the dependent candidate dimension and the independent candidate dimension/s. - Returns: - Boolean. + Returns + ------- + bool """ valid = True @@ -802,23 +807,25 @@ def _is_dependent(dependent, independent, positions, function_mapping=None): def _derive_consistent_groups(relation_matrix, separable_group): - """Determine the largest combinations of candidate dimensions within the + """Determine the largest combinations of candidate dimensions. + + Determine the largest combinations of candidate dimensions within the separable group that are self consistently separable from one another. If the candidate dimension A is separable from the candidate dimensions B and C. Then the candidate dimension group ABC is a separable consistent group if B is separable from A and C, and C is separable from A and B. - Args: - - * relation_matrix: + Parameters + ---------- + relation_matrix : The relation dictionary for each candidate dimension. - - * separable_group: + separable_group : The set of candidate dimensions that are separable. - Returns: - A list of candidate dimension groups that are consistently separable. + Returns + ------- + A list of candidate dimension groups that are consistently separable. """ result = [] @@ -850,7 +857,9 @@ def _derive_consistent_groups(relation_matrix, separable_group): def _build_separable_group( space, group, separable_consistent_groups, positions, function_matrix ): - """Update the space with the first separable consistent group that + """Update the space with the first separable consistent group. + + Update the space with the first separable consistent group that satisfies a valid functional relationship with all other candidate dimensions in the group. @@ -861,28 +870,25 @@ def _build_separable_group( and D, and "C: None" means that this candidate dimension is independent. - Args: - - * space: + Parameters + ---------- + space : A dictionary defining for each candidate dimension its dependency on any other candidate dimensions within the space. - - * group: + group : A set of related (chained) inseparable candidate dimensions. - - * separable_consistent_groups: + separable_consistent_groups: A list of candidate dimension groups that are consistently separable. - - * positions: + positions : A list containing a dictionary of candidate dimension key to scalar value pairs for each source-cube. - - * function_matrix: + function_matrix : The function mapping dictionary for each candidate dimension that participates in a functional relationship. - Returns: - Boolean. + Returns + ------- + bool """ valid = False @@ -914,7 +920,9 @@ def _build_separable_group( def _build_inseparable_group(space, group, positions, function_matrix): - """Update the space with the first valid scalar functional relationship + """Update the space with the first valid scalar functional relationship. + + Update the space with the first valid scalar functional relationship between a candidate dimension within the group and all other candidate dimensions. @@ -929,25 +937,23 @@ def _build_inseparable_group(space, group, positions, function_matrix): and all others in the group, as the group is considered inseparable in this context. - Args: - - * space: + Parameters + ---------- + space : A dictionary defining for each candidate dimension its dependency on any other candidate dimensions within the space. - - * group: + group : A set of related (chained) inseparable candidate dimensions. - - * positions: + positions : A list containing a dictionary of candidate dimension key to scalar value pairs for each source-cube. - - * function_matrix: + function_matrix : The function mapping dictionary for each candidate dimension that participates in a functional relationship. - Returns: - Boolean. + Returns + ------- + bool """ scalar = False @@ -981,7 +987,9 @@ def _build_inseparable_group(space, group, positions, function_matrix): def _build_combination_group(space, group, positions, function_matrix): - """Update the space with the new combined or invented dimension + """Update the space with the new combined or invented dimension. + + Update the space with the new combined or invented dimension that each member of this inseparable group depends on. As no functional relationship between members of the group can be @@ -989,25 +997,23 @@ def _build_combination_group(space, group, positions, function_matrix): coordinate associated with it. Rather, it is simply an enumeration of the group members for each of the positions (source-cubes). - Args: - - * space: + Parameters + ---------- + space : A dictionary defining for each candidate dimension its dependency on any other candidate dimensions within the space. - - * group: + group : A set of related (chained) inseparable candidate dimensions. - - * positions: + positions : A list containing a dictionary of candidate dimension key to scalar value pairs for each source-cube. - - * function_matrix: + function_matrix : The function mapping dictionary for each candidate dimension that participates in a functional relationship. - Returns: - None. + Returns + ------- + None """ combination = _COMBINATION_JOIN.join(sorted(map(str, group))) @@ -1037,25 +1043,22 @@ def _build_combination_group(space, group, positions, function_matrix): def derive_space(groups, relation_matrix, positions, function_matrix=None): """Determine the relationship between all the candidate dimensions. - Args: - * groups: - A list of all related (chained) inseparable candidate dimensions. - - * relation_matrix: - The relation dictionary for each candidate dimension. - - * positions: - A list containing a dictionary of candidate dimension key to - scalar value pairs for each source-cube. - - Kwargs: - * function_matrix: + Parameters + ---------- + groups : + A list of all related (chained) inseparable candidate dimensions. + relation_matrix: + The relation dictionary for each candidate dimension. + positions : + A list containing a dictionary of candidate dimension key to + scalar value pairs for each source-cube. + function_matrix : optional The function mapping dictionary for each candidate dimension that participates in a functional relationship. - Returns: - A space dictionary describing the relationship between each - candidate dimension. + Returns + ------- + A space dictionary describing the relationship between each candidate dimension. """ space = {} @@ -1095,17 +1098,15 @@ def derive_space(groups, relation_matrix, positions, function_matrix=None): class ProtoCube: - """Framework for merging source-cubes into one or more higher - dimensional cubes. - - """ + """Framework for merging source-cubes into one or more higher dimensional cubes.""" def __init__(self, cube): - """Create a new ProtoCube from the given cube and record the cube - as a source-cube. + """Create a new ProtoCube from the given cube. - """ + Create a new ProtoCube from the given cube and record the cube as a + source-cube. + """ # Default hint ordering for candidate dimension coordinates. self._hints = [ "time", @@ -1181,17 +1182,17 @@ def _report_duplicate(self, nd_indexes, group_by_nd_index): raise iris.exceptions.DuplicateDataError(msg) def merge(self, unique=True): - """Returns the list of cubes resulting from merging the registered - source-cubes. - - Kwargs: + """Return the list of cubes resulting from merging the registered source-cubes. - * unique: + Parameters + ---------- + unique : bool, default=True If True, raises `iris.exceptions.DuplicateDataError` if duplicate cubes are detected. - Returns: - A :class:`iris.cube.CubeList` of merged cubes. + Returns + ------- + A :class:`iris.cube.CubeList` of merged cubes. """ positions = [ @@ -1279,27 +1280,28 @@ def merge(self, unique=True): return merged_cubes def register(self, cube, error_on_mismatch=False): - """Add a compatible :class:`iris.cube.Cube` as a source-cube for + """Add a compatible :class:`iris.cube.Cube` as a source for merging. + + Add a compatible :class:`iris.cube.Cube` as a source-cube for merging under this :class:`ProtoCube`. A cube will be deemed compatible based on the signature of the cube and the signature of its scalar coordinates and vector coordinates being identical to that of the ProtoCube. - Args: - - * cube: + Parameters + ---------- + cube : Candidate :class:`iris.cube.Cube` to be associated with this :class:`ProtoCube`. - - Kwargs: - - * error_on_mismatch: + error_on_mismatch :bool, default=False If True, raise an informative :class:`~iris.exceptions.MergeError` if registration fails. - Returns: - True iff the :class:`iris.cube.Cube` is compatible with + Returns + ------- + bool + True if the :class:`iris.cube.Cube` is compatible with this :class:`ProtoCube`. """ @@ -1317,7 +1319,7 @@ def register(self, cube, error_on_mismatch=False): return match def _guess_axis(self, name): - """Returns a "best guess" axis name of the candidate dimension. + """Return a "best guess" axis name of the candidate dimension. Heuristic categoration of the candidate dimension (i.e. scalar_defn index) into either label 'T', 'Z', 'Y', 'X' @@ -1326,13 +1328,14 @@ def _guess_axis(self, name): Based on the associated scalar coordinate definition rather than the scalar coordinate itself. - Args: - - * name: + Parameters + ---------- + name : The candidate dimension. - Returns: - 'T', 'Z', 'Y', 'X', or None. + Returns + ------- + axis : {'T', 'Z', 'Y', 'X'} or None. """ axis = None @@ -1344,24 +1347,23 @@ def _guess_axis(self, name): return axis def _define_space(self, space, positions, indexes, function_matrix): - """Given the derived :class:`ProtoCube` space, define this space in + """Define space. + + Given the derived :class:`ProtoCube` space, define this space in terms of its dimensionality, shape, coordinates and associated coordinate to space dimension mappings. - Args: - - * space: + Parameters + ---------- + space : A dictionary defining for each candidate dimension its dependency on any other candidate dimensions within the space. - - * positions: + positions : A list containing a dictionary of candidate dimension key to scalar value pairs for each source-cube. - - * indexes: + indexes : A cross-reference dictionary for each candidate dimension. - - * function_matrix: + function_matrix : The function mapping dictionary for each candidate dimension that participates in a functional relationship. @@ -1520,7 +1522,9 @@ def name_in_independents(): self._shape.extend(signature.data_shape) def _get_cube(self, data): - """Return a fully constructed cube for the given data, containing + """Generate fully constructed cube. + + Return a fully constructed cube for the given data, containing all its coordinates and metadata. """ @@ -1560,11 +1564,7 @@ def _get_cube(self, data): return cube def _nd_index(self, position): - """Returns the n-dimensional index of this source-cube (position), - within the merged cube. - - """ - + """Return the n-dimensional index of thr source-cube, within the merged cube.""" index = [] # Determine the index of the source-cube cell for each dimension. @@ -1582,7 +1582,9 @@ def _nd_index(self, position): return tuple(index) def _build_coordinates(self): - """Build the dimension and auxiliary coordinates for the final + """Build the dimension and auxiliary coordinates. + + Build the dimension and auxiliary coordinates for the final merged cube given that the final dimensionality of the target merged cube is known and the associated dimension/s that each coordinate maps onto in that merged cube. @@ -1655,16 +1657,16 @@ def _build_coordinates(self): def _build_signature(self, cube): """Generate the signature that defines this cube. - Args: - - * cube: + Parameters + ---------- + cube : The source cube to create the cube signature from. - Returns: - The cube signature. + Returns + ------- + The cube signature. """ - return _CubeSignature( cube.metadata, cube.shape, diff --git a/lib/iris/_representation/__init__.py b/lib/iris/_representation/__init__.py index cc312b5c9c..74de095995 100644 --- a/lib/iris/_representation/__init__.py +++ b/lib/iris/_representation/__init__.py @@ -2,6 +2,4 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Code to make printouts and other representations (e.g. html) of Iris objects. - -""" +"""Code to make printouts and other representations (e.g. html) of Iris objects.""" diff --git a/lib/iris/_representation/cube_printout.py b/lib/iris/_representation/cube_printout.py index 81f60e595d..3c418bde64 100644 --- a/lib/iris/_representation/cube_printout.py +++ b/lib/iris/_representation/cube_printout.py @@ -2,16 +2,16 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Provides text printouts of Iris cubes. - -""" +"""Provides text printouts of Iris cubes.""" from copy import deepcopy from iris._representation.cube_summary import CubeSummary class Table: - """A container of text strings in rows + columns, that can format its content + """A container of text strings in rows and columns. + + A container of text strings in rows + columns, that can format its content into a string per row, with contents in columns of fixed width. Supports left- or right- aligned columns, alignment being set "per row". @@ -52,12 +52,13 @@ def __init__(self, cols, aligns, i_col_unlimited=None): def add_row(self, cols, aligns, i_col_unlimited=None): """Create a new row at the bottom. - Args: - * cols (list of string): + Parameters + ---------- + cols : list of str Per-column content. Length must match the other rows (if any). - * aligns (list of {'left', 'right'}): + aligns : list of {'left', 'right'} Per-column alignments. Length must match 'cols'. - * i_col_unlimited (int or None): + i_col_unlimited : int, optional Column beyond which content does not affect the column widths. ( meaning contents will print without limit ). @@ -117,7 +118,9 @@ def __str__(self): class CubePrinter: - """An object created from a + """An object created from a cube summary. + + An object created from a :class:`iris._representation.CubeSummary`, which provides text printout of a :class:`iris.cube.Cube`. @@ -131,14 +134,15 @@ class CubePrinter: N_INDENT_EXTRA = 4 def __init__(self, cube_or_summary): - """An object that provides a printout of a cube. + """Object that provides a printout of a cube. - Args: - - * cube_or_summary (Cube or CubeSummary): + Parameters + ---------- + cube_or_summary : Cube or CubeSummary If a cube, first create a CubeSummary from it. - + Notes + ----- .. note:: The CubePrinter is based on a digest of a CubeSummary, but does not reference or store it. @@ -266,7 +270,6 @@ def _decorated_table(table, name_padding=None): Note: 'name_padding' sets a minimum width for the name column (#0). """ - # Copy the input table + extract the header + its columns. table = table.copy() header = table.rows[0] @@ -320,15 +323,17 @@ def _multiline_summary(self, name_padding): def to_string(self, oneline=False, name_padding=35): """Produce a printable summary. - Args: - * oneline (bool): + Parameters + ---------- + oneline : bool, default=False If set, produce a one-line summary. Default is False = produce full (multiline) summary. - * name_padding (int): + name_padding : int, default=35 The minimum width for the "name" (#0) column. - Returns: - result (string) + Returns + ------- + str """ if oneline: diff --git a/lib/iris/_representation/cube_summary.py b/lib/iris/_representation/cube_summary.py index bae63ccb40..64a6aadbf3 100644 --- a/lib/iris/_representation/cube_summary.py +++ b/lib/iris/_representation/cube_summary.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Provides objects describing cube summaries. -""" +"""Provides objects describing cube summaries.""" import re from iris.common.metadata import hexdigest @@ -259,7 +258,7 @@ def __init__(self, title, cell_methods): class CubeSummary: - """This class provides a structure for output representations of an Iris cube.""" + """Provide a structure for output representations of an Iris cube.""" def __init__(self, cube, name_padding=35): self.header = FullHeader(cube, name_padding) diff --git a/lib/iris/_shapefiles.py b/lib/iris/_shapefiles.py new file mode 100644 index 0000000000..351e798ae5 --- /dev/null +++ b/lib/iris/_shapefiles.py @@ -0,0 +1,243 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. + +# Much of this code is originally based off the ASCEND library, developed in +# the Met Office by Chris Kent, Emilie Vanvyve, David Bentley, Joana Mendes +# many thanks to them. Converted to iris by Alex Chamberlain-Clay + + +from itertools import product +import warnings + +import numpy as np +import shapely +import shapely.errors +import shapely.geometry as sgeom +import shapely.ops + +from iris.warnings import IrisDefaultingWarning, IrisUserWarning + + +def create_shapefile_mask( + geometry, + cube, + minimum_weight=0.0, +): + """Make a mask for a cube from a shape. + + Get the mask of the intersection between the + given shapely geometry and cube with x/y DimCoords. + Can take a minimum weight and evaluate area overlaps instead + + Parameters + ---------- + geometry : :class:`shapely.Geometry` + cube : :class:`iris.cube.Cube` + A :class:`~iris.cube.Cube` which has 1d x and y coordinates + minimum_weight : float, default 0.0 + A float between 0 and 1 determining what % of a cell + a shape must cover for the cell to remain unmasked. + eg: 0.1 means that at least 10% of the shape overlaps the cell + to be unmasked. + Requires geometry to be a Polygon or MultiPolygon + Defaults to 0.0 (eg only test intersection) + + Returns + ------- + :class:`np.array` + An array of the shape of the x & y coordinates of the cube, with points + to mask equal to True + + """ + from iris.cube import Cube, CubeList + + try: + msg = "Geometry is not a valid Shapely object" + if not shapely.is_valid(geometry): + raise TypeError(msg) + except Exception: + raise TypeError(msg) + if not isinstance(cube, Cube): + if isinstance(cube, CubeList): + msg = "Received CubeList object rather than Cube - \ + to mask a CubeList iterate over each Cube" + raise TypeError(msg) + else: + msg = "Received non-Cube object where a Cube is expected" + raise TypeError(msg) + if minimum_weight > 0.0 and isinstance( + geometry, + ( + sgeom.Point, + sgeom.LineString, + sgeom.LinearRing, + sgeom.MultiPoint, + sgeom.MultiLineString, + ), + ): + minimum_weight = 0.0 + warnings.warn( + """Shape is of invalid type for minimum weight masking, + must use a Polygon rather than Line shape.\n + Masking based off intersection instead. """, + category=IrisDefaultingWarning, + ) + + # prepare 2D cube + y_name, x_name = _cube_primary_xy_coord_names(cube) + cube_2d = cube.slices([y_name, x_name]).next() + for coord in cube_2d.dim_coords: + if not coord.has_bounds(): + coord.guess_bounds() + trans_geo = _transform_coord_system(geometry, cube_2d) + + y_coord, x_coord = [cube_2d.coord(n) for n in (y_name, x_name)] + x_bounds = _get_mod_rebased_coord_bounds(x_coord) + y_bounds = _get_mod_rebased_coord_bounds(y_coord) + # prepare array for dark + box_template = [ + sgeom.box(x[0], y[0], x[1], y[1]) for x, y in product(x_bounds, y_bounds) + ] + # shapely can do lazy evaluation of intersections if it's given a list of grid box shapes + # delayed lets us do it in parallel + intersect_template = shapely.intersects(trans_geo, box_template) + # we want areas not under shapefile to be True (to mask) + intersect_template = np.invert(intersect_template) + # now calc area overlaps if doing weights and adjust mask + if minimum_weight > 0.0: + intersections = np.array(box_template)[~intersect_template] + intersect_template[~intersect_template] = [ + trans_geo.intersection(box).area / box.area <= minimum_weight + for box in intersections + ] + mask_template = np.reshape(intersect_template, cube_2d.shape[::-1]).T + return mask_template + + +def _transform_coord_system(geometry, cube, geometry_system=None): + """Project the shape onto another coordinate system. + + Parameters + ---------- + geometry: :class:`shapely.Geometry` + cube: :class:`iris.cube.Cube` + :class:`~iris.cube.Cube` with the coord_system to be projected to and + a x coordinate + geometry_system: :class:`iris.coord_systems`, optional + A :class:`~iris.coord_systems` object describing + the coord_system of the shapefile. Defaults to None, + which is treated as GeogCS + + Returns + ------- + :class:`shapely.Geometry` + A transformed copy of the provided :class:`shapely.Geometry` + + """ + y_name, x_name = _cube_primary_xy_coord_names(cube) + import iris.analysis.cartography + + DEFAULT_CS = iris.coord_systems.GeogCS( + iris.analysis.cartography.DEFAULT_SPHERICAL_EARTH_RADIUS + ) + target_system = cube.coord_system() + if not target_system: + warnings.warn( + "Cube has no coord_system; using default GeogCS lat/lon", + category=IrisDefaultingWarning, + ) + target_system = DEFAULT_CS + if geometry_system is None: + geometry_system = DEFAULT_CS + target_proj = target_system.as_cartopy_projection() + source_proj = geometry_system.as_cartopy_projection() + + trans_geometry = target_proj.project_geometry(geometry, source_proj) + # A GeogCS in iris can be either -180 to 180 or 0 to 360. If cube is 0-360, shift geom to match + if ( + isinstance(target_system, iris.coord_systems.GeogCS) + and cube.coord(x_name).points[-1] > 180 + ): + # chop geom at 0 degree line very finely then transform + prime_meridian_line = shapely.LineString([(0, 90), (0, -90)]) + trans_geometry = trans_geometry.difference(prime_meridian_line.buffer(0.00001)) + trans_geometry = shapely.transform(trans_geometry, _trans_func) + + if (not isinstance(target_system, iris.coord_systems.GeogCS)) and cube.coord( + x_name + ).points[-1] > 180: + # this may lead to incorrect masking or not depending on projection type so warn user + warnings.warn( + """Cube has x-coordinates over 180E and a non-standard projection type.\n + This may lead to incorrect masking. \n + If the result is not as expected, you might want to transform the x coordinate points of your cube to -180-180 """, + category=IrisUserWarning, + ) + return trans_geometry + + +def _trans_func(geometry): + """Pocket function for transforming the x coord of a geometry from -180 to 180 to 0-360.""" + for point in geometry: + if point[0] < 0: + point[0] = 360 - np.abs(point[0]) + return geometry + + +def _cube_primary_xy_coord_names(cube): + """Return the primary latitude and longitude coordinate names, or long names, from a cube. + + Parameters + ---------- + cube : :class:`iris.cube.Cube` + + Returns + ------- + tuple of str + The names of the primary latitude and longitude coordinates + + """ + latc = ( + cube.coords(axis="y", dim_coords=True)[0] + if cube.coords(axis="y", dim_coords=True) + else -1 + ) + lonc = ( + cube.coords(axis="x", dim_coords=True)[0] + if cube.coords(axis="x", dim_coords=True) + else -1 + ) + + if -1 in (latc, lonc): + msg = "Error retrieving 1d xy coordinates in cube: {!r}" + raise ValueError(msg.format(cube)) + + latitude = latc.name() + longitude = lonc.name() + return latitude, longitude + + +def _get_mod_rebased_coord_bounds(coord): + """Take in a coord and returns a array of the bounds of that coord rebased to the modulus. + + Parameters + ---------- + coord : :class:`iris.coords.Coord` + An Iris coordinate with a modulus + + Returns + ------- + :class:`np.array` + A 1d Numpy array of [start,end] pairs for bounds of the coord + + """ + modulus = coord.units.modulus + # Force realisation (rather than core_bounds) - more efficient for the + # repeated indexing happening downstream. + result = np.array(coord.bounds) + if modulus: + result[result < 0.0] = (np.abs(result[result < 0.0]) % modulus) * -1 + result[np.isclose(result, modulus, 1e-10)] = 0.0 + return result diff --git a/lib/iris/analysis/__init__.py b/lib/iris/analysis/__init__.py index 83ae07d350..773e804a14 100644 --- a/lib/iris/analysis/__init__.py +++ b/lib/iris/analysis/__init__.py @@ -227,7 +227,7 @@ def _dimensional_metadata_comparison(*cubes, object_get=None): Returns ------- - result (dict mapping string: list of _CoordGroup): + result : dict mapping str, list of _CoordGroup A dictionary whose keys are match categories and values are groups of coordinates, cell-measures or ancillary-variables. @@ -243,42 +243,42 @@ def _dimensional_metadata_comparison(*cubes, object_get=None): Returned Keys: - * grouped_coords - A list of coordinate groups of all the coordinates grouped together - by their coordinate definition - * ungroupable - A list of coordinate groups which contain at least one None, - meaning not all Cubes provide an equivalent coordinate - * not_equal - A list of coordinate groups of which not all are equal - (superset of ungroupable) - * no_data_dimension - A list of coordinate groups of which all have no data dimensions on - their respective cubes - * scalar - A list of coordinate groups of which all have shape (1, ) - * non_equal_data_dimension - A list of coordinate groups of which not all have the same - data dimension on their respective cubes - * non_equal_shape - A list of coordinate groups of which not all have the same shape - * equal_data_dimension - A list of coordinate groups of which all have the same data dimension - on their respective cubes - * equal - A list of coordinate groups of which all are equal - * ungroupable_and_dimensioned - A list of coordinate groups of which not all cubes had an equivalent - (in metadata) coordinate which also describe a data dimension - * dimensioned - A list of coordinate groups of which all describe a data dimension on - their respective cubes - * ignorable - A list of scalar, ungroupable non_equal coordinate groups - * resamplable - A list of equal, different data dimensioned coordinate groups - * transposable - A list of non equal, same data dimensioned, non scalar coordinate groups + * **grouped_coords**. + A list of coordinate groups of all the coordinates grouped together + by their coordinate definition + * **ungroupable**. + A list of coordinate groups which contain at least one None, + meaning not all Cubes provide an equivalent coordinate + * **not_equal**. + A list of coordinate groups of which not all are equal + (superset of ungroupable) + * **no_data_dimension**> + A list of coordinate groups of which all have no data dimensions on + their respective cubes + * **scalar**> + A list of coordinate groups of which all have shape (1, ) + * **non_equal_data_dimension**. + A list of coordinate groups of which not all have the same + data dimension on their respective cubes + * **non_equal_shape**. + A list of coordinate groups of which not all have the same shape + * **equal_data_dimension**. + A list of coordinate groups of which all have the same data dimension + on their respective cubes + * **equal**. + A list of coordinate groups of which all are equal + * **ungroupable_and_dimensioned**. + A list of coordinate groups of which not all cubes had an equivalent + (in metadata) coordinate which also describe a data dimension + * **dimensioned**. + A list of coordinate groups of which all describe a data dimension on + their respective cubes + * **ignorable**. + A list of scalar, ungroupable non_equal coordinate groups + * **resamplable**. + A list of equal, different data dimensioned coordinate groups + * **transposable**. + A list of non equal, same data dimensioned, non scalar coordinate groups Example usage:: @@ -511,11 +511,11 @@ def lazy_aggregate(self, data, axis, **kwargs): ---------- data : :class:`dask.array.Array` A lazy array. - axis: int or list of int + axis : int or list of int The dimensions to aggregate over -- note that this is defined differently to the 'aggregate' method 'axis' argument, which only accepts a single dimension index. - **kwargs: + **kwargs : dict, optional All keyword arguments are passed through to the data aggregation function. @@ -555,7 +555,7 @@ def aggregate(self, data, axis, **kwargs): mdtol. mdtol=0 means no missing data is tolerated while mdtol=1 will return the resulting value from the aggregation function. Defaults to 1. - **kwargs: + **kwargs : dict, optional All keyword arguments apart from those specified above, are passed through to the data aggregation function. @@ -593,7 +593,7 @@ def update_metadata(self, cube, coords, **kwargs): Source cube that requires metadata update. coords : :class:`iris.coords.Coord` The one or more coordinates that were aggregated. - **kwargs : + **kwargs : dict, optional This function is intended to be used in conjunction with aggregate() and should be passed the same keywords (for example, the "ddof" keyword for a standard deviation aggregator). @@ -617,7 +617,7 @@ def post_process(self, collapsed_cube, data_result, coords, **kwargs): Result from :func:`iris.analysis.Aggregator.aggregate` coords : The one or more coordinates that were aggregated over. - **kwargs : + **kwargs : dict, optional This function is intended to be used in conjunction with aggregate() and should be passed the same keywords (for example, the "ddof" keyword from a standard deviation aggregator). @@ -635,8 +635,9 @@ def aggregate_shape(self, **kwargs): Parameters ---------- - This function is intended to be used in conjunction with aggregate() - and should be passed the same keywords. + **kwargs : dict, optional + This function is intended to be used in conjunction with aggregate() + and should be passed the same keywords. Returns ------- @@ -683,7 +684,7 @@ def __init__(self, units_func=None, **kwargs): value that can be made into one. To ensure backwards-compatibility, also accepts a callable with call signature (units). - **kwargs : + **kwargs : dict, optional Passed through to :data:`call_func`, :data:`lazy_func`, and :data:`units_func`. @@ -736,7 +737,7 @@ def aggregate(self, data, axis, **kwargs): mdtol. mdtol=0 means no missing data is tolerated while mdtol=1 will return the resulting value from the aggregation function. Defaults to 1. - **kwargs : + **kwargs : dict, optional All keyword arguments apart from those specified above, are passed through to the data aggregation function. @@ -766,7 +767,7 @@ def lazy_aggregate(self, data, axis, **kwargs): The dimensions to aggregate over -- note that this is defined differently to the 'aggregate' method 'axis' argument, which only accepts a single dimension index. - **kwargs : + **kwargs : dict, optional All keyword arguments are passed through to the data aggregation function. @@ -788,7 +789,7 @@ def post_process(self, collapsed_cube, data_result, coords, **kwargs): Result from :func:`iris.analysis.Aggregator.aggregate` coords : The one or more coordinates that were aggregated over. - **kwargs : + **kwargs : dict, optional This function is intended to be used in conjunction with aggregate() and should be passed the same keywords (for example, the "percent" keywords from a percentile aggregator). @@ -843,7 +844,7 @@ def aggregate_shape(self, **kwargs): Parameters ---------- - **kwargs : + **kwargs : dict, optional This function is intended to be used in conjunction with aggregate() and should be passed the same keywords. @@ -890,7 +891,7 @@ def __init__(self, units_func=None, lazy_func=None, **kwargs): Parameters ---------- - units_func : callable + units_func : callable or None | *Call signature*: ``(units, **kwargs)``. If provided, called to convert a cube's units. @@ -913,7 +914,7 @@ def __init__(self, units_func=None, lazy_func=None, **kwargs): An alternative to :data:`call_func` implementing a lazy aggregation. Note that, it need not support all features of the main operation, but should raise an error in unhandled cases. - **kwargs : + **kwargs : dict, optional Passed through to :data:`call_func`, :data:`lazy_func`, and :data:`units_func`. @@ -957,7 +958,7 @@ def post_process(self, collapsed_cube, data_result, coords, **kwargs): Result from :func:`iris.analysis.Aggregator.aggregate` coords : The one or more coordinates that were aggregated over. - **kwargs : + **kwargs : dict, optional This function is intended to be used in conjunction with aggregate() and should be passed the same keywords (for example, the "weights" keyword). @@ -997,7 +998,7 @@ def update_metadata(self, cube, coords, **kwargs): Source cube that requires metadata update. coords : :class:`iris.coords.Coord` The one or more coordinates that were aggregated. - **kwargs : + **kwargs : dict, optional This function is intended to be used in conjunction with aggregate() and should be passed the same keywords (for example, the "ddof" keyword for a standard deviation aggregator). @@ -1063,7 +1064,7 @@ def __init__( An alternative to :data:`call_func` implementing a lazy aggregation. Note that, it need not support all features of the main operation, but should raise an error in unhandled cases. - ** kwargs : + **kwargs : dict, optional Passed through to :data:`call_func`, :data:`lazy_func`, and :data:`units_func`. @@ -1085,7 +1086,7 @@ def uses_weighting(self, **kwargs): Parameters ---------- - **kwargs : + **kwargs : dict, optional Arguments to filter of weighted keywords. Returns @@ -1113,7 +1114,7 @@ def post_process(self, collapsed_cube, data_result, coords, **kwargs): Result from :func:`iris.analysis.Aggregator.aggregate` coords : The one or more coordinates that were aggregated over. - **kwargs : + **kwargs : dict, optional This function is intended to be used in conjunction with aggregate() and should be passed the same keywords (for example, the "weights" keywords from a mean aggregator). @@ -1217,7 +1218,7 @@ def create_weighted_aggregator_fn(aggregator_fn, axis, **kwargs): axis : int Axis to aggregate over. This argument is directly passed to ``aggregator_fn``. - **kwargs : + **kwargs : dict, optional Arbitrary keyword arguments passed to ``aggregator_fn``. Should not include ``weights`` (this will be removed if present). @@ -1411,7 +1412,7 @@ def _weighted_quantile_1D(data, weights, quantiles, **kwargs): matching mask. quantiles : float or sequence of floats Quantile(s) to compute. Must have a value between 0 and 1. - **kwargs : + **kwargs : dict, optional passed to `scipy.interpolate.interp1d` Returns @@ -1461,7 +1462,7 @@ def _weighted_percentile(data, axis, weights, percent, returned=False, **kwargs) array with the weights. Must have same shape as data percent : float or sequence of floats Percentile rank/s at which to extract value/s. - returned : bool, optional + returned : bool, default=False Default False. If True, returns a tuple with the percentiles as the first element and the sum of the weights as the second element. @@ -1576,7 +1577,7 @@ def _lazy_max_run(array, axis=-1, **kwargs): stepped_run_lengths = da.reductions.cumreduction( np.maximum.accumulate, np.maximum, - np.NINF, + -np.inf, run_totals, axis=axis, dtype=cum_sum.dtype, @@ -2286,7 +2287,7 @@ def __init__( One or more coordinates (including multidimensional coordinates) that share the same group-by coordinate axis. The `int` identifies which dimension of the coord is on the group-by coordinate axis. - climatological : bool + climatological : bool, default=False Indicates whether the output is expected to be climatological. For any aggregated time coord(s), this causes the climatological flag to be set and the point for each cell to equal its first bound, thereby diff --git a/lib/iris/analysis/_area_weighted.py b/lib/iris/analysis/_area_weighted.py index f882860d25..263f83838c 100644 --- a/lib/iris/analysis/_area_weighted.py +++ b/lib/iris/analysis/_area_weighted.py @@ -18,22 +18,18 @@ class AreaWeightedRegridder: - """This class provides support for performing area-weighted regridding.""" + """Provide support for performing area-weighted regridding.""" def __init__(self, src_grid_cube, target_grid_cube, mdtol=1): - """Create an area-weighted regridder for conversions between the source - and target grids. + """Create an area-weighted regridder for conversions between the source and target grids. - Args: - - * src_grid_cube: + Parameters + ---------- + src_grid_cube : :class:`~iris.cube.Cube` The :class:`~iris.cube.Cube` providing the source grid. - * target_grid_cube: + target_grid_cube : :class:`~iris.cube.Cube` The :class:`~iris.cube.Cube` providing the target grid. - - Kwargs: - - * mdtol (float): + mdtol : float, default=1 Tolerance of missing data. The value returned in each element of the returned array will be masked if the fraction of masked data exceeds mdtol. mdtol=0 means no missing data is tolerated while @@ -41,6 +37,8 @@ def __init__(self, src_grid_cube, target_grid_cube, mdtol=1): if all the contributing elements of data are masked. Defaults to 1. + Notes + ----- .. Note:: Both source and target cubes must have an XY grid defined by @@ -76,8 +74,7 @@ def __init__(self, src_grid_cube, target_grid_cube, mdtol=1): ) = _regrid_info def __call__(self, cube): - """Regrid this :class:`~iris.cube.Cube` onto the target grid of - this :class:`AreaWeightedRegridder`. + """Regrid :class:`~iris.cube.Cube` onto target grid :class:`AreaWeightedRegridder`. The given cube must be defined with the same grid as the source grid used to create this :class:`AreaWeightedRegridder`. @@ -85,17 +82,21 @@ def __call__(self, cube): If the source cube has lazy data, the returned cube will also have lazy data. - Args: - - * cube: + Parameters + ---------- + cube : :class:`~iris.cube.Cube` A :class:`~iris.cube.Cube` to be regridded. - Returns: + Returns + ------- + :class:`~iris.cube.Cube` A cube defined with the horizontal dimensions of the target and the other dimensions from this cube. The data values of this cube will be converted to values on the new grid using area-weighted regridding. + Notes + ----- .. note:: If the source cube has lazy data, @@ -146,12 +147,14 @@ def _get_xy_coords(cube): have equal coordinate systems and that they do not occupy the same dimension on the cube. - Args: - - * cube: + Parameters + ---------- + cube : :class:`iris.cube.Cube` An instance of :class:`iris.cube.Cube`. - Returns: + Returns + ------- + tuple A tuple containing the cube's x and y coordinates. """ @@ -218,6 +221,7 @@ def _get_bounds_in_units(coord, units, dtype): """Return a copy of coord's bounds in the specified units and dtype. Return as contiguous bounds. + """ # The bounds are cast to dtype before conversion to prevent issues when # mixing float32 and float64 types. diff --git a/lib/iris/analysis/_grid_angles.py b/lib/iris/analysis/_grid_angles.py index d50f55125f..8712dd9ad1 100644 --- a/lib/iris/analysis/_grid_angles.py +++ b/lib/iris/analysis/_grid_angles.py @@ -2,7 +2,10 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Code to implement vector rotation by angles, and inferring gridcell angles + +"""Implement vector rotation by angles. + +Code to implement vector rotation by angles, and inferring gridcell angles from coordinate points and bounds. """ @@ -16,9 +19,9 @@ def _3d_xyz_from_latlon(lon, lat): """Return locations of (lon, lat) in 3D space. - Args: - - * lon, lat: (float array) + Parameters + ---------- + lon, lat: float array Arrays of longitudes and latitudes, in degrees. Both the same shape. @@ -45,9 +48,9 @@ def _3d_xyz_from_latlon(lon, lat): def _latlon_from_xyz(xyz): """Return arrays of lons+lats angles from xyz locations. - Args: - - * xyz: (array) + Parameters + ---------- + xyz : array Array of 3-D cartesian coordinates. Shape (3, ). x / y / z values are in xyz[0 / 1 / 2], @@ -67,14 +70,19 @@ def _latlon_from_xyz(xyz): def _angle(p, q, r): - """Estimate grid-angles to true-Eastward direction from positions in the same + """Estimate grid-angles to true-Eastward direction. + + Estimate grid-angles to true-Eastward direction from positions in the same grid row, but at increasing column (grid-Eastward) positions. {P, Q, R} are locations of consecutive points in the same grid row. - These could be successive points in a single grid, - e.g. {T(i-1,j), T(i,j), T(i+1,j)} - or a mixture of U/V and T gridpoints if row positions are aligned, - e.g. {v(i,j), f(i,j), v(i+1,j)}. + These could be successive points in a single grid, e.g.:: + + {T(i-1,j), T(i,j), T(i+1,j)} + + or a mixture of U/V and T gridpoints if row positions are aligned, e.g:: + + {v(i,j), f(i,j), v(i+1,j)}. Method: @@ -84,7 +92,7 @@ def _angle(p, q, r): Discriminate between +/- angles by comparing latitudes of P and R. Return NaN where any P-->R are zero. - .. NOTE:: + .. note:: This method assumes that the vector PR is parallel to the surface at the longitude of Q, as it uses the length of PR as the basis for @@ -96,9 +104,9 @@ def _angle(p, q, r): gridcell-orientation-angle arrays found in files output by the CICE model, which presumably uses an equivalent calculation. - Args: - - * p, q, r : (float array) + Parameters + ---------- + p, q, r : float array Arrays of angles, in degrees. All the same shape. Shape is (2, ). @@ -143,34 +151,28 @@ def gridcell_angles(x, y=None, cell_angle_boundpoints="mid-lhs, mid-rhs"): Input can be either two arrays, two coordinates, or a single cube containing two suitable coordinates identified with the 'x' and 'y' axes. - Args: - - The inputs (x [,y]) can be any of the following : + The inputs (x [,y]) can be different, see the parameters section. - * x (:class:`~iris.cube.Cube`): + Parameters + ---------- + x : :class:`~iris.cube.Cube` a grid cube with 2D X and Y coordinates, identified by 'axis'. The coordinates must be 2-dimensional with the same shape. The two dimensions represent grid dimensions in the order Y, then X. - - * x, y (:class:`~iris.coords.Coord`): + x, y : :class:`~iris.coords.Coord` X and Y coordinates, specifying grid locations on the globe. The coordinates must be 2-dimensional with the same shape. The two dimensions represent grid dimensions in the order Y, then X. If there is no coordinate system, they are assumed to be true longitudes and latitudes. Units must convertible to 'degrees'. - - * x, y (2-dimensional arrays of same shape (ny, nx)): + x, y : 2-dimensional arrays of same shape (ny, nx) longitude and latitude cell center locations, in degrees. The two dimensions represent grid dimensions in the order Y, then X. - - * x, y (3-dimensional arrays of same shape (ny, nx, 4)): + x, y : 3-dimensional arrays of same shape (ny, nx, 4) longitude and latitude cell bounds, in degrees. The first two dimensions are grid dimensions in the order Y, then X. The last index maps cell corners anticlockwise from bottom-left. - - Optional Args: - - * cell_angle_boundpoints (string): + cell_angle_boundpoints : str, default="mid-lhs, mid-rhs" Controls which gridcell bounds locations are used to calculate angles, if the inputs are bounds or bounded coordinates. Valid values are 'lower-left, lower-right', which takes the angle from @@ -392,7 +394,7 @@ def rotate_grid_vectors(u_cube, v_cube, grid_angles_cube=None, grid_angles_kwarg Can also rotate by arbitrary angles, if they are passed in. - .. Note:: + .. note:: This operation overlaps somewhat in function with :func:`iris.analysis.cartography.rotate_winds`. @@ -404,21 +406,17 @@ def rotate_grid_vectors(u_cube, v_cube, grid_angles_cube=None, grid_angles_kwarg complex meshes defined by two-dimensional coordinates, such as most ocean grids. - Args: - - * u_cube, v_cube : (cube) + Parameters + ---------- + u_cube, v_cube : cube Cubes of grid-u and grid-v vector components. Units should be differentials of true-distance, e.g. 'm/s'. - - Optional args: - - * grid_angles_cube : (cube) + grid_angles_cube : cube, optional gridcell orientation angles. Units must be angular, i.e. can be converted to 'radians'. If not provided, grid angles are estimated from 'u_cube' using the :func:`gridcell_angles` method. - - * grid_angles_kwargs : (dict or None) + **grid_angles_kwargs : dict, optional Additional keyword args to be passed to the :func:`gridcell_angles` method, if it is used. diff --git a/lib/iris/analysis/_interpolation.py b/lib/iris/analysis/_interpolation.py index b6e443c95c..babc414ee4 100644 --- a/lib/iris/analysis/_interpolation.py +++ b/lib/iris/analysis/_interpolation.py @@ -57,7 +57,8 @@ def convert_date(date): def extend_circular_coord(coord, points): - """Return coordinates points with a shape extended by one + """Return coordinate points with a shape extended by one. + This is common when dealing with circular coordinates. """ @@ -67,7 +68,9 @@ def extend_circular_coord(coord, points): def extend_circular_coord_and_data(coord, data, coord_dim): - """Return coordinate points and a data array with a shape extended by one + """Return coordinate points and data with a shape extended by one in the provided axis. + + Return coordinate points and a data array with a shape extended by one in the coord_dim axis. This is common when dealing with circular coordinates. @@ -94,12 +97,14 @@ def get_xy_dim_coords(cube): if the identified x and y coordinates do not have coordinate systems that are equal. - Args: - - * cube: + Parameters + ---------- + cube : :class:`iris.cube.Cube` An instance of :class:`iris.cube.Cube`. - Returns: + Returns + ------- + tuple A tuple containing the cube's x and y dimension coordinates. """ @@ -114,18 +119,17 @@ def get_xy_coords(cube, dim_coords=False): if the identified x and y coordinates do not have coordinate systems that are equal. - Args: - - * cube: + Parameters + ---------- + cube : :class:`iris.cube.Cube` An instance of :class:`iris.cube.Cube`. - - Kwargs: - - * dim_coords: + dim_coords : bool, default=False Set this to True to only return dimension coordinates. Defaults to False. - Returns: + Returns + ------- + tuple A tuple containing the cube's x and y dimension coordinates. """ @@ -154,16 +158,15 @@ def get_xy_coords(cube, dim_coords=False): def snapshot_grid(cube): - """Helper function that returns deep copies of lateral (dimension) coordinates - from a cube. - - """ + """Return deep copies of lateral (dimension) coordinates from a cube.""" x, y = get_xy_dim_coords(cube) return x.copy(), y.copy() class RectilinearInterpolator: - """This class provides support for performing nearest-neighbour or + """Provide support for performing nearest-neighbour or linear interpolation. + + This class provides support for performing nearest-neighbour or linear interpolation over one or more orthogonal dimensions. """ @@ -171,30 +174,30 @@ class RectilinearInterpolator: def __init__(self, src_cube, coords, method, extrapolation_mode): """Perform interpolation over one or more orthogonal coordinates. - Args: - - * src_cube: + Parameters + ---------- + src_cube : :class:`iris.cube.Cube` The :class:`iris.cube.Cube` which is to be interpolated. - * coords: + coords : The names or coordinate instances which are to be interpolated over - * method: + method : Either 'linear' or 'nearest'. - * extrapolation_mode: + extrapolation_mode : str Must be one of the following strings: - * 'extrapolate' - The extrapolation points will be calculated - according to the method. The 'linear' method extends the - gradient of the closest two points. The 'nearest' method - uses the value of the closest point. - * 'nan' - The extrapolation points will be be set to NaN. - * 'error' - A ValueError exception will be raised, notifying an - attempt to extrapolate. - * 'mask' - The extrapolation points will always be masked, even - if the source data is not a MaskedArray. - * 'nanmask' - If the source data is a MaskedArray the - extrapolation points will be masked. Otherwise they will be - set to NaN. + * 'extrapolate' - The extrapolation points will be calculated + according to the method. The 'linear' method extends the + gradient of the closest two points. The 'nearest' method + uses the value of the closest point. + * 'nan' - The extrapolation points will be be set to NaN. + * 'error' - A ValueError exception will be raised, notifying an + attempt to extrapolate. + * 'mask' - The extrapolation points will always be masked, even + if the source data is not a MaskedArray. + * 'nanmask' - If the source data is a MaskedArray the + extrapolation points will be masked. Otherwise they will be + set to NaN. """ # Trigger any deferred loading of the source cube's data and snapshot @@ -248,10 +251,7 @@ def extrapolation_mode(self): return self._mode def _account_for_circular(self, points, data): - """Extend the given data array, and re-centralise coordinate points - for circular (1D) coordinates. - - """ + """Extend data array, and re-centralise coordinate points for circular (1D) coordinates.""" from iris.analysis.cartography import wrap_lons for circular, modulus, index, dim, offset in self._circulars: @@ -284,10 +284,11 @@ def _interpolate(self, data, interp_points): it to perform interpolation over the data at the given coordinate point values. - * data (ndarray): + Parameters + ---------- + data : ndarray A data array, to be interpolated in its first 'N' dimensions. - - * interp_points (ndarray): + interp_points : ndarray An array of interpolation coordinate values. Its shape is (..., N) where N is the number of interpolation dimensions. @@ -379,7 +380,9 @@ def _resample_coord(self, sample_points, coord, coord_dims): return new_coord def _setup(self): - """Perform initial start-up configuration and validation based on the + """Perform initial start-up configuration and validation. + + Perform initial start-up configuration and validation based on the cube and the specified coordinates to be interpolated over. """ @@ -430,7 +433,9 @@ def _setup(self): self._validate() def _validate(self): - """Perform all sanity checks to ensure that the interpolation request + """Perform checks to ensure interpolation request is valid. + + Perform all sanity checks to ensure that the interpolation request over the cube with the specified coordinates is valid and can be performed. @@ -454,10 +459,7 @@ def _validate(self): raise ValueError(msg.format(coord.name())) def _interpolated_dtype(self, dtype): - """Determine the minimum base dtype required by the - underlying interpolator. - - """ + """Determine the minimum base dtype required by the underlying interpolator.""" if self._method == "nearest": result = dtype else: @@ -465,29 +467,30 @@ def _interpolated_dtype(self, dtype): return result def _points(self, sample_points, data, data_dims=None): - """Interpolate the given data values at the specified list of orthogonal - (coord, points) pairs. + """Interpolate at the specified points. - Args: + Interpolate the given data values at the specified list of orthogonal + (coord, points) pairs. - * sample_points: + Parameters + ---------- + sample_points : A list of N iterables, where N is the number of coordinates passed to the constructor. [sample_values_for_coord_0, sample_values_for_coord_1, ...] - * data: + data : The data to interpolate - not necessarily the data from the cube that was used to construct this interpolator. If the data has fewer dimensions, then data_dims must be defined. - - Kwargs: - - * data_dims: + data_dims : optional The dimensions of the given data array in terms of the original cube passed through to this interpolator's constructor. If None, the data dimensions must map one-to-one onto the increasing dimension order of the cube. - Returns: + Returns + ------- + :class:`~numpy.ndarray` or :class:`~numpy.ma.MaskedArray` An :class:`~numpy.ndarray` or :class:`~numpy.ma.MaskedArray` instance of the interpolated data. @@ -565,20 +568,19 @@ def _points(self, sample_points, data, data_dims=None): def __call__(self, sample_points, collapse_scalar=True): """Construct a cube from the specified orthogonal interpolation points. - Args: - - * sample_points: + Parameters + ---------- + sample_points : A list of N iterables, where N is the number of coordinates passed to the constructor. [sample_values_for_coord_0, sample_values_for_coord_1, ...] - - Kwargs: - - * collapse_scalar: + collapse_scalar : bool, default=True Whether to collapse the dimension of the scalar sample points in the resulting cube. Default is True. - Returns: + Returns + ------- + :class:`iris.cube.Cube` A cube interpolated at the given sample points. The dimensionality of the cube will be the number of original cube dimensions minus the number of scalar coordinates, if collapse_scalar is True. diff --git a/lib/iris/analysis/_regrid.py b/lib/iris/analysis/_regrid.py index b2ce99ee7a..b85265e5d9 100644 --- a/lib/iris/analysis/_regrid.py +++ b/lib/iris/analysis/_regrid.py @@ -19,8 +19,8 @@ snapshot_grid, ) from iris.analysis._scipy_interpolate import _RegularGridInterpolator -from iris.exceptions import IrisImpossibleUpdateWarning from iris.util import _meshgrid, guess_coord_axis +from iris.warnings import IrisImpossibleUpdateWarning def _transform_xy_arrays(crs_from, x, y, crs_to): @@ -28,15 +28,16 @@ def _transform_xy_arrays(crs_from, x, y, crs_to): NOTE: copied private function from iris.analysis.cartography. - Args: - - * crs_from, crs_to (:class:`cartopy.crs.Projection`): + Parameters + ---------- + crs_from, crs_to : :class:`cartopy.crs.Projection` The coordinate reference systems. - * x, y (arrays): + x, y : arrays point locations defined in 'crs_from'. - Returns: - x, y : Arrays of locations defined in 'crs_to'. + Returns + ------- + x, y : Arrays of locations defined in 'crs_to'. """ pts = crs_to.transform_points(crs_from, x, y) @@ -50,7 +51,6 @@ def _regrid_weighted_curvilinear_to_rectilinear__prepare(src_cube, weights, grid The 'regrid info' returned can be re-used over many cubes. """ - # Get the source cube x and y 2D auxiliary coordinates. sx, sy = src_cube.coord(axis="x"), src_cube.coord(axis="y") # Get the target grid cube x and y dimension coordinates. @@ -368,25 +368,23 @@ def _regrid_weighted_curvilinear_to_rectilinear__perform(src_cube, regrid_info): class CurvilinearRegridder: - """This class provides support for performing point-in-cell regridding + """Provides support for performing point-in-cell regridding. + + This class provides support for performing point-in-cell regridding between a curvilinear source grid and a rectilinear target grid. """ def __init__(self, src_grid_cube, target_grid_cube, weights=None): - """Create a regridder for conversions between the source - and target grids. - - Args: + """Create a regridder for conversions between the source and target grids. - * src_grid_cube: + Parameters + ---------- + src_grid_cube : :class:`~iris.cube.Cube` The :class:`~iris.cube.Cube` providing the source grid. - * tgt_grid_cube: + tgt_grid_cube : :class:`~iris.cube.Cube` The :class:`~iris.cube.Cube` providing the target grid. - - Optional Args: - - * weights: + weights : optional A :class:`numpy.ndarray` instance that defines the weights for the grid cells of the source grid. Must have the same shape as the data of the source grid. @@ -409,19 +407,18 @@ def __init__(self, src_grid_cube, target_grid_cube, weights=None): @staticmethod def _get_horizontal_coord(cube, axis): - """Gets the horizontal coordinate on the supplied cube along the - specified axis. - - Args: + """Get the horizontal coordinate on the supplied cube along the specified axis. - * cube: + Parameters + ---------- + cube : :class:`iris.cube.Cube` An instance of :class:`iris.cube.Cube`. - * axis: + axis : Locate coordinates on `cube` along this axis. - Returns: - The horizontal coordinate on the specified axis of the supplied - cube. + Returns + ------- + The horizontal coordinate on the specified axis of the supplied cube. """ coords = cube.coords(axis=axis, dim_coords=False) @@ -434,7 +431,9 @@ def _get_horizontal_coord(cube, axis): return coords[0] def __call__(self, src): - """Regrid the supplied :class:`~iris.cube.Cube` on to the target grid of + """Regrid onto the target grid. + + Regrid the supplied :class:`~iris.cube.Cube` on to the target grid of this :class:`_CurvilinearRegridder`. The given cube must be defined with the same grid as the source @@ -443,12 +442,14 @@ def __call__(self, src): If the source cube has lazy data, it will be realized before regridding and the returned cube will also have realized data. - Args: - - * src: + Parameters + ---------- + src : :class:`~iris.cube.Cube` A :class:`~iris.cube.Cube` to be regridded. - Returns: + Returns + ------- + :class:`~iris.cube.Cube` A cube defined with the horizontal dimensions of the target and the other dimensions from this cube. The data values of this cube will be converted to values on the new grid using @@ -485,37 +486,38 @@ def __call__(self, src): class RectilinearRegridder: - """This class provides support for performing nearest-neighbour or + """Provides support for performing nearest-neighbour or linear regridding. + + This class provides support for performing nearest-neighbour or linear regridding between source and target grids. """ def __init__(self, src_grid_cube, tgt_grid_cube, method, extrapolation_mode): - """Create a regridder for conversions between the source - and target grids. - - Args: + """Create a regridder for conversions between the source and target grids. - * src_grid_cube: + Parameters + ---------- + src_grid_cube : :class:`~iris.cube.Cube` The :class:`~iris.cube.Cube` providing the source grid. - * tgt_grid_cube: + tgt_grid_cube : :class:`~iris.cube.Cube` The :class:`~iris.cube.Cube` providing the target grid. - * method: + method : Either 'linear' or 'nearest'. - * extrapolation_mode: + extrapolation_mode : str Must be one of the following strings: - * 'extrapolate' - The extrapolation points will be - calculated by extending the gradient of the closest two - points. - * 'nan' - The extrapolation points will be be set to NaN. - * 'error' - An exception will be raised, notifying an - attempt to extrapolate. - * 'mask' - The extrapolation points will always be masked, even - if the source data is not a MaskedArray. - * 'nanmask' - If the source data is a MaskedArray the - extrapolation points will be masked. Otherwise they will be - set to NaN. + * 'extrapolate' - The extrapolation points will be + calculated by extending the gradient of the closest two + points. + * 'nan' - The extrapolation points will be be set to NaN. + * 'error' - An exception will be raised, notifying an + attempt to extrapolate. + * 'mask' - The extrapolation points will always be masked, even + if the source data is not a MaskedArray. + * 'nanmask' - If the source data is a MaskedArray the + extrapolation points will be masked. Otherwise they will be + set to NaN. """ from iris.cube import Cube @@ -553,23 +555,27 @@ def extrapolation_mode(self): @staticmethod def _sample_grid(src_coord_system, grid_x_coord, grid_y_coord): - """Convert the rectilinear grid coordinates to a curvilinear grid in + """Convert the rectilinear grid to a curvilinear grid. + + Convert the rectilinear grid coordinates to a curvilinear grid in the source coordinate system. The `grid_x_coord` and `grid_y_coord` must share a common coordinate system. - Args: - - * src_coord_system: + Parameters + ---------- + src_coord_system : :class:`iris.coord_system.CoordSystem` The :class:`iris.coord_system.CoordSystem` for the grid of the source Cube. - * grid_x_coord: + grid_x_coord : :class:`iris.coords.DimCoord` The :class:`iris.coords.DimCoord` for the X coordinate. - * grid_y_coord: + grid_y_coord : :class:`iris.coords.DimCoord` The :class:`iris.coords.DimCoord` for the Y coordinate. - Returns: + Returns + ------- + tuple A tuple of the X and Y coordinate values as 2-dimensional arrays. @@ -602,54 +608,55 @@ def _regrid( """Regrid the given data from the src grid to the sample grid. The result will be a MaskedArray if either/both of: - - the source array is a MaskedArray, - - the extrapolation_mode is 'mask' and the result requires - extrapolation. + + * the source array is a MaskedArray, + * the extrapolation_mode is 'mask' and the result requires + extrapolation. If the result is a MaskedArray the mask for each element will be set if either/both of: - - there is a non-zero contribution from masked items in the input data - - the element requires extrapolation and the extrapolation_mode - dictates a masked value. - Args: + * there is a non-zero contribution from masked items in the input data + * the element requires extrapolation and the extrapolation_mode + dictates a masked value. - * src_data: + Parameters + ---------- + src_data : An N-dimensional NumPy array or MaskedArray. - * x_dim: + x_dim : The X dimension within `src_data`. - * y_dim: + y_dim : The Y dimension within `src_data`. - * src_x_coord: + src_x_coord : :class:`iris.coords.DimCoord` The X :class:`iris.coords.DimCoord`. - * src_y_coord: + src_y_coord : :class:`iris.coords.DimCoord` The Y :class:`iris.coords.DimCoord`. - * sample_grid_x: + sample_grid_x : A 2-dimensional array of sample X values. - * sample_grid_y: + sample_grid_y : A 2-dimensional array of sample Y values. - - Kwargs: - - * method: + method: str, default="linear" Either 'linear' or 'nearest'. The default method is 'linear'. - * extrapolation_mode: + extrapolation_mode : str, default="nanmask" Must be one of the following strings: - * 'linear' - The extrapolation points will be calculated by - extending the gradient of the closest two points. - * 'nan' - The extrapolation points will be be set to NaN. - * 'error' - A ValueError exception will be raised, notifying an - attempt to extrapolate. - * 'mask' - The extrapolation points will always be masked, even - if the source data is not a MaskedArray. - * 'nanmask' - If the source data is a MaskedArray the - extrapolation points will be masked. Otherwise they will be - set to NaN. + * 'linear' - The extrapolation points will be calculated by + extending the gradient of the closest two points. + * 'nan' - The extrapolation points will be be set to NaN. + * 'error' - A ValueError exception will be raised, notifying an + attempt to extrapolate. + * 'mask' - The extrapolation points will always be masked, even + if the source data is not a MaskedArray. + * 'nanmask' - If the source data is a MaskedArray the + extrapolation points will be masked. Otherwise they will be + set to NaN. The default mode of extrapolation is 'nanmask'. - Returns: + Returns + ------- + NumPy array The regridded data as an N-dimensional NumPy array. The lengths of the X and Y dimensions will now match those of the sample grid. @@ -845,7 +852,9 @@ def _check_units(self, coord): raise ValueError(msg) def __call__(self, src): - """Regrid this :class:`~iris.cube.Cube` on to the target grid of + """Regrid onto target grid. + + Regrid this :class:`~iris.cube.Cube` on to the target grid of this :class:`RectilinearRegridder`. The given cube must be defined with the same grid as the source @@ -854,17 +863,21 @@ def __call__(self, src): If the source cube has lazy data, the returned cube will also have lazy data. - Args: - - * src: + Parameters + ---------- + src : :class:`~iris.cube.Cube` A :class:`~iris.cube.Cube` to be regridded. - Returns: + Returns + ------- + :class:`~iris.cube.Cube` A cube defined with the horizontal dimensions of the target and the other dimensions from this cube. The data values of this cube will be converted to values on the new grid using either nearest-neighbour or linear interpolation. + Notes + ----- .. note:: If the source cube has lazy data, @@ -962,13 +975,15 @@ def regrid_callback(*args, **kwargs): def _create_cube(data, src, src_dims, tgt_coords, num_tgt_dims, regrid_callback): r"""Return a new cube for the result of regridding. + Returned cube represents the result of regridding the source cube onto the horizontal coordinates (e.g. latitude) of the target cube. All the metadata and coordinates of the result cube are copied from the source cube, with two exceptions: - - Horizontal coordinates are copied from the target cube. - - Auxiliary coordinates which span the grid dimensions are - ignored. + + * Horizontal coordinates are copied from the target cube. + * Auxiliary coordinates which span the grid dimensions are + ignored. Parameters ---------- @@ -978,10 +993,10 @@ def _create_cube(data, src, src_dims, tgt_coords, num_tgt_dims, regrid_callback) The source Cube. src_dims : tuple of int The dimensions of the X and Y coordinate within the source Cube. - tgt_coords : tuple of :class:`iris.coords.Coord`\\ 's - Either two 1D :class:`iris.coords.DimCoord`\\ 's, two 1D - :class:`iris.experimental.ugrid.DimCoord`\\ 's or two n-D - :class:`iris.coords.AuxCoord`\\ 's representing the new grid's + tgt_coords : tuple of :class:`iris.coords.Coord + Either two 1D :class:`iris.coords.DimCoord`, two 1D + :class:`iris.experimental.ugrid.DimCoord` or two n-D + :class:`iris.coords.AuxCoord` representing the new grid's X and Y coordinates. num_tgt_dims : int The number of dimensions that the `tgt_coords` span. diff --git a/lib/iris/analysis/calculus.py b/lib/iris/analysis/calculus.py index 4dd3171fae..6955e847dc 100644 --- a/lib/iris/analysis/calculus.py +++ b/lib/iris/analysis/calculus.py @@ -22,10 +22,10 @@ import iris.analysis.maths import iris.coord_systems import iris.coords -from iris.exceptions import IrisUserWarning from iris.util import delta +from iris.warnings import IrisUserWarning -__all__ = ["cube_delta", "curl", "differentiate"] +__all__ = ["DIRECTIONAL_NAMES", "cube_delta", "curl", "differentiate"] def _construct_delta_coord(coord): @@ -144,6 +144,8 @@ def cube_delta(cube, coord): change_in_temperature_wrt_pressure = cube_delta(temperature_cube, 'pressure') + Notes + ----- .. note:: Missing data support not yet implemented. .. note:: @@ -471,6 +473,9 @@ def curl(i_cube, j_cube, k_cube=None): Calculate the 2-dimensional or 3-dimensional spherical or cartesian curl of the given vector of cubes. + The cube standard names must match one of the combinations in + :data:`DIRECTIONAL_NAMES`. + As well as the standard x and y coordinates, this function requires each cube to possess a vertical or z-like coordinate (representing some form of height or pressure). This can be a scalar or dimension coordinate. @@ -734,12 +739,27 @@ def curl(i_cube, j_cube, k_cube=None): return result +#: Acceptable X-Y-Z standard name combinations that +#: :func:`curl` can use (via :func:`spatial_vectors_with_phenom_name`). +DIRECTIONAL_NAMES: tuple[tuple[str, str, str], ...] = ( + ("u", "v", "w"), + ("x", "y", "z"), + ("i", "j", "k"), + ("eastward", "northward", "upward"), + ("easterly", "northerly", "vertical"), + ("easterly", "northerly", "radial"), +) + + def spatial_vectors_with_phenom_name(i_cube, j_cube, k_cube=None): """Given spatially dependent cubes, return a list of the spatial coordinate names. Given 2 or 3 spatially dependent cubes, return a list of the spatial coordinate names with appropriate phenomenon name. + The cube standard names must match one of the combinations in + :data:`DIRECTIONAL_NAMES`. + This routine is designed to identify the vector quantites which each of the cubes provided represent and return a list of their 3d spatial dimension names and associated phenomenon. @@ -752,20 +772,11 @@ def spatial_vectors_with_phenom_name(i_cube, j_cube, k_cube=None): Notes ----- - This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. """ - directional_names = ( - ("u", "v", "w"), - ("x", "y", "z"), - ("i", "j", "k"), - ("eastward", "northward", "upward"), - ("easterly", "northerly", "vertical"), - ("easterly", "northerly", "radial"), - ) - # Create a list of the standard_names of our incoming cubes # (excluding the k_cube if it is None). cube_standard_names = [ @@ -795,7 +806,7 @@ def spatial_vectors_with_phenom_name(i_cube, j_cube, k_cube=None): # Get the appropriate direction list from the cube_directions we # have got from the standard name. direction = None - for possible_direction in directional_names: + for possible_direction in DIRECTIONAL_NAMES: # If this possible direction (minus the k_cube if it is none) # matches direction from the given cubes use it. if possible_direction[0 : len(cube_directions)] == cube_directions: @@ -804,7 +815,7 @@ def spatial_vectors_with_phenom_name(i_cube, j_cube, k_cube=None): # If we didn't get a match, raise an Exception if direction is None: direction_string = "; ".join( - ", ".join(possible_direction) for possible_direction in directional_names + ", ".join(possible_direction) for possible_direction in DIRECTIONAL_NAMES ) raise ValueError( "{} are not recognised vector cube_directions. " diff --git a/lib/iris/analysis/cartography.py b/lib/iris/analysis/cartography.py index c0613028e3..bd1958581f 100644 --- a/lib/iris/analysis/cartography.py +++ b/lib/iris/analysis/cartography.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Various utilities and numeric transformations relevant to cartography. - -""" +"""Various utilities and numeric transformations relevant to cartography.""" from collections import namedtuple import copy @@ -21,6 +19,7 @@ import iris.coords import iris.exceptions from iris.util import _meshgrid +import iris.warnings from ._grid_angles import gridcell_angles, rotate_grid_vectors @@ -55,12 +54,21 @@ def wrap_lons(lons, base, period): """Wrap longitude values into the range between base and base+period. + Parameters + ---------- + lons : + base : + period : + + Examples + -------- .. testsetup:: import numpy as np from iris.analysis.cartography import wrap_lons - For example: + :: + >>> print(wrap_lons(np.array([185, 30, -200, 75]), -180, 360)) [-175. 30. 160. 75.] @@ -76,7 +84,12 @@ def wrap_lons(lons, base, period): def unrotate_pole(rotated_lons, rotated_lats, pole_lon, pole_lat): - """Convert arrays of rotated-pole longitudes and latitudes to unrotated + """Convert rotated-pole to unrotated longitudes and latitudes. + + ``pole_lat`` should describe the location of the rotated pole that + describes the arrays of rotated-pole longitudes and latitudes. + + Convert arrays of rotated-pole longitudes and latitudes to unrotated arrays of longitudes and latitudes. The values of ``pole_lon`` and ``pole_lat`` should describe the location of the rotated pole that describes the arrays of rotated-pole longitudes and latitudes. @@ -85,28 +98,30 @@ def unrotate_pole(rotated_lons, rotated_lats, pole_lon, pole_lat): rectilinear grid, the arrays of rotated-pole longitudes and latitudes must be of the same shape as each other. - Example:: - - lons, lats = unrotate_pole(rotated_lons, rotated_lats, \ - pole_lon, pole_lat) - .. note:: Uses proj.4 to perform the conversion. - Args: + Parameters + ---------- + rotated_lons : + An array of rotated-pole longitude values. + rotated_lats : + An array of rotated-pole latitude values. + pole_lon : + The longitude of the rotated pole that describes the arrays of + rotated-pole longitudes and latitudes. + pole_lat : + The latitude of the rotated pole that describes the arrays of + rotated-pole longitudes and latitudes. + + Returns + ------- + An array of unrotated longitudes and an array of unrotated latitudes. - * rotated_lons: - An array of rotated-pole longitude values. - * rotated_lats: - An array of rotated-pole latitude values. - * pole_lon: - The longitude of the rotated pole that describes the arrays of - rotated-pole longitudes and latitudes. - * pole_lat: - The latitude of the rotated pole that describes the arrays of - rotated-pole longitudes and latitudes. + Examples + -------- + :: - Returns: - An array of unrotated longitudes and an array of unrotated latitudes. + lons, lats = unrotate_pole(rotated_lons, rotated_lats, pole_lon, pole_lat) """ src_proj = ccrs.RotatedGeodetic(pole_longitude=pole_lon, pole_latitude=pole_lat) @@ -119,8 +134,9 @@ def unrotate_pole(rotated_lons, rotated_lats, pole_lon, pole_lat): def rotate_pole(lons, lats, pole_lon, pole_lat): - """Convert arrays of longitudes and latitudes to arrays of rotated-pole - longitudes and latitudes. The values of ``pole_lon`` and ``pole_lat`` + """Convert unrotated longitudes and latitudes to rotated-pole. + + The values of ``pole_lon`` and ``pole_lat`` should describe the rotated pole that the arrays of longitudes and latitudes are to be rotated onto. @@ -128,29 +144,30 @@ def rotate_pole(lons, lats, pole_lon, pole_lat): the arrays of rotated-pole longitudes and latitudes must be of the same shape as each other. - Example:: - - rotated_lons, rotated_lats = rotate_pole(lons, lats,\ - pole_lon, pole_lat) - .. note:: Uses proj.4 to perform the conversion. - Args: + Parameters + ---------- + lons : + An array of longitude values. + lats : + An array of latitude values. + pole_lon : + The longitude of the rotated pole that the arrays of longitudes and + latitudes are to be rotated onto. + pole_lat : + The latitude of the rotated pole that the arrays of longitudes and + latitudes are to be rotated onto. + + Returns + ------- + An array of rotated-pole longitudes and an array of rotated-pole latitudes. - * lons: - An array of longitude values. - * lats: - An array of latitude values. - * pole_lon: - The longitude of the rotated pole that the arrays of longitudes and - latitudes are to be rotated onto. - * pole_lat: - The latitude of the rotated pole that the arrays of longitudes and - latitudes are to be rotated onto. + Examples + -------- + :: - Returns: - An array of rotated-pole longitudes and an array of rotated-pole - latitudes. + rotated_lons, rotated_lats = rotate_pole(lons, lats, pole_lon, pole_lat) """ src_proj = ccrs.Geodetic() @@ -185,15 +202,14 @@ def search_for_coord(coord_iterable, coord_name): def _xy_range(cube, mode=None): """Return the x & y range of this Cube. - Args: - - * cube - The cube for which to calculate xy extents. - - Kwargs: - - * mode - If the coordinate has bounds, set this to specify the - min/max calculation. - Set to iris.coords.POINT_MODE or iris.coords.BOUND_MODE. + Parameters + ---------- + cube : + The cube for which to calculate xy extents. + mode : optional + If the coordinate has bounds, set this to specify the + min/max calculation. + Set to iris.coords.POINT_MODE or iris.coords.BOUND_MODE. """ # Helpful error if we have an inappropriate CoordSystem @@ -249,11 +265,14 @@ def _xy_range(cube, mode=None): def get_xy_grids(cube): """Return 2D X and Y points for a given cube. - Args: - - * cube - The cube for which to generate 2D X and Y points. + Parameters + ---------- + cube : + The cube for which to generate 2D X and Y points. - Example:: + Examples + -------- + :: x, y = get_xy_grids(cube) @@ -261,6 +280,7 @@ def get_xy_grids(cube): ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. + """ x_coord, y_coord = cube.coord(axis="X"), cube.coord(axis="Y") @@ -284,7 +304,13 @@ def get_xy_contiguous_bounded_grids(cube): Returns array of shape (n+1, m+1). - Example:: + Parameters + ---------- + cube : :class:`iris.cube.Cube` + + Examples + -------- + :: xs, ys = get_xy_contiguous_bounded_grids(cube) @@ -306,16 +332,11 @@ def get_xy_contiguous_bounded_grids(cube): def _quadrant_area(radian_lat_bounds, radian_lon_bounds, radius_of_earth): """Calculate spherical segment areas. - - radian_lat_bounds -- [n,2] array of latitude bounds (radians) - - radian_lon_bounds -- [n,2] array of longitude bounds (radians) - - radius_of_earth -- radius of the earth - (currently assumed spherical) - Area weights are calculated for each lat/lon cell as: - .. math:: + .. math:: - r^2 (lon_1 - lon_0) ( sin(lat_1) - sin(lat_0)) + r^2 (lon_1 - lon_0) ( sin(lat_1) - sin(lat_0)) The resulting array will have a shape of *(radian_lat_bounds.shape[0], radian_lon_bounds.shape[0])* @@ -323,6 +344,15 @@ def _quadrant_area(radian_lat_bounds, radian_lon_bounds, radius_of_earth): The calculations are done at 64 bit precision and the returned array will be of type numpy.float64. + Parameters + ---------- + radian_lat_bounds : + [n,2] array of latitude bounds (radians) + radian_lon_bounds : + [n,2] array of longitude bounds (radians) + radius_of_earth : + radius of the earth (currently assumed spherical) + """ # ensure pairs of bounds if ( @@ -347,34 +377,35 @@ def _quadrant_area(radian_lat_bounds, radian_lon_bounds, radius_of_earth): def area_weights(cube, normalize=False): - r"""Returns an array of area weights, with the same dimensions as the cube. + r"""Return an array of area weights, with the same dimensions as the cube. This is a 2D lat/lon area weights array, repeated over the non lat/lon dimensions. - Args: - - * cube (:class:`iris.cube.Cube`): - The cube to calculate area weights for. - - Kwargs: - - * normalize (False/True): - If False, weights are grid cell areas. If True, weights are grid - cell areas divided by the total grid area. - The cube must have coordinates 'latitude' and 'longitude' with bounds. Area weights are calculated for each lat/lon cell as: - .. math:: + .. math:: - r^2 (lon_1 - lon_0) (\sin(lat_1) - \sin(lat_0)) + r^2 (lon_1 - lon_0) (\sin(lat_1) - \sin(lat_0)) Currently, only supports a spherical datum. Uses earth radius from the cube, if present and spherical. Defaults to iris.analysis.cartography.DEFAULT_SPHERICAL_EARTH_RADIUS. + Parameters + ---------- + cube : :class:`iris.cube.Cube` + The cube to calculate area weights for. + normalize : bool, default=False + If False, weights are grid cell areas. If True, weights are grid + cell areas divided by the total grid area. + + Returns + ------- + broad_weights : + """ # Get the radius of the earth cs = cube.coord_system("CoordSystem") @@ -382,7 +413,7 @@ def area_weights(cube, normalize=False): if cs.inverse_flattening != 0.0: warnings.warn( "Assuming spherical earth from ellipsoid.", - category=iris.exceptions.IrisDefaultingWarning, + category=iris.warnings.IrisDefaultingWarning, ) radius_of_earth = cs.semi_major_axis elif isinstance(cs, iris.coord_systems.RotatedGeogCS) and ( @@ -391,13 +422,13 @@ def area_weights(cube, normalize=False): if cs.ellipsoid.inverse_flattening != 0.0: warnings.warn( "Assuming spherical earth from ellipsoid.", - category=iris.exceptions.IrisDefaultingWarning, + category=iris.warnings.IrisDefaultingWarning, ) radius_of_earth = cs.ellipsoid.semi_major_axis else: warnings.warn( "Using DEFAULT_SPHERICAL_EARTH_RADIUS.", - category=iris.exceptions.IrisDefaultingWarning, + category=iris.warnings.IrisDefaultingWarning, ) radius_of_earth = DEFAULT_SPHERICAL_EARTH_RADIUS @@ -466,7 +497,9 @@ def area_weights(cube, normalize=False): def cosine_latitude_weights(cube): - r"""Returns an array of latitude weights, with the same dimensions as + r"""Calculate cosine latitude weights, with the same dimensions as the cube. + + Return an array of latitude weights, with the same dimensions as the cube. The weights are the cosine of latitude. These are n-dimensional latitude weights repeated over the dimensions @@ -478,9 +511,13 @@ def cosine_latitude_weights(cube): Weights are calculated for each latitude as: - .. math:: + .. math:: - w_l = \cos \phi_l + w_l = \cos \phi_l + + Parameters + ---------- + cube : :class:`iris.cube.Cube` Examples -------- @@ -533,7 +570,7 @@ def cosine_latitude_weights(cube): ): warnings.warn( "Out of range latitude values will be clipped to the valid range.", - category=iris.exceptions.IrisDefaultingWarning, + category=iris.warnings.IrisDefaultingWarning, ) points = lat.points l_weights = np.cos(points).clip(0.0, 1.0) @@ -561,25 +598,29 @@ def project(cube, target_proj, nx=None, ny=None): prevent one from directly visualising the data, e.g. when the longitude and latitude are two dimensional and do not make up a regular grid. - Args: - * cube - An instance of :class:`iris.cube.Cube`. - * target_proj - An instance of the Cartopy Projection class, or an instance of - :class:`iris.coord_systems.CoordSystem` from which a projection - will be obtained. - Kwargs: - * nx - Desired number of sample points in the x direction for a domain - covering the globe. - * ny - Desired number of sample points in the y direction for a domain - covering the globe. - - Returns: + Parameters + ---------- + cube : :class:`iris.cube.Cube` + An instance of :class:`iris.cube.Cube`. + target_proj : :class:`iris.coord_systems.CoordSystem` + An instance of the Cartopy Projection class, or an instance of + :class:`iris.coord_systems.CoordSystem` from which a projection + will be obtained. + nx : optional + Desired number of sample points in the x direction for a domain + covering the globe. + ny : optional + Desired number of sample points in the y direction for a domain + covering the globe. + + Returns + ------- + :class:`iris.cube.Cube` An instance of :class:`iris.cube.Cube` and a list describing the extent of the projection. + Notes + ----- .. note:: If there are both dim and aux latitude-longitude coordinates, only @@ -646,7 +687,7 @@ def project(cube, target_proj, nx=None, ny=None): warnings.warn( "Coordinate system of latitude and longitude " "coordinates is not specified. Assuming WGS84 Geodetic.", - category=iris.exceptions.IrisDefaultingWarning, + category=iris.warnings.IrisDefaultingWarning, ) orig_cs = iris.coord_systems.GeogCS( semi_major_axis=6378137.0, inverse_flattening=298.257223563 @@ -837,7 +878,7 @@ def project(cube, target_proj, nx=None, ny=None): lon_coord.name(), [coord.name() for coord in discarded_coords], ), - category=iris.exceptions.IrisIgnoringWarning, + category=iris.warnings.IrisIgnoringWarning, ) # TODO handle derived coords/aux_factories @@ -849,18 +890,19 @@ def project(cube, target_proj, nx=None, ny=None): def _transform_xy(crs_from, x, y, crs_to): - """Shorthand function to transform 2d points between coordinate - reference systems. - - Args: + """Shorthand function to transform 2d points between coordinate reference systems. - * crs_from, crs_to (:class:`cartopy.crs.Projection`): + Parameters + ---------- + crs_from, crs_to : :class:`cartopy.crs.Projection` The coordinate reference systems. - * x, y (arrays): + x, y : array point locations defined in 'crs_from'. - Returns: - x, y : Arrays of locations defined in 'crs_to'. + Returns + ------- + x, y + Arrays of locations defined in 'crs_to'. """ pts = crs_to.transform_points(crs_from, x, y) @@ -872,14 +914,16 @@ def _inter_crs_differentials(crs1, x, y, crs2): Returns dx2/dx1, dy2/dx1, dx2/dy1 and dy2/dy1, at given locations. - Args: - - * crs1, crs2 (`cartopy.crs.Projection`): + Parameters + ---------- + crs1, crs2 : :class:`cartopy.crs.Projection` The coordinate systems, "from" and "to". - * x, y (array): + x, y : array Point locations defined in 'crs1'. - Returns: + Returns + ------- + arrays (dx2/dx1, dy2/dx1, dx2/dy1, dy2/dy1) at given locations. Each element of this tuple will be the same shape as the 'x' and 'y' arrays and will be the partial differentials between the two systems. @@ -918,19 +962,22 @@ def _inter_crs_differentials(crs1, x, y, crs2): def _crs_distance_differentials(crs, x, y): - """Calculate d(distance) / d(x) and ... / d(y) for a coordinate - reference system at specified locations. + """Calculate d(distance) / d(x) and ... / d(y). - Args: + Calculate d(distance) / d(x) and ... / d(y) for a coordinate + reference system at specified locations. - * crs (:class:`cartopy.crs.Projection`): + Parameters + ---------- + crs : :class:`cartopy.crs.Projection` The coordinate reference system. - * x, y (array): + x, y : array Locations at which to calculate the differentials, defined in 'crs' coordinate reference system. - Returns: - (abs(ds/dx), abs(ds/dy)). + Returns + ------- + (abs(ds/dx), abs(ds/dy)) Numerically approximated partial differentials, i.e. scaling factors between changes in distance and changes in coordinate values. @@ -950,26 +997,29 @@ def _crs_distance_differentials(crs, x, y): def _transform_distance_vectors(u_dist, v_dist, ds, dx2, dy2): - """Transform distance vectors from one coordinate reference system to - another, preserving magnitude and physical direction. + """Transform distance vectors to another coordinate reference system. - Args: + Transform distance vectors from one coordinate reference system to + another, preserving magnitude and physical direction. - * u_dist, v_dist (array): + Parameters + ---------- + u_dist, v_dist : array Components of each vector along the x and y directions of the source crs at each location. - * ds (`DistanceDifferential`): + ds : `DistanceDifferential` Distance differentials for the source and the target crs at specified locations. - * dx2, dy2 (`PartialDifferential`): + dx2, dy2 : `PartialDifferential` Partial differentials from the source to the target crs. - Returns: + Returns + ------- + tuple (ut_dist, vt_dist): Tuple of arrays containing the vector components along the x and y directions of the target crs at each location. """ - # Scale input distance vectors --> source-coordinate differentials. u1, v1 = u_dist / ds.dx1, v_dist / ds.dy1 # Transform vectors into the target system. @@ -982,24 +1032,28 @@ def _transform_distance_vectors(u_dist, v_dist, ds, dx2, dy2): def _transform_distance_vectors_tolerance_mask(src_crs, x, y, tgt_crs, ds, dx2, dy2): - """Return a mask that can be applied to data array to mask elements + """Return a mask that can be applied to data array to mask elements. + + Return a mask that can be applied to data array to mask elements where the magnitude of vectors are not preserved due to numerical errors introduced by the transformation between coordinate systems. - Args: - * src_crs (`cartopy.crs.Projection`): + Parameters + ---------- + src_crs : `cartopy.crs.Projection` The source coordinate reference systems. - * x, y (array): + x, y : array Locations of each vector defined in 'src_crs'. - * tgt_crs (`cartopy.crs.Projection`): + tgt_crs : `cartopy.crs.Projection` The target coordinate reference systems. - * ds (`DistanceDifferential`): + ds : `DistanceDifferential` Distance differentials for src_crs and tgt_crs at specified locations - * dx2, dy2 (`PartialDifferential`): + dx2, dy2 : `PartialDifferential` Partial differentials from src_crs to tgt_crs. - Returns: - 2d boolean array that is the same shape as x and y. + Returns + ------- + 2d boolean array that is the same shape as x and y. """ if x.shape != y.shape: @@ -1040,23 +1094,27 @@ def rotate_winds(u_cube, v_cube, target_cs): also have two 2-dimensional auxiliary coordinates containing the X and Y locations in the target coordinate system. - Args: - - * u_cube + Parameters + ---------- + u_cube : An instance of :class:`iris.cube.Cube` that contains the x-component of the vector. - * v_cube + v_cube : An instance of :class:`iris.cube.Cube` that contains the y-component of the vector. - * target_cs + target_cs : An instance of :class:`iris.coord_systems.CoordSystem` that specifies the new grid directions. - Returns: + Returns + ------- + (u', v') tuple of :class:`iris.cube.Cube` A (u', v') tuple of :class:`iris.cube.Cube` instances that are the u and v components in the requested target coordinate system. The units are the same as the inputs. + Notes + ----- .. note:: The U and V values relate to distance, with units such as 'm s-1'. diff --git a/lib/iris/analysis/geometry.py b/lib/iris/analysis/geometry.py index a07ef61a76..120b6dfaa6 100644 --- a/lib/iris/analysis/geometry.py +++ b/lib/iris/analysis/geometry.py @@ -15,18 +15,19 @@ from shapely.geometry import Polygon import iris.exceptions +import iris.warnings def _extract_relevant_cube_slice(cube, geometry): - """Given a shapely geometry object, this helper method returns - the tuple + """Calculate geometry intersection with spatial region defined by cube. + + This helper method returns the tuple (subcube, x_coord_of_subcube, y_coord_of_subcube, - (min_x_index, min_y_index, max_x_index, max_y_index)). + (min_x_index, min_y_index, max_x_index, max_y_index)). If cube and geometry don't overlap, returns None. """ - # Validate the input parameters if not cube.coords(axis="x") or not cube.coords(axis="y"): raise ValueError("The cube must contain x and y axes.") @@ -71,7 +72,7 @@ def _extract_relevant_cube_slice(cube, geometry): except ValueError: warnings.warn( "The geometry exceeds the cube's x dimension at the lower end.", - category=iris.exceptions.IrisGeometryExceedWarning, + category=iris.warnings.IrisGeometryExceedWarning, ) x_min_ix = 0 if x_ascending else x_coord.points.size - 1 @@ -81,7 +82,7 @@ def _extract_relevant_cube_slice(cube, geometry): except ValueError: warnings.warn( "The geometry exceeds the cube's x dimension at the upper end.", - category=iris.exceptions.IrisGeometryExceedWarning, + category=iris.warnings.IrisGeometryExceedWarning, ) x_max_ix = x_coord.points.size - 1 if x_ascending else 0 @@ -91,7 +92,7 @@ def _extract_relevant_cube_slice(cube, geometry): except ValueError: warnings.warn( "The geometry exceeds the cube's y dimension at the lower end.", - category=iris.exceptions.IrisGeometryExceedWarning, + category=iris.warnings.IrisGeometryExceedWarning, ) y_min_ix = 0 if y_ascending else y_coord.points.size - 1 @@ -101,7 +102,7 @@ def _extract_relevant_cube_slice(cube, geometry): except ValueError: warnings.warn( "The geometry exceeds the cube's y dimension at the upper end.", - category=iris.exceptions.IrisGeometryExceedWarning, + category=iris.warnings.IrisGeometryExceedWarning, ) y_max_ix = y_coord.points.size - 1 if y_ascending else 0 @@ -135,7 +136,9 @@ def _extract_relevant_cube_slice(cube, geometry): def geometry_area_weights(cube, geometry, normalize=False): - """Returns the array of weights corresponding to the area of overlap between + """Return the array of weights corresponding to the area of overlap. + + Return the array of weights corresponding to the area of overlap between the cells of cube's horizontal grid, and the given shapely geometry. The returned array is suitable for use with :const:`iris.analysis.MEAN`. @@ -161,23 +164,19 @@ def geometry_area_weights(cube, geometry, normalize=False): This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. - Args: - - * cube (:class:`iris.cube.Cube`): + Parameters + ---------- + cube : :class:`iris.cube.Cube` A Cube containing a bounded, horizontal grid definition. - * geometry (a shapely geometry instance): + geometry : shapely geometry instance The geometry of interest. To produce meaningful results this geometry must have a non-zero area. Typically a Polygon or MultiPolygon. - - Kwargs: - - * normalize: + normalize : bool, default=False Calculate each individual cell weight as the cell area overlap between the cell and the given shapely geometry divided by the total cell area. Default is False. """ - # extract smallest subcube containing geometry shape = cube.shape extraction_results = _extract_relevant_cube_slice(cube, geometry) diff --git a/lib/iris/analysis/maths.py b/lib/iris/analysis/maths.py index 3d1df8d66f..caf4aea0a8 100644 --- a/lib/iris/analysis/maths.py +++ b/lib/iris/analysis/maths.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Basic mathematical and statistical operations. - -""" +"""Basic mathematical and statistical operations.""" from functools import lru_cache import inspect @@ -25,6 +23,7 @@ import iris.coords import iris.exceptions import iris.util +import iris.warnings # Configure the logger. logger = get_logger(__name__) @@ -32,27 +31,28 @@ @lru_cache(maxsize=128, typed=True) def _output_dtype(op, first_dtype, second_dtype=None, in_place=False): - """Get the numpy dtype corresponding to the result of applying a unary or - binary operation to arguments of specified dtype. + """Get the numpy dtype. - Args: + Get the numpy dtype corresponding to the result of applying a unary or + binary operation to arguments of specified dtype. - * op: + Parameters + ---------- + op : A unary or binary operator which can be applied to array-like objects. - * first_dtype: + first_dtype : The dtype of the first or only argument to the operator. - - Kwargs: - - * second_dtype: + second_dtype : optional The dtype of the second argument to the operator. - - * in_place: + in_place : bool, default=False Whether the operation is to be performed in place. - Returns: - An instance of :class:`numpy.dtype` + Returns + ------- + :class:`numpy.dtype` + Notes + ----- .. note:: The function always returns the dtype which would result if the @@ -74,17 +74,17 @@ def _output_dtype(op, first_dtype, second_dtype=None, in_place=False): def _get_dtype(operand): - """Get the numpy dtype corresponding to the numeric data in the object - provided. - - Args: + """Get the numpy dtype corresponding to the numeric data in the object provided. - * operand: + Parameters + ---------- + operand : An instance of :class:`iris.cube.Cube` or :class:`iris.coords.Coord`, or a number or :class:`numpy.ndarray`. - Returns: - An instance of :class:`numpy.dtype` + Returns + ------- + :class:`numpy.dtype` """ return np.min_scalar_type(operand) if np.isscalar(operand) else operand.dtype @@ -93,18 +93,16 @@ def _get_dtype(operand): def abs(cube, in_place=False): """Calculate the absolute values of the data in the Cube provided. - Args: - - * cube: + Parameters + ---------- + cube : An instance of :class:`iris.cube.Cube`. - - Kwargs: - - * in_place: + in_place : bool, default=False Whether to create a new Cube, or alter the given "cube". - Returns: - An instance of :class:`iris.cube.Cube`. + Returns + ------- + :class:`iris.cube.Cube`. Notes ----- @@ -124,17 +122,21 @@ def intersection_of_cubes(cube, other_cube): .. note:: The intersection of cubes function will ignore all single valued coordinates in checking the intersection. - Args: - - * cube: + Parameters + ---------- + cube : An instance of :class:`iris.cube.Cube`. - * other_cube: + other_cube : An instance of :class:`iris.cube.Cube`. - Returns: + Returns + ------- + A paired tuple of :class:`iris.cube.Cube` A pair of :class:`iris.cube.Cube` instances in a tuple corresponding to the original cubes restricted to their intersection. + Notes + ----- .. deprecated:: 3.2.0 Instead use :meth:`iris.cube.CubeList.extract_overlapping`. For example, @@ -153,8 +155,6 @@ def intersection_of_cubes(cube, other_cube): intersections = cubes.extract_overlapping(coords) cube1, cube2 = (intersections[0], intersections[1]) - Notes - ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -207,7 +207,9 @@ def _assert_is_cube(cube): @_lenient_client(services=SERVICES) def add(cube, other, dim=None, in_place=False): - """Calculate the sum of two cubes, or the sum of a cube and a coordinate or + """Calculate the sum. + + Calculate the sum of two cubes, or the sum of a cube and a coordinate or array or scalar value. When summing two cubes, they must both have the same coordinate systems and @@ -220,14 +222,11 @@ def add(cube, other, dim=None, in_place=False): ---------- cube : iris.cube.Cube First operand to add. - other: iris.cube.Cube, iris.coords.Coord, number, numpy.ndarray or dask.array.Array Second operand to add. - dim : int, optional If `other` is a coord which does not exist on the cube, specify the dimension to which it should be mapped. - in_place : bool, default=False If `True`, alters the input cube. Otherwise a new cube is created. @@ -260,7 +259,9 @@ def add(cube, other, dim=None, in_place=False): @_lenient_client(services=SERVICES) def subtract(cube, other, dim=None, in_place=False): - """Calculate the difference between two cubes, or the difference between + """Calculate the difference. + + Calculate the difference between two cubes, or the difference between a cube and a coordinate or array or scalar value. When differencing two cubes, they must both have the same coordinate systems @@ -273,14 +274,11 @@ def subtract(cube, other, dim=None, in_place=False): ---------- cube : iris.cube.Cube Cube from which to subtract. - other: iris.cube.Cube, iris.coords.Coord, number, numpy.ndarray or dask.array.Array Object to subtract from the cube. - dim : int, optional If `other` is a coord which does not exist on the cube, specify the dimension to which it should be mapped. - in_place : bool, default=False If `True`, alters the input cube. Otherwise a new cube is created. @@ -320,22 +318,27 @@ def _add_subtract_common( dim=None, in_place=False, ): - """Function which shares common code between addition and subtraction - of cubes. - - operation_function - function which does the operation - (e.g. numpy.subtract) - operation_name - the public name of the operation (e.g. 'divide') - cube - the cube whose data is used as the first argument - to `operation_function` - other - the cube, coord, ndarray, dask array or number whose - data is used as the second argument - new_dtype - the expected dtype of the output. Used in the - case of scalar masked arrays - dim - dimension along which to apply `other` if it's a - coordinate that is not found in `cube` - in_place - whether or not to apply the operation in place to - `cube` and `cube.data` + """Share common code between addition and subtraction of cubes. + + Parameters + ---------- + operation_function : + function which does the operation (e.g. numpy.subtract) + operation_name : + The public name of the operation (e.g. 'divide') + cube : + The cube whose data is used as the first argument to `operation_function` + other : + The cube, coord, ndarray, dask array or number whose + data is used as the second argument + new_dtype : + The expected dtype of the output. Used in the case of scalar + masked arrays + dim : optional + Dimension along which to apply `other` if it's a coordinate that is not + found in `cube` + in_place : bool, default=False + Whether or not to apply the operation in place to `cube` and `cube.data` """ _assert_is_cube(cube) @@ -363,7 +366,9 @@ def _add_subtract_common( @_lenient_client(services=SERVICES) def multiply(cube, other, dim=None, in_place=False): - """Calculate the product of two cubes, or the product of a cube and a coordinate + """Calculate the product. + + Calculate the product of two cubes, or the product of a cube and a coordinate or array or scalar value. When multiplying two cubes, they must both have the same coordinate systems @@ -376,14 +381,11 @@ def multiply(cube, other, dim=None, in_place=False): ---------- cube : iris.cube.Cube First operand to multiply. - other: iris.cube.Cube, iris.coords.Coord, number, numpy.ndarray or dask.array.Array Second operand to multiply. - dim : int, optional If `other` is a coord which does not exist on the cube, specify the dimension to which it should be mapped. - in_place : bool, default=False If `True`, alters the input cube. Otherwise a new cube is created. @@ -428,7 +430,9 @@ def multiply(cube, other, dim=None, in_place=False): def _inplace_common_checks(cube, other, math_op): - """Check whether an inplace math operation can take place between `cube` and + """Check if an inplace math operation can take place. + + Check whether an inplace math operation can take place between `cube` and `other`. It cannot if `cube` has integer data and `other` has float data as the operation will always produce float data that cannot be 'safely' cast back to the integer data of `cube`. @@ -447,7 +451,9 @@ def _inplace_common_checks(cube, other, math_op): @_lenient_client(services=SERVICES) def divide(cube, other, dim=None, in_place=False): - """Calculate the ratio of two cubes, or the ratio of a cube and a coordinate + """Calculate the ratio. + + Calculate the ratio of two cubes, or the ratio of a cube and a coordinate or array or scalar value. When dividing a cube by another cube, they must both have the same coordinate @@ -460,14 +466,11 @@ def divide(cube, other, dim=None, in_place=False): ---------- cube : iris.cube.Cube Numerator. - other: iris.cube.Cube, iris.coords.Coord, number, numpy.ndarray or dask.array.Array Denominator. - dim : int, optional If `other` is a coord which does not exist on the cube, specify the dimension to which it should be mapped. - in_place : bool, default=False If `True`, alters the input cube. Otherwise a new cube is created. @@ -518,13 +521,13 @@ def divide(cube, other, dim=None, in_place=False): def exponentiate(cube, exponent, in_place=False): - """Returns the result of the given cube to the power of a scalar. + """Return the result of the given cube to the power of a scalar. - Args: - - * cube: + Parameters + ---------- + cube : An instance of :class:`iris.cube.Cube`. - * exponent: + exponent : The integer or floating point exponent. .. note:: When applied to the cube's unit, the exponent must @@ -532,19 +535,18 @@ def exponentiate(cube, exponent, in_place=False): powers of the basic units. e.g. Unit('meter^-2 kilogram second^-1') - - Kwargs: - - * in_place: + in_place : bool, default=False Whether to create a new Cube, or alter the given "cube". - Returns: - An instance of :class:`iris.cube.Cube`. + Returns + ------- + :class:`iris.cube.Cube`. Notes ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. + """ _assert_is_cube(cube) new_dtype = _output_dtype( @@ -575,25 +577,21 @@ def power(data, out=None): def exp(cube, in_place=False): """Calculate the exponential (exp(x)) of the cube. - Args: - - * cube: + Parameters + ---------- + cube : An instance of :class:`iris.cube.Cube`. - - .. note:: - - Taking an exponential will return a cube with dimensionless units. - - Kwargs: - - * in_place: + in_place : bool, default=False Whether to create a new Cube, or alter the given "cube". - Returns: - An instance of :class:`iris.cube.Cube`. + Returns + ------- + :class:`iris.cube.Cube`. Notes ----- + Taking an exponential will return a cube with dimensionless units. + This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -609,18 +607,16 @@ def exp(cube, in_place=False): def log(cube, in_place=False): """Calculate the natural logarithm (base-e logarithm) of the cube. - Args: - - * cube: + Parameters + ---------- + cube : An instance of :class:`iris.cube.Cube`. - - Kwargs: - - * in_place: + in_place : bool, default=False Whether to create a new Cube, or alter the given "cube". - Returns: - An instance of :class:`iris.cube.Cube`. + Returns + ------- + :class:`iris.cube.Cube` Notes ----- @@ -643,18 +639,16 @@ def log(cube, in_place=False): def log2(cube, in_place=False): """Calculate the base-2 logarithm of the cube. - Args: - - * cube: + Parameters + ---------- + cube : An instance of :class:`iris.cube.Cube`. - - Kwargs:lib/iris/tests/unit/analysis/maths/test_subtract.py - - * in_place: + in_place : bool, default=False Whether to create a new Cube, or alter the given "cube". - Returns: - An instance of :class:`iris.cube.Cube`. + Returns + ------- + :class:`iris.cube.Cube` Notes ----- @@ -673,18 +667,16 @@ def log2(cube, in_place=False): def log10(cube, in_place=False): """Calculate the base-10 logarithm of the cube. - Args: - - * cube: + Parameters + ---------- + cube : An instance of :class:`iris.cube.Cube`. - - Kwargs: - - * in_place: + in_place : bool, default=False Whether to create a new Cube, or alter the given "cube". - Returns: - An instance of :class:`iris.cube.Cube`. + Returns + ------- + :class:`iris.cube.Cube`. Notes ----- @@ -701,7 +693,9 @@ def log10(cube, in_place=False): def apply_ufunc(ufunc, cube, other=None, new_unit=None, new_name=None, in_place=False): - """Apply a `numpy universal function + """Apply a `numpy universal function `_ to a cube. + + Apply a `numpy universal function `_ to a cube or pair of cubes. @@ -711,34 +705,30 @@ def apply_ufunc(ufunc, cube, other=None, new_unit=None, new_name=None, in_place= It is usually preferable to use these functions rather than :func:`iris.analysis.maths.apply_ufunc` where possible. - Args: - - * ufunc: + Parameters + ---------- + ufunc : An instance of :func:`numpy.ufunc` e.g. :func:`numpy.sin`, :func:`numpy.mod`. - - * cube: + cube : An instance of :class:`iris.cube.Cube`. - - Kwargs: - - * other: + other ::class:`iris.cube.Cube`, optional An instance of :class:`iris.cube.Cube` to be given as the second argument to :func:`numpy.ufunc`. - - * new_unit: + new_unit : optional Unit for the resulting Cube. - - * new_name: + new_name : optional Name for the resulting Cube. - - * in_place: + in_place : bool, default=False Whether to create a new Cube, or alter the given "cube". - Returns: - An instance of :class:`iris.cube.Cube`. + Returns + ------- + :class:`iris.cube.Cube`. - Example:: + Examples + -------- + :: cube = apply_ufunc(numpy.sin, cube, in_place=True) @@ -749,7 +739,6 @@ def apply_ufunc(ufunc, cube, other=None, new_unit=None, new_name=None, in_place= See more at :doc:`/userguide/real_and_lazy_data`. """ - if not isinstance(ufunc, np.ufunc): ufunc_name = getattr(ufunc, "__name__", "function passed to apply_ufunc") emsg = f"{ufunc_name} is not recognised, it is not an instance of numpy.ufunc" @@ -819,24 +808,31 @@ def _binary_op_common( in_place=False, sanitise_metadata=True, ): - """Function which shares common code between binary operations. - - operation_function - function which does the operation - (e.g. numpy.divide) - operation_name - the public name of the operation (e.g. 'divide') - cube - the cube whose data is used as the first argument - to `operation_function` - other - the cube, coord, ndarray, dask array or number whose - data is used as the second argument - new_dtype - the expected dtype of the output. Used in the - case of scalar masked arrays - new_unit - unit for the resulting quantity - dim - dimension along which to apply `other` if it's a - coordinate that is not found in `cube` - in_place - whether or not to apply the operation in place to - `cube` and `cube.data` - sanitise_metadata - whether or not to remove metadata using - _sanitise_metadata function + """Share common code between binary operations. + + Parameters + ---------- + operation_function : + Function which does the operation (e.g. numpy.divide) + operation_name : + The public name of the operation (e.g. 'divide') + cube : + The cube whose data is used as the first argument to `operation_function` + other : + The cube, coord, ndarray, dask array or number whose data is used + as the second argument + new_dtype : + The expected dtype of the output. Used in the case of scalar masked arrays + new_unit : optional + Unit for the resulting quantity + dim : optional + Dimension along which to apply `other` if it's a coordinate that is + not found in `cube` + in_place : bool, default=False + whether or not to apply the operation in place to `cube` and `cube.data` + sanitise_metadata : bool, default=True + Whether or not to remove metadata using _sanitise_metadata function + """ from iris.cube import Cube @@ -946,7 +942,7 @@ def _broadcast_cube_coord_data(cube, other, operation_name, dim=None): warnings.warn( "Using {!r} with a bounded coordinate is not well " "defined; ignoring bounds.".format(operation_name), - category=iris.exceptions.IrisIgnoringBoundsWarning, + category=iris.warnings.IrisIgnoringBoundsWarning, ) points = other.points @@ -962,7 +958,9 @@ def _broadcast_cube_coord_data(cube, other, operation_name, dim=None): def _sanitise_metadata(cube, unit): - """As part of the maths metadata contract, clear the necessary or + """Clear appropriate metadata from the resultant cube. + + As part of the maths metadata contract, clear the necessary or unsupported metadata from the resultant cube of the maths operation. """ @@ -1041,25 +1039,24 @@ class IFunc: def __init__(self, data_func, units_func): """Create an ifunc from a data function and units function. - Args: - - * data_func: - + Parameters + ---------- + data_func : Function to be applied to one or two data arrays, which are given as positional arguments. Should return another data array, with the same shape as the first array. - May also have keyword arguments. - - * units_func: - + units_func : Function to calculate the units of the resulting cube. Should take the cube/s as input and return an instance of :class:`cf_units.Unit`. - Returns: - An ifunc. + Returns + ------- + ifunc + Examples + -------- **Example usage 1** Using an existing numpy ufunc, such as numpy.sin for the data function and a simple lambda function for the units function:: @@ -1090,8 +1087,8 @@ def ws_units_func(u_cube, v_cube): cs_ifunc = iris.analysis.maths.IFunc(numpy.cumsum, lambda a: a.units) cs_cube = cs_ifunc(cube, axis=1) - """ + """ self._data_func_name = getattr( data_func, "__name__", "data_func argument passed to IFunc" ) @@ -1162,35 +1159,29 @@ def __call__( new_name=None, **kwargs_data_func, ): - """Applies the ifunc to the cube(s). - - Args: + """Apply the ifunc to the cube(s). - * cube + Parameters + ---------- + cube : An instance of :class:`iris.cube.Cube`, whose data is used as the first argument to the data function. - - Kwargs: - - * other + other : optional A cube, coord, ndarray, dask array or number whose data is used as the second argument to the data function. - - * new_name: + new_name : optional Name for the resulting Cube. - - * in_place: + in_place : bool, default=False Whether to create a new Cube, or alter the given "cube". - - * dim: + dim : optional Dimension along which to apply `other` if it's a coordinate that is not found in `cube` - - * kwargs_data_func: + **kwargs_data_func : Keyword arguments that get passed on to the data_func. - Returns: - An instance of :class:`iris.cube.Cube`. + Returns + ------- + :class:`iris.cube.Cube` """ _assert_is_cube(cube) diff --git a/lib/iris/analysis/stats.py b/lib/iris/analysis/stats.py index e3a01f6933..8df93571f1 100644 --- a/lib/iris/analysis/stats.py +++ b/lib/iris/analysis/stats.py @@ -2,17 +2,18 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Statistical operations between cubes. - -""" +"""Statistical operations between cubes.""" +import dask.array as da import numpy as np -import numpy.ma as ma import iris -from iris.util import broadcast_to_shape +from iris.common import SERVICES, Resolve +from iris.common.lenient import _lenient_client +from iris.util import _mask_array +@_lenient_client(services=SERVICES) def pearsonr( cube_a, cube_b, @@ -21,54 +22,58 @@ def pearsonr( mdtol=1.0, common_mask=False, ): - """Calculate the Pearson's r correlation coefficient over specified - dimensions. - - Args: + """Calculate the Pearson's r correlation coefficient over specified dimensions. - * cube_a, cube_b (cubes): + Parameters + ---------- + cube_a, cube_b : :class:`iris.cube.Cube` Cubes between which the correlation will be calculated. The cubes should either be the same shape and have the same dimension coordinates - or one cube should be broadcastable to the other. - * corr_coords (str or list of str): + or one cube should be broadcastable to the other. Broadcasting rules + are the same as those for cube arithmetic (see :ref:`cube maths`). + corr_coords : str or list of str, optional The cube coordinate name(s) over which to calculate correlations. If no names are provided then correlation will be calculated over all common cube dimensions. - * weights (numpy.ndarray, optional): - Weights array of same shape as (the smaller of) cube_a and cube_b. Note - that latitude/longitude area weights can be calculated using + weights : :class:`numpy.ndarray`, optional + Weights array of same shape as (the smaller of) `cube_a` and `cube_b`. + Note that latitude/longitude area weights can be calculated using :func:`iris.analysis.cartography.area_weights`. - * mdtol (float, optional): + mdtol : float, default=1.0 Tolerance of missing data. The missing data fraction is calculated - based on the number of grid cells masked in both cube_a and cube_b. If - this fraction exceed mdtol, the returned value in the corresponding - cell is masked. mdtol=0 means no missing data is tolerated while - mdtol=1 means the resulting element will be masked if and only if all - contributing elements are masked in cube_a or cube_b. Defaults to 1. - * common_mask (bool): - If True, applies a common mask to cube_a and cube_b so only cells which - are unmasked in both cubes contribute to the calculation. If False, the - variance for each cube is calculated from all available cells. Defaults - to False. - - Returns: + based on the number of grid cells masked in both `cube_a` and `cube_b`. + If this fraction exceed `mdtol`, the returned value in the + corresponding cell is masked. `mdtol` =0 means no missing data is + tolerated while `mdtol` =1 means the resulting element will be masked + if and only if all contributing elements are masked in `cube_a` or + `cube_b`. + common_mask : bool, default=False + If ``True``, applies a common mask to cube_a and cube_b so only cells + which are unmasked in both cubes contribute to the calculation. If + ``False``, the variance for each cube is calculated from all available + cells. + + Returns + ------- + :class:`~iris.cube.Cube` A cube of the correlation between the two input cubes along the specified dimensions, at each point in the remaining dimensions of the cubes. For example providing two time/altitude/latitude/longitude cubes and - corr_coords of 'latitude' and 'longitude' will result in a + `corr_coords` of 'latitude' and 'longitude' will result in a time/altitude cube describing the latitude/longitude (i.e. pattern) correlation at each time/altitude point. + Notes + ----- + If either of the input cubes has lazy data, the result will have lazy data. + Reference: https://en.wikipedia.org/wiki/Pearson_correlation_coefficient - This operation is non-lazy. - """ - - # Assign larger cube to cube_1 + # Assign larger cube to cube_1 for simplicity. if cube_b.ndim > cube_a.ndim: cube_1 = cube_b cube_2 = cube_a @@ -78,90 +83,88 @@ def pearsonr( smaller_shape = cube_2.shape - dim_coords_1 = [coord.name() for coord in cube_1.dim_coords] - dim_coords_2 = [coord.name() for coord in cube_2.dim_coords] - common_dim_coords = list(set(dim_coords_1) & set(dim_coords_2)) + # Get the broadcast, auto-transposed safe versions of the cube operands. + resolver = Resolve(cube_1, cube_2) + lhs_cube_resolved = resolver.lhs_cube_resolved + rhs_cube_resolved = resolver.rhs_cube_resolved + + if lhs_cube_resolved.has_lazy_data() or rhs_cube_resolved.has_lazy_data(): + al = da + array_lhs = lhs_cube_resolved.lazy_data() + array_rhs = rhs_cube_resolved.lazy_data() + else: + al = np + array_lhs = lhs_cube_resolved.data + array_rhs = rhs_cube_resolved.data + # If no coords passed then set to all common dimcoords of cubes. if corr_coords is None: - corr_coords = common_dim_coords - - def _ones_like(cube): - # Return a copy of cube with the same mask, but all data values set to 1. - # The operation is non-lazy. - # For safety we also discard any cell-measures and ancillary-variables, to - # avoid cube arithmetic possibly objecting to them, or inadvertently retaining - # them in the result where they might be inappropriate. - ones_cube = cube.copy() - ones_cube.data = np.ones_like(cube.data) - ones_cube.rename("unknown") - ones_cube.units = 1 - for cm in ones_cube.cell_measures(): - ones_cube.remove_cell_measure(cm) - for av in ones_cube.ancillary_variables(): - ones_cube.remove_ancillary_variable(av) - return ones_cube + dim_coords_1 = {coord.name() for coord in lhs_cube_resolved.dim_coords} + dim_coords_2 = {coord.name() for coord in rhs_cube_resolved.dim_coords} + corr_coords = list(dim_coords_1.intersection(dim_coords_2)) + + # Interpret coords as array dimensions. + corr_dims = set() + if isinstance(corr_coords, str): + corr_coords = [corr_coords] + for coord in corr_coords: + corr_dims.update(lhs_cube_resolved.coord_dims(coord)) + + corr_dims = tuple(corr_dims) # Match up data masks if required. if common_mask: - # Create a cube of 1's with a common mask. - if ma.is_masked(cube_2.data): - mask_cube = _ones_like(cube_2) - else: - mask_cube = 1.0 - if ma.is_masked(cube_1.data): - # Take a slice to avoid unnecessary broadcasting of cube_2. - slice_coords = [ - dim_coords_1[i] - for i in range(cube_1.ndim) - if dim_coords_1[i] not in common_dim_coords - and np.array_equal( - cube_1.data.mask.any(axis=i), cube_1.data.mask.all(axis=i) - ) - ] - cube_1_slice = next(cube_1.slices_over(slice_coords)) - mask_cube = _ones_like(cube_1_slice) * mask_cube - # Apply common mask to data. - if isinstance(mask_cube, iris.cube.Cube): - cube_1 = cube_1 * mask_cube - cube_2 = mask_cube * cube_2 - dim_coords_2 = [coord.name() for coord in cube_2.dim_coords] - - # Broadcast weights to shape of cubes if necessary. - if weights is None or cube_1.shape == smaller_shape: - weights_1 = weights - weights_2 = weights + mask_lhs = al.ma.getmaskarray(array_lhs) + if al is np: + # Reduce all invariant dimensions of mask_lhs to length 1. This avoids + # unnecessary broadcasting of array_rhs. + index = tuple( + slice(0, 1) + if np.array_equal(mask_lhs.any(axis=dim), mask_lhs.all(axis=dim)) + else slice(None) + for dim in range(mask_lhs.ndim) + ) + mask_lhs = mask_lhs[index] + + array_rhs = _mask_array(array_rhs, mask_lhs) + array_lhs = _mask_array(array_lhs, al.ma.getmaskarray(array_rhs)) + + # Broadcast weights to shape of arrays if necessary. + if weights is None: + weights_lhs = weights_rhs = None else: if weights.shape != smaller_shape: - raise ValueError( - "weights array should have dimensions {}".format(smaller_shape) - ) + msg = f"weights array should have dimensions {smaller_shape}" + raise ValueError(msg) - dims_1_common = [ - i for i in range(cube_1.ndim) if dim_coords_1[i] in common_dim_coords - ] - weights_1 = broadcast_to_shape(weights, cube_1.shape, dims_1_common) - if cube_2.shape != smaller_shape: - dims_2_common = [ - i for i in range(cube_2.ndim) if dim_coords_2[i] in common_dim_coords - ] - weights_2 = broadcast_to_shape(weights, cube_2.shape, dims_2_common) - else: - weights_2 = weights + wt_resolver = Resolve(cube_1, cube_2.copy(weights)) + weights = wt_resolver.rhs_cube_resolved.data + weights_rhs = np.broadcast_to(weights, array_rhs.shape) + weights_lhs = np.broadcast_to(weights, array_lhs.shape) # Calculate correlations. - s1 = cube_1 - cube_1.collapsed(corr_coords, iris.analysis.MEAN, weights=weights_1) - s2 = cube_2 - cube_2.collapsed(corr_coords, iris.analysis.MEAN, weights=weights_2) - - covar = (s1 * s2).collapsed( - corr_coords, iris.analysis.SUM, weights=weights_1, mdtol=mdtol + s_lhs = array_lhs - al.ma.average( + array_lhs, axis=corr_dims, weights=weights_lhs, keepdims=True ) - var_1 = (s1**2).collapsed(corr_coords, iris.analysis.SUM, weights=weights_1) - var_2 = (s2**2).collapsed(corr_coords, iris.analysis.SUM, weights=weights_2) + s_rhs = array_rhs - al.ma.average( + array_rhs, axis=corr_dims, weights=weights_rhs, keepdims=True + ) + + s_prod = resolver.cube(s_lhs * s_rhs) - denom = iris.analysis.maths.apply_ufunc( - np.sqrt, var_1 * var_2, new_unit=covar.units + # Use cube collapsed method as it takes care of coordinate collapsing and missing + # data tolerance. + covar = s_prod.collapsed( + corr_coords, iris.analysis.SUM, weights=weights_lhs, mdtol=mdtol ) + + var_lhs = iris.analysis._sum(s_lhs**2, axis=corr_dims, weights=weights_lhs) + var_rhs = iris.analysis._sum(s_rhs**2, axis=corr_dims, weights=weights_rhs) + + denom = np.sqrt(var_lhs * var_rhs) + corr_cube = covar / denom corr_cube.rename("Pearson's r") + corr_cube.units = 1 return corr_cube diff --git a/lib/iris/analysis/trajectory.py b/lib/iris/analysis/trajectory.py index ed5b911b0c..1dd19cd724 100644 --- a/lib/iris/analysis/trajectory.py +++ b/lib/iris/analysis/trajectory.py @@ -2,10 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Defines a Trajectory class, and a routine to extract a sub-cube along a -trajectory. - -""" +"""Defines a Trajectory class, and a routine to extract a sub-cube along a trajectory.""" import math @@ -40,9 +37,18 @@ class Trajectory: """A series of given waypoints with pre-calculated sample points.""" def __init__(self, waypoints, sample_count=10): - """Defines a trajectory using a sequence of waypoints. + """Define a trajectory using a sequence of waypoints. + + Parameters + ---------- + waypoints : + A sequence of dictionaries, mapping coordinate names to values. + sample_count : int, default=10 + The number of sample positions to use along the trajectory. - For example:: + Examples + -------- + :: waypoints = [{'latitude': 45, 'longitude': -60}, {'latitude': 45, 'longitude': 0}] @@ -51,16 +57,6 @@ def __init__(self, waypoints, sample_count=10): .. note:: All the waypoint dictionaries must contain the same coordinate names. - Args: - - * waypoints - A sequence of dictionaries, mapping coordinate names to values. - - Kwargs: - - * sample_count - The number of sample positions to use along the trajectory. - """ self.waypoints = waypoints self.sample_count = sample_count @@ -75,7 +71,7 @@ def __init__(self, waypoints, sample_count=10): self.length = sum([seg.length for seg in segments]) # generate our sampled points - #: The trajectory points, as dictionaries of {coord_name: value}. + # The trajectory points, as dictionaries of {coord_name: value}. self.sampled_points = [] sample_step = self.length / (self.sample_count - 1) @@ -116,10 +112,11 @@ def __repr__(self): ) def _get_interp_points(self): - """Translate `self.sampled_points` to the format expected by the - interpolator. + """Translate `self.sampled_points` to the format expected by the interpolator. - Returns: + Returns + ------- + `self.sampled points` `self.sampled points` in the format required by `:func:`~iris.analysis.trajectory.interpolate`. @@ -131,18 +128,23 @@ def _get_interp_points(self): return [(k, v) for k, v in points.items()] def _src_cube_anon_dims(self, cube): - """A helper method to locate the index of anonymous dimensions on the + """Locate the index of anonymous dimensions. + + A helper method to locate the index of anonymous dimensions on the interpolation target, ``cube``. - Returns: - The index of any anonymous dimensions in ``cube``. + Returns + ------- + The index of any anonymous dimensions in ``cube``. """ named_dims = [cube.coord_dims(c)[0] for c in cube.dim_coords] return list(set(range(cube.ndim)) - set(named_dims)) def interpolate(self, cube, method=None): - """Calls :func:`~iris.analysis.trajectory.interpolate` to interpolate + """Interpolate ``cube`` on the defined trajectory. + + Call :func:`~iris.analysis.trajectory.interpolate` to interpolate ``cube`` on the defined trajectory. Assumes that the coordinate names supplied in the waypoints @@ -150,14 +152,11 @@ def interpolate(self, cube, method=None): supplied in the same coord_system as in `cube`, where appropriate (i.e. for horizontal coordinate points). - Args: - - * cube + Parameters + ---------- + cube : The source Cube to interpolate. - - Kwargs: - - * method: + method : optional The interpolation method to use; "linear" (default) or "nearest". Only nearest is available when specifying multi-dimensional coordinates. @@ -186,23 +185,20 @@ def interpolate(self, cube, method=None): def interpolate(cube, sample_points, method=None): """Extract a sub-cube at the given n-dimensional points. - Args: - - * cube + Parameters + ---------- + cube : The source Cube. - - * sample_points + sample_points : A sequence of coordinate (name) - values pairs. - - Kwargs: - - * method + method : optional Request "linear" interpolation (default) or "nearest" neighbour. Only nearest neighbour is available when specifying multi-dimensional coordinates. - - For example:: + Examples + -------- + :: sample_points = [('latitude', [45, 45, 45]), ('longitude', [-60, -50, -40])] @@ -484,20 +480,21 @@ def _ll_to_cart(lon, lat): def _cartesian_sample_points(sample_points, sample_point_coord_names): """Replace geographic lat/lon with cartesian xyz. + Generates coords suitable for nearest point calculations with `scipy.spatial.cKDTree`. - Args: + Parameters + ---------- + sample_points : + [coord][datum] list of sample_positions for each datum, formatted for + fast use of :func:`_ll_to_cart()`. + sample_point_coord_names : + [coord] list of n coord names - * sample_points[coord][datum]: - list of sample_positions for each datum, formatted for fast use of - :func:`_ll_to_cart()`. - - * sample_point_coord_names[coord]: - list of n coord names - - Returns: - list of [x,y,z,t,etc] positions, formatted for kdtree. + Returns + ------- + list of [x,y,z,t,etc] positions, formatted for kdtree. """ # Find lat and lon coord indices @@ -533,7 +530,9 @@ def _cartesian_sample_points(sample_points, sample_point_coord_names): def _nearest_neighbour_indices_ndcoords(cube, sample_points, cache=None): - """Returns the indices to select the data value(s) closest to the given + """Calculate the cube nearest neighbour indices for the samples. + + Return the indices to select the data value(s) closest to the given coordinate point values. 'sample_points' is of the form [[coord-or-coord-name, point-value(s)]*]. @@ -700,7 +699,9 @@ def _nearest_neighbour_indices_ndcoords(cube, sample_points, cache=None): class UnstructuredNearestNeigbourRegridder: - """Encapsulate the operation of :meth:`iris.analysis.trajectory.interpolate` + """Encapsulate the operation of :meth:`iris.analysis.trajectory.interpolate`. + + Encapsulate the operation of :meth:`iris.analysis.trajectory.interpolate` with given source and target grids. This is the type used by the :class:`~iris.analysis.UnstructuredNearest` @@ -711,35 +712,39 @@ class UnstructuredNearestNeigbourRegridder: # TODO: cache the necessary bits of the operation so reuse can actually # be more efficient. def __init__(self, src_cube, target_grid_cube): - """A nearest-neighbour regridder to perform regridding from the source + """Nearest-neighbour regridder. + + A nearest-neighbour regridder to perform regridding from the source grid to the target grid. This can then be applied to any source data with the same structure as the original 'src_cube'. - Args: - - * src_cube: + Parameters + ---------- + src_cube : :class:`~iris.cube.Cube` The :class:`~iris.cube.Cube` defining the source grid. The X and Y coordinates can have any shape, but must be mapped over the same cube dimensions. - - * target_grid_cube: + target_grid_cube : :class:`~iris.cube.Cube` A :class:`~iris.cube.Cube`, whose X and Y coordinates specify a desired target grid. The X and Y coordinates must be one-dimensional dimension coordinates, mapped to different dimensions. All other cube components are ignored. - Returns: - regridder : (object) + Returns + ------- + regridder (object) + A callable object with the interface:: - A callable object with the interface: - `result_cube = regridder(data)` + result_cube = regridder(data) where `data` is a cube with the same grid as the original `src_cube`, that is to be regridded to the `target_grid_cube`. + Notes + ----- .. Note:: For latitude-longitude coordinates, the nearest-neighbour distances diff --git a/lib/iris/aux_factory.py b/lib/iris/aux_factory.py index f447537b7d..d63ab157fa 100644 --- a/lib/iris/aux_factory.py +++ b/lib/iris/aux_factory.py @@ -13,7 +13,7 @@ from iris.common import CFVariableMixin, CoordMetadata, metadata_manager_factory import iris.coords -from iris.exceptions import IrisIgnoringBoundsWarning +from iris.warnings import IrisIgnoringBoundsWarning class AuxCoordFactory(CFVariableMixin, metaclass=ABCMeta): @@ -81,7 +81,7 @@ def make_coord(self, coord_dims_func): Parameters ---------- - coord_dims_func: + coord_dims_func : A callable which can return the list of dimensions relevant to a given coordinate. @@ -97,9 +97,9 @@ def update(self, old_coord, new_coord=None): Parameters ---------- - old_coord: + old_coord : The coordinate to be removed/replaced. - new_coord: optional + new_coord : optional If None, any dependency using old_coord is removed, otherwise any dependency using old_coord is updated to use new_coord. @@ -131,7 +131,7 @@ def derived_dims(self, coord_dims_func): Parameters ---------- - coord_dims_func: + coord_dims_func : A callable which can return the list of dimensions relevant to a given coordinate. See :meth:`iris.cube.Cube.coord_dims()`. @@ -161,7 +161,7 @@ def updated(self, new_coord_mapping): Parameters ---------- - new_coord_mapping: + new_coord_mapping : A dictionary mapping from the object IDs potentially used by this factory, to the coordinate objects that should be used instead. @@ -207,7 +207,8 @@ def _nd_bounds(coord, dims, ndim): dims == [3] ndim == 5 - results in: + results in:: + nd_bounds.shape == (1, 1, 1, 70, 1, 2) """ @@ -471,7 +472,7 @@ def make_coord(self, coord_dims_func): Parameters ---------- - coord_dims_func: + coord_dims_func : A callable which can return the list of dimensions relevant to a given coordinate. @@ -546,11 +547,11 @@ def __init__(self, delta=None, sigma=None, orography=None): Parameters ---------- - delta: Coord, optional + delta : Coord, optional The coordinate providing the `a` term. - sigma: Coord, optional + sigma : Coord, optional The coordinate providing the `b` term. - orography: Coord, optional + orography : Coord, optional The coordinate providing the `orog` term. """ @@ -615,7 +616,7 @@ def make_coord(self, coord_dims_func): Parameters ---------- - coord_dims_func: + coord_dims_func : A callable which can return the list of dimensions relevant to a given coordinate. @@ -678,9 +679,9 @@ def update(self, old_coord, new_coord=None): Parameters ---------- - old_coord: + old_coord : The coordinate to be removed/replaced. - new_coord: optional + new_coord : optional If None, any dependency using old_coord is removed, otherwise any dependency using old_coord is updated to use new_coord. @@ -720,11 +721,11 @@ def __init__(self, delta=None, sigma=None, surface_air_pressure=None): Parameters ---------- - delta: Coord, optional + delta : Coord, optional The coordinate providing the `ap` term. - sigma: Coord, optional + sigma : Coord, optional The coordinate providing the `b` term. - surface_air_pressure: Coord, optional + surface_air_pressure : Coord, optional The coordinate providing the `ps` term. """ @@ -821,7 +822,7 @@ def make_coord(self, coord_dims_func): Parameters ---------- - coord_dims_func: + coord_dims_func : A callable which can return the list of dimensions relevant to a given coordinate. @@ -1083,7 +1084,7 @@ def make_coord(self, coord_dims_func): Parameters ---------- - coord_dims_func: + coord_dims_func : A callable which can return the list of dimensions relevant to a given coordinate. See :meth:`iris.cube.Cube.coord_dims()`. @@ -1245,7 +1246,7 @@ def make_coord(self, coord_dims_func): Parameters ---------- - coord_dims_func: + coord_dims_func : A callable which can return the list of dimensions relevant to a given coordinate. See :meth:`iris.cube.Cube.coord_dims()`. @@ -1428,7 +1429,7 @@ def make_coord(self, coord_dims_func): Parameters ---------- - coord_dims_func: + coord_dims_func : A callable which can return the list of dimensions relevant to a given coordinate. See :meth:`iris.cube.Cube.coord_dims()`. @@ -1619,7 +1620,7 @@ def make_coord(self, coord_dims_func): Parameters ---------- - coord_dims_func: + coord_dims_func : A callable which can return the list of dimensions relevant to a given coordinate. See :meth:`iris.cube.Cube.coord_dims()`. @@ -1809,7 +1810,7 @@ def make_coord(self, coord_dims_func): Parameters ---------- - coord_dims_func: + coord_dims_func : A callable which can return the list of dimensions relevant to a given coordinate. See :meth:`iris.cube.Cube.coord_dims()`. diff --git a/lib/iris/common/__init__.py b/lib/iris/common/__init__.py index 983238f17d..f9ad2bf207 100644 --- a/lib/iris/common/__init__.py +++ b/lib/iris/common/__init__.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""A package for provisioning common Iris infrastructure. - -""" +"""A package for provisioning common Iris infrastructure.""" from .lenient import * from .metadata import * diff --git a/lib/iris/common/_split_attribute_dicts.py b/lib/iris/common/_split_attribute_dicts.py index 95dbcbb7b3..17b3014fb1 100644 --- a/lib/iris/common/_split_attribute_dicts.py +++ b/lib/iris/common/_split_attribute_dicts.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Dictionary operations for dealing with the CubeAttrsDict "split"-style attribute -dictionaries. +"""Dictionary operations for dealing with the CubeAttrsDict "split"-style attribute dictionaries. The idea here is to convert a split-dictionary into a "plain" one for calculations, whose keys are all pairs of the form ('global', ) or ('local', ). @@ -67,7 +66,9 @@ def _convert_pairedkeys_dict_to_splitattrs(dic): def adjust_for_split_attribute_dictionaries(operation): - """Decorator to make a function of attribute-dictionaries work with split attributes. + """Generate attribute-dictionaries to work with split attributes. + + Decorator to make a function of attribute-dictionaries work with split attributes. The wrapped function of attribute-dictionaries is currently always one of "equals", "combine" or "difference", with signatures like : @@ -92,6 +93,7 @@ def adjust_for_split_attribute_dictionaries(operation): "Split" dictionaries are all of class :class:`~iris.cube.CubeAttrsDict`, since the only usage of 'split' attribute dictionaries is in Cubes (i.e. they are not used for cube components). + """ @wraps(operation) diff --git a/lib/iris/common/lenient.py b/lib/iris/common/lenient.py index 614060b9bf..b26e0f1763 100644 --- a/lib/iris/common/lenient.py +++ b/lib/iris/common/lenient.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Provides the infrastructure to support lenient client/service behaviour. - -""" +"""Provides the infrastructure to support lenient client/service behaviour.""" from collections.abc import Iterable from contextlib import contextmanager @@ -30,45 +28,45 @@ def _lenient_client(*dargs, services=None): - """Decorator that allows a client function/method to declare at runtime that + """Allow a client function/method to declare at runtime. + + Decorator that allows a client function/method to declare at runtime that it is executing and requires lenient behaviour from a prior registered lenient service function/method. - This decorator supports being called with no arguments e.g., + This decorator supports being called with no arguments e.g:: @_lenient_client() def func(): pass - This is equivalent to using it as a simple naked decorator e.g., + This is equivalent to using it as a simple naked decorator e.g:: @_lenient_client def func() pass Alternatively, this decorator supports the lenient client explicitly - declaring the lenient services that it wishes to use e.g., + declaring the lenient services that it wishes to use e.g:: @_lenient_client(services=(service1, service2, ...) def func(): pass - Args: - - * dargs (tuple of callable): + Parameters + ---------- + dargs : tuple of callable A tuple containing the callable lenient client function/method to be wrapped by the decorator. This is automatically populated by Python through the decorator interface. No argument requires to be manually provided. - - Kwargs: - - * services (callable or str or iterable of callable/str) + services : callable or str or iterable of callable/str, optional Zero or more function/methods, or equivalent fully qualified string names, of lenient service function/methods. - Returns: - Closure wrapped function/method. + Returns + ------- + Closure wrapped function/method. """ ndargs = len(dargs) @@ -91,7 +89,9 @@ def func(): @wraps(func) def lenient_client_inner_naked(*args, **kwargs): - """Closure wrapper function to register the wrapped function/method + """Closure wrapper function. + + Closure wrapper function to register the wrapped function/method as active at runtime before executing it. """ @@ -111,7 +111,9 @@ def lenient_client_inner_naked(*args, **kwargs): def lenient_client_outer(func): @wraps(func) def lenient_client_inner(*args, **kwargs): - """Closure wrapper function to register the wrapped function/method + """Closure wrapper function. + + Closure wrapper function to register the wrapped function/method as active at runtime before executing it. """ @@ -127,33 +129,36 @@ def lenient_client_inner(*args, **kwargs): def _lenient_service(*dargs): - """Decorator that allows a function/method to declare that it supports lenient + """Implement the lenient service protocol. + + Decorator that allows a function/method to declare that it supports lenient behaviour as a service. Registration is at Python interpreter parse time. - The decorator supports being called with no arguments e.g., + The decorator supports being called with no arguments e.g:: @_lenient_service() def func(): pass - This is equivalent to using it as a simple naked decorator e.g., + This is equivalent to using it as a simple naked decorator e.g:: @_lenient_service def func(): pass - Args: - - * dargs (tuple of callable): + Parameters + ---------- + dargs : tuple of callable A tuple containing the callable lenient service function/method to be wrapped by the decorator. This is automatically populated by Python through the decorator interface. No argument requires to be manually provided. - Returns: - Closure wrapped function/method. + Returns + ------- + Closure wrapped function/method. """ ndargs = len(dargs) @@ -195,12 +200,14 @@ def lenient_service_outer(func): def _qualname(func): """Return the fully qualified function/method string name. - Args: - - * func (callable): + Parameters + ---------- + func : callable Callable function/method. Non-callable arguments are simply passed through. + Notes + ----- .. note:: Inherited methods will be qualified with the base class that defines the method. @@ -216,16 +223,18 @@ def _qualname(func): class Lenient(threading.local): def __init__(self, **kwargs): - """A container for managing the run-time lenient features and options. + """Container for managing the run-time lenient features and options. - Kwargs: - - * kwargs (dict) + Parameters + ---------- + **kwargs : dict, optional Mapping of lenient key/value options to enable/disable. Note that, only the lenient "maths" options is available, which controls lenient/strict cube arithmetic. - For example:: + Examples + -------- + :: Lenient(maths=False) @@ -280,7 +289,9 @@ def _init(self): @contextmanager def context(self, **kwargs): - """Return a context manager which allows temporary modification of the + """Context manager supporting temporary modification of lenient state. + + Return a context manager which allows temporary modification of the lenient option state within the scope of the context manager. On entry to the context manager, all provided keyword arguments are @@ -288,6 +299,7 @@ def context(self, **kwargs): option state is restored. For example:: + with iris.common.Lenient.context(maths=False): pass @@ -317,23 +329,24 @@ def configure_state(state): class _Lenient(threading.local): def __init__(self, *args, **kwargs): - """A container for managing the run-time lenient services and client - options for pre-defined functions/methods. + """Container for managing the run-time lenient services and client options. - Args: + A container for managing the run-time lenient services and client + options for pre-defined functions/methods. - * args (callable or str or iterable of callable/str) + Parameters + ---------- + *args : callable or str or iterable of callable/str A function/method or fully qualified string name of the function/method acting as a lenient service. - - Kwargs: - - * kwargs (dict of callable/str or iterable of callable/str) + **kwargs : dict of callable/str or iterable of callable/str, optional Mapping of lenient client function/method, or fully qualified string name of the function/method, to one or more lenient service function/methods or fully qualified string name of function/methods. - For example:: + Examples + -------- + :: _Lenient(service1, service2, client1=service1, client2=(service1, service2)) @@ -352,16 +365,19 @@ def __init__(self, *args, **kwargs): self.register_client(client, services) def __call__(self, func): - """Determine whether it is valid for the function/method to provide a - lenient service at runtime to the actively executing lenient client. + """Determine whether it is valid for the function/method to provide a lenient service. - Args: + Determine whether it is valid for the function/method to provide a + lenient service at runtime to the actively executing lenient client. - * func (callable or str): + Parameters + ---------- + func : callable or str A function/method or fully qualified string name of the function/method. - Returns: - Boolean. + Returns + ------- + bool """ result = False @@ -430,7 +446,9 @@ def __setitem__(self, name, value): @contextmanager def context(self, *args, **kwargs): - """Return a context manager which allows temporary modification of + """Context manager supporting temporary modification of lenient state. + + Return a context manager which allows temporary modification of the lenient option state for the active thread. On entry to the context manager, all provided keyword arguments are @@ -438,6 +456,7 @@ def context(self, *args, **kwargs): state is restored. For example:: + with iris._LENIENT.context(example_lenient_flag=False): # ... code that expects some non-lenient behaviour @@ -509,9 +528,9 @@ def enable(self, state): Setting the state to `False` disables all lenient services, and setting the state to `True` enables all lenient services. - Args: - - * state (bool): + Parameters + ---------- + state : bool Activate state for lenient services. """ @@ -522,22 +541,20 @@ def enable(self, state): self.__dict__["enable"] = state def register_client(self, func, services, append=False): - """Add the provided mapping of lenient client function/method to - required lenient service function/methods. + """Add the lenient client to service mapping. - Args: + Add the provided mapping of lenient client function/method to + required lenient service function/methods. - * func (callable or str): + Parameters + ---------- + func : callable or str A client function/method or fully qualified string name of the client function/method. - - * services (callable or str or iterable of callable/str): + services : callable or str or iterable of callable/str One or more service function/methods or fully qualified string names of the required service function/method. - - Kwargs: - - * append (bool): + append : bool, default=False If True, append the lenient services to any pre-registered lenient services for the provided lenient client. Default is False. @@ -565,12 +582,11 @@ def register_client(self, func, services, append=False): self.__dict__[func] = services def register_service(self, func): - """Add the provided function/method as providing a lenient service and - activate it. - - Args: + """Add the provided function/method as providing a lenient service and activate it. - * func (callable or str): + Parameters + ---------- + func : callable or str A service function/method or fully qualified string name of the service function/method. @@ -588,9 +604,9 @@ def register_service(self, func): def unregister_client(self, func): """Remove the provided function/method as a lenient client using lenient services. - Args: - - * func (callable or str): + Parameters + ---------- + func : callable or str A function/method of fully qualified string name of the function/method. """ @@ -614,9 +630,9 @@ def unregister_client(self, func): def unregister_service(self, func): """Remove the provided function/method as providing a lenient service. - Args: - - * func (callable or str): + Parameters + ---------- + func : callable or str A function/method or fully qualified string name of the function/method. """ diff --git a/lib/iris/common/metadata.py b/lib/iris/common/metadata.py index 691e427aa5..403436496f 100644 --- a/lib/iris/common/metadata.py +++ b/lib/iris/common/metadata.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Provides the infrastructure to support the common metadata API. - -""" +"""Provides the infrastructure to support the common metadata API.""" from abc import ABCMeta @@ -59,12 +57,14 @@ def hexdigest(item): This provides a means to compare large and/or complex objects through simple string hexdigest comparison. - Args: - - * item (object): + Parameters + ---------- + item : object The item that requires to have its hexdigest calculated. - Returns: + Returns + ------- + str The string hexadecimal representation of the item's 64-bit hash. """ @@ -94,7 +94,9 @@ def hexdigest(item): class _NamedTupleMeta(ABCMeta): - """Meta-class to support the convenience of creating a namedtuple from + """Meta-class convenience for creating a namedtuple. + + Meta-class to support the convenience of creating a namedtuple from names/members of the metadata class hierarchy. """ @@ -154,13 +156,14 @@ class BaseMetadata(metaclass=_NamedTupleMeta): def __eq__(self, other): """Determine whether the associated metadata members are equivalent. - Args: - - * other (metadata): + Parameters + ---------- + other : metadata A metadata instance of the same type. - Returns: - Boolean. + Returns + ------- + bool """ result = NotImplemented @@ -238,30 +241,25 @@ def __str__(self): return f"{type(self).__name__}({', '.join(field_strings)})" def _api_common(self, other, func_service, func_operation, action, lenient=None): - """Common entry-point for lenient metadata API methods. + """Perform common entry-point for lenient metadata API methods. - Args: - - * other (metadata): + Parameters + ---------- + other : metadata A metadata instance of the same type. - - * func_service (callable): + func_service : callable The parent service method offering the API entry-point to the service. - - * func_operation (callable): + func_operation : callable The parent service method that provides the actual service. - - * action (str): + action : str The verb describing the service operation. - - Kwargs: - - * lenient (boolean): + lenient : bool, optional Enable/disable the lenient service operation. The default is to automatically detect whether this lenient service operation is enabled. - Returns: - The result of the service operation to the parent service caller. + Returns + ------- + The result of the service operation to the parent service caller. """ # Ensure that we have similar class instances. @@ -312,13 +310,14 @@ def func(field): def _combine_lenient(self, other): """Perform lenient combination of metadata members. - Args: - - * other (BaseMetadata): + Parameters + ---------- + other : BaseMetadata The other metadata participating in the lenient combination. - Returns: - A list of combined metadata member values. + Returns + ------- + A list of combined metadata member values. """ @@ -393,13 +392,14 @@ def _combine_strict_attributes(left, right): def _compare_lenient(self, other): """Perform lenient equality of metadata members. - Args: - - * other (BaseMetadata): + Parameters + ---------- + other : BaseMetadata The other metadata participating in the lenient comparison. - Returns: - Boolean. + Returns + ------- + bool """ result = False @@ -484,13 +484,14 @@ def func(field): def _difference_lenient(self, other): """Perform lenient difference of metadata members. - Args: - - * other (BaseMetadata): + Parameters + ---------- + other : BaseMetadata The other metadata participating in the lenient difference. - Returns: - A list of difference metadata member values. + Returns + ------- + A list of difference metadata member values. """ @@ -576,22 +577,19 @@ def _is_attributes(field, left, right): @lenient_service def combine(self, other, lenient=None): - """Return a new metadata instance created by combining each of the - associated metadata members. + """Return a new metadata instance created by combining each of the associated metadata members. - Args: - - * other (metadata): + Parameters + ---------- + other : metadata A metadata instance of the same type. - - Kwargs: - - * lenient (boolean): + lenient : bool, optional Enable/disable lenient combination. The default is to automatically detect whether this lenient operation is enabled. - Returns: - Metadata instance. + Returns + ------- + Metadata instance. """ result = self._api_common( @@ -601,26 +599,26 @@ def combine(self, other, lenient=None): @lenient_service def difference(self, other, lenient=None): - """Return a new metadata instance created by performing a difference + """Perform lenient metadata difference operation. + + Return a new metadata instance created by performing a difference comparison between each of the associated metadata members. A metadata member returned with a value of "None" indicates that there is no difference between the members being compared. Otherwise, a tuple of the different values is returned. - Args: - - * other (metadata): + Parameters + ---------- + other : metadata A metadata instance of the same type. - - Kwargs: - - * lenient (boolean): + lenient : bool, optional Enable/disable lenient difference. The default is to automatically detect whether this lenient operation is enabled. - Returns: - Metadata instance of member differences or None. + Returns + ------- + Metadata instance of member differences or None. """ result = self._api_common( @@ -635,19 +633,17 @@ def difference(self, other, lenient=None): def equal(self, other, lenient=None): """Determine whether the associated metadata members are equivalent. - Args: - - * other (metadata): + Parameters + ---------- + other : metadata A metadata instance of the same type. - - Kwargs: - - * lenient (boolean): + lenient : bool, optional Enable/disable lenient equivalence. The default is to automatically detect whether this lenient operation is enabled. - Returns: - Boolean. + Returns + ------- + bool """ result = self._api_common( @@ -657,18 +653,21 @@ def equal(self, other, lenient=None): @classmethod def from_metadata(cls, other): - """Convert the provided metadata instance from a different type + """Convert metadata instance to this metadata type. + + Convert the provided metadata instance from a different type to this metadata type, using only the relevant metadata members. Non-common metadata members are set to ``None``. - Args: - - * other (metadata): + Parameters + ---------- + other : metadata A metadata instance of any type. - Returns: - New metadata instance. + Returns + ------- + New metadata instance. """ result = None @@ -684,25 +683,26 @@ def from_metadata(cls, other): return result def name(self, default=None, token=False): - """Returns a string name representing the identity of the metadata. + """Return a string name representing the identity of the metadata. First it tries standard name, then it tries the long name, then the NetCDF variable name, before falling-back to a default value, which itself defaults to the string 'unknown'. - Kwargs: - - * default: + Parameters + ---------- + default : optional The fall-back string representing the default name. Defaults to the string 'unknown'. - * token: + token : bool, default=False If True, ensures that the name returned satisfies the criteria for the characters required by a valid NetCDF name. If it is not possible to return a valid name, then a ValueError exception is raised. Defaults to False. - Returns: - String. + Returns + ------- + str """ @@ -726,16 +726,19 @@ def _check(item): @classmethod def token(cls, name): - """Determine whether the provided name is a valid NetCDF name and thus - safe to represent a single parsable token. + """Verify validity of provided NetCDF name. - Args: + Determine whether the provided name is a valid NetCDF name and thus + safe to represent a single parsable token. - * name: + Parameters + ---------- + name : str The string name to verify - Returns: - The provided name if valid, otherwise None. + Returns + ------- + The provided name if valid, otherwise None. """ if name is not None: @@ -786,14 +789,15 @@ def __eq__(self, other): def _combine_lenient(self, other): """Perform lenient combination of metadata members for cell measures. - Args: - - * other (CellMeasureMetadata): + Parameters + ---------- + other : CellMeasureMetadata The other cell measure metadata participating in the lenient combination. - Returns: - A list of combined metadata member values. + Returns + ------- + A list of combined metadata member values. """ # Perform "strict" combination for "measure". @@ -807,14 +811,15 @@ def _combine_lenient(self, other): def _compare_lenient(self, other): """Perform lenient equality of metadata members for cell measures. - Args: - - * other (CellMeasureMetadata): + Parameters + ---------- + other : CellMeasureMetadata The other cell measure metadata participating in the lenient comparison. - Returns: - Boolean. + Returns + ------- + bool """ # Perform "strict" comparison for "measure". @@ -828,14 +833,15 @@ def _compare_lenient(self, other): def _difference_lenient(self, other): """Perform lenient difference of metadata members for cell measures. - Args: - - * other (CellMeasureMetadata): + Parameters + ---------- + other : CellMeasureMetadata The other cell measure metadata participating in the lenient difference. - Returns: - A list of difference metadata member values. + Returns + ------- + A list of difference metadata member values. """ # Perform "strict" difference for "measure". @@ -908,14 +914,15 @@ def _sort_key(item): def _combine_lenient(self, other): """Perform lenient combination of metadata members for coordinates. - Args: - - * other (CoordMetadata): + Parameters + ---------- + other : CoordMetadata The other coordinate metadata participating in the lenient combination. - Returns: - A list of combined metadata member values. + Returns + ------- + A list of combined metadata member values. """ @@ -936,14 +943,15 @@ def func(field): def _compare_lenient(self, other): """Perform lenient equality of metadata members for coordinates. - Args: - - * other (CoordMetadata): + Parameters + ---------- + other : CoordMetadata The other coordinate metadata participating in the lenient comparison. - Returns: - Boolean. + Returns + ------- + bool """ # Perform "strict" comparison for "coord_system" and "climatological". @@ -962,14 +970,15 @@ def _compare_lenient(self, other): def _difference_lenient(self, other): """Perform lenient difference of metadata members for coordinates. - Args: - - * other (CoordMetadata): + Parameters + ---------- + other : CoordMetadata The other coordinate metadata participating in the lenient difference. - Returns: - A list of difference metadata member values. + Returns + ------- + A list of difference metadata member values. """ @@ -1057,13 +1066,14 @@ def _sort_key(item): def _combine_lenient(self, other): """Perform lenient combination of metadata members for cubes. - Args: - - * other (CubeMetadata): + Parameters + ---------- + other : CubeMetadata The other cube metadata participating in the lenient combination. - Returns: - A list of combined metadata member values. + Returns + ------- + A list of combined metadata member values. """ # Perform "strict" combination for "cell_methods". @@ -1077,13 +1087,14 @@ def _combine_lenient(self, other): def _compare_lenient(self, other): """Perform lenient equality of metadata members for cubes. - Args: - - * other (CubeMetadata): + Parameters + ---------- + other : CubeMetadata The other cube metadata participating in the lenient comparison. - Returns: - Boolean. + Returns + ------- + bool """ # Perform "strict" comparison for "cell_methods". @@ -1096,13 +1107,14 @@ def _compare_lenient(self, other): def _difference_lenient(self, other): """Perform lenient difference of metadata members for cubes. - Args: - - * other (CubeMetadata): + Parameters + ---------- + other : CubeMetadata The other cube metadata participating in the lenient difference. - Returns: - A list of difference metadata member values. + Returns + ------- + A list of difference metadata member values. """ # Perform "strict" difference for "cell_methods". @@ -1119,7 +1131,9 @@ def _difference_lenient(self, other): @property def _names(self): - """A tuple containing the value of each name participating in the identity + """A tuple containing the value of each name participating in the identity of a cube. + + A tuple containing the value of each name participating in the identity of a :class:`iris.cube.Cube`. This includes the standard name, long name, NetCDF variable name, and the STASH from the attributes dictionary. @@ -1330,53 +1344,45 @@ def metadata_filter( attributes=None, axis=None, ): - """Filter a collection of objects by their metadata to fit the given metadata - criteria. + """Filter a collection of objects by their metadata to fit the given metadata criteria. Criteria can be either specific properties or other objects with metadata to be matched. - Args: - - * instances: + Parameters + ---------- + instances : One or more objects to be filtered. - - Kwargs: - - * item: + item : optional Either, * a :attr:`~iris.common.mixin.CFVariableMixin.standard_name`, :attr:`~iris.common.mixin.CFVariableMixin.long_name`, or :attr:`~iris.common.mixin.CFVariableMixin.var_name` which is compared against the :meth:`~iris.common.mixin.CFVariableMixin.name`. - * a coordinate or metadata instance equal to that of the desired objects e.g., :class:`~iris.coords.DimCoord` or :class:`CoordMetadata`. - - * standard_name: + standard_name : optional The CF standard name of the desired object. If ``None``, does not check for ``standard_name``. - - * long_name: + long_name : optional An unconstrained description of the object. If ``None``, does not check for ``long_name``. - - * var_name: + var_name : optional The NetCDF variable name of the desired object. If ``None``, does not check for ``var_name``. - - * attributes: + attributes : dict, optional A dictionary of attributes desired on the object. If ``None``, does not check for ``attributes``. - - * axis: + axis : optional The desired object's axis, see :func:`~iris.util.guess_coord_axis`. If ``None``, does not check for ``axis``. Accepts the values ``X``, ``Y``, ``Z`` and ``T`` (case-insensitive). - Returns: + Returns + ------- + list of the objects A list of the objects supplied in the ``instances`` argument, limited to only those that matched the given criteria. @@ -1494,7 +1500,9 @@ def __ne__(self, other): return match def __reduce__(self): - """Dynamically created classes at runtime cannot be pickled, due to not + """Use the __reduce__ interface to allow 'pickle' to recreate this class instance. + + Dynamically created classes at runtime cannot be pickled, due to not being defined at the top level of a module. As a result, we require to use the __reduce__ interface to allow 'pickle' to recreate this class instance, and dump and load instance state successfully. @@ -1553,26 +1561,26 @@ def values(self): def metadata_manager_factory(cls, **kwargs): - """A class instance factory function responsible for manufacturing + """Manufacturing metadata instances. + + A class instance factory function responsible for manufacturing metadata instances dynamically at runtime. The factory instances returned by the factory are capable of managing their metadata state, which can be proxied by the owning container. - Args: - - * cls: + Parameters + ---------- + cls : A subclass of :class:`~iris.common.metadata.BaseMetadata`, defining the metadata to be managed. - - Kwargs: - - * kwargs: + **kwargs : dict, optional Initial values for the manufactured metadata instance. Unspecified fields will default to a value of 'None'. - Returns: - A manager instance for the provided metadata ``cls``. + Returns + ------- + A manager instance for the provided metadata ``cls``. """ # Check whether kwargs have valid fields for the specified metadata. diff --git a/lib/iris/common/mixin.py b/lib/iris/common/mixin.py index 08ad224ad6..2d9605de83 100644 --- a/lib/iris/common/mixin.py +++ b/lib/iris/common/mixin.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Provides common metadata mixin behaviour. - -""" +"""Provides common metadata mixin behaviour.""" from collections.abc import Mapping from functools import wraps @@ -121,7 +119,7 @@ def __setitem__(self, key, value): dict.__setitem__(self, key, value) def update(self, other, **kwargs): - """Standard ``dict.update()`` operation.""" + """Perform standard ``dict.update()`` operation.""" # Gather incoming keys keys = [] if hasattr(other, "keys"): @@ -145,7 +143,7 @@ def name(self, default=None, token=None): return self._metadata_manager.name(default=default, token=token) def rename(self, name): - """Changes the human-readable name. + """Change the human-readable name. If 'name' is a valid standard name it will assign it to :attr:`standard_name`, otherwise it will assign it to diff --git a/lib/iris/common/resolve.py b/lib/iris/common/resolve.py index 84ea6eed24..5a96b52a02 100644 --- a/lib/iris/common/resolve.py +++ b/lib/iris/common/resolve.py @@ -2,7 +2,9 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Provides the infrastructure to support the analysis, identification and +"""Resolve metadata common between two cubes. + +Provides the infrastructure to support the analysis, identification and combination of metadata common between two :class:`~iris.cube.Cube` operands into a single resultant :class:`~iris.cube.Cube`, which will be auto-transposed, and with the appropriate broadcast shape. @@ -97,7 +99,9 @@ def create_coord(self, metadata): class Resolve: - """At present, :class:`~iris.common.resolve.Resolve` is used by Iris solely + """Resolve the metadata of two cubes into one cube. + + At present, :class:`~iris.common.resolve.Resolve` is used by Iris solely during cube maths to combine a left-hand :class:`~iris.cube.Cube` operand and a right-hand :class:`~iris.cube.Cube` operand into a resultant :class:`~iris.cube.Cube` with common metadata, suitably auto-transposed @@ -205,10 +209,12 @@ class Resolve: >>> resolver = Resolve(cube1, cube2) >>> results = [resolver.cube(data) for data in payload] - """ # noqa: D214, D410, D411 + """ # noqa: D214, D406, D407, D410, D411 def __init__(self, lhs=None, rhs=None): - """Resolve the provided ``lhs`` :class:`~iris.cube.Cube` operand and + """Resolve the cube operands. + + Resolve the provided ``lhs`` :class:`~iris.cube.Cube` operand and ``rhs`` :class:`~iris.cube.Cube` operand to determine the metadata that is common between them, and the auto-transposed, broadcast shape of the resultant :class:`~iris.cube.Cube`. @@ -236,8 +242,16 @@ def __init__(self, lhs=None, rhs=None): but this may not be possible when auto-transposition or extended broadcasting is involved during the operation. - For example: + Parameters + ---------- + lhs : :class:`~iris.cube.Cube`, optional + The left-hand-side :class:`~iris.cube.Cube` operand. + rhs : :class:`~iris.cube.Cube`, optional + The right-hand-side :class:`~iris.cube.Cube` operand. + + Examples + -------- .. doctest:: >>> cube1 @@ -249,14 +263,6 @@ def __init__(self, lhs=None, rhs=None): >>> result1 == result2 True - Kwargs: - - * lhs: - The left-hand-side :class:`~iris.cube.Cube` operand. - - * rhs: - The right-hand-side :class:`~iris.cube.Cube` operand. - """ #: The ``lhs`` operand to be resolved into the resultant :class:`~iris.cube.Cube`. self.lhs_cube = None # set in __call__ @@ -323,7 +329,9 @@ def __init__(self, lhs=None, rhs=None): self(lhs, rhs) def __call__(self, lhs, rhs): - """Resolve the ``lhs`` :class:`~iris.cube.Cube` operand and ``rhs`` + """Resolve the cube operands. + + Resolve the ``lhs`` :class:`~iris.cube.Cube` operand and ``rhs`` :class:`~iris.cube.Cube` operand metadata. Involves determining all the common coordinate metadata shared between @@ -332,12 +340,11 @@ def __call__(self, lhs, rhs): :class:`~iris.cube.Cube`, which may be auto-transposed, can be determined. - Args: - - * lhs: + Parameters + ---------- + lhs : :class:`~iris.cube.Cube` The left-hand-side :class:`~iris.cube.Cube` operand. - - * rhs: + rhs : :class:`~iris.cube.Cube` The right-hand-side :class:`~iris.cube.Cube` operand. """ @@ -379,7 +386,9 @@ def __call__(self, lhs, rhs): return self def _as_compatible_cubes(self): - """Determine whether the ``src`` and ``tgt`` :class:`~iris.cube.Cube` can + """Transpose and/or broadcast operands. + + Determine whether the ``src`` and ``tgt`` :class:`~iris.cube.Cube` can be transposed and/or broadcast successfully together. If compatible, the ``_broadcast_shape`` of the resultant resolved cube is @@ -488,7 +497,9 @@ def _aux_coverage( common_aux_metadata, common_scalar_metadata, ): - """Determine the dimensions covered by each of the local and common + """Perform auxiliary coordinate coverage. + + Determine the dimensions covered by each of the local and common auxiliary coordinates of the provided :class:`~iris.cube.Cube`. The cube dimensions not covered by any of the auxiliary coordinates is @@ -496,29 +507,26 @@ def _aux_coverage( The scalar coordinates local to the cube are also determined. - Args: - - * cube: + Parameters + ---------- + cube : :class:`~iris.cube.Cube` The :class:`~iris.cube.Cube` to be analysed for coverage. - - * cube_items_aux: + cube_items_aux : The list of associated :class:`~iris.common.resolve._Item` metadata for each auxiliary coordinate owned by the cube. - - * cube_items_scalar: + cube_items_scalar : The list of associated :class:`~iris.common.resolve._Item` metadata for each scalar coordinate owned by the cube. - - * common_aux_metadata: + common_aux_metadata : The list of common auxiliary coordinate metadata shared by both the LHS and RHS cube operands being resolved. - - * common_scalar_metadata: + common_scalar_metadata : The list of common scalar coordinate metadata shared by both the LHS and RHS cube operands being resolved. - Returns: - :class:`~iris.common.resolve._AuxCoverage` + Returns + ------- + :class:`~iris.common.resolve._AuxCoverage` """ common_items_aux = [] @@ -558,27 +566,29 @@ def _aux_coverage( @staticmethod def _aux_mapping(src_coverage, tgt_coverage): - """Establish the mapping of dimensions from the ``src`` to ``tgt`` + """Perform auxiliary coordinate dimension mapping. + + Establish the mapping of dimensions from the ``src`` to ``tgt`` :class:`~iris.cube.Cube` using the auxiliary coordinate metadata common between each of the operands. The ``src`` to ``tgt`` common auxiliary coordinate mapping is held by the :attr:`~iris.common.resolve.Resolve.mapping`. - Args: - - * src_coverage: + Parameters + ---------- + src_coverage : The :class:`~iris.common.resolve._DimCoverage` of the ``src`` :class:`~iris.cube.Cube` i.e., map from the common ``src`` dimensions. - - * tgt_coverage: + tgt_coverage : The :class:`~iris.common.resolve._DimCoverage` of the ``tgt`` :class:`~iris.cube.Cube` i.e., map to the common ``tgt`` dimensions. - Returns: - Dictionary of ``src`` to ``tgt`` dimension mapping. + Returns + ------- + dict of ``src`` to ``tgt`` dimension mapping. """ mapping = {} @@ -624,19 +634,22 @@ def _aux_mapping(src_coverage, tgt_coverage): @staticmethod def _categorise_items(cube): - """Inspect the provided :class:`~iris.cube.Cube` and group its + """Categorise the cube metadata. + + Inspect the provided :class:`~iris.cube.Cube` and group its coordinates and associated metadata into dimension, auxiliary and scalar categories. - Args: - - * cube: + Parameters + ---------- + cube : :class:`~iris.cube.Cube` The :class:`~iris.cube.Cube` that will have its coordinates and metadata grouped into their associated dimension, auxiliary and scalar categories. - Returns: - :class:`~iris.common.resolve._CategoryItems` + Returns + ------- + :class:`~iris.common.resolve._CategoryItems` """ category = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[]) @@ -671,39 +684,38 @@ def _create_prepared_item( bounds=None, container=None, ): - """Convenience method that creates a :class:`~iris.common.resolve._PreparedItem` + """Package metadata in preparation for resolution. + + Convenience method that creates a :class:`~iris.common.resolve._PreparedItem` containing the data and metadata required to construct and attach a coordinate to the resultant resolved cube. - Args: - - * coord: + Parameters + ---------- + coord : The coordinate with the ``points`` and ``bounds`` to be extracted. - - * dims (int or tuple): + dims : int or tuple The dimensions that the ``coord`` spans on the resulting resolved :class:`~iris.cube.Cube`. (Can also be a single dimension number). - - * src_metadata: + src_metadata : optional The coordinate metadata from the ``src`` :class:`~iris.cube.Cube`. - - * tgt_metadata: + tgt_metadata : optional The coordinate metadata from the ``tgt`` :class:`~iris.cube.Cube`. - - * points: + points : optional Override points array. When not given, use coord.points. - - * bounds: + bounds : optional Override bounds array. When not given, use coord.bounds. - - * container: + container : optional Override coord type (class constructor). When not given, use type(coord). - Returns: - The :class:`~iris.common.resolve._PreparedItem`. + Returns + ------- + :class:`~iris.common.resolve._PreparedItem`. + Notes + ----- .. note:: If container or type(coord) is DimCoord/AuxCoord (i.e. not @@ -796,27 +808,28 @@ def _show(items, heading): @staticmethod def _dim_coverage(cube, cube_items_dim, common_dim_metadata): - """Determine the dimensions covered by each of the local and common + """Perform dimension coordinate coverage. + + Determine the dimensions covered by each of the local and common dimension coordinates of the provided :class:`~iris.cube.Cube`. The cube dimensions not covered by any of the dimension coordinates is also determined; these are known as `free` dimensions. - Args: - - * cube: + Parameters + ---------- + cube : :class:`~iris.cube.Cube` The :class:`~iris.cube.Cube` to be analysed for coverage. - - * cube_items_dim: + cube_items_dim : The list of associated :class:`~iris.common.resolve._Item` metadata for each dimension coordinate owned by the cube. - - * common_dim_metadata: + common_dim_metadata : The list of common dimension coordinate metadata shared by both the LHS and RHS cube operands being resolved. - Returns: - :class:`~iris.common.resolve._DimCoverage` + Returns + ------- + :class:`~iris.common.resolve._DimCoverage` """ ndim = cube.ndim @@ -847,27 +860,29 @@ def _dim_coverage(cube, cube_items_dim, common_dim_metadata): @staticmethod def _dim_mapping(src_coverage, tgt_coverage): - """Establish the mapping of dimensions from the ``src`` to ``tgt`` + """Perform dimension coordinate dimension mapping. + + Establish the mapping of dimensions from the ``src`` to ``tgt`` :class:`~iris.cube.Cube` using the dimension coordinate metadata common between each of the operands. The ``src`` to ``tgt`` common dimension coordinate mapping is held by the :attr:`~iris.common.resolve.Resolve.mapping`. - Args: - - * src_coverage: + Parameters + ---------- + src_coverage : The :class:`~iris.common.resolve._DimCoverage` of the ``src`` :class:`~iris.cube.Cube` i.e., map from the common ``src`` dimensions. - - * tgt_coverage: + tgt_coverage : The :class:`~iris.common.resolve._DimCoverage` of the ``tgt`` :class:`~iris.cube.Cube` i.e., map to the common ``tgt`` dimensions. - Returns: - Dictionary of ``src`` to ``tgt`` dimension mapping. + Returns + ------- + dict of ``src`` to ``tgt`` dimension mapping. """ mapping = {} @@ -904,7 +919,9 @@ def _free_mapping( src_aux_coverage, tgt_aux_coverage, ): - """Attempt to update the :attr:`~iris.common.resolve.Resolve.mapping` with + """Associate free dimensions to covered dimensions. + + Attempt to update the :attr:`~iris.common.resolve.Resolve.mapping` with ``src`` to ``tgt`` :class:`~iris.cube.Cube` mappings from unmapped ``src`` dimensions that are free from coordinate metadata coverage to ``tgt`` dimensions that have local metadata coverage (i.e., is not common between @@ -919,24 +936,23 @@ def _free_mapping( An exception will be raised if there are any ``src`` :class:`~iris.cube.Cube` dimensions not mapped to an associated ``tgt`` dimension. - Args: - - * src_dim_coverage: + Parameters + ---------- + src_dim_coverage : The :class:`~iris.common.resolve.._DimCoverage` of the ``src`` :class:`~iris.cube.Cube`. - - * tgt_dim_coverage: + tgt_dim_coverage : The :class:`~iris.common.resolve.._DimCoverage` of the ``tgt`` :class:`~iris.cube.Cube`. - - * src_aux_coverage: + src_aux_coverage : The :class:`~iris.common.resolve._AuxCoverage` of the ``src`` :class:`~iris.cube.Cube`. - - * tgt_aux_coverage: + tgt_aux_coverage : The :class:`~iris.common.resolve._AuxCoverage` of the ``tgt`` :class:`~iris.cube.Cube`. + Notes + ----- .. note:: All unmapped dimensions with an extend >1 are mapped before those @@ -1079,7 +1095,9 @@ def _pop(item, items): logger.debug(f"mapping free dimensions gives, mapping={self.mapping}") def _metadata_coverage(self): - """Using the pre-categorised metadata of the cubes, determine the dimensions + """Determine free and covered dimensions. + + Using the pre-categorised metadata of the cubes, determine the dimensions covered by their associated dimension and auxiliary coordinates, and which dimensions are free of metadata coverage. @@ -1125,34 +1143,42 @@ def _metadata_coverage(self): ) def _metadata_mapping(self): - """Ensure that each ``src`` :class:`~iris.cube.Cube` dimension is mapped to an associated - ``tgt`` :class:`~iris.cube.Cube` dimension using the common dim and aux coordinate metadata. + """Identify equivalent dimensions using metadata. - If the common metadata does not result in a full mapping of ``src`` to ``tgt`` dimensions - then free dimensions are analysed to determine whether the mapping can be completed. + Ensure that each ``src`` :class:`~iris.cube.Cube` dimension is mapped to an + associated ``tgt`` :class:`~iris.cube.Cube` dimension using the common dim + and aux coordinate metadata. - Once the ``src`` has been mapped to the ``tgt``, the cubes are checked to ensure that they - will successfully broadcast, and the ``src`` :class:`~iris.cube.Cube` is transposed appropriately, - if necessary. + If the common metadata does not result in a full mapping of ``src`` to ``tgt`` + dimensions then free dimensions are analysed to determine whether the mapping + can be completed. - The :attr:`~iris.common.resolve.Resolve._broadcast_shape` is set, along with the - :attr:`~iris.common.resolve.Resolve._src_cube_resolved` and :attr:`~iris.common.resolve.Resolve._tgt_cube_resolved`, + Once the ``src`` has been mapped to the ``tgt``, the cubes are checked to + ensure that they will successfully broadcast, and the ``src`` + :class:`~iris.cube.Cube` is transposed appropriately, if necessary. + + The :attr:`~iris.common.resolve.Resolve._broadcast_shape` is set, along with + the :attr:`~iris.common.resolve.Resolve._src_cube_resolved` and + :attr:`~iris.common.resolve.Resolve._tgt_cube_resolved`, which are the broadcast/transposed ``src`` and ``tgt``. .. note:: - An exception will be raised if a ``src`` dimension cannot be mapped to a ``tgt`` dimension. + An exception will be raised if a ``src`` dimension cannot be mapped to + a ``tgt`` dimension. .. note:: - An exception will be raised if the full mapped ``src`` :class:`~iris.cube.Cube` cannot be - broadcast or transposed with the ``tgt`` :class:`~iris.cube.Cube`. + An exception will be raised if the full mapped ``src`` + :class:`~iris.cube.Cube` cannot be broadcast or transposed with the + ``tgt`` :class:`~iris.cube.Cube`. .. note:: - The ``src`` and ``tgt`` may be swapped in the case where they both have equal dimensionality and - the ``tgt`` does have the same shape as the resolved broadcast shape (and the ``src`` does) or - the ``tgt`` has more free dimensions than the ``src``. + The ``src`` and ``tgt`` may be swapped in the case where they both have + equal dimensionality and the ``tgt`` does have the same shape as the + resolved broadcast shape (and the ``src`` does) or the ``tgt`` has more + free dimensions than the ``src``. """ # Initialise the state. @@ -1233,7 +1259,9 @@ def _metadata_mapping(self): self._as_compatible_cubes() def _metadata_prepare(self): - """Populate the :attr:`~iris.common.resolve.Resolve.prepared_category` and + """Consolidate metadata for resolved cube. + + Populate the :attr:`~iris.common.resolve.Resolve.prepared_category` and :attr:`~iris.common.resolve.Resolve.prepared_factories` with the necessary metadata to be constructed and attached to the resulting resolved :class:`~iris.cube.Cube`. @@ -1293,7 +1321,9 @@ def _metadata_prepare(self): self._prepare_factory_payload(src_cube, src_category_local) def _metadata_resolve(self): - """Categorise the coordinate metadata of the cubes into three distinct + """Categorise the coordinate metadata. + + Categorise the coordinate metadata of the cubes into three distinct groups; metadata from coordinates only available (local) on the LHS cube, metadata from coordinates only available (local) on the RHS cube, and metadata from coordinates common to both the LHS and RHS @@ -1303,7 +1333,6 @@ def _metadata_resolve(self): 'aux_coords' or 'dim_coords' of the participating cubes. """ - # Determine the cube dim, aux and scalar coordinate items # for each individual cube. self.lhs_cube_category = self._categorise_items(self.lhs_cube) @@ -1402,7 +1431,9 @@ def _prepare_common_aux_payload( prepared_items, ignore_mismatch=None, ): - """Populate the ``prepared_items`` with a :class:`~iris.common.resolve._PreparedItem` containing + """Consolidate common auxiliary coordinate metadata. + + Populate the ``prepared_items`` with a :class:`~iris.common.resolve._PreparedItem` containing the necessary metadata for each auxiliary coordinate to be constructed and attached to the resulting resolved :class:`~iris.cube.Cube`. @@ -1411,24 +1442,19 @@ def _prepare_common_aux_payload( For mixed ``src`` and ``tgt`` coordinate types with matching metadata, an :class:`~iris.coords.AuxCoord` will be nominated for construction. - Args: - - * src_common_items: + Parameters + ---------- + src_common_items : The list of :attr:`~iris.common.resolve._AuxCoverage.common_items_aux` metadata for the ``src`` :class:`~iris.cube.Cube`. - - * tgt_common_items: + tgt_common_items : The list of :attr:`~iris.common.resolve._AuxCoverage.common_items_aux` metadata for the ``tgt`` :class:`~iris.cube.Cube`. - - * prepared_items: + prepared_items : The list of :class:`~iris.common.resolve._PreparedItem` metadata that will be used to construct the auxiliary coordinates that will be attached to the resulting resolved :class:`~iris.cube.Cube`. - - Kwargs: - - * ignore_mismatch: + ignore_mismatch : optional When ``False``, an exception will be raised if a difference is detected between corresponding ``src`` and ``tgt`` coordinate ``points`` and/or ``bounds``. When ``True``, the coverage metadata is ignored i.e., a coordinate will not be constructed and @@ -1530,22 +1556,22 @@ def _prepare_common_aux_payload( def _prepare_common_dim_payload( self, src_coverage, tgt_coverage, ignore_mismatch=None ): - """Populate the ``items_dim`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items` + """Consolidate common dimension coordinate metadata. + + Populate the ``items_dim`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items` with a :class:`~iris.common.resolve._PreparedItem` containing the necessary metadata for each :class:`~iris.coords.DimCoord` to be constructed and attached to the resulting resolved :class:`~iris.cube.Cube`. - Args: - - * src_coverage: - The :class:`~iris.common.resolve._DimCoverage` metadata for the ``src`` :class:`~iris.cube.Cube`. - - * tgt_coverage: - The :class:`~iris.common.resolve._DimCoverage` metadata for the ``tgt`` :class:`~iris.cube.Cube`. - - Kwargs: - - * ignore_mismatch: + Parameters + ---------- + src_coverage : + The :class:`~iris.common.resolve._DimCoverage` metadata for the + ``src`` :class:`~iris.cube.Cube`. + tgt_coverage : + The :class:`~iris.common.resolve._DimCoverage` metadata for the + ``tgt`` :class:`~iris.cube.Cube`. + ignore_mismatch : optional When ``False``, an exception will be raised if a difference is detected between corresponding ``src`` and ``tgt`` :class:`~iris.coords.DimCoord` ``points`` and/or ``bounds``. When ``True``, the coverage metadata is ignored i.e., a :class:`~iris.coords.DimCoord` will not @@ -1591,7 +1617,9 @@ def _prepare_common_dim_payload( def _get_prepared_item( self, metadata, category_local, from_src=True, from_local=False ): - """Find the :attr:`~iris.common.resolve._PreparedItem` from the + """Find the :attr:`~iris.common.resolve._PreparedItem`. + + Find the :attr:`~iris.common.resolve._PreparedItem` from the :attr:`~iris.common.resolve.Resolve.prepared_category` that matches the provided ``metadata``. Alternatively, the ``category_local`` is searched to find a :class:`~iris.common.resolve._Item` @@ -1599,29 +1627,26 @@ def _get_prepared_item( If a match is found, then a new `~iris.common.resolve._PreparedItem` is created and added to :attr:`~iris.common.resolve.Resolve.prepared_category` and returned. See ``from_local``. - Args: - - * metadata: + Parameters + ---------- + metadata : The target metadata of the prepared (or local) item to retrieve. - - * category_local: + category_local : The :class:`~iris.common.resolve._CategoryItems` containing the local metadata of either the ``src`` or ``tgt`` :class:`~iris.cube.Cube`. See ``from_local``. - - Kwargs: - - * from_src: + from_src : bool, default=True Boolean stating whether the ``metadata`` is from the ``src`` (``True``) or ``tgt`` :class:`~iris.cube.Cube`. Defaults to ``True``. - - * from_local: + from_local: bool, default=False Boolean controlling whether the ``metadata`` is used to search the ``category_local`` (``True``) or the :attr:`~iris.common.resolve.Resolve.prepared_category`. Defaults to ``False``. - Returns: + Returns + ------- + :class:`~iris.common.resolve._PreparedItem` The :class:`~iris.common.resolve._PreparedItem` matching the provided ``metadata``. """ @@ -1670,9 +1695,12 @@ def _get_prepared_item( return result def _prepare_factory_payload(self, cube, category_local, from_src=True): - """Populate the :attr:`~iris.common.resolve.Resolve.prepared_factories` with a :class:`~iris.common.resolve._PreparedFactory` - containing the necessary metadata for each ``src`` and/or ``tgt`` auxiliary factory to be constructed and - attached to the resulting resolved :class:`~iris.cube.Cube`. + """Consolidate common factory metadata. + + Populate the :attr:`~iris.common.resolve.Resolve.prepared_factories` with a + :class:`~iris.common.resolve._PreparedFactory` containing the necessary + metadata for each ``src`` and/or ``tgt`` auxiliary factory to be constructed + and attached to the resulting resolved :class:`~iris.cube.Cube`. .. note:: @@ -1680,17 +1708,15 @@ def _prepare_factory_payload(self, cube, category_local, from_src=True): :attr:`~iris.common.resolve.Resolve.prepared_category` and therefore this is a legitimate reason to add the associated metadata of the local dependency to the ``prepared_category``. - Args: - - * cube: - The :class:`~iris.cube.Cube` that may contain an auxiliary factory to be prepared. - - * category_local: - The :class:`~iris.common.resolve._CategoryItems` of all metadata local to the provided ``cube``. - - Kwargs: - - * from_src: + Parameters + ---------- + cube : :class:`~iris.cube.Cube` + The :class:`~iris.cube.Cube` that may contain an auxiliary factory + to be prepared. + category_local : :class:`~iris.common.resolve._CategoryItems` + The :class:`~iris.common.resolve._CategoryItems` of all metadata + local to the provided ``cube``. + from_src : bool, default=True Boolean stating whether the provided ``cube`` is either a ``src`` or ``tgt`` :class:`~iris.cube.Cube` - used to retrieve the appropriate metadata from a :class:`~iris.common.resolve._PreparedMetadata`. @@ -1748,26 +1774,31 @@ def _prepare_factory_payload(self, cube, category_local, from_src=True): logger.debug(dmsg) def _prepare_local_payload_aux(self, src_aux_coverage, tgt_aux_coverage): - """Populate the ``items_aux`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items` + """Consolidate local auxiliary coordinate metadata. + + Populate the ``items_aux`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items` with a :class:`~iris.common.resolve._PreparedItem` containing the necessary metadata for each ``src`` or ``tgt`` local auxiliary coordinate to be constructed and attached to the resulting resolved :class:`~iris.cube.Cube`. .. note:: - In general, lenient behaviour subscribes to the philosophy that it is easier to remove - metadata than it is to find then add metadata. To those ends, lenient behaviour supports - metadata richness by adding both local ``src`` and ``tgt`` auxiliary coordinates. - Alternatively, strict behaviour will only add a ``tgt`` local auxiliary coordinate that - spans dimensions not mapped to by the ``src`` e.g., extra ``tgt`` dimensions. - - Args: - - * src_aux_coverage: - The :class:`~iris.common.resolve.Resolve._AuxCoverage` for the ``src`` :class:`~iris.cube.Cube`. - - * tgt_aux_coverage: - The :class:~iris.common.resolve.Resolve._AuxCoverage` for the ``tgt`` :class:`~iris.cube.Cube`. + In general, lenient behaviour subscribes to the philosophy that + it is easier to remove metadata than it is to find then add + metadata. To those ends, lenient behaviour supports metadata + richness by adding both local ``src`` and ``tgt`` auxiliary + coordinates. Alternatively, strict behaviour will only add a + ``tgt`` local auxiliary coordinate that spans dimensions not + mapped to by the ``src`` e.g., extra ``tgt`` dimensions. + + Parameters + ---------- + src_aux_coverage : + The :class:`~iris.common.resolve.Resolve._AuxCoverage` for the + ``src`` :class:`~iris.cube.Cube`. + tgt_aux_coverage : + The :class:~iris.common.resolve.Resolve._AuxCoverage` for the + ``tgt`` :class:`~iris.cube.Cube`. """ # Determine whether there are tgt dimensions not mapped to by an @@ -1820,7 +1851,9 @@ def _prepare_local_payload_aux(self, src_aux_coverage, tgt_aux_coverage): logger.debug(dmsg) def _prepare_local_payload_dim(self, src_dim_coverage, tgt_dim_coverage): - """Populate the ``items_dim`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items` + """Consolidate local dimension coordinate metadata. + + Populate the ``items_dim`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items` with a :class:`~iris.common.resolve._PreparedItem` containing the necessary metadata for each ``src`` or ``tgt`` local :class:`~iris.coords.DimCoord` to be constructed and attached to the resulting resolved :class:`~iris.cube.Cube`. @@ -1832,13 +1865,14 @@ def _prepare_local_payload_dim(self, src_dim_coverage, tgt_dim_coverage): is more liberal, whereas strict behaviour will only add a local ``tgt`` coordinate covering an unmapped "extra" ``tgt`` dimension/s. - Args: - - * src_dim_coverage: - The :class:`~iris.common.resolve.Resolve._DimCoverage` for the ``src`` :class:`~iris.cube.Cube`. - - * tgt_dim_coverage: - The :class:`~iris.common.resolve.Resolve._DimCoverage` for the ``tgt`` :class:`~iris.cube.Cube`. + Parameters + ---------- + src_dim_coverage : + The :class:`~iris.common.resolve.Resolve._DimCoverage` for the + ``src`` :class:`~iris.cube.Cube`. + tgt_dim_coverage : + The :class:`~iris.common.resolve.Resolve._DimCoverage` for the + ``tgt`` :class:`~iris.cube.Cube`. """ mapped_tgt_dims = self.mapping.values() @@ -1896,7 +1930,9 @@ def _prepare_local_payload_dim(self, src_dim_coverage, tgt_dim_coverage): self.prepared_category.items_dim.append(prepared_item) def _prepare_local_payload_scalar(self, src_aux_coverage, tgt_aux_coverage): - """Populate the ``items_scalar`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items` + """Consolidate local scalar coordinate metadata. + + Populate the ``items_scalar`` member of :attr:`~iris.common.resolve.Resolve.prepared_category_items` with a :class:`~iris.common.resolve._PreparedItem` containing the necessary metadata for each ``src`` or ``tgt`` local scalar coordinate to be constructed and attached to the resulting resolved :class:`~iris.cube.Cube`. @@ -1909,13 +1945,14 @@ def _prepare_local_payload_scalar(self, src_aux_coverage, tgt_aux_coverage): Alternatively, strict behaviour will only add a ``tgt`` local scalar coordinate when the ``src`` is a scalar :class:`~iris.cube.Cube` with no local scalar coordinates. - Args: - - * src_aux_coverage: - The :class:`~iris.common.resolve.Resolve._AuxCoverage` for the ``src`` :class:`~iris.cube.Cube`. - - * tgt_aux_coverage: - The :class:~iris.common.resolve.Resolve._AuxCoverage` for the ``tgt`` :class:`~iris.cube.Cube`. + Parameters + ---------- + src_aux_coverage : + The :class:`~iris.common.resolve.Resolve._AuxCoverage` for the + ``src`` :class:`~iris.cube.Cube`. + tgt_aux_coverage : + The :class:~iris.common.resolve.Resolve._AuxCoverage` for the + ``tgt`` :class:`~iris.cube.Cube`. """ # Add all local tgt scalar coordinates iff the src cube is a @@ -1949,24 +1986,27 @@ def _prepare_local_payload( tgt_dim_coverage, tgt_aux_coverage, ): - """Populate the :attr:`~iris.common.resolve.Resolve.prepared_category_items` with a + """Consolidate the local metadata. + + Populate the :attr:`~iris.common.resolve.Resolve.prepared_category_items` with a :class:`~iris.common.resolve._PreparedItem` containing the necessary metadata from the ``src`` and/or ``tgt`` :class:`~iris.cube.Cube` for each coordinate to be constructed and attached to the resulting resolved :class:`~iris.cube.Cube`. - Args: - - * src_dim_coverage: - The :class:`~iris.common.resolve.Resolve._DimCoverage` for the ``src`` :class:`~iris.cube.Cube`. - - * src_aux_coverage: - The :class:`~iris.common.resolve.Resolve._AuxCoverage` for the ``src`` :class:`~iris.cube.Cube`. - - * tgt_dim_coverage: - The :class:`~iris.common.resolve.Resolve._DimCoverage` for the ``tgt`` :class:`~iris.cube.Cube`. - - * tgt_aux_coverage: - The :class:~iris.common.resolve.Resolve._AuxCoverage` for the ``tgt`` :class:`~iris.cube.Cube`. + Parameters + ---------- + src_dim_coverage : + The :class:`~iris.common.resolve.Resolve._DimCoverage` for the + ``src`` :class:`~iris.cube.Cube`. + src_aux_coverage : + The :class:`~iris.common.resolve.Resolve._AuxCoverage` for the + ``src`` :class:`~iris.cube.Cube`. + tgt_dim_coverage : + The :class:`~iris.common.resolve.Resolve._DimCoverage` for the + ``tgt`` :class:`~iris.cube.Cube`. + tgt_aux_coverage : + The :class:~iris.common.resolve.Resolve._AuxCoverage` for the + ``tgt`` :class:`~iris.cube.Cube`. """ # Add local src/tgt dim coordinates. @@ -1981,7 +2021,9 @@ def _prepare_local_payload( def _prepare_points_and_bounds( self, src_coord, tgt_coord, src_dims, tgt_dims, ignore_mismatch=None ): - """Compare the points and bounds of the ``src`` and ``tgt`` coordinates to ensure + """Consolidate points and bounds. + + Compare the points and bounds of the ``src`` and ``tgt`` coordinates to ensure that they are equivalent, taking into account broadcasting when appropriate. .. note:: @@ -1994,31 +2036,26 @@ def _prepare_points_and_bounds( An exception will be raised if either the points or bounds are different, however appropriate lenient behaviour concessions are applied. - Args: - - * src_coord: + Parameters + ---------- + src_coord : The ``src`` :class:`~iris.cube.Cube` coordinate with metadata matching the ``tgt_coord``. - - * tgt_coord: + tgt_coord : The ``tgt`` :class`~iris.cube.Cube` coordinate with metadata matching the ``src_coord``. - - * src_dims: + src_dims : The dimension/s of the ``src_coord`` attached to the ``src`` :class:`~iris.cube.Cube`. - - * tgt_dims: + tgt_dims : The dimension/s of the ``tgt_coord`` attached to the ``tgt`` :class:`~iris.cube.Cube`. - - Kwargs: - - * ignore_mismatch: + ignore_mismatch : bool, optional For lenient behaviour only, don't raise an exception if there is a difference between the ``src`` and ``tgt`` coordinate points or bounds. Defaults to ``False``. - Returns: - Tuple of equivalent ``points`` and ``bounds``, otherwise ``None``. + Returns + ------- + Tuple of equivalent ``points`` and ``bounds``, otherwise ``None``. """ from iris.util import array_equal @@ -2258,20 +2295,19 @@ def _tgt_cube_prepare(self, data): cube.remove_ancillary_variable(av) def cube(self, data, in_place=False): - """Create the resultant :class:`~iris.cube.Cube` from the resolved ``lhs`` + """Create the resultant resolved cube. + + Create the resultant :class:`~iris.cube.Cube` from the resolved ``lhs`` and ``rhs`` :class:`~iris.cube.Cube` operands, using the provided ``data``. - Args: - - * data: + Parameters + ---------- + data : The data payload for the resultant :class:`~iris.cube.Cube`, which **must match** the expected resolved :attr:`~iris.common.resolve.Resolve.shape`. - - Kwargs: - - * in_place: + in_place : bool, default=False If ``True``, the ``data`` is inserted into the ``tgt`` :class:`~iris.cube.Cube`. The existing metadata of the ``tgt`` :class:`~iris.cube.Cube` is replaced with the resolved metadata from @@ -2279,9 +2315,12 @@ def cube(self, data, in_place=False): a **new** :class:`~iris.cube.Cube` instance is returned. Default is ``False``. - Returns: - :class:`~iris.cube.Cube` + Returns + ------- + :class:`~iris.cube.Cube` + Notes + ----- .. note:: :class:`~iris.common.resolve.Resolve` will determine whether the @@ -2301,8 +2340,8 @@ def cube(self, data, in_place=False): match** the expected resolved :attr:`~iris.common.resolve.Resolve.shape`. - For example: - + Examples + -------- .. testsetup:: in-place import iris @@ -2423,7 +2462,9 @@ def cube(self, data, in_place=False): @property def mapped(self): - """Boolean state representing whether **all** ``src`` :class:`~iris.cube.Cube` + """Whether all ``src`` dimensions have been mapped. + + Boolean state representing whether **all** ``src`` :class:`~iris.cube.Cube` dimensions have been associated with relevant ``tgt`` :class:`~iris.cube.Cube` dimensions. @@ -2493,7 +2534,7 @@ def mapped(self): >>> resolver.map_rhs_to_lhs False - """ # noqa: D214, D410, D411 + """ # noqa: D214, D406, D407, D410, D411 result = None if self.mapping is not None: result = self._src_cube.ndim == len(self.mapping) @@ -2501,7 +2542,9 @@ def mapped(self): @property def shape(self): - """Proposed shape of the final resolved cube given the ``lhs`` + """Proposed shape of the final resolved cube. + + Proposed shape of the final resolved cube given the ``lhs`` :class:`~iris.cube.Cube` operand and the ``rhs`` :class:`~iris.cube.Cube` operand. @@ -2554,5 +2597,5 @@ def shape(self): >>> Resolve(cube2, cube1).shape (240, 37, 49) - """ # noqa: D214, D410, D411 + """ # noqa: D214, D406, D407, D410, D411 return self._broadcast_shape diff --git a/lib/iris/config.py b/lib/iris/config.py index c31f856d54..9cec602a95 100644 --- a/lib/iris/config.py +++ b/lib/iris/config.py @@ -21,10 +21,6 @@ The full path to the Iris palette configuration directory -.. py:data:: iris.config.IMPORT_LOGGER - - The [optional] name of the logger to notify when first imported. - ---------- """ @@ -35,7 +31,7 @@ import os.path import warnings -import iris.exceptions +import iris.warnings def get_logger(name, datefmt=None, fmt=None, level=None, propagate=None, handler=True): @@ -46,24 +42,24 @@ def get_logger(name, datefmt=None, fmt=None, level=None, propagate=None, handler Parameters ---------- - name + name : The name of the logger. Typically this is the module filename that owns the logger. - datefmt: optional + datefmt : optional The date format string of the :class:`logging.Formatter`. Defaults to ``%d-%m-%Y %H:%M:%S``. - fmt: optional + fmt : optional The additional format string of the :class:`logging.Formatter`. This is appended to the default format string ``%(asctime)s %(name)s %(levelname)s - %(message)s``. - level: optional + level : optional The threshold level of the logger. Defaults to ``INFO``. - propagate: optional + propagate : optional Sets the ``propagate`` attribute of the :class:`logging.Logger`, which determines whether events logged to this logger will be passed to the handlers of higher level loggers. Defaults to ``False``. - handler: optional + handler : bool, default=True Create and attach a :class:`logging.StreamHandler` to the logger. Defaults to ``True``. @@ -143,7 +139,7 @@ def get_dir_option(section, option, default=None): ) warnings.warn( msg.format(section, option, c_path), - category=iris.exceptions.IrisIgnoringWarning, + category=iris.warnings.IrisIgnoringWarning, ) return path @@ -250,7 +246,7 @@ def __setattr__(self, name, value): ) warnings.warn( wmsg.format(value, name, good_value), - category=iris.exceptions.IrisDefaultingWarning, + category=iris.warnings.IrisDefaultingWarning, ) value = good_value self.__dict__[name] = value diff --git a/lib/iris/coord_categorisation.py b/lib/iris/coord_categorisation.py index dbf27ea86e..f4c3aa6cb4 100644 --- a/lib/iris/coord_categorisation.py +++ b/lib/iris/coord_categorisation.py @@ -5,12 +5,13 @@ """Cube functions for coordinate categorisation. All the functions provided here add a new coordinate to a cube. - * The function :func:`add_categorised_coord` performs a generic - coordinate categorisation. - * The other functions all implement specific common cases - (e.g. :func:`add_day_of_month`). - Currently, these are all calendar functions, so they only apply to - "Time coordinates". + +* The function :func:`add_categorised_coord` performs a generic + coordinate categorisation. +* The other functions all implement specific common cases + (e.g. :func:`add_day_of_month`). + Currently, these are all calendar functions, so they only apply to + "Time coordinates". """ @@ -28,21 +29,18 @@ def add_categorised_coord(cube, name, from_coord, category_function, units="1"): Make a new :class:`iris.coords.AuxCoord` from mapped values, and add it to the cube. - Args: - - * cube (:class:`iris.cube.Cube`): - the cube containing 'from_coord'. The new coord will be added into it. - * name (string): + Parameters + ---------- + cube : :class:`iris.cube.Cube` + The cube containing 'from_coord'. The new coord will be added into it. + name : str name of the created coordinate - * from_coord (:class:`iris.coords.Coord` or string): + from_coord : :class:`iris.coords.Coord` or str coordinate in 'cube', or the name of one - * category_function (callable): + category_function : callable function(coordinate, value), returning a category value for a coordinate point-value - - Kwargs: - - * units: + units : str, default="1" units of the category value, typically 'no_unit' or '1'. """ # Interpret coord, if given as a name @@ -90,15 +88,16 @@ def vectorised_fn(*args): def _pt_date(coord, time): """Return the datetime of a time-coordinate point. - Args: - - * coord (Coord): + Parameters + ---------- + coord : Coord coordinate (must be Time-type) - * time (float): + time : float value of a coordinate point - Returns: - cftime.datetime + Returns + ------- + cftime.datetime """ # NOTE: All of the currently defined categorisation functions are @@ -148,10 +147,7 @@ def add_day_of_month(cube, coord, name="day_of_month"): def add_day_of_year(cube, coord, name="day_of_year"): - """Add a categorical day-of-year coordinate, values 1..365 - (1..366 in leap years). - - """ + """Add a categorical day-of-year coordinate, values 1..365 (1..366 in leap years).""" # Note: cftime.datetime objects return a normal tuple from timetuple(), # unlike datetime.datetime objects that return a namedtuple. # Index the time tuple (element 7 is day of year) instead of using named @@ -208,10 +204,7 @@ def add_hour(cube, coord, name="hour"): def _months_in_season(season): - """Returns a list of month numbers corresponding to each month in the - given season. - - """ + """Return a list of month numbers corresponding to each month in the given season.""" cyclic_months = "jfmamjjasondjfmamjjasond" m0 = cyclic_months.find(season.lower()) if m0 < 0: @@ -301,22 +294,18 @@ def _month_season_numbers(seasons): def add_season(cube, coord, name="season", seasons=("djf", "mam", "jja", "son")): - """Add a categorical season-of-year coordinate, with user specified - seasons. + """Add a categorical season-of-year coordinate, with user specified seasons. - Args: - - * cube (:class:`iris.cube.Cube`): + Parameters + ---------- + cube : :class:`iris.cube.Cube` The cube containing 'coord'. The new coord will be added into it. - * coord (:class:`iris.coords.Coord` or string): + coord : :class:`iris.coords.Coord` or str Coordinate in 'cube', or its name, representing time. - - Kwargs: - - * name (string): + name : str, default="season" Name of the created coordinate. Defaults to "season". - * seasons (:class:`list` of strings): + seasons : :class:`list` of str, optional List of seasons defined by month abbreviations. Each month must appear once and only once. Defaults to standard meteorological seasons ('djf', 'mam', 'jja', 'son'). @@ -340,22 +329,21 @@ def _season(coord, value): def add_season_number( cube, coord, name="season_number", seasons=("djf", "mam", "jja", "son") ): - """Add a categorical season-of-year coordinate, values 0..N-1 where - N is the number of user specified seasons. + """Add a categorical season-of-year coordinate. - Args: + Add a categorical season-of-year coordinate, values 0..N-1 where + N is the number of user specified seasons. - * cube (:class:`iris.cube.Cube`): + Parameters + ---------- + cube : :class:`iris.cube.Cube` The cube containing 'coord'. The new coord will be added into it. - * coord (:class:`iris.coords.Coord` or string): + coord : :class:`iris.coords.Coord` or str Coordinate in 'cube', or its name, representing time. - - Kwargs: - - * name (string): + name : str, default="season" Name of the created coordinate. Defaults to "season_number". - * seasons (:class:`list` of strings): + seasons : :class:`list` of str, optional List of seasons defined by month abbreviations. Each month must appear once and only once. Defaults to standard meteorological seasons ('djf', 'mam', 'jja', 'son'). @@ -423,25 +411,21 @@ def _season_year(coord, value): def add_season_membership(cube, coord, season, name="season_membership"): - """Add a categorical season membership coordinate for a user specified - season. + """Add a categorical season membership coordinate for a user specified season. The coordinate has the value True for every time that is within the given season, and the value False otherwise. - Args: - - * cube (:class:`iris.cube.Cube`): + Parameters + ---------- + cube : :class:`iris.cube.Cube` The cube containing 'coord'. The new coord will be added into it. - * coord (:class:`iris.coords.Coord` or string): + coord : :class:`iris.coords.Coord` or str Coordinate in 'cube', or its name, representing time. - * season (string): + season : str Season defined by month abbreviations. - - Kwargs: - - * name (string): + name : str, default="season_membership" Name of the created coordinate. Defaults to "season_membership". """ diff --git a/lib/iris/coord_systems.py b/lib/iris/coord_systems.py index 33214ef5e0..35eea98764 100644 --- a/lib/iris/coord_systems.py +++ b/lib/iris/coord_systems.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Definitions of coordinate systems. - -""" +"""Definitions of coordinate systems.""" from abc import ABCMeta, abstractmethod from functools import cached_property @@ -15,7 +13,7 @@ import numpy as np from iris._deprecation import warn_deprecated -import iris.exceptions +import iris.warnings def _arg_default(value, default, cast_as=float): @@ -81,7 +79,7 @@ def __ne__(self, other): return not (self == other) def xml_element(self, doc, attrs=None): - """Default behaviour for coord systems.""" + """Perform default behaviour for coord systems.""" # attrs - optional list of (k,v) items, used for alternate output xml_element_name = type(self).__name__ @@ -117,10 +115,7 @@ def _ellipsoid_to_globe(ellipsoid, globe_default): @abstractmethod def as_cartopy_crs(self): - """Return a cartopy CRS representing our native coordinate - system. - - """ + """Return a cartopy CRS representing our native coordinate system.""" pass @abstractmethod @@ -143,7 +138,9 @@ def as_cartopy_projection(self): class GeogCS(CoordSystem): - """A geographic (ellipsoidal) coordinate system, defined by the shape of + """A geographic (ellipsoidal) coordinate system. + + A geographic (ellipsoidal) coordinate system, defined by the shape of the Earth and a prime meridian. """ @@ -160,12 +157,12 @@ def __init__( Parameters ---------- - * semi_major_axis, semi_minor_axis: + semi_major_axis, semi_minor_axis : optional Axes of ellipsoid, in metres. At least one must be given (see note below). - * inverse_flattening: + inverse_flattening : optional Can be omitted if both axes given (see note below). Default 0.0 - * longitude_of_prime_meridian: + longitude_of_prime_meridian : optional Specifies the prime meridian on the ellipsoid, in degrees. Default 0.0 Notes @@ -364,7 +361,9 @@ class that invalidates the cache. return ccrs.Geodetic(self._globe) def _wipe_cached_properties(self): - """Wipes the cached properties on the object as part of any update to a + """Wipes the cached properties on the object. + + Wipes the cached properties on the object as part of any update to a value that invalidates the cache. """ try: @@ -385,9 +384,12 @@ def semi_major_axis(self): @semi_major_axis.setter def semi_major_axis(self, value): - """Setting this property to a different value invalidates the current datum + """Assign semi_major_axis. + + Setting this property to a different value invalidates the current datum (if any) because a datum encodes a specific semi-major axis. This also invalidates the cached `cartopy.Globe` and `cartopy.CRS`. + """ value = float(value) if not np.isclose(self.semi_major_axis, value): @@ -404,9 +406,12 @@ def semi_minor_axis(self): @semi_minor_axis.setter def semi_minor_axis(self, value): - """Setting this property to a different value invalidates the current datum + """Assign semi_minor_axis. + + Setting this property to a different value invalidates the current datum (if any) because a datum encodes a specific semi-minor axis. This also invalidates the cached `cartopy.Globe` and `cartopy.CRS`. + """ value = float(value) if not np.isclose(self.semi_minor_axis, value): @@ -423,15 +428,18 @@ def inverse_flattening(self): @inverse_flattening.setter def inverse_flattening(self, value): - """Setting this property to a different value does not affect the behaviour + """Assign inverse_flattening. + + Setting this property to a different value does not affect the behaviour of this object any further than the value of this property. + """ wmsg = ( "Setting inverse_flattening does not affect other properties of " "the GeogCS object. To change other properties set them explicitly" " or create a new GeogCS instance." ) - warnings.warn(wmsg, category=iris.exceptions.IrisUserWarning) + warnings.warn(wmsg, category=iris.warnings.IrisUserWarning) value = float(value) self._inverse_flattening = value @@ -445,10 +453,13 @@ def datum(self): @datum.setter def datum(self, value): - """Setting this property to a different value invalidates the current + """Assign datum. + + Setting this property to a different value invalidates the current values of the ellipsoid measurements because a datum encodes its own ellipse. This also invalidates the cached `cartopy.Globe` and `cartopy.CRS`. + """ if self._datum != value: self._semi_major_axis = None @@ -485,27 +496,23 @@ def __init__( north_pole_grid_longitude=None, ellipsoid=None, ): - """Constructs a coordinate system with rotated pole, on an - optional :class:`GeogCS`. - - Args: + """Construct a coordinate system with rotated pole, on an optional :class:`GeogCS`. - * grid_north_pole_latitude: + Parameters + ---------- + grid_north_pole_latitude : The true latitude of the rotated pole in degrees. - - * grid_north_pole_longitude: + grid_north_pole_longitude : The true longitude of the rotated pole in degrees. - - Kwargs: - - * north_pole_grid_longitude: + north_pole_grid_longitude : optional Longitude of true north pole in rotated grid, in degrees. - Defaults to 0.0 . - - * ellipsoid (:class:`GeogCS`): + Defaults to 0.0. + ellipsoid : :class:`GeogCS`, optional If given, defines the ellipsoid. - Examples:: + Examples + -------- + :: rotated_cs = RotatedGeogCS(30, 30) another_cs = RotatedGeogCS(30, 30, @@ -594,36 +601,31 @@ def __init__( scale_factor_at_central_meridian=None, ellipsoid=None, ): - """Constructs a TransverseMercator object. + """Construct a TransverseMercator object. - Args: - - * latitude_of_projection_origin: - True latitude of planar origin in degrees. - - * longitude_of_central_meridian: - True longitude of planar origin in degrees. - - Kwargs: - - * false_easting: - X offset from planar origin in metres. - Defaults to 0.0 . - - * false_northing: - Y offset from planar origin in metres. - Defaults to 0.0 . - - * scale_factor_at_central_meridian: - Reduces the cylinder to slice through the ellipsoid - (secant form). Used to provide TWO longitudes of zero - distortion in the area of interest. - Defaults to 1.0 . - - * ellipsoid (:class:`GeogCS`): + Parameters + ---------- + latitude_of_projection_origin : + True latitude of planar origin in degrees. + longitude_of_central_meridian : + True longitude of planar origin in degrees. + false_easting : optional + X offset from planar origin in metres. + Defaults to 0.0. + false_northing : optional + Y offset from planar origin in metres. + Defaults to 0.0. + scale_factor_at_central_meridian : optional + Reduces the cylinder to slice through the ellipsoid + (secant form). Used to provide TWO longitudes of zero + distortion in the area of interest. + Defaults to 1.0 . + ellipsoid : :class:`GeogCS`, optional If given, defines the ellipsoid. - Example:: + Examples + -------- + :: airy1830 = GeogCS(6377563.396, 6356256.909) osgb = TransverseMercator(49, -2, 400000, -100000, 0.9996012717, @@ -715,25 +717,19 @@ def __init__( false_northing=None, ellipsoid=None, ): - """Constructs an Orthographic coord system. - - Args: + """Construct an Orthographic coord system. - * latitude_of_projection_origin: + Parameters + ---------- + latitude_of_projection_origin : True latitude of planar origin in degrees. - - * longitude_of_projection_origin: + longitude_of_projection_origin : True longitude of planar origin in degrees. - - Kwargs: - - * false_easting: - X offset from planar origin in metres. Defaults to 0.0 . - - * false_northing: - Y offset from planar origin in metres. Defaults to 0.0 . - - * ellipsoid (:class:`GeogCS`): + false_easting : optional + X offset from planar origin in metres. Defaults to 0.0. + false_northing : optional + Y offset from planar origin in metres. Defaults to 0.0. + ellipsoid : :class:`GeogCS`, optional If given, defines the ellipsoid. """ @@ -772,7 +768,7 @@ def as_cartopy_crs(self): warnings.warn( "Discarding false_easting and false_northing that are " "not used by Cartopy.", - category=iris.exceptions.IrisDefaultingWarning, + category=iris.warnings.IrisDefaultingWarning, ) return ccrs.Orthographic( @@ -799,29 +795,22 @@ def __init__( false_northing=None, ellipsoid=None, ): - """Constructs a Vertical Perspective coord system. + """Construct a Vertical Perspective coord system. - Args: - - * latitude_of_projection_origin: + Parameters + ---------- + latitude_of_projection_origin : True latitude of planar origin in degrees. - - * longitude_of_projection_origin: + longitude_of_projection_origin : True longitude of planar origin in degrees. - - * perspective_point_height: + perspective_point_height : Altitude of satellite in metres above the surface of the ellipsoid. - - Kwargs: - - * false_easting: - X offset from planar origin in metres. Defaults to 0.0 . - - * false_northing: - Y offset from planar origin in metres. Defaults to 0.0 . - - * ellipsoid (:class:`GeogCS`): + false_easting : optional + X offset from planar origin in metres. Defaults to 0.0. + false_northing : optional + Y offset from planar origin in metres. Defaults to 0.0. + ellipsoid : :class:`GeogCS`, optional If given, defines the ellipsoid. """ @@ -891,31 +880,23 @@ def __init__( false_northing=None, ellipsoid=None, ): - """Constructs a Geostationary coord system. + """Construct a Geostationary coord system. - Args: - - * latitude_of_projection_origin: + Parameters + ---------- + latitude_of_projection_origin : True latitude of planar origin in degrees. - - * longitude_of_projection_origin: + longitude_of_projection_origin : True longitude of planar origin in degrees. - - * perspective_point_height: + perspective_point_height : Altitude of satellite in metres above the surface of the ellipsoid. - - * sweep_angle_axis (string): + sweep_angle_axis : str The axis along which the satellite instrument sweeps - 'x' or 'y'. - - Kwargs: - - * false_easting: - X offset from planar origin in metres. Defaults to 0.0 . - - * false_northing: - Y offset from planar origin in metres. Defaults to 0.0 . - - * ellipsoid (:class:`GeogCS`): + false_easting : optional + X offset from planar origin in metres. Defaults to 0.0. + false_northing : optional + Y offset from planar origin in metres. Defaults to 0.0. + ellipsoid : :class:`GeogCS`, optional If given, defines the ellipsoid. """ @@ -995,37 +976,31 @@ def __init__( ellipsoid=None, scale_factor_at_projection_origin=None, ): - """Constructs a Stereographic coord system. + """Construct a Stereographic coord system. Parameters ---------- central_lat : float The latitude of the pole. - central_lon : float The central longitude, which aligns with the y axis. - false_easting : float, optional X offset from planar origin in metres. - false_northing : float, optional Y offset from planar origin in metres. - true_scale_lat : float, optional Latitude of true scale. - scale_factor_at_projection_origin : float, optional Scale factor at the origin of the projection - ellipsoid : :class:`GeogCS`, optional If given, defines the ellipsoid. Notes ----- - It is only valid to provide one of true_scale_lat and scale_factor_at_projection_origin + It is only valid to provide one of true_scale_lat and + scale_factor_at_projection_origin """ - #: True latitude of planar origin in degrees. self.central_lat = float(central_lat) @@ -1045,7 +1020,7 @@ def __init__( scale_factor_at_projection_origin, None, cast_as=_float_or_None ) # N.B. the way we use these parameters, we need them to default to None, - # and *not* to 0.0 . + # and *not* to 0.0. if ( self.true_scale_lat is not None @@ -1114,22 +1089,16 @@ def __init__( ---------- central_lat : {90, -90} The latitude of the pole. - central_lon : float The central longitude, which aligns with the y axis. - false_easting : float, optional X offset from planar origin in metres. - false_northing : float, optional Y offset from planar origin in metres. - true_scale_lat : float, optional Latitude of true scale. - scale_factor_at_projection_origin : float, optional Scale factor at the origin of the projection - ellipsoid : :class:`GeogCS`, optional If given, defines the ellipsoid. @@ -1140,7 +1109,6 @@ def __init__( """ - super().__init__( central_lat=central_lat, central_lon=central_lon, @@ -1169,29 +1137,26 @@ def __init__( secant_latitudes=None, ellipsoid=None, ): - """Constructs a LambertConformal coord system. - - Kwargs: - - * central_lat: - The latitude of "unitary scale". Defaults to 39.0 . - - * central_lon: - The central longitude. Defaults to -96.0 . + """Construct a LambertConformal coord system. - * false_easting: - X offset from planar origin in metres. Defaults to 0.0 . - - * false_northing: - Y offset from planar origin in metres. Defaults to 0.0 . - - * secant_latitudes (number or iterable of 1 or 2 numbers): - Latitudes of secant intersection. One or two. - Defaults to (33.0, 45.0). - - * ellipsoid (:class:`GeogCS`): + Parameters + ---------- + central_lat : optional + The latitude of "unitary scale". Defaults to 39.0 . + central_lon : optional + The central longitude. Defaults to -96.0 . + false_easting : optional + X offset from planar origin in metres. Defaults to 0.0. + false_northing : optional + Y offset from planar origin in metres. Defaults to 0.0. + secant_latitudes : number or iterable of 1 or 2 numbers, optional + Latitudes of secant intersection. One or two. + Defaults to (33.0, 45.0). + ellipsoid : :class:`GeogCS`, optional If given, defines the ellipsoid. + Notes + ----- .. note: Default arguments are for the familiar USA map: @@ -1200,7 +1165,6 @@ def __init__( secant_latitudes=(33, 45) """ - #: True latitude of planar origin in degrees. self.central_lat = _arg_default(central_lat, 39.0) @@ -1276,33 +1240,29 @@ def __init__( false_easting=None, false_northing=None, ): - """Constructs a Mercator coord system. - - Kwargs: + """Construct a Mercator coord system. - * longitude_of_projection_origin: - True longitude of planar origin in degrees. Defaults to 0.0 . - - * ellipsoid (:class:`GeogCS`): + Parameters + ---------- + longitude_of_projection_origin : optional + True longitude of planar origin in degrees. Defaults to 0.0. + ellipsoid : :class:`GeogCS`, optional If given, defines the ellipsoid. - - * standard_parallel: - The latitude where the scale is 1. Defaults to 0.0 . - - * scale_factor_at_projection_origin: + standard_parallel : optional + The latitude where the scale is 1. Defaults to 0.0. + scale_factor_at_projection_origin : optional Scale factor at natural origin. Defaults to unused. - - * false_easting: + false_easting : optional X offset from the planar origin in metres. Defaults to 0.0. - - * false_northing: + false_northing : optional Y offset from the planar origin in metres. Defaults to 0.0. - - * datum: + datum : optional If given, specifies the datumof the coordinate system. Only respected if iris.Future.daum_support is set. - Note: Only one of ``standard_parallel`` and + Notes + ----- + Only one of ``standard_parallel`` and ``scale_factor_at_projection_origin`` should be included. """ @@ -1381,23 +1341,19 @@ def __init__( false_northing=None, ellipsoid=None, ): - """Constructs a Lambert Azimuthal Equal Area coord system. - - Kwargs: - - * latitude_of_projection_origin: - True latitude of planar origin in degrees. Defaults to 0.0 . - - * longitude_of_projection_origin: - True longitude of planar origin in degrees. Defaults to 0.0 . + """Construct a Lambert Azimuthal Equal Area coord system. - * false_easting: - X offset from planar origin in metres. Defaults to 0.0 . - - * false_northing: - Y offset from planar origin in metres. Defaults to 0.0 . - - * ellipsoid (:class:`GeogCS`): + Parameters + ---------- + latitude_of_projection_origin : optional + True latitude of planar origin in degrees. Defaults to 0.0. + longitude_of_projection_origin : optional + True longitude of planar origin in degrees. Defaults to 0.0. + false_easting : optional + X offset from planar origin in metres. Defaults to 0.0. + false_northing : optional + Y offset from planar origin in metres. Defaults to 0.0. + ellipsoid : :class:`GeogCS`, optional If given, defines the ellipsoid. """ @@ -1462,28 +1418,23 @@ def __init__( standard_parallels=None, ellipsoid=None, ): - """Constructs a Albers Conical Equal Area coord system. - - Kwargs: + """Construct a Albers Conical Equal Area coord system. - * latitude_of_projection_origin: - True latitude of planar origin in degrees. Defaults to 0.0 . - - * longitude_of_central_meridian: + Parameters + ---------- + latitude_of_projection_origin : optional + True latitude of planar origin in degrees. Defaults to 0.0. + longitude_of_central_meridian : optional True longitude of planar central meridian in degrees. - Defaults to 0.0 . - - * false_easting: - X offset from planar origin in metres. Defaults to 0.0 . - - * false_northing: - Y offset from planar origin in metres. Defaults to 0.0 . - - * standard_parallels (number or iterable of 1 or 2 numbers): + Defaults to 0.0. + false_easting : optional + X offset from planar origin in metres. Defaults to 0.0. + false_northing : optional + Y offset from planar origin in metres. Defaults to 0.0. + standard_parallels : number or iterable of 1 or 2 numbers, optional The one or two latitudes of correct scale. Defaults to (20.0, 50.0). - - * ellipsoid (:class:`GeogCS`): + ellipsoid : :class:`GeogCS`, optional If given, defines the ellipsoid. """ @@ -1567,7 +1518,7 @@ def __init__( scale_factor_at_projection_origin=None, ellipsoid=None, ): - """Constructs an ObliqueMercator object. + """Construct an ObliqueMercator object. Parameters ---------- @@ -1580,14 +1531,14 @@ def __init__( The true latitude of the planar origin in degrees. false_easting: float, optional X offset from the planar origin in metres. - Defaults to 0.0 . + Defaults to 0.0. false_northing: float, optional Y offset from the planar origin in metres. - Defaults to 0.0 . + Defaults to 0.0. scale_factor_at_projection_origin: float, optional Scale factor at the central meridian. Defaults to 1.0 . - ellipsoid: :class:`GeogCS`, optional + ellipsoid : :class:`GeogCS`, optional If given, defines the ellipsoid. Examples @@ -1682,7 +1633,7 @@ def __init__( scale_factor_at_projection_origin=None, ellipsoid=None, ): - """Constructs a RotatedMercator object. + """Construct a RotatedMercator object. Parameters ---------- @@ -1692,10 +1643,10 @@ def __init__( The true latitude of the planar origin in degrees. false_easting: float, optional X offset from the planar origin in metres. - Defaults to 0.0 . + Defaults to 0.0. false_northing: float, optional Y offset from the planar origin in metres. - Defaults to 0.0 . + Defaults to 0.0. scale_factor_at_projection_origin: float, optional Scale factor at the central meridian. Defaults to 1.0 . diff --git a/lib/iris/coords.py b/lib/iris/coords.py index 9336f1f4c7..d9de063ea3 100644 --- a/lib/iris/coords.py +++ b/lib/iris/coords.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Definitions of coordinates and other dimensional metadata. - -""" +"""Definitions of coordinates and other dimensional metadata.""" from abc import ABCMeta, abstractmethod from collections import namedtuple @@ -34,6 +32,7 @@ import iris.exceptions import iris.time import iris.util +import iris.warnings #: The default value for ignore_axis which controls guess_coord_axis' behaviour DEFAULT_IGNORE_AXIS = False @@ -69,25 +68,22 @@ def __init__( units=None, attributes=None, ): - """Constructs a single dimensional metadata object. - - Args: + """Construct a single dimensional metadata object. - * values: + Parameters + ---------- + values : The values of the dimensional metadata. - - Kwargs: - - * standard_name: + standard_name : optional CF standard name of the dimensional metadata. - * long_name: + long_name : optional Descriptive name of the dimensional metadata. - * var_name: + var_name : optional The netCDF variable name for the dimensional metadata. - * units + units : optional The :class:`~cf_units.Unit` of the dimensional metadata's values. Can be a string, which will be converted to a Unit object. - * attributes + attributes : optional A dictionary containing other cf and user-defined attributes. """ @@ -124,8 +120,7 @@ def __init__( self._bounds_dm = None # Only ever set on Coord-derived instances. def __getitem__(self, keys): - """Returns a new dimensional metadata whose values are obtained by - conventional array indexing. + """Return a new dimensional metadata whose values are obtained by conventional array indexing. .. note:: @@ -162,11 +157,11 @@ def __getitem__(self, keys): return new_metadata def copy(self, values=None): - """Returns a copy of this dimensional metadata object. + """Return a copy of this dimensional metadata object. - Kwargs: - - * values + Parameters + ---------- + values : optional An array of values for the new dimensional metadata object. This may be a different shape to the original values array being copied. @@ -240,14 +235,11 @@ def _values(self, values): self._values_dm.data = values def _lazy_values(self): - """Returns a lazy array representing the dimensional metadata values.""" + """Return a lazy array representing the dimensional metadata values.""" return self._values_dm.lazy_data() def _core_values(self): - """The values array of this dimensional metadata which may be a NumPy - array or a dask array. - - """ + """Value array of this dimensional metadata which may be a NumPy array or a dask array.""" result = self._values_dm.core_data() if not _lazy.is_lazy_data(result): result = result.view() @@ -255,10 +247,7 @@ def _core_values(self): return result def _has_lazy_values(self): - """Returns a boolean indicating whether the metadata's values array is a - lazy dask array or not. - - """ + """Indicate whether the metadata's values array is a lazy dask array or not.""" return self._values_dm.has_lazy_data() def summary( @@ -275,26 +264,26 @@ def summary( Parameters ---------- - shorten : bool, default = False + shorten : bool, default=False If True, produce an abbreviated one-line summary. If False, produce a multi-line summary, with embedded newlines. - max_values : int or None, default = None + max_values : int or None If more than this many data values, print truncated data arrays instead of full contents. If 0, print only the shape. The default is 5 if :attr:`shorten`\ =True, or 15 otherwise. This overrides ``numpy.get_printoptions['threshold']``\ . - linewidth : int or None, default = None + linewidth : int or None Character-width controlling line splitting of array outputs. If unset, defaults to ``numpy.get_printoptions['linewidth']``\ . - edgeitems : int = 2 + edgeitems : int, default=2 Controls truncated array output. Overrides ``numpy.getprintoptions['edgeitems']``\ . - precision : int or None, default = None + precision : int or None Controls number decimal formatting. When :attr:`shorten`\ =True this is defaults to 3, in which case it overrides ``numpy.get_printoptions()['precision']``\ . - convert_dates : bool, default = True + convert_dates : bool, default=True If the units has a calendar, then print array values as date strings instead of the actual numbers. @@ -303,7 +292,8 @@ def summary( result : str Output text, with embedded newlines when :attr:`shorten`\ =False. - + Notes + ----- .. note:: Arrays are formatted using :meth:`numpy.array2string`. Some aspects of the array formatting are controllable in the usual way, via @@ -586,6 +576,9 @@ def __repr__(self): return self.summary(shorten=True) def __eq__(self, other): + if other is self: + return True + # Note: this method includes bounds handling code, but it only runs # within Coord type instances, as only these allow bounds to be set. @@ -630,7 +623,7 @@ def __hash__(self): return hash(id(self)) def __binary_operator__(self, other, mode_constant): - """Common code which is called by add, sub, mul and div. + """Perform common code which is called by add, sub, mul and div. Mode constant is one of ADD, SUB, MUL, DIV, RDIV @@ -756,10 +749,7 @@ def pointwise_convert(values): self.units = unit def is_compatible(self, other, ignore=None): - """Return whether the current dimensional metadata object is compatible - with another. - - """ + """Return whether the current dimensional metadata object is compatible with another.""" compatible = self.name() == other.name() and self.units == other.units if compatible: @@ -777,25 +767,16 @@ def is_compatible(self, other, ignore=None): @property def dtype(self): - """The NumPy dtype of the current dimensional metadata object, as - specified by its values. - - """ + """The NumPy dtype of the current dimensional metadata object, as specified by its values.""" return self._values_dm.dtype @property def ndim(self): - """Return the number of dimensions of the current dimensional metadata - object. - - """ + """Return the number of dimensions of the current dimensional metadata object.""" return self._values_dm.ndim def has_bounds(self): - """Return a boolean indicating whether the current dimensional metadata - object has a bounds array. - - """ + """Indicate whether the current dimensional metadata object has a bounds array.""" # Allows for code to handle unbounded dimensional metadata agnostic of # whether the metadata is a coordinate or not. return False @@ -806,16 +787,20 @@ def shape(self): return self._values_dm.shape def xml_element(self, doc): - """Create the :class:`xml.dom.minidom.Element` that describes this - :class:`_DimensionalMetadata`. + """Create XML element. - Args: + Create the :class:`xml.dom.minidom.Element` that describes this + :class:`_DimensionalMetadata`. - * doc: + Parameters + ---------- + doc : The parent :class:`xml.dom.minidom.Document`. - Returns: - The :class:`xml.dom.minidom.Element` that will describe this + Returns + ------- + :class:`xml.dom.minidom.Element` + :class:`xml.dom.minidom.Element` that will describe this :class:`_DimensionalMetadata`. """ @@ -899,10 +884,7 @@ def _xml_array_repr(data): return result def _value_type_name(self): - """A simple, readable name for the data type of the dimensional metadata - values. - - """ + """Provide a simple name for the data type of the dimensional metadata values.""" dtype = self._core_values().dtype kind = dtype.kind if kind in "SU": @@ -927,25 +909,22 @@ def __init__( units=None, attributes=None, ): - """Constructs a single ancillary variable. + """Construct a single ancillary variable. - Args: - - * data: + Parameters + ---------- + data : The values of the ancillary variable. - - Kwargs: - - * standard_name: + standard_name : optional CF standard name of the ancillary variable. - * long_name: + long_name : optional Descriptive name of the ancillary variable. - * var_name: + var_name : optional The netCDF variable name for the ancillary variable. - * units + units : optional The :class:`~cf_units.Unit` of the ancillary variable's values. Can be a string, which will be converted to a Unit object. - * attributes + attributes : optional A dictionary containing other cf and user-defined attributes. """ @@ -980,24 +959,24 @@ def lazy_data(self): If the data have already been loaded for the ancillary variable, the returned Array will be a new lazy array wrapper. - Returns: - A lazy array, representing the ancillary variable data array. + Returns + ------- + A lazy array, representing the ancillary variable data array. """ return super()._lazy_values() def core_data(self): - """The data array at the core of this ancillary variable, which may be a + """Return data array at the core of this ancillary variable. + + The data array at the core of this ancillary variable, which may be a NumPy array or a dask array. """ return super()._core_values() def has_lazy_data(self): - """Return a boolean indicating whether the ancillary variable's data array - is a lazy dask array or not. - - """ + """Indicate whether the ancillary variable's data array is a lazy dask array or not.""" return super()._has_lazy_values() def cube_dims(self, cube): @@ -1010,7 +989,9 @@ def cube_dims(self, cube): class CellMeasure(AncillaryVariable): - """A CF Cell Measure, providing area or volume properties of a cell + """A CF Cell Measure, providing area or volume properties of a cell. + + A CF Cell Measure, providing area or volume properties of a cell where these cannot be inferred from the Coordinates and Coordinate Reference System. @@ -1026,29 +1007,26 @@ def __init__( attributes=None, measure=None, ): - """Constructs a single cell measure. - - Args: + """Construct a single cell measure. - * data: + Parameters + ---------- + data : The values of the measure for each cell. Either a 'real' array (:class:`numpy.ndarray`) or a 'lazy' array (:class:`dask.array.Array`). - - Kwargs: - - * standard_name: + standard_name : optional CF standard name of the coordinate. - * long_name: + long_name : optional Descriptive name of the coordinate. - * var_name: + var_name : optional The netCDF variable name for the coordinate. - * units + units : optional The :class:`~cf_units.Unit` of the coordinate's values. Can be a string, which will be converted to a Unit object. - * attributes + attributes : optional A dictionary containing other CF and user-defined attributes. - * measure + measure : optional A string describing the type of measure. Supported values are 'area' and 'volume'. The default is 'area'. @@ -1091,15 +1069,16 @@ def cube_dims(self, cube): return cube.cell_measure_dims(self) def xml_element(self, doc): - """Create the :class:`xml.dom.minidom.Element` that describes this - :class:`CellMeasure`. + """Create the :class:`xml.dom.minidom.Element` that describes this :class:`CellMeasure`. - Args: - - * doc: + Parameters + ---------- + doc : The parent :class:`xml.dom.minidom.Document`. - Returns: + Returns + ------- + :class:`xml.dom.minidom.Element` The :class:`xml.dom.minidom.Element` that describes this :class:`CellMeasure`. @@ -1136,28 +1115,21 @@ def __new__( min_inclusive=True, max_inclusive=True, ): - """Create a CoordExtent for the specified coordinate and range of - values. + """Create a CoordExtent for the specified coordinate and range of values. - Args: - - * name_or_coord + Parameters + ---------- + name_or_coord : Either a coordinate name or a coordinate, as defined in :meth:`iris.cube.Cube.coords()`. - - * minimum + minimum : The minimum value of the range to select. - - * maximum + maximum : The maximum value of the range to select. - - Kwargs: - - * min_inclusive + min_inclusive : bool, default=True If True, coordinate values equal to `minimum` will be included in the selection. Default is True. - - * max_inclusive + max_inclusive : bool, default=True If True, coordinate values equal to `maximum` will be included in the selection. Default is True. @@ -1179,7 +1151,7 @@ def __new__( def _get_2d_coord_bound_grid(bounds): - """Creates a grid using the bounds of a 2D coordinate with 4 sided cells. + """Create a grid using the bounds of a 2D coordinate with 4 sided cells. Assumes that the four vertices of the cells are in an anti-clockwise order (bottom-left, bottom-right, top-right, top-left). @@ -1193,12 +1165,14 @@ def _get_2d_coord_bound_grid(bounds): # 0-0-0-0-1 # 3-3-3-3-2 - Args: - * bounds: (array) + Parameters + ---------- + bounds : array Coordinate bounds array of shape (Y, X, 4) - Returns: - * grid: (array) + Returns + ------- + array Grid of shape (Y+1, X+1) """ @@ -1221,7 +1195,9 @@ def _get_2d_coord_bound_grid(bounds): class Cell(namedtuple("Cell", ["point", "bound"])): - """An immutable representation of a single cell of a coordinate, including the + """A coordinate cell containing a single point, or point and bounds. + + An immutable representation of a single cell of a coordinate, including the sample point and/or boundary position. Notes on cell comparison: @@ -1305,10 +1281,7 @@ def __hash__(self): return hash((self.point, bound)) def __eq__(self, other): - """Compares Cell equality depending on the type of the object to be - compared. - - """ + """Compare Cell equality depending on the type of the object to be compared.""" if isinstance(other, (int, float, np.number)) or hasattr(other, "timetuple"): if self.bound is not None: return self.contains_point(other) @@ -1335,14 +1308,15 @@ def __ne__(self, other): return result def __common_cmp__(self, other, operator_method): - """Common method called by the rich comparison operators. The method of + """Common equality comparison. + + Common method called by the rich comparison operators. The method of checking equality depends on the type of the object to be compared. Cell vs Cell comparison is used to define a strict order. Non-Cell vs Cell comparison is used to define Constraint matching. - """ - + """ # noqa: D401 if (isinstance(other, list) and len(other) == 1) or ( isinstance(other, np.ndarray) and other.shape == (1,) ): @@ -1441,8 +1415,7 @@ def __str__(self): return str(self.point) def contains_point(self, point): - """For a bounded cell, returns whether the given point lies within the - bounds. + """For a bounded cell, returns whether the given point lies within the bounds. .. note:: The test carried out is equivalent to min(bound) <= point <= max(bound). @@ -1471,26 +1444,25 @@ def __init__( coord_system=None, climatological=False, ): - """Coordinate abstract base class. As of ``v3.0.0`` you **cannot** create an instance of :class:`Coord`. + """Coordinate abstract base class. - Args: + As of ``v3.0.0`` you **cannot** create an instance of :class:`Coord`. - * points: + Parameters + ---------- + points : The values (or value in the case of a scalar coordinate) for each cell of the coordinate. - - Kwargs: - - * standard_name: + standard_name : optional CF standard name of the coordinate. - * long_name: + long_name : optional Descriptive name of the coordinate. - * var_name: + var_name : optional The netCDF variable name for the coordinate. - * units + units : optional The :class:`~cf_units.Unit` of the coordinate's values. Can be a string, which will be converted to a Unit object. - * bounds + bounds : optional An array of values describing the bounds of each cell. Given n bounds for each cell, the shape of the bounds array should be points.shape + (n,). For example, a 1D coordinate with 100 points @@ -1498,13 +1470,13 @@ def __init__( (100, 2) Note if the data is a climatology, `climatological` should be set. - * attributes + attributes : optional A dictionary containing other CF and user-defined attributes. - * coord_system + coord_system : optional A :class:`~iris.coord_systems.CoordSystem` representing the coordinate system of the coordinate, e.g., a :class:`~iris.coord_systems.GeogCS` for a longitude coordinate. - * climatological (bool): + climatological : bool, default=False When True: the coordinate is a NetCDF climatological time axis. When True: saving in NetCDF will give the coordinate variable a 'climatology' attribute and will create a boundary variable called @@ -1539,20 +1511,23 @@ def __init__( self._ignore_axis = DEFAULT_IGNORE_AXIS def copy(self, points=None, bounds=None): - """Returns a copy of this coordinate. - - Kwargs: - - * points: A points array for the new coordinate. - This may be a different shape to the points of the coordinate - being copied. - - * bounds: A bounds array for the new coordinate. - Given n bounds for each cell, the shape of the bounds array - should be points.shape + (n,). For example, a 1d coordinate - with 100 points and two bounds per cell would have a bounds - array of shape (100, 2). + """Return a copy of this coordinate. + Parameters + ---------- + points : optional + A points array for the new coordinate. + This may be a different shape to the points of the coordinate + being copied. + bounds : optional + A bounds array for the new coordinate. + Given n bounds for each cell, the shape of the bounds array + should be points.shape + (n,). For example, a 1d coordinate + with 100 points and two bounds per cell would have a bounds + array of shape (100, 2). + + Notes + ----- .. note:: If the points argument is specified and bounds are not, the resulting coordinate will have no bounds. @@ -1610,7 +1585,9 @@ def points(self, points): @property def bounds(self): - """The coordinate bounds values, as a NumPy array, + """Coordinate bounds values. + + The coordinate bounds values, as a NumPy array, or None if no bound values are defined. .. note:: The shape of the bound array should be: ``points.shape + @@ -1649,7 +1626,9 @@ def coord_system(self, value): @property def climatological(self): - """A boolean that controls whether the coordinate is a climatological + """Flag for representing a climatological time axis. + + A boolean that controls whether the coordinate is a climatological time axis, in which case the bounds represent a climatological period rather than a normal period. @@ -1684,11 +1663,10 @@ def climatological(self, value): @property def ignore_axis(self): - """A boolean that controls whether guess_coord_axis acts on this - coordinate. + """A boolean controlling if iris.util.guess_coord_axis acts on this coordinate. - Defaults to False, and when set to True it will be skipped by - guess_coord_axis. + Defaults to ``False``, and when set to ``True`` it will be skipped by + :func:`iris.util.guess_coord_axis`. """ return self._ignore_axis @@ -1709,8 +1687,9 @@ def lazy_points(self): If the data have already been loaded for the coord, the returned Array will be a new lazy array wrapper. - Returns: - A lazy array, representing the coord points array. + Returns + ------- + A lazy array, representing the coord points array. """ return super()._lazy_values() @@ -1725,7 +1704,9 @@ def lazy_bounds(self): If the data have already been loaded for the coord, the returned Array will be a new lazy array wrapper. - Returns: + Returns + ------- + lazy array A lazy array representing the coord bounds array or `None` if the coord does not have bounds. @@ -1736,17 +1717,11 @@ def lazy_bounds(self): return lazy_bounds def core_points(self): - """The points array at the core of this coord, which may be a NumPy array - or a dask array. - - """ + """Core points array at the core of this coord, which may be a NumPy array or a dask array.""" return super()._core_values() def core_bounds(self): - """The points array at the core of this coord, which may be a NumPy array - or a dask array. - - """ + """Core bounds. The points array at the core of this coord, which may be a NumPy array or a dask array.""" result = None if self.has_bounds(): result = self._bounds_dm.core_data() @@ -1755,14 +1730,13 @@ def core_bounds(self): return result def has_lazy_points(self): - """Return a boolean indicating whether the coord's points array is a - lazy dask array or not. - - """ + """Return a boolean whether the coord's points array is a lazy dask array or not.""" return super()._has_lazy_values() def has_lazy_bounds(self): - """Return a boolean indicating whether the coord's bounds array is a + """Whether coordinate bounds are lazy. + + Return a boolean indicating whether the coord's bounds array is a lazy dask array or not. """ @@ -1789,8 +1763,7 @@ def cube_dims(self, cube): return cube.coord_dims(self) def convert_units(self, unit): - r"""Change the coordinate's units, converting the values in its points - and bounds arrays. + r"""Change the coordinate's units, converting the values in its points and bounds arrays. For example, if a coordinate's :attr:`~iris.coords.Coord.units` attribute is set to radians then:: @@ -1808,7 +1781,7 @@ def convert_units(self, unit): super().convert_units(unit=unit) def cells(self): - """Returns an iterable of Cell instances for this Coord. + """Return an iterable of Cell instances for this Coord. For example:: @@ -1858,20 +1831,20 @@ def _sanity_check_bounds(self): ) def _discontiguity_in_bounds(self, rtol=1e-5, atol=1e-8): - """Checks that the bounds of the coordinate are contiguous. + """Check that the bounds of the coordinate are contiguous. - Kwargs: - * rtol: (float) + rtol : float, default=1e-5 Relative tolerance that is used when checking contiguity. Defaults to 1e-5. - * atol: (float) + atol : float, default=1e-8 Absolute tolerance that is used when checking contiguity. Defaults to 1e-8. - Returns: - * contiguous: (boolean) + Returns + ------- + contiguous : bool True if there are no discontiguities. - * diffs: (array or tuple of arrays) + diffs : array or tuple of arrays A boolean array or tuple of boolean arrays which are true where there are discontiguities between neighbouring bounds. If self is a 2D coord of shape (Y, X), a pair of arrays is returned, where @@ -1948,7 +1921,9 @@ def mod360_adjust(compare_axis): return contiguous, diffs def is_contiguous(self, rtol=1e-05, atol=1e-08): - """Return True if, and only if, this Coord is bounded with contiguous + """Whether coordinate has contiguous bounds. + + Return True if, and only if, this Coord is bounded with contiguous bounds to within the specified relative and absolute tolerances. 1D coords are contiguous if the upper bound of a cell aligns, @@ -1959,15 +1934,16 @@ def is_contiguous(self, rtol=1e-05, atol=1e-08): it, and the upper left corner of each cell aligns with the lower left corner of the cell above it. - Args: - - * rtol: + Parameters + ---------- + rtol : float, default=1e-05 The relative tolerance parameter (default is 1e-05). - * atol: + atol : float, default=1e-08 The absolute tolerance parameter (default is 1e-08). - Returns: - Boolean. + Returns + ------- + bool """ if self.has_bounds(): @@ -1977,7 +1953,9 @@ def is_contiguous(self, rtol=1e-05, atol=1e-08): return contiguous def contiguous_bounds(self): - """Returns the N+1 bound values for a contiguous bounded 1D coordinate + """Contiguous bounds of 1D coordinate. + + Return the N+1 bound values for a contiguous bounded 1D coordinate of length N, or the (N+1, M+1) bound values for a contiguous bounded 2D coordinate of shape (N, M). @@ -2000,7 +1978,7 @@ def contiguous_bounds(self): warnings.warn( "Coordinate {!r} is not bounded, guessing " "contiguous bounds.".format(self.name()), - category=iris.exceptions.IrisGuessBoundsWarning, + category=iris.warnings.IrisGuessBoundsWarning, ) bounds = self._guess_bounds() elif self.ndim == 2: @@ -2022,7 +2000,6 @@ def contiguous_bounds(self): def is_monotonic(self): """Return True if, and only if, this Coord is monotonic.""" - if self.ndim != 1: raise iris.exceptions.CoordinateMultiDimError(self) @@ -2048,19 +2025,20 @@ def is_compatible(self, other, ignore=None): :attr:`iris.coords.Coord.coord_system` and :attr:`iris.coords.Coord.attributes` that are present in both objects. - Args: - - * other: + Parameters + ---------- + other : An instance of :class:`iris.coords.Coord`, :class:`iris.common.CoordMetadata` or :class:`iris.common.DimCoordMetadata`. - * ignore: + ignore : optional A single attribute key or iterable of attribute keys to ignore when comparing the coordinates. Default is None. To ignore all attributes, set this to other.attributes. - Returns: - Boolean. + Returns + ------- + bool """ compatible = False @@ -2071,7 +2049,9 @@ def is_compatible(self, other, ignore=None): @property def bounds_dtype(self): - """The NumPy dtype of the coord's bounds. Will be `None` if the coord + """The NumPy dtype of the coordinates bounds. + + The NumPy dtype of the coord's bounds. Will be `None` if the coord does not have bounds. """ @@ -2093,12 +2073,15 @@ def has_bounds(self): return self._bounds_dm is not None def cell(self, index): - """Return the single :class:`Cell` instance which results from slicing the + """Point/bound cell at the given coordinate index. + + Return the single :class:`Cell` instance which results from slicing the points/bounds with the given index. + """ index = iris.util._build_full_slice_given_keys(index, self.ndim) - point = tuple(np.array(self.points[index], ndmin=1).flatten()) + point = tuple(np.array(self.core_points()[index], ndmin=1).flatten()) if len(point) != 1: raise IndexError( "The index %s did not uniquely identify a single " @@ -2107,7 +2090,7 @@ def cell(self, index): bound = None if self.has_bounds(): - bound = tuple(np.array(self.bounds[index], ndmin=1).flatten()) + bound = tuple(np.array(self.core_bounds()[index], ndmin=1).flatten()) if self.units.is_time_reference(): point = self.units.num2date(point) @@ -2117,12 +2100,10 @@ def cell(self, index): return Cell(point, bound) def collapsed(self, dims_to_collapse=None): - """Returns a copy of this coordinate, which has been collapsed along - the specified dimensions. + """Return a copy of this coordinate, which has been collapsed along the specified dimensions. Replaces the points & bounds with a simple bounded region. """ - # Ensure dims_to_collapse is a tuple to be able to pass # through to numpy if isinstance(dims_to_collapse, (int, np.integer)): @@ -2158,7 +2139,7 @@ def serialize(x): ) warnings.warn( msg.format(self.name()), - category=iris.exceptions.IrisVagueMetadataWarning, + category=iris.warnings.IrisVagueMetadataWarning, ) else: try: @@ -2171,7 +2152,7 @@ def serialize(x): ) warnings.warn( msg.format(str(exc), self.name()), - category=iris.exceptions.IrisVagueMetadataWarning, + category=iris.warnings.IrisVagueMetadataWarning, ) self.bounds = None else: @@ -2182,7 +2163,7 @@ def serialize(x): ) warnings.warn( msg.format(self.name()), - category=iris.exceptions.IrisVagueMetadataWarning, + category=iris.warnings.IrisVagueMetadataWarning, ) if self.has_bounds(): @@ -2216,15 +2197,18 @@ def serialize(x): def _guess_bounds(self, bound_position=0.5): """Return bounds for this coordinate based on its points. - Kwargs: - - * bound_position: + Parameters + ---------- + bound_position : float, default=0.5 The desired position of the bounds relative to the position of the points. - Returns: - A numpy array of shape (len(self.points), 2). + Returns + ------- + A numpy array of shape (len(self.points), 2). + Notes + ----- .. note:: This method only works for coordinates with ``coord.ndim == 1``. @@ -2285,12 +2269,14 @@ def guess_bounds(self, bound_position=0.5): With irregular points, the first and last cells are given the same widths as the ones next to them. - Kwargs: - - * bound_position: + Parameters + ---------- + bound_position : float, default=0.5 The desired position of the bounds relative to the position of the points. + Notes + ----- .. note:: An error is raised if the coordinate already has bounds, is not @@ -2306,14 +2292,14 @@ def guess_bounds(self, bound_position=0.5): self.bounds = self._guess_bounds(bound_position) def intersect(self, other, return_indices=False): - """Returns a new coordinate from the intersection of two coordinates. + """Return a new coordinate from the intersection of two coordinates. Both coordinates must be compatible as defined by :meth:`~iris.coords.Coord.is_compatible`. - Kwargs: - - * return_indices: + Parameters + ---------- + return_indices : bool, default=False If True, changes the return behaviour to return the intersection indices for the "self" coordinate. @@ -2351,7 +2337,7 @@ def intersect(self, other, return_indices=False): return self[self_intersect_indices] def nearest_neighbour_index(self, point): - """Returns the index of the cell nearest to the given point. + """Return the index of the cell nearest to the given point. Only works for one-dimensional coordinates. @@ -2446,15 +2432,16 @@ def nearest_neighbour_index(self, point): return result_index def xml_element(self, doc): - """Create the :class:`xml.dom.minidom.Element` that describes this - :class:`Coord`. - - Args: + """Create the :class:`xml.dom.minidom.Element` that describes this :class:`Coord`. - * doc: + Parameters + ---------- + doc : The parent :class:`xml.dom.minidom.Document`. - Returns: + Returns + ------- + :class:`xml.dom.minidom.Element` The :class:`xml.dom.minidom.Element` that will describe this :class:`DimCoord`. @@ -2480,7 +2467,9 @@ def _xml_id_extra(self, unique_value): class DimCoord(Coord): - """A coordinate that is 1D, and numeric, with values that have a strict monotonic ordering. Missing values are not + """A coordinate that is 1D, and numeric. + + With values that have a strict monotonic ordering. Missing values are not permitted in a :class:`DimCoord`. """ @@ -2501,24 +2490,20 @@ def from_regular( climatological=False, with_bounds=False, ): - """Create a :class:`DimCoord` with regularly spaced points, and - optionally bounds. + """Create a :class:`DimCoord` with regularly spaced points, and optionally bounds. The majority of the arguments are defined as for :class:`Coord`, but those which differ are defined below. - Args: - - * zeroth: + Parameters + ---------- + zeroth : The value *prior* to the first point value. - * step: + step : The numeric difference between successive point values. - * count: + count : The number of point values. - - Kwargs: - - * with_bounds: + with_bounds : bool, default=False If True, the resulting DimCoord will possess bound values which are equally spaced around the points. Otherwise no bounds values will be defined. Defaults to False. @@ -2566,25 +2551,22 @@ def __init__( Missing values are not permitted. - Args: - - * points: + Parameters + ---------- + points : 1D numpy array-like of values (or single value in the case of a scalar coordinate) for each cell of the coordinate. The values must be strictly monotonic and masked values are not allowed. - - Kwargs: - - * standard_name: + standard_name : optional CF standard name of the coordinate. - * long_name: + long_name : optional Descriptive name of the coordinate. - * var_name: + var_name : optional The netCDF variable name for the coordinate. - * units: + units : :class:`~cf_units.Unit`, optional The :class:`~cf_units.Unit` of the coordinate's values. Can be a string, which will be converted to a Unit object. - * bounds: + bounds : optional An array of values describing the bounds of each cell. Given n bounds and m cells, the shape of the bounds array should be (m, n). For each bound, the values must be strictly monotonic along @@ -2595,16 +2577,16 @@ def __init__( in the same direction. Masked values are not allowed. Note if the data is a climatology, `climatological` should be set. - * attributes: + attributes : optional A dictionary containing other CF and user-defined attributes. - * coord_system: + coord_system : :class:`~iris.coord_systems.CoordSystem`, optional A :class:`~iris.coord_systems.CoordSystem` representing the coordinate system of the coordinate, e.g., a :class:`~iris.coord_systems.GeogCS` for a longitude coordinate. - * circular (bool): + circular : bool, default=False Whether the coordinate wraps by the :attr:`~iris.coords.DimCoord.units.modulus` i.e., the longitude coordinate wraps around the full great circle. - * climatological (bool): + climatological : bool, default=False When True: the coordinate is a NetCDF climatological time axis. When True: saving in NetCDF will give the coordinate variable a 'climatology' attribute and will create a boundary variable called @@ -2698,12 +2680,15 @@ def collapsed(self, dims_to_collapse=None): return coord def _new_points_requirements(self, points): - """Confirm that a new set of coord points adheres to the requirements for + """Confirm that a new set of coord points adheres to the requirements. + + Confirm that a new set of coord points adheres to the requirements for :class:`~iris.coords.DimCoord` points, being: - * points are scalar or 1D, - * points are numeric, - * points are not masked, and - * points are monotonic. + + * points are scalar or 1D, + * points are numeric, + * points are not masked, and + * points are monotonic. """ if points.ndim not in (0, 1): @@ -2744,12 +2729,15 @@ def _values(self, points): points.flags.writeable = False def _new_bounds_requirements(self, bounds): - """Confirm that a new set of coord bounds adheres to the requirements for + """Confirm that a new set of coord bounds adheres to the requirements. + + Confirm that a new set of coord bounds adheres to the requirements for :class:`~iris.coords.DimCoord` bounds, being: - * bounds are compatible in shape with the points - * bounds are numeric, - * bounds are not masked, and - * bounds are monotonic in the first dimension. + + * bounds are compatible in shape with the points + * bounds are numeric, + * bounds are not masked, and + * bounds are monotonic in the first dimension. Also reverse the order of the second dimension if necessary to match the first dimension's direction. I.e. both should increase or both should @@ -2837,15 +2825,16 @@ def is_monotonic(self): return True def xml_element(self, doc): - """Create the :class:`xml.dom.minidom.Element` that describes this - :class:`DimCoord`. + """Create the :class:`xml.dom.minidom.Element` that describes this :class:`DimCoord`. - Args: - - * doc: + Parameters + ---------- + doc : The parent :class:`xml.dom.minidom.Document`. - Returns: + Returns + ------- + :class:`xml.dom.minidom.Element` The :class:`xml.dom.minidom.Element` that describes this :class:`DimCoord`. @@ -2862,24 +2851,21 @@ class AuxCoord(Coord): def __init__(self, *args, **kwargs): """Create a coordinate with **mutable** points and bounds. - Args: - - * points: + Parameters + ---------- + points : The values (or value in the case of a scalar coordinate) for each cell of the coordinate. - - Kwargs: - - * standard_name: + standard_name : optional CF standard name of the coordinate. - * long_name: + long_name : optional Descriptive name of the coordinate. - * var_name: + var_name : optional The netCDF variable name for the coordinate. - * units + unit : :class:`~cf_units.Unit`, optional The :class:`~cf_units.Unit` of the coordinate's values. Can be a string, which will be converted to a Unit object. - * bounds + bounds : optional An array of values describing the bounds of each cell. Given n bounds for each cell, the shape of the bounds array should be points.shape + (n,). For example, a 1D coordinate with 100 points @@ -2887,13 +2873,13 @@ def __init__(self, *args, **kwargs): (100, 2) Note if the data is a climatology, `climatological` should be set. - * attributes + attributes : optional A dictionary containing other CF and user-defined attributes. - * coord_system + coord_system : :class:`~iris.coord_systems.CoordSystem`, optional A :class:`~iris.coord_systems.CoordSystem` representing the coordinate system of the coordinate, e.g., a :class:`~iris.coord_systems.GeogCS` for a longitude coordinate. - * climatological (bool): + climatological bool, optional When True: the coordinate is a NetCDF climatological time axis. When True: saving in NetCDF will give the coordinate variable a 'climatology' attribute and will create a boundary variable called @@ -2936,22 +2922,17 @@ class CellMethod(iris.util._OrderedHashable): def __init__(self, method, coords=None, intervals=None, comments=None): """Call Method initialise. - Args: - - * method: + Parameters + ---------- + method : The name of the operation. - - Kwargs: - - * coords: + coords : :class:`.Coord` instances, optional A single instance or sequence of :class:`.Coord` instances or coordinate names. - - * intervals: + intervals : optional A single string, or a sequence strings, describing the intervals within the cell method. - - * comments: + comments : optional A single string, or a sequence strings, containing any additional comments. @@ -3020,15 +3001,16 @@ def __add__(self, other): return NotImplemented def xml_element(self, doc): - """Create the :class:`xml.dom.minidom.Element` that describes this - :class:`CellMethod`. + """Create the :class:`xml.dom.minidom.Element` that describes this :class:`CellMethod`. - Args: - - * doc: + Parameters + ---------- + doc : The parent :class:`xml.dom.minidom.Document`. - Returns: + Returns + ------- + :class:`xml.dom.minidom.Element` The :class:`xml.dom.minidom.Element` that describes this :class:`CellMethod`. diff --git a/lib/iris/cube.py b/lib/iris/cube.py index 1199831b7b..e77646993e 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -3,9 +3,7 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Classes for representing multi-dimensional data with metadata. - -""" +"""Classes for representing multi-dimensional data with metadata.""" from collections import OrderedDict import copy @@ -47,6 +45,7 @@ import iris.coords import iris.exceptions import iris.util +import iris.warnings __all__ = ["Cube", "CubeAttrsDict", "CubeList"] @@ -68,21 +67,17 @@ def __len__(self): return len(self.cubes) def add(self, cube): - """Adds the appropriate (sub)cube to the list of cubes where it - matches the constraint. - - """ + """Add the appropriate (sub)cube to the list of cubes where it matches the constraint.""" sub_cube = self.constraint.extract(cube) if sub_cube is not None: self.cubes.append(sub_cube) def merged(self, unique=False): - """Returns a new :class:`_CubeFilter` by merging the list of - cubes. + """Return a new :class:`_CubeFilter` by merging the list of cubes. - Kwargs: - - * unique: + Parameters + ---------- + unique : bool, default=False If True, raises `iris.exceptions.DuplicateDataError` if duplicate cubes are detected. @@ -95,10 +90,7 @@ class _CubeFilterCollection: @staticmethod def from_cubes(cubes, constraints=None): - """Creates a new collection from an iterable of cubes, and some - optional constraints. - - """ + """Create a new collection from an iterable of cubes, and some optional constraints.""" constraints = iris._constraints.list_of_constraints(constraints) pairs = [_CubeFilter(constraint) for constraint in constraints] collection = _CubeFilterCollection(pairs) @@ -110,30 +102,23 @@ def __init__(self, pairs): self.pairs = pairs def add_cube(self, cube): - """Adds the given :class:`~iris.cube.Cube` to all of the relevant - constraint pairs. - - """ + """Add the given :class:`~iris.cube.Cube` to all of the relevant constraint pairs.""" for pair in self.pairs: pair.add(cube) def cubes(self): - """Returns all the cubes in this collection concatenated into a - single :class:`CubeList`. - - """ + """Return all the cubes in this collection concatenated into a single :class:`CubeList`.""" result = CubeList() for pair in self.pairs: result.extend(pair.cubes) return result def merged(self, unique=False): - """Returns a new :class:`_CubeFilterCollection` by merging all the cube - lists of this collection. - - Kwargs: + """Return a new :class:`_CubeFilterCollection` by merging all the cube lists of this collection. - * unique: + Parameters + ---------- + unique : bool, default=False If True, raises `iris.exceptions.DuplicateDataError` if duplicate cubes are detected. @@ -142,10 +127,7 @@ def merged(self, unique=False): class CubeList(list): - """All the functionality of a standard :class:`list` with added "Cube" - context. - - """ + """All the functionality of a standard :class:`list` with added "Cube" context.""" def __init__(self, *args, **kwargs): """Given an iterable of cubes, return a CubeList instance.""" @@ -156,7 +138,7 @@ def __init__(self, *args, **kwargs): self._assert_is_cube(cube) def __str__(self): - """Runs short :meth:`Cube.summary` on every cube.""" + """Run short :meth:`Cube.summary` on every cube.""" result = [ "%s: %s" % (i, cube.summary(shorten=True)) for i, cube in enumerate(self) ] @@ -167,7 +149,7 @@ def __str__(self): return result def __repr__(self): - """Runs repr on every cube.""" + """Run repr on every cube.""" return "[%s]" % ",\n".join([repr(cube) for cube in self]) @staticmethod @@ -227,10 +209,11 @@ def append(self, cube): def extend(self, other_cubes): """Extend cubelist by appending the cubes contained in other_cubes. - Args: - - * other_cubes: + Parameters + ---------- + other_cubes : A cubelist or other sequence of cubes. + """ super(CubeList, self).extend(CubeList(other_cubes)) @@ -241,7 +224,6 @@ def insert(self, index, cube): def xml(self, checksum=False, order=True, byteorder=True): """Return a string of the XML that this list of cubes represents.""" - doc = Document() cubes_xml_element = doc.createElement("cubes") cubes_xml_element.setAttribute("xmlns", XML_NAMESPACE_URI) @@ -260,17 +242,16 @@ def xml(self, checksum=False, order=True, byteorder=True): return doc.toprettyxml(indent=" ") def extract(self, constraints): - """Filter each of the cubes which can be filtered by the given - constraints. + """Filter each of the cubes which can be filtered by the given constraints. This method iterates over each constraint given, and subsets each of the cubes in this CubeList where possible. Thus, a CubeList of length **n** when filtered with **m** constraints can generate a maximum of **m * n** cubes. - Args: - - * constraints (:class:`~iris.Constraint` or iterable of constraints): + Parameters + ---------- + constraints : :class:`~iris.Constraint` or iterable of constraints A single constraint or an iterable. """ @@ -278,15 +259,18 @@ def extract(self, constraints): def extract_cube(self, constraint): """Extract a single cube from a CubeList, and return it. - Raise an error if the extract produces no cubes, or more than one. - Args: + Extract a single cube from a CubeList, and return it. + Raise an error if the extract produces no cubes, or more than one. - * constraint (:class:`~iris.Constraint`): + Parameters + ---------- + constraint : :class:`~iris.Constraint` The constraint to extract with. - .. see also:: - :meth:`~iris.cube.CubeList.extract` + See Also + -------- + :meth:`~iris.cube.CubeList.extract` """ # Just validate this, so we can accept strings etc, but not multiples. @@ -297,16 +281,19 @@ def extract_cube(self, constraint): def extract_cubes(self, constraints): """Extract specific cubes from a CubeList, one for each given constraint. + + Extract specific cubes from a CubeList, one for each given constraint. Each constraint must produce exactly one cube, otherwise an error is raised. - Args: - - * constraints (iterable of, or single, :class:`~iris.Constraint`): + Parameters + ---------- + constraints : iter of, or single, :class:`~iris.Constraint` The constraints to extract with. - .. see also:: - :meth:`~iris.cube.CubeList.extract` + See Also + -------- + :meth:`~iris.cube.CubeList.extract` """ return self._extract_and_merge( @@ -351,13 +338,15 @@ def _extract_and_merge(cubes, constraints, strict=False, return_single_cube=Fals return result def extract_overlapping(self, coord_names): - """Returns a :class:`CubeList` of cubes extracted over regions + """Return a :class:`CubeList` of cubes extracted over regions. + + Return a :class:`CubeList` of cubes extracted over regions where the coordinates overlap, for the coordinates in coord_names. - Args: - - * coord_names: + Parameters + ---------- + coord_names : str or list of str A string or list of strings of the names of the coordinates over which to perform the extraction. @@ -378,8 +367,7 @@ def overlap_fn(cell): return self.extract(iris.Constraint(coord_values=coord_values)) def merge_cube(self): - """Return the merged contents of the :class:`CubeList` as a single - :class:`Cube`. + """Return the merged contents of the :class:`CubeList` as a single :class:`Cube`. If it is not possible to merge the `CubeList` into a single `Cube`, a :class:`~iris.exceptions.MergeError` will be raised @@ -414,15 +402,16 @@ def merge_cube(self): return merged_cube def merge(self, unique=True): - """Returns the :class:`CubeList` resulting from merging this - :class:`CubeList`. + """Return the :class:`CubeList` resulting from merging this :class:`CubeList`. - Kwargs: - - * unique: + Parameters + ---------- + unique : bool, default=True If True, raises `iris.exceptions.DuplicateDataError` if duplicate cubes are detected. + Examples + -------- This combines cubes with different values of an auxiliary scalar coordinate, by constructing a new dimension. @@ -436,7 +425,7 @@ def merge(self, unique=True): c2 = c1.copy() c2.coord('y_vals').points = [200] - For example:: + :: >>> print(c1) some_parameter / (unknown) (x_vals: 3) @@ -515,32 +504,28 @@ def concatenate_cube( check_ancils=True, check_derived_coords=True, ): - """Return the concatenated contents of the :class:`CubeList` as a single - :class:`Cube`. + """Return the concatenated contents of the :class:`CubeList` as a single :class:`Cube`. If it is not possible to concatenate the `CubeList` into a single `Cube`, a :class:`~iris.exceptions.ConcatenateError` will be raised describing the reason for the failure. - Kwargs: - - * check_aux_coords + Parameters + ---------- + check_aux_coords : bool, default=True Checks if the points and bounds of auxiliary coordinates of the cubes match. This check is not applied to auxiliary coordinates that span the dimension the concatenation is occurring along. Defaults to True. - - * check_cell_measures + check_cell_measures : bool, default=True Checks if the data of cell measures of the cubes match. This check is not applied to cell measures that span the dimension the concatenation is occurring along. Defaults to True. - - * check_ancils + check_ancils : bool, default=True Checks if the data of ancillary variables of the cubes match. This check is not applied to ancillary variables that span the dimension the concatenation is occurring along. Defaults to True. - - * check_derived_coords + check_derived_coords : bool, default=True Checks if the points and bounds of derived coordinates of the cubes match. This check is not applied to derived coordinates that span the dimension the concatenation is occurring along. Note that @@ -549,6 +534,8 @@ def concatenate_cube( coordinates used to derive the coordinates can be ignored with `check_aux_coords`. Defaults to True. + Notes + ----- .. note:: Concatenation cannot occur along an anonymous dimension. @@ -596,25 +583,22 @@ def concatenate( ): """Concatenate the cubes over their common dimensions. - Kwargs: - - * check_aux_coords + Parameters + ---------- + check_aux_coords : bool, default=True Checks if the points and bounds of auxiliary coordinates of the cubes match. This check is not applied to auxiliary coordinates that span the dimension the concatenation is occurring along. Defaults to True. - - * check_cell_measures + check_cell_measures : bool, default=True Checks if the data of cell measures of the cubes match. This check is not applied to cell measures that span the dimension the concatenation is occurring along. Defaults to True. - - * check_ancils + check_ancils : bool, default=True Checks if the data of ancillary variables of the cubes match. This check is not applied to ancillary variables that span the dimension the concatenation is occurring along. Defaults to True. - - * check_derived_coords + check_derived_coords : bool, default=True Checks if the points and bounds of derived coordinates of the cubes match. This check is not applied to derived coordinates that span the dimension the concatenation is occurring along. Note that @@ -623,10 +607,14 @@ def concatenate( coordinates used to derive the coordinates can be ignored with `check_aux_coords`. Defaults to True. - Returns: + Returns + ------- + :class:`iris.cube.CubeList` A new :class:`iris.cube.CubeList` of concatenated :class:`iris.cube.Cube` instances. + Notes + ----- This combines cubes with a common dimension coordinate, but occupying different regions of the coordinate value. The cubes are joined across that dimension. @@ -752,12 +740,14 @@ def _is_single_item(testee): class CubeAttrsDict(MutableMapping): - r"""A :class:`dict`\\-like object for :attr:`iris.cube.Cube.attributes`, + """A :class:`dict`-like object for :attr:`iris.cube.Cube.attributes`. + + A :class:`dict`-like object for :attr:`iris.cube.Cube.attributes`, providing unified user access to combined cube "local" and "global" attributes dictionaries, with the access behaviour of an ordinary (single) dictionary. Properties :attr:`globals` and :attr:`locals` are regular - :class:`~iris.common.mixin.LimitedAttributeDict`\\s, which can be accessed and + :class:`~iris.common.mixin.LimitedAttributeDict`, which can be accessed and modified separately. The :class:`CubeAttrsDict` itself contains *no* additional state, but simply provides a 'combined' view of both global + local attributes. @@ -1126,18 +1116,21 @@ class Cube(CFVariableMixin): @classmethod def _sort_xml_attrs(cls, doc): - """Takes an xml document and returns a copy with all element + """Return a copy with all element attributes sorted in alphabetical order. + + Take an xml document and returns a copy with all element attributes sorted in alphabetical order. This is a private utility method required by iris to maintain legacy xml behaviour beyond python 3.7. - Args: - - * doc: - The :class:`xml.dom.minidom.Document`. + Parameters + ---------- + doc : :class:`xml.dom.minidom.Document`. - Returns: + Returns + ------- + :class:`xml.dom.minidom.Document` The :class:`xml.dom.minidom.Document` with sorted element attributes. @@ -1146,7 +1139,6 @@ def _sort_xml_attrs(cls, doc): def _walk_nodes(node): """Note: _walk_nodes is called recursively on child elements.""" - # we don't want to copy the children here, so take a shallow copy new_node = node.cloneNode(deep=False) @@ -1191,14 +1183,14 @@ def __init__( cell_measures_and_dims=None, ancillary_variables_and_dims=None, ): - """Creates a cube with data and optional metadata. + """Create a cube with data and optional metadata. Not typically used - normally cubes are obtained by loading data (e.g. :func:`iris.load`) or from manipulating existing cubes. - Args: - - * data + Parameters + ---------- + data : This object defines the shape of the cube and the phenomenon value in each cell. @@ -1208,39 +1200,39 @@ def __init__( array_like (as described in :func:`numpy.asarray`). See :attr:`Cube.data`. - - Kwargs: - - * standard_name + standard_name : optional The standard name for the Cube's data. - * long_name + long_name : optional An unconstrained description of the cube. - * var_name + var_name : optional The NetCDF variable name for the cube. - * units + units : optional The unit of the cube, e.g. ``"m s-1"`` or ``"kelvin"``. - * attributes + attributes : optional A dictionary of cube attributes - * cell_methods + cell_methods : optional A tuple of CellMethod objects, generally set by Iris, e.g. ``(CellMethod("mean", coords='latitude'), )``. - * dim_coords_and_dims + dim_coords_and_dims : optional A list of coordinates with scalar dimension mappings, e.g ``[(lat_coord, 0), (lon_coord, 1)]``. - * aux_coords_and_dims + aux_coords_and_dims : optional A list of coordinates with dimension mappings, e.g ``[(lat_coord, 0), (lon_coord, (0, 1))]``. See also :meth:`Cube.add_dim_coord()` and :meth:`Cube.add_aux_coord()`. - * aux_factories + aux_factories : optional A list of auxiliary coordinate factories. See :mod:`iris.aux_factory`. - * cell_measures_and_dims + cell_measures_and_dims : optional A list of CellMeasures with dimension mappings. - * ancillary_variables_and_dims + ancillary_variables_and_dims : optional A list of AncillaryVariables with dimension mappings. - For example:: + Examples + -------- + :: + >>> from iris.coords import DimCoord >>> from iris.cube import Cube >>> latitude = DimCoord(np.linspace(-90, 90, 4), @@ -1328,7 +1320,9 @@ def __init__( @property def _names(self): - """A tuple containing the value of each name participating in the identity + """Tuple containing the value of each name participating in the identity of a :class:`iris.cube.Cube`. + + A tuple containing the value of each name participating in the identity of a :class:`iris.cube.Cube`. This includes the standard name, long name, NetCDF variable name, and the STASH from the attributes dictionary. @@ -1345,14 +1339,18 @@ def attributes(self) -> CubeAttrsDict: @attributes.setter def attributes(self, attributes: Optional[Mapping]): - """An override to CfVariableMixin.attributes.setter, which ensures that Cube + """Override to CfVariableMixin.attributes.setter. + + An override to CfVariableMixin.attributes.setter, which ensures that Cube attributes are stored in a way which distinguishes global + local ones. """ self._metadata_manager.attributes = CubeAttrsDict(attributes or {}) def _dimensional_metadata(self, name_or_dimensional_metadata): - """Return a single _DimensionalMetadata instance that matches the given + """Return a single _DimensionalMetadata instance that matches. + + Return a single _DimensionalMetadata instance that matches the given name_or_dimensional_metadata. If one is not found, raise an error. """ @@ -1379,19 +1377,22 @@ def is_compatible(self, other, ignore=None): :attr:`iris.cube.Cube.units`, :attr:`iris.cube.Cube.cell_methods` and :attr:`iris.cube.Cube.attributes` that are present in both objects. - Args: - - * other: + Parameters + ---------- + other : An instance of :class:`iris.cube.Cube` or :class:`iris.cube.CubeMetadata`. - * ignore: + ignore : optional A single attribute key or iterable of attribute keys to ignore when comparing the cubes. Default is None. To ignore all attributes set this to other.attributes. - Returns: - Boolean. + Returns + ------- + bool + Notes + ----- .. seealso:: :meth:`iris.util.describe_diff()` @@ -1465,24 +1466,26 @@ def add_cell_method(self, cell_method): self.cell_methods += (cell_method,) def add_aux_coord(self, coord, data_dims=None): - """Adds a CF auxiliary coordinate to the cube. + """Add a CF auxiliary coordinate to the cube. - Args: - - * coord + Parameters + ---------- + coord : The :class:`iris.coords.DimCoord` or :class:`iris.coords.AuxCoord` instance to add to the cube. - - Kwargs: - - * data_dims + data_dims : optional Integer or iterable of integers giving the data dimensions spanned by the coordinate. - Raises a ValueError if a coordinate with identical metadata already - exists on the cube. + Raises + ------ + ValueError + Raises a ValueError if a coordinate with identical metadata already + exists on the cube. - See also :meth:`Cube.remove_coord()`. + See Also + -------- + :meth:`Cube.remove_coord()`. """ if self.coords(coord): # TODO: just fail on duplicate object @@ -1572,11 +1575,11 @@ def _add_unique_aux_coord(self, coord, data_dims): self._aux_coords_and_dims.append((coord, data_dims)) def add_aux_factory(self, aux_factory): - """Adds an auxiliary coordinate factory to the cube. + """Add an auxiliary coordinate factory to the cube. - Args: - - * aux_factory + Parameters + ---------- + aux_factory : The :class:`iris.aux_factory.AuxCoordFactory` instance to add. """ @@ -1604,24 +1607,25 @@ def coordsonly(coords_and_dims): self._aux_factories.append(aux_factory) def add_cell_measure(self, cell_measure, data_dims=None): - """Adds a CF cell measure to the cube. - - Args: + """Add a CF cell measure to the cube. - * cell_measure + Parameters + ---------- + cell_measure : The :class:`iris.coords.CellMeasure` instance to add to the cube. - - Kwargs: - - * data_dims + data_dims : optional Integer or iterable of integers giving the data dimensions spanned by the coordinate. - Raises a ValueError if a cell_measure with identical metadata already - exists on the cube. + Raises + ------ + ValueError + Raises a ValueError if a cell_measure with identical metadata already + exists on the cube. - See also + See Also + -------- :meth:`Cube.remove_cell_measure()`. """ @@ -1636,24 +1640,23 @@ def add_cell_measure(self, cell_measure, data_dims=None): ) def add_ancillary_variable(self, ancillary_variable, data_dims=None): - """Adds a CF ancillary variable to the cube. - - Args: + """Add a CF ancillary variable to the cube. - * ancillary_variable + Parameters + ---------- + ancillary_variable : The :class:`iris.coords.AncillaryVariable` instance to be added to the cube - - Kwargs: - - * data_dims + data_dims : optional Integer or iterable of integers giving the data dimensions spanned by the ancillary variable. - Raises a ValueError if an ancillary variable with identical metadata - already exists on the cube. + Raises + ------ + ValueError + Raises a ValueError if an ancillary variable with identical metadata + already exists on the cube. """ - if self.ancillary_variables(ancillary_variable): raise iris.exceptions.CannotAddError( "Duplicate ancillary variables not permitted" @@ -1668,18 +1671,23 @@ def add_ancillary_variable(self, ancillary_variable, data_dims=None): def add_dim_coord(self, dim_coord, data_dim): """Add a CF coordinate to the cube. - Args: - - * dim_coord + Parameters + ---------- + dim_coord : :class:`iris.coords.DimCoord` The :class:`iris.coords.DimCoord` instance to add to the cube. - * data_dim + data_dim : Integer giving the data dimension spanned by the coordinate. - Raises a ValueError if a coordinate with identical metadata already - exists on the cube or if a coord already exists for the - given dimension. + Raises + ------ + ValueError + Raises a ValueError if a coordinate with identical metadata already + exists on the cube or if a coord already exists for the + given dimension. - See also :meth:`Cube.remove_coord()`. + See Also + -------- + :meth:`Cube.remove_coord()`. """ if self.coords(dim_coord): @@ -1731,7 +1739,7 @@ def _add_unique_dim_coord(self, dim_coord, data_dim): self._dim_coords_and_dims.append((dim_coord, int(data_dim))) def remove_aux_factory(self, aux_factory): - """Removes the given auxiliary coordinate factory from the cube.""" + """Remove the given auxiliary coordinate factory from the cube.""" self._aux_factories.remove(aux_factory) def _remove_coord(self, coord): @@ -1750,15 +1758,17 @@ def _remove_coord(self, coord): self.remove_aux_factory(aux_factory) def remove_coord(self, coord): - """Removes a coordinate from the cube. - - Args: + """Remove a coordinate from the cube. - * coord (string or coord) + Parameters + ---------- + coord : str or coord The (name of the) coordinate to remove from the cube. - See also :meth:`Cube.add_dim_coord()` - and :meth:`Cube.add_aux_coord()`. + See Also + -------- + :meth:`Cube.add_dim_coord()` + :meth:`Cube.add_aux_coord()` """ coord = self.coord(coord) @@ -1768,30 +1778,32 @@ def remove_coord(self, coord): factory.update(coord) def remove_cell_measure(self, cell_measure): - """Removes a cell measure from the cube. - - Args: + """Remove a cell measure from the cube. - * cell_measure (string or cell_measure) + Parameters + ---------- + cell_measure : str or cell_measure The (name of the) cell measure to remove from the cube. As either - (a) a :attr:`standard_name`, :attr:`long_name`, or - :attr:`var_name`. Defaults to value of `default` - (which itself defaults to `unknown`) as defined in - :class:`iris.common.CFVariableMixin`. + * (a) a :attr:`standard_name`, :attr:`long_name`, or + :attr:`var_name`. Defaults to value of `default` + (which itself defaults to `unknown`) as defined in + :class:`iris.common.CFVariableMixin`. - (b) a cell_measure instance with metadata equal to that of - the desired cell_measures. + * (b) a cell_measure instance with metadata equal to that of + the desired cell_measures. + Notes + ----- .. note:: If the argument given does not represent a valid cell_measure on the cube, an :class:`iris.exceptions.CellMeasureNotFoundError` is raised. - .. seealso:: - - :meth:`Cube.add_cell_measure()` + See Also + -------- + :meth:`Cube.add_cell_measure()` """ cell_measure = self.cell_measure(cell_measure) @@ -1803,11 +1815,11 @@ def remove_cell_measure(self, cell_measure): ] def remove_ancillary_variable(self, ancillary_variable): - """Removes an ancillary variable from the cube. - - Args: + """Remove an ancillary variable from the cube. - * ancillary_variable (string or AncillaryVariable) + Parameters + ---------- + ancillary_variable : str or AncillaryVariable The (name of the) AncillaryVariable to remove from the cube. """ @@ -1834,17 +1846,16 @@ def replace_coord(self, new_coord): factory.update(old_coord, new_coord) def coord_dims(self, coord): - """Returns a tuple of the data dimensions relevant to the given - coordinate. + """Return a tuple of the data dimensions relevant to the given coordinate. When searching for the given coordinate in the cube the comparison is made using coordinate metadata equality. Hence the given coordinate instance need not exist on the cube, and may contain different coordinate values. - Args: - - * coord (string or coord) + Parameters + ---------- + coord : str or coord The (name of the) coord to look for. """ @@ -1895,10 +1906,11 @@ def matcher(factory): return match def cell_measure_dims(self, cell_measure): - """Returns a tuple of the data dimensions relevant to the given - CellMeasure. + """Return a tuple of the data dimensions relevant to the given CellMeasure. - * cell_measure (string or CellMeasure) + Parameters + ---------- + cell_measure : str or CellMeasure The (name of the) cell measure to look for. """ @@ -1916,10 +1928,11 @@ def cell_measure_dims(self, cell_measure): return matches[0] def ancillary_variable_dims(self, ancillary_variable): - """Returns a tuple of the data dimensions relevant to the given - AncillaryVariable. + """Return a tuple of the data dimensions relevant to the given AncillaryVariable. - * ancillary_variable (string or AncillaryVariable) + Parameters + ---------- + ancillary_variable : str or AncillaryVariable The (name of the) AncillaryVariable to look for. """ @@ -1941,23 +1954,27 @@ def ancillary_variable_dims(self, ancillary_variable): return matches[0] def aux_factory(self, name=None, standard_name=None, long_name=None, var_name=None): - """Returns the single coordinate factory that matches the criteria, - or raises an error if not found. + """Return the single coordinate factory that matches the criteria. - Kwargs: + Return the single coordinate factory that matches the criteria, + or raises an error if not found. - * name + Parameters + ---------- + name : optional If not None, matches against factory.name(). - * standard_name + standard_name : optional The CF standard name of the desired coordinate factory. If None, does not check for standard name. - * long_name + long_name : optional An unconstrained description of the coordinate factory. If None, does not check for long_name. - * var_name + var_name : optional The NetCDF variable name of the desired coordinate factory. If None, does not check for var_name. + Notes + ----- .. note:: If the arguments given do not result in precisely 1 coordinate @@ -2016,16 +2033,11 @@ def coords( dim_coords=None, mesh_coords=None, ): - r"""Return a list of coordinates from the :class:`Cube` that match the - provided criteria. - - .. seealso:: - - :meth:`Cube.coord` for matching exactly one coordinate. - - Kwargs: + r"""Return a list of coordinates from the :class:`Cube` that match the provided criteria. - * name_or_coord: + Parameters + ---------- + name_or_coord : optional Either, * a :attr:`~iris.common.mixin.CFVariableMixin.standard_name`, @@ -2036,57 +2048,52 @@ def coords( * a coordinate or metadata instance equal to that of the desired coordinate e.g., :class:`~iris.coords.DimCoord` or :class:`~iris.common.metadata.CoordMetadata`. - - * standard_name: + standard_name : optional The CF standard name of the desired coordinate. If ``None``, does not check for ``standard name``. - - * long_name: + long_name : optional An unconstrained description of the coordinate. If ``None``, does not check for ``long_name``. - - * var_name: + var_name : optional The NetCDF variable name of the desired coordinate. If ``None``, does not check for ``var_name``. - - * attributes: + attributes : optional A dictionary of attributes desired on the coordinates. If ``None``, does not check for ``attributes``. - - * axis: + axis : optional The desired coordinate axis, see :func:`iris.util.guess_coord_axis`. If ``None``, does not check for ``axis``. Accepts the values ``X``, ``Y``, ``Z`` and ``T`` (case-insensitive). - - * contains_dimension: + contains_dimension : optional The desired coordinate contains the data dimension. If ``None``, does not check for the dimension. - - * dimensions: + dimensions : optional The exact data dimensions of the desired coordinate. Coordinates with no data dimension can be found with an empty ``tuple`` or ``list`` i.e., ``()`` or ``[]``. If ``None``, does not check for dimensions. - - * coord_system: + coord_system : optional Whether the desired coordinates have a coordinate system equal to the given coordinate system. If ``None``, no check is done. - - * dim_coords: + dim_coords : optional Set to ``True`` to only return coordinates that are the cube's dimension coordinates. Set to ``False`` to only return coordinates that are the cube's auxiliary, mesh and derived coordinates. If ``None``, returns all coordinates. - - * mesh_coords: + mesh_coords : optional Set to ``True`` to return only coordinates which are - :class:`~iris.experimental.ugrid.MeshCoord`\\ s. + :class:`~iris.experimental.ugrid.MeshCoord`\'s. Set to ``False`` to return only non-mesh coordinates. If ``None``, returns all coordinates. - Returns: - A list containing zero or more coordinates matching the provided - criteria. + Returns + ------- + A list containing zero or more coordinates matching the provided criteria. + + See Also + -------- + :meth:`Cube.coord` for matching exactly one coordinate. + """ coords_and_factories = [] @@ -2182,21 +2189,11 @@ def coord( dim_coords=None, mesh_coords=None, ): - r"""Return a single coordinate from the :class:`Cube` that matches the - provided criteria. - - .. note:: - - If the arguments given do not result in **precisely one** coordinate, - then a :class:`~iris.exceptions.CoordinateNotFoundError` is raised. - - .. seealso:: - - :meth:`Cube.coords` for matching zero or more coordinates. + r"""Return a single coordinate from the :class:`Cube` that matches the provided criteria. - Kwargs: - - * name_or_coord: + Parameters + ---------- + name_or_coord : optional Either, * a :attr:`~iris.common.mixin.CFVariableMixin.standard_name`, @@ -2207,57 +2204,58 @@ def coord( * a coordinate or metadata instance equal to that of the desired coordinate e.g., :class:`~iris.coords.DimCoord` or :class:`~iris.common.metadata.CoordMetadata`. - - * standard_name: + standard_name : optional The CF standard name of the desired coordinate. If ``None``, does not check for ``standard name``. - - * long_name: + long_name : optional An unconstrained description of the coordinate. If ``None``, does not check for ``long_name``. - - * var_name: + var_name : optional The NetCDF variable name of the desired coordinate. If ``None``, does not check for ``var_name``. - - * attributes: + attributes : optional A dictionary of attributes desired on the coordinates. If ``None``, does not check for ``attributes``. - - * axis: + axis : optional The desired coordinate axis, see :func:`iris.util.guess_coord_axis`. If ``None``, does not check for ``axis``. Accepts the values ``X``, ``Y``, ``Z`` and ``T`` (case-insensitive). - - * contains_dimension: + contains_dimension : optional The desired coordinate contains the data dimension. If ``None``, does not check for the dimension. - - * dimensions: + dimensions : optional The exact data dimensions of the desired coordinate. Coordinates with no data dimension can be found with an empty ``tuple`` or ``list`` i.e., ``()`` or ``[]``. If ``None``, does not check for dimensions. - - * coord_system: + coord_system : optional Whether the desired coordinates have a coordinate system equal to the given coordinate system. If ``None``, no check is done. - - * dim_coords: + dim_coords : optional Set to ``True`` to only return coordinates that are the cube's dimension coordinates. Set to ``False`` to only return coordinates that are the cube's auxiliary, mesh and derived coordinates. If ``None``, returns all coordinates. - - * mesh_coords: + mesh_coords : optional Set to ``True`` to return only coordinates which are - :class:`~iris.experimental.ugrid.MeshCoord`\\ s. + :class:`~iris.experimental.ugrid.MeshCoord`\'s. Set to ``False`` to return only non-mesh coordinates. If ``None``, returns all coordinates. - Returns: - The coordinate that matches the provided criteria. + Returns + ------- + The coordinate that matches the provided criteria. + + Notes + ----- + .. note:: + If the arguments given do not result in **precisely one** coordinate, + then a :class:`~iris.exceptions.CoordinateNotFoundError` is raised. + + See Also + -------- + :meth:`Cube.coords` for matching zero or more coordinates. """ coords = self.coords( name_or_coord=name_or_coord, @@ -2304,9 +2302,9 @@ def coord_system(self, spec=None): If no target coordinate system is provided then find any available coordinate system. - Kwargs: - - * spec: + Parameters + ---------- + spec : optional The the name or type of a coordinate system subclass. E.g. :: @@ -2319,8 +2317,9 @@ def coord_system(self, spec=None): If spec is None, then find any available coordinate systems within the :class:`iris.cube.Cube`. - Returns: - The :class:`iris.coord_systems.CoordSystem` or None. + Returns + ------- + :class:`iris.coord_systems.CoordSystem` or None. """ if isinstance(spec, str) or spec is None: @@ -2357,9 +2356,11 @@ def _any_meshcoord(self): @property def mesh(self): - r"""Return the unstructured :class:`~iris.experimental.ugrid.Mesh` + r"""Return the unstructured :class:`~iris.experimental.ugrid.Mesh` associated with the cube. + + Return the unstructured :class:`~iris.experimental.ugrid.Mesh` associated with the cube, if the cube has any - :class:`~iris.experimental.ugrid.MeshCoord`\\ s, + :class:`~iris.experimental.ugrid.MeshCoord`, or ``None`` if it has none. Returns @@ -2377,15 +2378,17 @@ def mesh(self): @property def location(self): - r"""Return the mesh "location" of the cube data, if the cube has any - :class:`~iris.experimental.ugrid.MeshCoord`\\ s, + r"""Return the mesh "location" of the cube data. + + Return the mesh "location" of the cube data, if the cube has any + :class:`~iris.experimental.ugrid.MeshCoord`, or ``None`` if it has none. Returns ------- location : str or None The mesh location of the cube - :class:`~iris.experimental.ugrid.MeshCoord`'s + :class:`~iris.experimental.ugrid.MeshCoords` (i.e. one of 'face' / 'edge' / 'node'), or ``None``. """ @@ -2395,15 +2398,17 @@ def location(self): return result def mesh_dim(self): - r"""Return the cube dimension of the mesh, if the cube has any - :class:`~iris.experimental.ugrid.MeshCoord`\\ s, + r"""Return the cube dimension of the mesh. + + Return the cube dimension of the mesh, if the cube has any + :class:`~iris.experimental.ugrid.MeshCoord`, or ``None`` if it has none. Returns ------- mesh_dim : int or None The cube dimension which the cube - :class:`~iris.experimental.ugrid.MeshCoord`\\s map to, + :class:`~iris.experimental.ugrid.MeshCoord` map to, or ``None``. """ @@ -2415,20 +2420,22 @@ def mesh_dim(self): def cell_measures(self, name_or_cell_measure=None): """Return a list of cell measures in this cube fitting the given criteria. - Kwargs: - - * name_or_cell_measure + Parameters + ---------- + name_or_cell_measure : optional Either - (a) a :attr:`standard_name`, :attr:`long_name`, or - :attr:`var_name`. Defaults to value of `default` - (which itself defaults to `unknown`) as defined in - :class:`iris.common.CFVariableMixin`. + * (a) a :attr:`standard_name`, :attr:`long_name`, or + :attr:`var_name`. Defaults to value of `default` + (which itself defaults to `unknown`) as defined in + :class:`iris.common.CFVariableMixin`. - (b) a cell_measure instance with metadata equal to that of - the desired cell_measures. + * (b) a cell_measure instance with metadata equal to that of + the desired cell_measures. - See also :meth:`Cube.cell_measure()`. + See Also + -------- + :meth:`Cube.cell_measure()`. """ name = None @@ -2450,19 +2457,20 @@ def cell_measures(self, name_or_cell_measure=None): return cell_measures def cell_measure(self, name_or_cell_measure=None): - """Return a single cell_measure given the same arguments as - :meth:`Cube.cell_measures`. + """Return a single cell_measure given the same arguments as :meth:`Cube.cell_measures`. + Notes + ----- .. note:: If the arguments given do not result in precisely 1 cell_measure being matched, an :class:`iris.exceptions.CellMeasureNotFoundError` is raised. - .. seealso:: - - :meth:`Cube.cell_measures()` - for full keyword documentation. + See Also + -------- + :meth:`Cube.cell_measures()` + For full keyword documentation. """ cell_measures = self.cell_measures(name_or_cell_measure) @@ -2497,23 +2505,23 @@ def cell_measure(self, name_or_cell_measure=None): return cell_measures[0] def ancillary_variables(self, name_or_ancillary_variable=None): - """Return a list of ancillary variable in this cube fitting the given - criteria. - - Kwargs: + """Return a list of ancillary variable in this cube fitting the given criteria. - * name_or_ancillary_variable + Parameters + ---------- + name_or_ancillary_variable : optional Either - (a) a :attr:`standard_name`, :attr:`long_name`, or - :attr:`var_name`. Defaults to value of `default` - (which itself defaults to `unknown`) as defined in - :class:`iris.common.CFVariableMixin`. + * (a) a :attr:`standard_name`, :attr:`long_name`, or + :attr:`var_name`. Defaults to value of `default` + (which itself defaults to `unknown`) as defined in + :class:`iris.common.CFVariableMixin`. - (b) a ancillary_variable instance with metadata equal to that of - the desired ancillary_variables. + * (b) a ancillary_variable instance with metadata equal to that of + the desired ancillary_variables. - See also + See Also + -------- :meth:`Cube.ancillary_variable()`. """ @@ -2536,19 +2544,20 @@ def ancillary_variables(self, name_or_ancillary_variable=None): return ancillary_variables def ancillary_variable(self, name_or_ancillary_variable=None): - """Return a single ancillary_variable given the same arguments as - :meth:`Cube.ancillary_variables`. + """Return a single ancillary_variable given the same arguments as :meth:`Cube.ancillary_variables`. + Notes + ----- .. note:: If the arguments given do not result in precisely 1 ancillary_variable being matched, an :class:`iris.exceptions.AncillaryVariableNotFoundError` is raised. - .. seealso:: - - :meth:`Cube.ancillary_variables()` - for full keyword documentation. + See Also + -------- + :meth:`Cube.ancillary_variables()` + For full keyword documentation. """ ancillary_variables = self.ancillary_variables(name_or_ancillary_variable) @@ -2587,7 +2596,9 @@ def ancillary_variable(self, name_or_ancillary_variable=None): @property def cell_methods(self): - """Tuple of :class:`iris.coords.CellMethod` representing the processing + """Tuple of :class:`iris.coords.CellMethod`. + + Tuple of :class:`iris.coords.CellMethod` representing the processing done on the phenomenon. """ @@ -2613,7 +2624,9 @@ def cell_methods(self, cell_methods: Iterable): self._metadata_manager.cell_methods = cell_methods def core_data(self): - """Retrieve the data array of this :class:`~iris.cube.Cube` in its + """Retrieve the data array of this :class:`~iris.cube.Cube`. + + Retrieve the data array of this :class:`~iris.cube.Cube` in its current state, which will either be real or lazy. If this :class:`~iris.cube.Cube` has lazy data, accessing its data @@ -2631,10 +2644,7 @@ def shape(self): @property def dtype(self): - """The data type of the values in the data array of this - :class:`~iris.cube.Cube`. - - """ + """The data type of the values in the data array of this :class:`~iris.cube.Cube`.""" return self._data_manager.dtype @property @@ -2643,7 +2653,9 @@ def ndim(self): return self._data_manager.ndim def lazy_data(self): - """Return a "lazy array" representing the Cube data. A lazy array + """Return a "lazy array" representing the Cube data. + + Return a "lazy array" representing the Cube data. A lazy array describes an array whose data values have not been loaded into memory from disk. @@ -2657,17 +2669,19 @@ def lazy_data(self): does _not_ make the Cube data lazy again; the Cube data remains loaded in memory. - Returns: - A lazy array, representing the Cube data. + Returns + ------- + A lazy array, representing the Cube data. """ return self._data_manager.lazy_data() @property def data(self): - """The :class:`numpy.ndarray` representing the multi-dimensional data of - the cube. + """The :class:`numpy.ndarray` representing the multi-dimensional data of the cube. + Notes + ----- .. note:: Cubes obtained from NetCDF, PP, and FieldsFile files will only @@ -2704,8 +2718,9 @@ def data(self, data): def has_lazy_data(self): """Details whether this :class:`~iris.cube.Cube` has lazy data. - Returns: - Boolean. + Returns + ------- + bool """ return self._data_manager.has_lazy_data() @@ -2736,10 +2751,7 @@ def dim_coords(self): @property def aux_coords(self): - """Return a tuple of all the auxiliary coordinates, ordered by - dimension(s). - - """ + """Return a tuple of all the auxiliary coordinates, ordered by dimension(s).""" return tuple( ( coord @@ -2752,10 +2764,7 @@ def aux_coords(self): @property def derived_coords(self): - """Return a tuple of all the coordinates generated by the coordinate - factories. - - """ + """Return a tuple of all the coordinates generated by the coordinate factories.""" return tuple( factory.make_coord(self.coord_dims) for factory in sorted( @@ -2769,16 +2778,18 @@ def aux_factories(self): return tuple(self._aux_factories) def summary(self, shorten=False, name_padding=35): - """String summary of the Cube with name+units, a list of dim coord names - versus length and, optionally, a summary of all other components. + """Summary of the Cube. - Kwargs: + String summary of the Cube with name+units, a list of dim coord names + versus length and, optionally, a summary of all other components. - * shorten (bool): + Parameters + ---------- + shorten : bool, default=False If set, produce a one-line summary of minimal width, showing only the cube name, units and dimensions. When not set (default), produces a full multi-line summary string. - * name_padding (int): + name_padding : int, default=35 Control the *minimum* width of the cube name + units, i.e. the indent of the dimension map section. @@ -2806,7 +2817,9 @@ def _repr_html_(self): __iter__ = None def __getitem__(self, keys): - """Cube indexing (through use of square bracket notation) has been + """Cube indexing has been implemented at the data level. + + Cube indexing (through use of square bracket notation) has been implemented at the data level. That is, the indices provided to this method should be aligned to the data of the cube, and thus the indices requested must be applicable directly to the cube.data attribute. All @@ -2913,7 +2926,9 @@ def new_ancillary_variable_dims(av_): return cube def subset(self, coord): - """Get a subset of the cube by providing the desired resultant + """Get a subset of the cube by providing the desired resultant coordinate. + + Get a subset of the cube by providing the desired resultant coordinate. If the coordinate provided applies to the whole cube; the whole cube is returned. As such, the operation is not strict. @@ -2962,56 +2977,48 @@ def subset(self, coord): return result def extract(self, constraint): - """Filter the cube by the given constraint using - :meth:`iris.Constraint.extract` method. - - """ + """Filter cube by the given constraint using :meth:`iris.Constraint.extract`.""" # Cast the constraint into a proper constraint if it is not so already constraint = iris._constraints.as_constraint(constraint) return constraint.extract(self) def intersection(self, *args, **kwargs): - """Return the intersection of the cube with specified coordinate - ranges. + """Return the intersection of the cube with specified coordinate ranges. Coordinate ranges can be specified as: - (a) positional arguments: instances of :class:`iris.coords.CoordExtent`, - or equivalent tuples of 3-5 items: - - * coord - Either a :class:`iris.coords.Coord`, or coordinate name - (as defined in :meth:`iris.cube.Cube.coords()`) - - * minimum - The minimum value of the range to select. - - * maximum - The maximum value of the range to select. + * (a) positional arguments: instances of :class:`iris.coords.CoordExtent`, + or equivalent tuples of 3-5 items: - * min_inclusive - If True, coordinate values equal to `minimum` will be included - in the selection. Default is True. + * (b) keyword arguments, where the keyword name specifies the name + of the coordinate, and the value defines the corresponding range of + coordinate values as a tuple. The tuple must contain two, three, or + four items, corresponding to `(minimum, maximum, min_inclusive, + max_inclusive)` as defined above. - * max_inclusive - If True, coordinate values equal to `maximum` will be included - in the selection. Default is True. - - (b) keyword arguments, where the keyword name specifies the name - of the coordinate, and the value defines the corresponding range of - coordinate values as a tuple. The tuple must contain two, three, or - four items, corresponding to `(minimum, maximum, min_inclusive, - max_inclusive)` as defined above. - - Kwargs: - - * ignore_bounds: + Parameters + ---------- + coord : + Either a :class:`iris.coords.Coord`, or coordinate name + (as defined in :meth:`iris.cube.Cube.coords()`) + minimum : + The minimum value of the range to select. + maximum : + The maximum value of the range to select. + min_inclusive : + If True, coordinate values equal to `minimum` will be included + in the selection. Default is True. + max_inclusive: + If True, coordinate values equal to `maximum` will be included + in the selection. Default is True. + ignore_bounds : optional Intersect based on points only. Default False. - - * threshold: + threshold : optional Minimum proportion of a bounded cell that must overlap with the specified range. Default 0. + Notes + ----- .. note:: For ranges defined over "circular" coordinates (i.e. those @@ -3020,10 +3027,10 @@ def intersection(self, *args, **kwargs): range that covers the entire modulus, a split cell will preferentially be placed at the ``minimum`` end. - .. warning:: - - Currently this routine only works with "circular" - coordinates (as defined in the previous note.) + Warnings + -------- + Currently this routine only works with "circular" + coordinates (as defined in the previous note.) For example:: @@ -3032,7 +3039,7 @@ def intersection(self, *args, **kwargs): >>> print(cube.coord('longitude').points[::10]) [ 0. 37.49999237 74.99998474 112.49996948 \ 149.99996948 - 187.49995422 224.99993896 262.49993896 299.99993896 \ + 187.49995422 224.99993896 262.49993896 299.99993896 \ 337.49990845] >>> subset = cube.intersection(longitude=(30, 50)) >>> print(subset.coord('longitude').points) @@ -3041,7 +3048,9 @@ def intersection(self, *args, **kwargs): >>> print(subset.coord('longitude').points) [-7.50012207 -3.75012207 0. 3.75 7.5 ] - Returns: + Returns + ------- + :class:`~iris.cube.Cube` A new :class:`~iris.cube.Cube` giving the subset of the cube which intersects with the requested coordinate intervals. @@ -3364,32 +3373,41 @@ def _as_list_of_coords(self, names_or_coords): return coords def slices_over(self, ref_to_slice): - """Return an iterator of all subcubes along a given coordinate or - dimension index, or multiple of these. + """Return an iterator of all subcubes. - Args: + Return an iterator of all subcubes along a given coordinate or + dimension index, or multiple of these. - * ref_to_slice (string, coord, dimension index or a list of these): + Parameters + ---------- + ref_to_slice: str, coord, dimension index or a list of these Determines which dimensions will be iterated along (i.e. the dimensions that are not returned in the subcubes). A mix of input types can also be provided. - Returns: - An iterator of subcubes. + Returns + ------- + An iterator of subcubes. + Examples + -------- For example, to get all subcubes along the time dimension:: for sub_cube in cube.slices_over('time'): print(sub_cube) - .. seealso:: :meth:`iris.cube.Cube.slices`. - + Notes + ----- .. note:: The order of dimension references to slice along does not affect the order of returned items in the iterator; instead the ordering is based on the fastest-changing dimension. + See Also + -------- + :meth:`iris.cube.Cube.slices`. + """ # Required to handle a mix between types. if _is_single_item(ref_to_slice): @@ -3419,34 +3437,39 @@ def slices_over(self, ref_to_slice): return self.slices(opposite_dims, ordered=False) def slices(self, ref_to_slice, ordered=True): - """Return an iterator of all subcubes given the coordinates or dimension - indices desired to be present in each subcube. + """Return an iterator of all subcubes given the coordinates or dimension indices. - Args: + Return an iterator of all subcubes given the coordinates or dimension + indices desired to be present in each subcube. - * ref_to_slice (string, coord, dimension index or a list of these): + Parameters + ---------- + ref_to_slice : str, coord, dimension index or a list of these Determines which dimensions will be returned in the subcubes (i.e. the dimensions that are not iterated over). A mix of input types can also be provided. They must all be orthogonal (i.e. point to different dimensions). - - Kwargs: - - * ordered: if True, the order which the coords to slice or data_dims + ordered : bool, default=True + if True, the order which the coords to slice or data_dims are given will be the order in which they represent the data in the resulting cube slices. If False, the order will follow that of the source cube. Default is True. - Returns: - An iterator of subcubes. + Returns + ------- + An iterator of subcubes. + Examples + -------- For example, to get all 2d longitude/latitude subcubes from a multi-dimensional cube:: for sub_cube in cube.slices(['longitude', 'latitude']): print(sub_cube) - .. seealso:: :meth:`iris.cube.Cube.slices_over`. + See Also + -------- + :meth:`iris.cube.Cube.slices_over`. """ if not isinstance(ordered, bool): @@ -3503,13 +3526,19 @@ def slices(self, ref_to_slice, ordered=True): def transpose(self, new_order=None): """Re-order the data dimensions of the cube in-place. - new_order - list of ints, optional - By default, reverse the dimensions, otherwise permute the - axes according to the values given. + Parameters + ---------- + new_order : list of ints, optional + By default, reverse the dimensions, otherwise permute the + axes according to the values given. + Notes + ----- .. note:: If defined, new_order must span all of the data dimensions. - Example usage:: + Examples + -------- + :: # put the second dimension first, followed by the third dimension, # and finally put the first dimension third:: @@ -3559,7 +3588,7 @@ def remap_cube_metadata(metadata_and_dims): ) def xml(self, checksum=False, order=True, byteorder=True): - """Returns a fully valid CubeML string representation of the Cube.""" + """Return a fully valid CubeML string representation of the Cube.""" doc = Document() cube_xml_element = self._xml_element( @@ -3732,15 +3761,16 @@ def _order(array): return cube_xml_element def copy(self, data=None): - """Returns a deep copy of this cube. - - Kwargs: + """Return a deep copy of this cube. - * data: + Parameters + ---------- + data : optional Replace the data of the cube copy with provided data payload. - Returns: - A copy instance of the :class:`Cube`. + Returns + ------- + A copy instance of the :class:`Cube`. """ memo = {} @@ -3795,6 +3825,9 @@ def _deepcopy(self, memo, data=None): # START OPERATOR OVERLOADS def __eq__(self, other): + if other is self: + return True + result = NotImplemented if isinstance(other, Cube): @@ -3833,7 +3866,13 @@ def __eq__(self, other): if result: # TODO: why do we use allclose() here, but strict equality in # _DimensionalMetadata (via util.array_equal())? - result = da.allclose(self.core_data(), other.core_data()).compute() + result = bool( + np.allclose( + self.core_data(), + other.core_data(), + equal_nan=True, + ) + ) return result # Must supply __ne__, Python does not defer to __eq__ for negative equality @@ -3899,8 +3938,7 @@ def __neg__(self): # END OPERATOR OVERLOADS def collapsed(self, coords, aggregator, **kwargs): - """Collapse one or more dimensions over the cube given the coordinate/s - and an aggregation. + """Collapse one or more dimensions over the cube given the coordinate/s and an aggregation. Examples of aggregations that may be used include :data:`~iris.analysis.COUNT` and :data:`~iris.analysis.MAX`. @@ -3927,25 +3965,22 @@ def collapsed(self, coords, aggregator, **kwargs): it will be used wherever possible when this cube's data is itself a deferred array. - Args: - - * coords (string, coord or a list of strings/coords): + Parameters + ---------- + coords : str, coord or a list of strings/coords Coordinate names/coordinates over which the cube should be collapsed. - - * aggregator (:class:`iris.analysis.Aggregator`): + aggregator : :class:`iris.analysis.Aggregator` Aggregator to be applied for collapse operation. - - Kwargs: - - * kwargs: + **kwargs : dict, optional Aggregation function keyword arguments. - Returns: - Collapsed cube. - - For example: + Returns + ------- + Collapsed cube. + Examples + -------- >>> import iris >>> import iris.analysis >>> path = iris.sample_data_path('ostia_monthly.nc') @@ -3969,7 +4004,8 @@ def collapsed(self, coords, aggregator, **kwargs): Conventions 'CF-1.5' STASH m01s00i024 - + Notes + ----- .. note:: Some aggregations are not commutative and hence the order of @@ -4016,7 +4052,7 @@ def collapsed(self, coords, aggregator, **kwargs): for coord in lat_match: warnings.warn( msg.format(coord.name()), - category=iris.exceptions.IrisUserWarning, + category=iris.warnings.IrisUserWarning, ) # Determine the dimensions we need to collapse (and those we don't) @@ -4171,20 +4207,17 @@ def aggregated_by(self, coords, aggregator, climatological=False, **kwargs): performed. aggregator : :class:`iris.analysis.Aggregator` Aggregator to be applied to each group. - climatological : bool + climatological : bool, default=False Indicates whether the output is expected to be climatological. For any aggregated time coord(s), this causes the climatological flag to be set and the point for each cell to equal its first bound, thereby preserving the time of year. + **kwargs : dict, optional + Aggregator and aggregation function keyword arguments. Returns ------- - :class:`iris.cube.Cube` - - Other Parameters - ---------------- - kwargs: - Aggregator and aggregation function keyword arguments. + :class:`iris.cube.Cube` Examples -------- @@ -4429,22 +4462,21 @@ def aggregated_by(self, coords, aggregator, climatological=False, **kwargs): return aggregateby_cube def rolling_window(self, coord, aggregator, window, **kwargs): - """Perform rolling window aggregation on a cube given a coordinate, an - aggregation method and a window size. + """Perform rolling window aggregation on a cube. - Args: + Perform rolling window aggregation on a cube given a coordinate, an + aggregation method and a window size. - * coord (string/:class:`iris.coords.Coord`): + Parameters + ---------- + coord : str or :class:`iris.coords.Coord` The coordinate over which to perform the rolling window aggregation. - * aggregator (:class:`iris.analysis.Aggregator`): + aggregator : :class:`iris.analysis.Aggregator` Aggregator to be applied to the data. - * window (int): + window : int Size of window to use. - - Kwargs: - - * kwargs: + **kwargs : dict, optional Aggregator and aggregation function keyword arguments. The weights argument to the aggregator, if any, should be a 1d array, cube, or (names of) :meth:`~iris.cube.Cube.coords`, @@ -4452,15 +4484,18 @@ def rolling_window(self, coord, aggregator, window, **kwargs): :meth:`~iris.cube.Cube.ancillary_variables` with the same length as the chosen window. - Returns: - :class:`iris.cube.Cube`. + Returns + ------- + :class:`iris.cube.Cube`. + Notes + ----- .. note:: This operation does not yet have support for lazy evaluation. - For example: - + Examples + -------- >>> import iris, iris.analysis >>> fname = iris.sample_data_path('GloSea4', 'ensemble_010.pp') >>> air_press = iris.load_cube(fname, 'surface_temperature') @@ -4488,7 +4523,6 @@ def rolling_window(self, coord, aggregator, window, **kwargs): 'Data from Met Office Unified Model' um_version '7.6' - >>> print(air_press.rolling_window('time', iris.analysis.MEAN, 3)) surface_temperature / (K) \ (time: 4; latitude: 145; longitude: 192) @@ -4517,8 +4551,7 @@ def rolling_window(self, coord, aggregator, window, **kwargs): Notice that the forecast_period dimension now represents the 4 possible windows of size 3 from the original cube. - """ # noqa: D214, D410, D411 - + """ # noqa: D214, D406, D407, D410, D411 # Update weights kwargs (if necessary) to handle different types of # weights weights_info = None @@ -4573,7 +4606,7 @@ def rolling_window(self, coord, aggregator, window, **kwargs): warnings.warn( "The bounds of coordinate %r were ignored in " "the rolling window operation." % coord_.name(), - category=iris.exceptions.IrisIgnoringBoundsWarning, + category=iris.warnings.IrisIgnoringBoundsWarning, ) if coord_.ndim != 1: @@ -4636,40 +4669,41 @@ def rolling_window(self, coord, aggregator, window, **kwargs): return result def interpolate(self, sample_points, scheme, collapse_scalar=True): - """Interpolate from this :class:`~iris.cube.Cube` to the given - sample points using the given interpolation scheme. + """Interpolate from this :class:`~iris.cube.Cube` to the given sample points. - Args: + Interpolate from this :class:`~iris.cube.Cube` to the given + sample points using the given interpolation scheme. - * sample_points: + Parameters + ---------- + sample_points : A sequence of (coordinate, points) pairs over which to interpolate. The values for coordinates that correspond to dates or times may optionally be supplied as datetime.datetime or cftime.datetime instances. The N pairs supplied will be used to create an N-d grid of points that will then be sampled (rather than just N points). - * scheme: + scheme : An instance of the type of interpolation to use to interpolate from this :class:`~iris.cube.Cube` to the given sample points. The interpolation schemes currently available in Iris are: - * :class:`iris.analysis.Linear`, and - * :class:`iris.analysis.Nearest`. - - Kwargs: - - * collapse_scalar: + * :class:`iris.analysis.Linear`, and + * :class:`iris.analysis.Nearest`. + collapse_scalar : bool, default=True Whether to collapse the dimension of scalar sample points in the resulting cube. Default is True. - Returns: + Returns + ------- + cube A cube interpolated at the given sample points. If `collapse_scalar` is True then the dimensionality of the cube will be the number of original cube dimensions minus the number of scalar coordinates. - For example: - + Examples + -------- >>> import datetime >>> import iris >>> path = iris.sample_data_path('uk_hires.pp') @@ -4722,26 +4756,30 @@ def interpolate(self, sample_points, scheme, collapse_scalar=True): return interp(points, collapse_scalar=collapse_scalar) def regrid(self, grid, scheme): - r"""Regrid this :class:`~iris.cube.Cube` on to the given target `grid` - using the given regridding `scheme`. + r"""Regrid this :class:`~iris.cube.Cube` on to the given target `grid`. - Args: + Regrid this :class:`~iris.cube.Cube` on to the given target `grid` + using the given regridding `scheme`. - * grid: + Parameters + ---------- + grid : A :class:`~iris.cube.Cube` that defines the target grid. - * scheme: + scheme : An instance of the type of regridding to use to regrid this cube onto the target grid. The regridding schemes in Iris currently include: - * :class:`iris.analysis.Linear`\*, - * :class:`iris.analysis.Nearest`\*, - * :class:`iris.analysis.AreaWeighted`\*, - * :class:`iris.analysis.UnstructuredNearest`, - * :class:`iris.analysis.PointInCell`, + * :class:`iris.analysis.Linear`\*, + * :class:`iris.analysis.Nearest`\*, + * :class:`iris.analysis.AreaWeighted`\*, + * :class:`iris.analysis.UnstructuredNearest`, + * :class:`iris.analysis.PointInCell`, \* Supports lazy regridding. - Returns: + Returns + ------- + :class:`~iris.cube` A cube defined with the horizontal dimensions of the target grid and the other dimensions from this cube. The data values of this cube will be converted to values on the new grid @@ -4750,6 +4788,8 @@ def regrid(self, grid, scheme): The returned cube will have lazy data if the original cube has lazy data and the regridding scheme supports lazy regridding. + Notes + ----- .. note:: Both the source and target cubes must have a CoordSystem, otherwise @@ -4836,7 +4876,9 @@ def keys(self): def sorted_axes(axes): - """Returns the axis names sorted alphabetically, with the exception that + """Return the axis names sorted alphabetically. + + Return the axis names sorted alphabetically, with the exception that 't', 'z', 'y', and, 'x' are sorted to the end. """ diff --git a/lib/iris/exceptions.py b/lib/iris/exceptions.py index 36523b8ed6..d6d2084d3c 100644 --- a/lib/iris/exceptions.py +++ b/lib/iris/exceptions.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Exceptions specific to the Iris package. - -""" +"""Exceptions specific to the Iris package.""" class IrisError(Exception): @@ -69,10 +67,7 @@ class InvalidCubeError(IrisError): class ConstraintMismatchError(IrisError): - """Raised when a constraint operation has failed to find the correct number - of results. - - """ + """Raised when a constraint operation has failed to find the correct number of results.""" pass @@ -100,18 +95,17 @@ class IgnoreCubeException(IrisError): class ConcatenateError(IrisError): - """Raised when concatenate is expected to produce a single cube, but fails to - do so. - - """ + """Raised when concatenate is expected to produce a single cube, but fails to do so.""" def __init__(self, differences): - """Creates a ConcatenateError with a list of textual descriptions of - the differences which prevented a concatenate. + """Create a ConcatenateError with a list of textual descriptions of differences. - Args: + Create a ConcatenateError with a list of textual descriptions of + the differences which prevented a concatenate. - * differences: + Parameters + ---------- + differences : list of str The list of strings which describe the differences. """ @@ -124,18 +118,17 @@ def __str__(self): class MergeError(IrisError): - """Raised when merge is expected to produce a single cube, but fails to - do so. - - """ + """Raised when merge is expected to produce a single cube, but fails to do so.""" def __init__(self, differences): - """Creates a MergeError with a list of textual descriptions of - the differences which prevented a merge. + """Create a MergeError with a list of textual descriptions of the differences. - Args: + Creates a MergeError with a list of textual descriptions of + the differences which prevented a merge. - * differences: + Parameters + ---------- + differences : list of str The list of strings which describe the differences. """ @@ -168,186 +161,3 @@ class CannotAddError(ValueError): """Raised when an object (e.g. coord) cannot be added to a :class:`~iris.cube.Cube`.""" pass - - -############################################################################### -# WARNINGS -# Please namespace all warning objects (i.e. prefix with Iris...). - - -class IrisUserWarning(UserWarning): - r"""Base class for :class:`UserWarning`\\ s generated by Iris.""" - - pass - - -class IrisLoadWarning(IrisUserWarning): - """Any warning relating to loading.""" - - pass - - -class IrisSaveWarning(IrisUserWarning): - """Any warning relating to saving.""" - - pass - - -class IrisCfWarning(IrisUserWarning): - """Any warning relating to :term:`CF Conventions` .""" - - pass - - -class IrisIgnoringWarning(IrisUserWarning): - """Any warning that involves an Iris operation not using some information. - - E.g. :class:`~iris.aux_factory.AuxCoordFactory` generation disregarding - bounds. - """ - - pass - - -class IrisDefaultingWarning(IrisUserWarning): - """Any warning that involves Iris changing invalid/missing information. - - E.g. creating a :class:`~iris.coords.AuxCoord` from an invalid - :class:`~iris.coords.DimCoord` definition. - """ - - pass - - -class IrisVagueMetadataWarning(IrisUserWarning): - """Warnings where object metadata may not be fully descriptive.""" - - pass - - -class IrisUnsupportedPlottingWarning(IrisUserWarning): - """Warnings where support for a plotting module/function is not guaranteed.""" - - pass - - -class IrisImpossibleUpdateWarning(IrisUserWarning): - """Warnings where it is not possible to update an object. - - Mainly generated during regridding where the necessary information for - updating an :class:`~iris.aux_factory.AuxCoordFactory` is no longer - present. - """ - - pass - - -class IrisGeometryExceedWarning(IrisUserWarning): - """:mod:`iris.analysis.geometry` warnings about geometry exceeding dimensions.""" - - pass - - -class IrisMaskValueMatchWarning(IrisUserWarning): - """Warnings where the value representing masked data is actually present in data.""" - - pass - - -######## - - -class IrisCfLoadWarning(IrisCfWarning, IrisLoadWarning): - """Any warning relating to both loading and :term:`CF Conventions` .""" - - pass - - -class IrisCfSaveWarning(IrisCfWarning, IrisSaveWarning): - """Any warning relating to both saving and :term:`CF Conventions` .""" - - pass - - -class IrisCfInvalidCoordParamWarning(IrisCfLoadWarning): - """Warnings where incorrect information for CF coord construction is in a file.""" - - pass - - -class IrisCfMissingVarWarning(IrisCfLoadWarning): - """Warnings where a CF variable references another variable that is not in the file.""" - - pass - - -class IrisCfLabelVarWarning(IrisCfLoadWarning, IrisIgnoringWarning): - """Warnings where a CF string/label variable is being used inappropriately.""" - - pass - - -class IrisCfNonSpanningVarWarning(IrisCfLoadWarning, IrisIgnoringWarning): - """Warnings where a CF variable is ignored because it does not span the required dimension.""" - - pass - - -######## - - -class IrisIgnoringBoundsWarning(IrisIgnoringWarning): - """Warnings where bounds information has not been used by an Iris operation.""" - - pass - - -class IrisCannotAddWarning(IrisIgnoringWarning): - """Warnings where a member object cannot be added to a :class:`~iris.cube.Cube` .""" - - pass - - -class IrisGuessBoundsWarning(IrisDefaultingWarning): - """Warnings where Iris has filled absent bounds information with a best estimate.""" - - pass - - -class IrisPpClimModifiedWarning(IrisSaveWarning, IrisDefaultingWarning): - """Warnings where a climatology has been modified while saving :term:`Post Processing (PP) Format` .""" - - pass - - -class IrisFactoryCoordNotFoundWarning(IrisLoadWarning): - """Warnings where a referenced factory coord can not be found when loading a variable in :term:`NetCDF Format`.""" - - pass - - -class IrisNimrodTranslationWarning(IrisLoadWarning): - """For unsupported vertical coord types in :mod:`iris.file_formats.nimrod_load_rules`. - - (Pre-dates the full categorisation of Iris UserWarnings). - """ - - pass - - -class IrisUnknownCellMethodWarning(IrisCfLoadWarning): - """If a loaded :class:`~iris.coords.CellMethod` is not one the method names known to Iris. - - (Pre-dates the full categorisation of Iris UserWarnings). - """ - - pass - - -class IrisSaverFillValueWarning(IrisMaskValueMatchWarning, IrisSaveWarning): - """For fill value complications during Iris file saving :term:`NetCDF Format`. - - (Pre-dates the full categorisation of Iris UserWarnings). - """ - - pass diff --git a/lib/iris/experimental/raster.py b/lib/iris/experimental/raster.py index 16421947f6..ba7efc68b0 100644 --- a/lib/iris/experimental/raster.py +++ b/lib/iris/experimental/raster.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Experimental module for importing/exporting raster data from Iris cubes using -the GDAL library. +"""Experimental module for importing/exporting raster data from Iris cubes using the GDAL library. See also: `GDAL - Geospatial Data Abstraction Library `_. @@ -42,20 +41,31 @@ def _gdal_write_array(x_min, x_step, y_max, y_step, coord_system, data, fname, ftype): """Use GDAL WriteArray to export data as a 32-bit raster image. + Requires the array data to be of the form: North-at-top and West-on-left. - Args: - * x_min: Minimum X coordinate bounds value. - * x_step: Change in X coordinate per cell. - * y_max: Maximum Y coordinate bounds value. - * y_step: Change in Y coordinate per cell. - * coord_system (iris.coord_systems.CoordSystem): - Coordinate system for X and Y. - * data (numpy.ndarray): 2d array of values to export - * fname (string): Output file name. - * ftype (string): Export file type. - + Parameters + ---------- + x_min : + Minimum X coordinate bounds value. + x_step : + Change in X coordinate per cell. + y_max : + Maximum Y coordinate bounds value. + y_step : + Change in Y coordinate per cell. + coord_system : iris.coord_systems.CoordSystem + Coordinate system for X and Y. + data : numpy.ndarray + 2d array of values to export + fname : str + Output file name. + ftype : str + Export file type. + + Notes + ----- .. note:: Projection information is currently not written to the output. @@ -97,7 +107,7 @@ def _gdal_write_array(x_min, x_step, y_max, y_step, coord_system, data, fname, f def export_geotiff(cube, fname): - """Writes cube data to raster file format as a PixelIsArea GeoTiff image. + """Write cube data to raster file format as a PixelIsArea GeoTiff image. .. deprecated:: 3.2.0 @@ -107,11 +117,16 @@ def export_geotiff(cube, fname): Developers to discuss how to retain it (which could include reversing the deprecation). - Args: - * cube (Cube): The 2D regularly gridded cube slice to be exported. - The cube must have regular, contiguous bounds. - * fname (string): Output file name. + Parameters + ---------- + cube : Cube + The 2D regularly gridded cube slice to be exported. + The cube must have regular, contiguous bounds. + fname : str + Output file name. + Notes + ----- .. note:: For more details on GeoTiff specification and PixelIsArea, see: diff --git a/lib/iris/experimental/regrid.py b/lib/iris/experimental/regrid.py index b0ebfc5679..c1d209cac0 100644 --- a/lib/iris/experimental/regrid.py +++ b/lib/iris/experimental/regrid.py @@ -37,8 +37,8 @@ import iris.analysis.cartography import iris.coord_systems import iris.cube -from iris.exceptions import IrisImpossibleUpdateWarning from iris.util import _meshgrid +from iris.warnings import IrisImpossibleUpdateWarning wmsg = ( "The 'iris.experimental.regrid' package is deprecated since version 3.2, " @@ -51,7 +51,9 @@ def regrid_area_weighted_rectilinear_src_and_grid(src_cube, grid_cube, mdtol=0): - """Return a new cube with data values calculated using the area weighted + """Regrid using the area weighted mean of data values. + + Return a new cube with data values calculated using the area weighted mean of data values from src_grid regridded onto the horizontal grid of grid_cube. @@ -82,18 +84,15 @@ def regrid_area_weighted_rectilinear_src_and_grid(src_cube, grid_cube, mdtol=0): or entirely outside of the horizontal extent of the src_cube will be masked irrespective of the value of mdtol. - Args: - - * src_cube: + Parameters + ---------- + src_cube : :class:`iris.cube.Cube` An instance of :class:`iris.cube.Cube` that supplies the data, metadata and coordinates. - * grid_cube: + grid_cube : :class:`iris.cube.Cube` An instance of :class:`iris.cube.Cube` that supplies the desired horizontal grid definition. - - Kwargs: - - * mdtol: + mdtol : int, default=0 Tolerance of missing data. The value returned in each element of the returned cube's data array will be masked if the fraction of masked data in the overlapping cells of the source cube exceeds mdtol. This @@ -102,8 +101,9 @@ def regrid_area_weighted_rectilinear_src_and_grid(src_cube, grid_cube, mdtol=0): will mean the resulting element will be masked if and only if all the overlapping cells of the source cube are masked. Defaults to 0. - Returns: - A new :class:`iris.cube.Cube` instance. + Returns + ------- + A new :class:`iris.cube.Cube` instance. """ wmsg = ( @@ -125,7 +125,9 @@ def regrid_area_weighted_rectilinear_src_and_grid(src_cube, grid_cube, mdtol=0): def regrid_weighted_curvilinear_to_rectilinear(src_cube, weights, grid_cube): - r"""Return a new cube with the data values calculated using the weighted + r"""Regrid using the weighted mean and the weights. + + Return a new cube with the data values calculated using the weighted mean of data values from :data:`src_cube` and the weights from :data:`weights` regridded onto the horizontal grid of :data:`grid_cube`. @@ -165,24 +167,25 @@ def regrid_weighted_curvilinear_to_rectilinear(src_cube, weights, grid_cube): .. warning:: - * All coordinates that span the :data:`src_cube` that don't define - the horizontal curvilinear grid will be ignored. + All coordinates that span the :data:`src_cube` that don't define + the horizontal curvilinear grid will be ignored. - Args: - - * src_cube: + Parameters + ---------- + src_cube : :class:`iris.cube.Cube` A :class:`iris.cube.Cube` instance that defines the source variable grid to be regridded. - * weights (array or None): + weights : array or None A :class:`numpy.ndarray` instance that defines the weights for the source variable grid cells. Must have the same shape as the X and Y coordinates. If weights is None, all-ones will be used. - * grid_cube: + grid_cube : :class:`iris.cube.Cube` A :class:`iris.cube.Cube` instance that defines the target rectilinear grid. - Returns: - A :class:`iris.cube.Cube` instance. + Returns + ------- + A :class:`iris.cube.Cube` instance. """ wmsg = ( @@ -201,7 +204,9 @@ def regrid_weighted_curvilinear_to_rectilinear(src_cube, weights, grid_cube): class PointInCell: - """This class describes the point-in-cell regridding scheme for use + """Describe the point-in-cell regridding scheme. + + This class describes the point-in-cell regridding scheme for use typically with :meth:`iris.cube.Cube.regrid()`. .. warning:: @@ -214,8 +219,7 @@ class PointInCell: """ def __init__(self, weights=None): - """Point-in-cell regridding scheme suitable for regridding over one - or more orthogonal coordinates. + """Point-in-cell regridding scheme for regridding over one or more orthogonal coordinates. .. warning:: @@ -235,21 +239,20 @@ def __init__(self, weights=None): class _ProjectedUnstructuredRegridder: - """This class provides regridding that uses scipy.interpolate.griddata.""" + """Regridding that uses scipy.interpolate.griddata.""" def __init__(self, src_cube, tgt_grid_cube, method, projection=None): - """Create a regridder for conversions between the source - and target grids. - - Args: + """Create a regridder for conversions between the source and target grids. - * src_cube: + Parameters + ---------- + src_cube : :class:`~iris.cube.Cube` The :class:`~iris.cube.Cube` providing the source points. - * tgt_grid_cube: + tgt_grid_cube : :class:`~iris.cube.Cube` The :class:`~iris.cube.Cube` providing the target grid. - * method: + method : Either 'linear' or 'nearest'. - * projection: + projection : optional The projection in which the interpolation is performed. If None, a PlateCarree projection is used. Defaults to None. @@ -392,42 +395,43 @@ def _create_cube( grid_y_coord, regrid_callback, ): - """Return a new Cube for the result of regridding the source Cube onto - the new grid. + """Return a new Cube for the result of regridding the source Cube onto the new grid. All the metadata and coordinates of the result Cube are copied from the source Cube, with two exceptions: - - Grid dimension coordinates are copied from the grid Cube. - - Auxiliary coordinates which span the grid dimensions are - ignored, except where they provide a reference surface for an - :class:`iris.aux_factory.AuxCoordFactory`. - Args: + * Grid dimension coordinates are copied from the grid Cube. + * Auxiliary coordinates which span the grid dimensions are + ignored, except where they provide a reference surface for an + :class:`iris.aux_factory.AuxCoordFactory`. - * data: + Parameters + ---------- + data : The regridded data as an N-dimensional NumPy array. - * src: + src : :class:`~iris.cube.Cube` The source Cube. - * src_xy_dim: + src_xy_dim : The dimension the X and Y coord span within the source Cube. - * src_x_coord: + src_x_coord : The X coordinate (either :class:`iris.coords.AuxCoord` or :class:`iris.coords.DimCoord`). - * src_y_coord: + src_y_coord : The Y coordinate (either :class:`iris.coords.AuxCoord` or :class:`iris.coords.DimCoord`). - * grid_x_coord: + grid_x_coord : The :class:`iris.coords.DimCoord` for the new grid's X coordinate. - * grid_y_coord: + grid_y_coord : The :class:`iris.coords.DimCoord` for the new grid's Y coordinate. - * regrid_callback: + regrid_callback : The routine that will be used to calculate the interpolated values of any reference surfaces. - Returns: - The new, regridded Cube. + Returns + ------- + The new, regridded Cube. """ # Create a result cube with the appropriate metadata @@ -517,18 +521,22 @@ def regrid_reference_surface( return result def __call__(self, src_cube): - """Regrid this :class:`~iris.cube.Cube` on to the target grid of + """Regrid to the target grid. + + Regrid this :class:`~iris.cube.Cube` on to the target grid of this :class:`UnstructuredProjectedRegridder`. The given cube must be defined with the same grid as the source grid used to create this :class:`UnstructuredProjectedRegridder`. - Args: - - * src_cube: + Parameters + ---------- + src_cube : :class:`~iris.cube.Cube` A :class:`~iris.cube.Cube` to be regridded. - Returns: + Returns + ------- + :class:`~iris.cube.Cube` A cube defined with the horizontal dimensions of the target and the other dimensions from this cube. The data values of this cube will be converted to values on the new grid using @@ -598,7 +606,9 @@ def __call__(self, src_cube): class ProjectedUnstructuredLinear: - """This class describes the linear regridding scheme which uses the + """Describe the linear regridding scheme. + + This class describes the linear regridding scheme which uses the scipy.interpolate.griddata to regrid unstructured data on to a grid. The source cube and the target cube will be projected into a common @@ -607,7 +617,9 @@ class ProjectedUnstructuredLinear: """ def __init__(self, projection=None): - """Linear regridding scheme that uses scipy.interpolate.griddata on + """Linear regridding scheme. + + Linear regridding scheme that uses scipy.interpolate.griddata on projected unstructured data. .. note:: @@ -620,9 +632,9 @@ def __init__(self, projection=None): Developers to discuss how to retain it (which could include reversing the deprecation). - Optional Args: - - * projection: `cartopy.crs instance` + Parameters + ---------- + projection : `cartopy.crs instance`, optional The projection that the scipy calculation is performed in. If None is given, a PlateCarree projection is used. Defaults to None. @@ -637,7 +649,9 @@ def __init__(self, projection=None): warn_deprecated(wmsg) def regridder(self, src_cube, target_grid): - """Creates a linear regridder to perform regridding, using + """Create a linear regridder to perform regridding. + + Creates a linear regridder to perform regridding, using scipy.interpolate.griddata from unstructured source points to the target grid. The regridding calculation is performed in the given projection. @@ -649,16 +663,18 @@ def regridder(self, src_cube, target_grid): Does not support lazy regridding. - Args: - - * src_cube: + Parameters + ---------- + src_cube : :class:`~iris.cube.Cube` The :class:`~iris.cube.Cube` defining the unstructured source points. - * target_grid: + target_grid : :class:`~iris.cube.Cube` The :class:`~iris.cube.Cube` defining the target grid. - Returns: - A callable with the interface: + Returns + ------- + callable + A callable with the interface:: `callable(cube)` @@ -672,7 +688,9 @@ def regridder(self, src_cube, target_grid): class ProjectedUnstructuredNearest: - """This class describes the nearest regridding scheme which uses the + """Describe the nearest regridding scheme which uses scipy.interpolate.griddata. + + This class describes the nearest regridding scheme which uses the scipy.interpolate.griddata to regrid unstructured data on to a grid. The source cube and the target cube will be projected into a common @@ -687,8 +705,7 @@ class ProjectedUnstructuredNearest: """ def __init__(self, projection=None): - """Nearest regridding scheme that uses scipy.interpolate.griddata on - projected unstructured data. + """Nearest regridding scheme that uses scipy.interpolate.griddata on projected unstructured data. .. note:: @@ -701,9 +718,9 @@ def __init__(self, projection=None): contact the Iris Developers to discuss how to retain it (which could include reversing the deprecation). - Optional Args: - - * projection: `cartopy.crs instance` + Parameters + ---------- + projection : `cartopy.crs instance`, optional The projection that the scipy calculation is performed in. If None is given, a PlateCarree projection is used. Defaults to None. @@ -719,7 +736,9 @@ def __init__(self, projection=None): warn_deprecated(wmsg) def regridder(self, src_cube, target_grid): - """Creates a nearest-neighbour regridder to perform regridding, using + """Create a nearest-neighbour regridder to perform regridding. + + Create a nearest-neighbour regridder to perform regridding, using scipy.interpolate.griddata from unstructured source points to the target grid. The regridding calculation is performed in the given projection. @@ -731,16 +750,18 @@ def regridder(self, src_cube, target_grid): Does not support lazy regridding. - Args: - - * src_cube: + Parameters + ---------- + src_cube : :class:`~iris.cube.Cube` The :class:`~iris.cube.Cube` defining the unstructured source points. - * target_grid: + target_grid : :class:`~iris.cube.Cube` The :class:`~iris.cube.Cube` defining the target grid. - Returns: - A callable with the interface: + Returns + ------- + callable + A callable with the interface:: `callable(cube)` diff --git a/lib/iris/experimental/regrid_conservative.py b/lib/iris/experimental/regrid_conservative.py index 45ac0505ed..e15b1c29a5 100644 --- a/lib/iris/experimental/regrid_conservative.py +++ b/lib/iris/experimental/regrid_conservative.py @@ -60,17 +60,15 @@ def _make_esmpy_field(x_coord, y_coord, ref_name="field", data=None, mask=None): Add a grid mask if provided. Create and return a Field mapped on this Grid, setting data if provided. - Args: - - * x_coord, y_coord (:class:`iris.coords.Coord`): + Parameters + ---------- + x_coord, y_coord : :class:`iris.coords.Coord` One-dimensional coordinates of shape (nx,) and (ny,). Their contiguous bounds define an ESMF.Grid of shape (nx, ny). - - Kwargs: - - * data (:class:`numpy.ndarray`, shape (nx,ny)): + ref_name : stre, default="field" + data : :class:`numpy.ndarray`, shape (nx,ny), optional Set the Field data content. - * mask (:class:`numpy.ndarray`, boolean, shape (nx,ny)): + mask : :class:`numpy.ndarray`, bool, shape (nx,ny), optional Add a mask item to the grid, assigning it 0/1 where mask=False/True. """ @@ -164,19 +162,23 @@ def regrid_conservative_via_esmpy(source_cube, grid_cube): Regrids the data of a source cube onto a new grid defined by a destination cube. - Args: - - * source_cube (:class:`iris.cube.Cube`): + Parameters + ---------- + source_cube : :class:`iris.cube.Cube` Source data. Must have two identifiable horizontal dimension coordinates. - * grid_cube (:class:`iris.cube.Cube`): + grid_cube : :class:`iris.cube.Cube` Define the target horizontal grid: Only the horizontal dimension coordinates are actually used. - Returns: + Returns + ------- + :class:`iris.cube.Cube` A new cube derived from source_cube, regridded onto the specified horizontal grid. + Notes + ----- Any additional coordinates which map onto the horizontal dimensions are removed, while all other metadata is retained. If there are coordinate factories with 2d horizontal reference surfaces, diff --git a/lib/iris/experimental/representation.py b/lib/iris/experimental/representation.py index fd063a5475..4ffe176e3a 100644 --- a/lib/iris/experimental/representation.py +++ b/lib/iris/experimental/representation.py @@ -3,9 +3,7 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Definitions of how Iris objects should be represented. - -""" +"""Definitions of how Iris objects should be represented.""" from html import escape import re @@ -118,7 +116,9 @@ def __init__(self, cube): self.units = escape(str(self.cube.units)) def _get_dim_names(self): - """Get dimension-describing coordinate names, or '--' if no coordinate] + """Get dimension-describing coordinate names. + + Get dimension-describing coordinate names, or '--' if no coordinate] describes the dimension. Note: borrows from `cube.summary`. @@ -148,7 +148,9 @@ def _get_lines(self): return self.cube_str.split("\n") def _get_bits(self, bits): - """Parse the body content (`bits`) of the cube string in preparation for + """Parse the body content (`bits`) of the cube string. + + Parse the body content (`bits`) of the cube string in preparation for being converted into table rows. """ @@ -177,7 +179,9 @@ def _get_bits(self, bits): self.sections_data[str_heading_name] = content def _make_header(self): - """Make the table header. This is similar to the summary of the cube, + """Make the table header. + + Make the table header. This is similar to the summary of the cube, but does not include dim shapes. These are included on the next table row down, and produced with `make_shapes_row`. @@ -201,16 +205,25 @@ def _make_shapes_row(self): return "\n".join(cell for cell in cells) def _make_row(self, title, body=None, col_span=0): - """Produce one row for the table body; i.e. - Coord namex-.... + """Produce one row for the table body. + + Parameters + ---------- + body : str, optional + Contains the content for each cell not in the left-most (title) column. + If None, indicates this row is a title row (see below). + title : stre, optional + Contains the row heading. If `body` is None, indicates + that the row contains a sub-heading; + e.g. 'Dimension coordinates:'. + col_span : int, default=0 + Indicates how many columns the string should span. + + Examples + -------- + :: - `body` contains the content for each cell not in the left-most (title) - column. - If None, indicates this row is a title row (see below). - `title` contains the row heading. If `body` is None, indicates - that the row contains a sub-heading; - e.g. 'Dimension coordinates:'. - `col_span` indicates how many columns the string should span. + Coord namex-.... """ row = [''] @@ -281,7 +294,7 @@ def _make_content(self): return "\n".join(element for element in elements) def repr_html(self): - """The `repr` interface for Jupyter.""" + """Represent html, the `repr` interface for Jupyter.""" # Deal with the header first. header = self._make_header() diff --git a/lib/iris/experimental/stratify.py b/lib/iris/experimental/stratify.py index d79102cc08..50f8c21dcf 100644 --- a/lib/iris/experimental/stratify.py +++ b/lib/iris/experimental/stratify.py @@ -2,10 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Routines for putting data on new strata (aka. isosurfaces), often in the -Z direction. - -""" +"""Routines for putting data on new strata (aka. isosurfaces), often in the Z direction.""" from functools import partial @@ -17,16 +14,14 @@ def _copy_coords_without_z_dim(src, tgt, z_dim): - """Helper function to copy across non z-dimenson coordinates between cubes. + """Copy across non z-dimenson coordinates between cubes. Parameters ---------- src : :class:`~iris.cube.Cube` Incoming cube containing the coordinates to be copied from. - tgt : :class:`~iris.cube.Cube` Outgoing cube for the coordinates to be copied to. - z_dim : int Dimension within the `src` cube that is the z-dimension. This dimension will not be copied. For example, the incoming @@ -52,7 +47,9 @@ def _copy_coords_without_z_dim(src, tgt, z_dim): def relevel(cube, src_levels, tgt_levels, axis=None, interpolator=None): - """Interpolate the cube onto the specified target levels, given the + """Perform vertical interpolation. + + Interpolate the cube onto the specified target levels, given the source levels of the cube. For example, suppose we have two datasets `P(i,j,k)` and `H(i,j,k)` @@ -64,31 +61,27 @@ def relevel(cube, src_levels, tgt_levels, axis=None, interpolator=None): that are generally monotonic in the direction of interpolation, such as height/pressure or salinity/depth. - Args: - + Parameters + ---------- cube : :class:`~iris.cube.Cube` The phenomenon data to be re-levelled. - - src_levels : :class:`~iris.cube.Cube`, :class:`~iris.coord.Coord` or string + src_levels : :class:`~iris.cube.Cube`, :class:`~iris.coord.Coord` or str Describes the source levels of the `cube` that will be interpolated over. The `src_levels` must be in the same system as the `tgt_levels`. The dimensions of `src_levels` must be broadcastable to the dimensions of the `cube`. Note that, the coordinate name containing the source levels in the `cube` may be provided. - tgt_levels : array-like Describes the target levels of the `cube` to be interpolated to. The `tgt_levels` must be in the same system as the `src_levels`. The dimensions of the `tgt_levels` must be broadcastable to the dimensions of the `cube`, except in the nominated axis of interpolation. - - axis : int, :class:`~iris.coords.Coord` or string + axis : int, :class:`~iris.coords.Coord` or str, optional The axis of interpolation. Defaults to the first dimension of the `cube`, which is typically the z-dimension. Note that, the coordinate name specifying the z-dimension of the `cube` may be provided. - - interpolator : callable or None + interpolator : callable or None, optional The interpolator to use when computing the interpolation. The function will be passed the following positional arguments:: diff --git a/lib/iris/experimental/ugrid/__init__.py b/lib/iris/experimental/ugrid/__init__.py index 7cae55a1bd..ccdf05a387 100644 --- a/lib/iris/experimental/ugrid/__init__.py +++ b/lib/iris/experimental/ugrid/__init__.py @@ -3,8 +3,9 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Infra-structure for unstructured mesh support, based on -CF UGRID Conventions (v1.0), https://ugrid-conventions.github.io/ugrid-conventions/. +"""Infra-structure for unstructured mesh support. + +Based on CF UGRID Conventions (v1.0), https://ugrid-conventions.github.io/ugrid-conventions/. .. note:: diff --git a/lib/iris/experimental/ugrid/cf.py b/lib/iris/experimental/ugrid/cf.py index 10e76cc11b..6897b4ca67 100644 --- a/lib/iris/experimental/ugrid/cf.py +++ b/lib/iris/experimental/ugrid/cf.py @@ -10,13 +10,15 @@ """ import warnings -from ...exceptions import IrisCfLabelVarWarning, IrisCfMissingVarWarning from ...fileformats import cf +from ...warnings import IrisCfLabelVarWarning, IrisCfMissingVarWarning from .mesh import Connectivity class CFUGridConnectivityVariable(cf.CFVariable): - """A CF_UGRID connectivity variable points to an index variable identifying + """A CF_UGRID connectivity variable. + + A CF_UGRID connectivity variable points to an index variable identifying for every element (edge/face/volume) the indices of its corner nodes. The connectivity array will thus be a matrix of size n-elements x n-corners. For the indexing one may use either 0- or 1-based indexing; the convention @@ -86,7 +88,9 @@ def identify(cls, variables, ignore=None, target=None, warn=True): class CFUGridAuxiliaryCoordinateVariable(cf.CFVariable): - """A CF-UGRID auxiliary coordinate variable is a CF-netCDF auxiliary + """A CF-UGRID auxiliary coordinate variable. + + A CF-UGRID auxiliary coordinate variable is a CF-netCDF auxiliary coordinate variable representing the element (node/edge/face/volume) locations (latitude, longitude or other spatial coordinates, and optional elevation or other coordinates). These auxiliary coordinate variables will @@ -163,7 +167,9 @@ def identify(cls, variables, ignore=None, target=None, warn=True): class CFUGridMeshVariable(cf.CFVariable): - """A CF-UGRID mesh variable is a dummy variable for storing topology + """A CF-UGRID mesh variable is a dummy variable for storing topology information as attributes. + + A CF-UGRID mesh variable is a dummy variable for storing topology information as attributes. The mesh variable has the ``cf_role`` 'mesh_topology'. @@ -230,7 +236,9 @@ def identify(cls, variables, ignore=None, target=None, warn=True): class CFUGridGroup(cf.CFGroup): - """Represents a collection of 'NetCDF Climate and Forecast (CF) Metadata + """Represents a collection of CF Metadata Conventions variables and netCDF global attributes. + + Represents a collection of 'NetCDF Climate and Forecast (CF) Metadata Conventions' variables and netCDF global attributes. Specialisation of :class:`~iris.fileformats.cf.CFGroup` that includes extra @@ -255,10 +263,7 @@ def meshes(self): @property def non_data_variable_names(self): - """:class:`set` of the names of the CF-netCDF/CF-UGRID variables that are - not the data pay-load. - - """ + """:class:`set` of names of the CF-netCDF/CF-UGRID variables that are not the data pay-load.""" extra_variables = (self.connectivities, self.ugrid_coords, self.meshes) extra_result = set() for variable in extra_variables: @@ -267,7 +272,9 @@ def non_data_variable_names(self): class CFUGridReader(cf.CFReader): - """This class allows the contents of a netCDF file to be interpreted according + """Allows the contents of a netCDF file to be. + + This class allows the contents of a netCDF file to be interpreted according to the 'NetCDF Climate and Forecast (CF) Metadata Conventions'. Specialisation of :class:`~iris.fileformats.cf.CFReader` that can also diff --git a/lib/iris/experimental/ugrid/load.py b/lib/iris/experimental/ugrid/load.py index 07cc20a65a..630c179fd9 100644 --- a/lib/iris/experimental/ugrid/load.py +++ b/lib/iris/experimental/ugrid/load.py @@ -3,8 +3,10 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -r"""Extensions to Iris' NetCDF loading to allow the construction of -:class:`~iris.experimental.ugrid.mesh.Mesh`\\ es from UGRID data in the file. +r"""Allow the construction of :class:`~iris.experimental.ugrid.mesh.Mesh`. + +Extensions to Iris' NetCDF loading to allow the construction of +:class:`~iris.experimental.ugrid.mesh.Mesh` from UGRID data in the file. Eventual destination: :mod:`iris.fileformats.netcdf`. @@ -17,11 +19,11 @@ from ...config import get_logger from ...coords import AuxCoord -from ...exceptions import IrisCfWarning, IrisDefaultingWarning, IrisIgnoringWarning from ...fileformats._nc_load_rules.helpers import get_attr_units, get_names from ...fileformats.netcdf import loader as nc_loader from ...io import decode_uri, expand_filespecs from ...util import guess_coord_axis +from ...warnings import IrisCfWarning, IrisDefaultingWarning, IrisIgnoringWarning from .cf import ( CFUGridAuxiliaryCoordinateVariable, CFUGridConnectivityVariable, @@ -48,7 +50,9 @@ class _WarnComboCfDefaultingIgnoring(_WarnComboCfDefaulting, IrisIgnoringWarning class ParseUGridOnLoad(threading.local): def __init__(self): - """A flag for dictating whether to use the experimental UGRID-aware + """Thead-safe state to enable UGRID-aware NetCDF loading. + + A flag for dictating whether to use the experimental UGRID-aware version of Iris NetCDF loading. Object is thread-safe. Use via the run-time switch @@ -97,7 +101,7 @@ def context(self): def _meshes_from_cf(cf_reader): - """Common behaviour for extracting meshes from a CFReader. + """Mesh from cf, common behaviour for extracting meshes from a CFReader. Simple now, but expected to increase in complexity as Mesh sharing develops. @@ -123,10 +127,10 @@ def load_mesh(uris, var_name=None): ---------- uris : str or iterable of str One or more filenames/URI's. Filenames can include wildcards. Any URI's - must support OpenDAP. + must support OpenDAP. var_name : str, optional Only return a :class:`~iris.experimental.ugrid.mesh.Mesh` if its - var_name matches this value. + var_name matches this value. Returns ------- @@ -149,17 +153,17 @@ def load_meshes(uris, var_name=None): ---------- uris : str or iterable of str One or more filenames/URI's. Filenames can include wildcards. Any URI's - must support OpenDAP. + must support OpenDAP. var_name : str, optional - Only return :class:`~iris.experimental.ugrid.mesh.Mesh`\\ es that have - var_names matching this value. + Only return :class:`~iris.experimental.ugrid.mesh.Mesh` that have + var_names matching this value. Returns ------- dict A dictionary mapping each mesh-containing file path/URL in the input - ``uris`` to a list of the - :class:`~iris.experimental.ugrid.mesh.Mesh`\\ es returned from each. + ``uris`` to a list of the + :class:`~iris.experimental.ugrid.mesh.Mesh` returned from each. """ # TODO: rationalise UGRID/mesh handling once experimental.ugrid is folded @@ -230,7 +234,9 @@ def load_meshes(uris, var_name=None): def _build_aux_coord(coord_var, file_path): - """Construct a :class:`~iris.coords.AuxCoord` from a given + """Construct a :class:`~iris.coords.AuxCoord`. + + Construct a :class:`~iris.coords.AuxCoord` from a given :class:`~iris.experimental.ugrid.cf.CFUGridAuxiliaryCoordinateVariable`, and guess its mesh axis. @@ -283,7 +289,9 @@ def _build_aux_coord(coord_var, file_path): def _build_connectivity(connectivity_var, file_path, element_dims): - """Construct a :class:`~iris.experimental.ugrid.mesh.Connectivity` from a + """Construct a :class:`~iris.experimental.ugrid.mesh.Connectivity`. + + Construct a :class:`~iris.experimental.ugrid.mesh.Connectivity` from a given :class:`~iris.experimental.ugrid.cf.CFUGridConnectivityVariable`, and identify the name of its first dimension. @@ -325,10 +333,12 @@ def _build_connectivity(connectivity_var, file_path, element_dims): def _build_mesh(cf, mesh_var, file_path): - """Construct a :class:`~iris.experimental.ugrid.mesh.Mesh` from a given + """Construct a :class:`~iris.experimental.ugrid.mesh.Mesh`. + + Construct a :class:`~iris.experimental.ugrid.mesh.Mesh` from a given :class:`~iris.experimental.ugrid.cf.CFUGridMeshVariable`. - todo: integrate with standard loading API post-pyke. + TODO: integrate with standard loading API post-pyke. """ # TODO: integrate with standard saving API when no longer 'experimental'. @@ -458,11 +468,13 @@ def _build_mesh(cf, mesh_var, file_path): def _build_mesh_coords(mesh, cf_var): - """Construct a tuple of :class:`~iris.experimental.ugrid.mesh.MeshCoord` using + """Construct a tuple of :class:`~iris.experimental.ugrid.mesh.MeshCoord`. + + Construct a tuple of :class:`~iris.experimental.ugrid.mesh.MeshCoord` using from a given :class:`~iris.experimental.ugrid.mesh.Mesh` and :class:`~iris.fileformats.cf.CFVariable`. - todo: integrate with standard loading API post-pyke. + TODO: integrate with standard loading API post-pyke. """ # TODO: integrate with standard saving API when no longer 'experimental'. diff --git a/lib/iris/experimental/ugrid/mesh.py b/lib/iris/experimental/ugrid/mesh.py index 14bb313474..a2519f250b 100644 --- a/lib/iris/experimental/ugrid/mesh.py +++ b/lib/iris/experimental/ugrid/mesh.py @@ -3,7 +3,7 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Iris' data model representation of CF UGrid's Mesh and its constituent parts. +"""Iris data model representation of CF UGrid's Mesh and its constituent parts. Eventual destination: dedicated module in :mod:`iris` root. @@ -84,7 +84,9 @@ class Connectivity(_DimensionalMetadata): - """A CF-UGRID topology connectivity, describing the topological relationship + """CF-UGRID topology. + + A CF-UGRID topology connectivity, describing the topological relationship between two types of mesh element. One or more connectivities make up a CF-UGRID topology - a constituent of a CF-UGRID mesh. @@ -117,11 +119,11 @@ def __init__( start_index=0, location_axis=0, ): - """Constructs a single connectivity. - - Args: + """Construct a single connectivity. - * indices (:class:`numpy.ndarray` or :class:`numpy.ma.core.MaskedArray` or :class:`dask.array.Array`): + Parameters + ---------- + indices : :class:`numpy.ndarray` or :class:`numpy.ma.core.MaskedArray` or :class:`dask.array.Array` 2D array giving the topological connection relationship between :attr:`location` elements and :attr:`connected` elements. The :attr:`location_axis` dimension indexes over the @@ -135,34 +137,31 @@ def __init__( elements: use a :class:`numpy.ma.core.MaskedArray` and mask the :attr:`location` elements' unused index 'slots'. Use a :class:`dask.array.Array` to keep indices 'lazy'. - * cf_role (str): + cf_role : str Denotes the topological relationship that this connectivity describes. Made up of this array's :attr:`location`, and the :attr:`connected` element type that is indexed by the array. See :attr:`UGRID_CF_ROLES` for valid arguments. - - Kwargs: - - * standard_name (str): + standard_name : str, optional CF standard name of the connectivity. (NOTE: this is not expected by the UGRID conventions, but will be handled in Iris' standard way if provided). - * long_name (str): + long_name : str, optional Descriptive name of the connectivity. - * var_name (str): + var_name : str, optional The NetCDF variable name for the connectivity. - * units (cf_units.Unit): + units : cf_units.Unit, optional The :class:`~cf_units.Unit` of the connectivity's values. Can be a string, which will be converted to a Unit object. (NOTE: this is not expected by the UGRID conventions, but will be handled in Iris' standard way if provided). - * attributes (dict): + attributes : dict, optional A dictionary containing other cf and user-defined attributes. - * start_index (int): + start_index : int, optional Either ``0`` or ``1``. Default is ``0``. Denotes whether :attr:`indices` uses 0-based or 1-based indexing (allows support for Fortran and legacy NetCDF files). - * location_axis (int): + location_axis : int, optional Either ``0`` or ``1``. Default is ``0``. Denotes which axis of :attr:`indices` varies over the :attr:`location` elements (the alternate axis therefore varying over :attr:`connected` elements). @@ -218,8 +217,8 @@ def _values(self, values): @property def cf_role(self): - """The category of topological relationship that this connectivity - describes. + """The category of topological relationship that this connectivity describes. + **Read-only** - validity of :attr:`indices` is dependent on :attr:`cf_role`. A new :class:`Connectivity` must therefore be defined if a different :attr:`cf_role` is needed. @@ -229,7 +228,9 @@ def cf_role(self): @property def location(self): - """Derived from the connectivity's :attr:`cf_role` - the first part, e.g. + """Derived from the connectivity's :attr:`cf_role`. + + Derived from the connectivity's :attr:`cf_role` - the first part, e.g. ``face`` in ``face_node_connectivity``. Refers to the elements that vary along the :attr:`location_axis` of the connectivity's :attr:`indices` array. @@ -239,7 +240,9 @@ def location(self): @property def connected(self): - """Derived from the connectivity's :attr:`cf_role` - the second part, e.g. + """Derived from the connectivity's :attr:`cf_role`. + + Derived from the connectivity's :attr:`cf_role` - the second part, e.g. ``node`` in ``face_node_connectivity``. Refers to the elements indexed by the values in the connectivity's :attr:`indices` array. @@ -248,8 +251,8 @@ def connected(self): @property def start_index(self): - """The base value of the connectivity's :attr:`indices` array; either - ``0`` or ``1``. + """The base value of the connectivity's :attr:`indices` array; either ``0`` or ``1``. + **Read-only** - validity of :attr:`indices` is dependent on :attr:`start_index`. A new :class:`Connectivity` must therefore be defined if a different :attr:`start_index` is needed. @@ -259,7 +262,9 @@ def start_index(self): @property def location_axis(self): - """The axis of the connectivity's :attr:`indices` array that varies + """The axis of the connectivity's :attr:`indices` array. + + The axis of the connectivity's :attr:`indices` array that varies over the connectivity's :attr:`location` elements. Either ``0`` or ``1``. **Read-only** - validity of :attr:`indices` is dependent on :attr:`location_axis`. Use :meth:`transpose` to create a new, transposed @@ -270,7 +275,9 @@ def location_axis(self): @property def connected_axis(self): - """Derived as the alternate value of :attr:`location_axis` - each must + """Derived as the alternate value of :attr:`location_axis`. + + Derived as the alternate value of :attr:`location_axis` - each must equal either ``0`` or ``1``. The axis of the connectivity's :attr:`indices` array that varies over the :attr:`connected` elements associated with each :attr:`location` element. @@ -280,11 +287,14 @@ def connected_axis(self): @property def indices(self): - """The index values describing the topological relationship of the + """The index values describing the topological relationship of the connectivity. + + The index values describing the topological relationship of the connectivity, as a NumPy array. Masked points indicate a :attr:`location` element with fewer :attr:`connected` elements than other :attr:`location` elements described in this array - unused index 'slots' are masked. + **Read-only** - index values are only meaningful when combined with an appropriate :attr:`cf_role`, :attr:`start_index` and :attr:`location_axis`. A new :class:`Connectivity` must therefore be @@ -294,19 +304,23 @@ def indices(self): return self._values def indices_by_location(self, indices=None): - """Return a view of the indices array with :attr:`location_axis` **always** as + """Return a view of the indices array. + + Return a view of the indices array with :attr:`location_axis` **always** as the first axis - transposed if necessary. Can optionally pass in an identically shaped array on which to perform this operation (e.g. the output from :meth:`core_indices` or :meth:`lazy_indices`). - Kwargs: - - * indices (array): + Parameters + ---------- + indices : array, optional The array on which to operate. If ``None``, will operate on :attr:`indices`. Default is ``None``. - Returns: - A view of the indices array, transposed - if necessary - to put + Returns + ------- + result : + A view of the indices array Transposed - if necessary - to put :attr:`location_axis` first. """ @@ -391,7 +405,9 @@ def indices_error(message): ) def validate_indices(self): - """Perform a thorough validity check of this connectivity's + """Perform a thorough validity check of this connectivity's :attr:`indices`. + + Perform a thorough validity check of this connectivity's :attr:`indices`. Includes checking the number of :attr:`connected` elements associated with each :attr:`location` element (specified using masks on the :attr:`indices` array) against the :attr:`cf_role`. @@ -435,10 +451,14 @@ def __eq__(self, other): return eq def transpose(self): - """Create a new :class:`Connectivity`, identical to this one but with the + """Transpose :class:`Connectivity`. + + Create a new :class:`Connectivity`, identical to this one but with the :attr:`indices` array transposed and the :attr:`location_axis` value flipped. - Returns: + Returns + ------- + :class:`Connectivity` A new :class:`Connectivity` that is the transposed equivalent of the original. @@ -466,34 +486,40 @@ def lazy_indices(self): If the :attr:`indices` have already been loaded for the connectivity, the returned Array will be a new lazy array wrapper. - Returns: - A lazy array, representing the connectivity indices array. + Returns + ------- + A lazy array, representing the connectivity indices array. """ return super()._lazy_values() def core_indices(self): - """The indices array at the core of this connectivity, which may be a + """Return the indices array at the core of this connectivity. + + The indices array at the core of this connectivity, which may be a NumPy array or a Dask array. - Returns: - :class:`numpy.ndarray` or :class:`numpy.ma.core.MaskedArray` or :class:`dask.array.Array` + Returns + ------- + :class:`numpy.ndarray` or :class:`numpy.ma.core.MaskedArray` or :class:`dask.array.Array` """ return super()._core_values() def has_lazy_indices(self): - """Return a boolean indicating whether the connectivity's :attr:`indices` - array is a lazy Dask array or not. + """Check if the connectivity's :attr:`indices` array is a lazy Dask array or not. - Returns: - boolean + Returns + ------- + bool """ return super()._has_lazy_values() def lazy_location_lengths(self): - """Return a lazy array representing the number of :attr:`connected` + """Return a lazy array representing the number of :attr:`connected` elements. + + Return a lazy array representing the number of :attr:`connected` elements associated with each of the connectivity's :attr:`location` elements, accounting for masks if present. @@ -504,9 +530,11 @@ def lazy_location_lengths(self): The returned Array will be lazy regardless of whether the :attr:`indices` have already been loaded. - Returns: + Returns + ------- + lazy array A lazy array, representing the number of :attr:`connected` - elements associated with each :attr:`location` element. + elements associated with each :attr:`location` element. """ location_mask_counts = da.sum( @@ -516,13 +544,17 @@ def lazy_location_lengths(self): return max_location_size - location_mask_counts def location_lengths(self): - """Return a NumPy array representing the number of :attr:`connected` + """Return a NumPy array representing the number of :attr:`connected` elements. + + Return a NumPy array representing the number of :attr:`connected` elements associated with each of the connectivity's :attr:`location` elements, accounting for masks if present. - Returns: + Returns + ------- + NumPy array A NumPy array, representing the number of :attr:`connected` - elements associated with each :attr:`location` element. + elements associated with each :attr:`location` element. """ return self.lazy_location_lengths().compute() @@ -544,7 +576,9 @@ def xml_element(self, doc): class Mesh(CFVariableMixin): - """A container representing the UGRID ``cf_role`` ``mesh_topology``, supporting + """A container representing the UGRID ``cf_role`` ``mesh_topology``. + + A container representing the UGRID ``cf_role`` ``mesh_topology``, supporting 1D network, 2D triangular, and 2D flexible mesh topologies. .. note:: @@ -659,8 +693,7 @@ def normalise(element, axis): @classmethod def from_coords(cls, *coords): - r"""Construct a :class:`Mesh` by derivation from one or more - :class:`~iris.coords.Coord`\\ s. + r"""Construct a :class:`Mesh` by derivation from one or more :class:`~iris.coords.Coord`. The :attr:`~Mesh.topology_dimension`, :class:`~iris.coords.Coord` membership and :class:`Connectivity` membership are all determined @@ -668,14 +701,12 @@ def from_coords(cls, *coords): * ``None`` or ``(n, <2)``: Not supported - * ``(n, 2)``: :attr:`~Mesh.topology_dimension` = ``1``. :attr:`~Mesh.node_coords` and :attr:`~Mesh.edge_node_connectivity` constructed from :attr:`~iris.coords.Coord.bounds`. :attr:`~Mesh.edge_coords` constructed from :attr:`~iris.coords.Coord.points`. - * ``(n, >=3)``: :attr:`~Mesh.topology_dimension` = ``2``. :attr:`~Mesh.node_coords` and :attr:`~Mesh.face_node_connectivity` @@ -683,26 +714,29 @@ def from_coords(cls, *coords): :attr:`~Mesh.face_coords` constructed from :attr:`~iris.coords.Coord.points`. - Args: - - * \\*coords (Iterable of :class:`~iris.coords.Coord`): + Parameters + ---------- + *coords : Iterable of :class:`~iris.coords.Coord` Coordinates to pass into the :class:`Mesh`. All :attr:`~iris.coords.Coord.points` must have the same shapes; all :attr:`~iris.coords.Coord.bounds` must have the same shapes, and must not be ``None``. - Returns: - :class:`Mesh` + Returns + ------- + :class:`Mesh` + Notes + ----- .. note:: Any resulting duplicate nodes are not currently removed, due to the computational intensity. .. note:: :class:`Mesh` currently requires ``X`` and ``Y`` - :class:`~iris.coords.Coord`\\ s specifically. + :class:`~iris.coords.Coord` specifically. :meth:`iris.util.guess_coord_axis` is therefore attempted, else the - first two :class:`~iris.coords.Coord`\\ s are taken. + first two :class:`~iris.coords.Coord` are taken. .. testsetup:: @@ -717,7 +751,9 @@ def from_coords(cls, *coords): with PARSE_UGRID_ON_LOAD.context(): cube_w_mesh = load_cube(file_path) - For example:: + Examples + -------- + :: # Reconstruct a cube-with-mesh after subsetting it. @@ -891,7 +927,7 @@ def summary(self, shorten=False): Parameters ---------- - shorten : bool, default = False + shorten : bool, default=False If True, produce a oneline string form of the form . If False, produce a multi-line detailed print output. @@ -1056,10 +1092,7 @@ def _set_dimension_names(self, node, edge, face, reset=False): @property def all_connectivities(self): - """All the :class:`~iris.experimental.ugrid.mesh.Connectivity` instances - of the :class:`Mesh`. - - """ + """All the :class:`~iris.experimental.ugrid.mesh.Connectivity` instances of the :class:`Mesh`.""" return self._connectivity_manager.all_members @property @@ -1069,7 +1102,9 @@ def all_coords(self): @property def boundary_node_connectivity(self): - """The *optional* UGRID ``boundary_node_connectivity`` + """The *optional* UGRID ``boundary_node_connectivity`` :class:`~iris.experimental.ugrid.mesh.Connectivity`. + + The *optional* UGRID ``boundary_node_connectivity`` :class:`~iris.experimental.ugrid.mesh.Connectivity` of the :class:`Mesh`. @@ -1078,18 +1113,12 @@ def boundary_node_connectivity(self): @property def edge_coords(self): - """The *optional* UGRID ``edge`` :class:`~iris.coords.AuxCoord` coordinates - of the :class:`Mesh`. - - """ + """The *optional* UGRID ``edge`` :class:`~iris.coords.AuxCoord` coordinates of the :class:`Mesh`.""" return self._coord_manager.edge_coords @property def edge_dimension(self): - """The *optionally required* UGRID NetCDF variable name for the ``edge`` - dimension. - - """ + """The *optionally required* UGRID NetCDF variable name for the ``edge`` dimension.""" return self._metadata_manager.edge_dimension @edge_dimension.setter @@ -1102,7 +1131,9 @@ def edge_dimension(self, name): @property def edge_face_connectivity(self): - """The *optional* UGRID ``edge_face_connectivity`` + """The *optional* UGRID ``edge_face_connectivity`` :class:`~iris.experimental.ugrid.mesh.Connectivity`. + + The *optional* UGRID ``edge_face_connectivity`` :class:`~iris.experimental.ugrid.mesh.Connectivity` of the :class:`Mesh`. @@ -1111,7 +1142,9 @@ def edge_face_connectivity(self): @property def edge_node_connectivity(self): - """The UGRID ``edge_node_connectivity`` + """The UGRID ``edge_node_connectivity`` :class:`~iris.experimental.ugrid.mesh.Connectivity`. + + The UGRID ``edge_node_connectivity`` :class:`~iris.experimental.ugrid.mesh.Connectivity` of the :class:`Mesh`, which is **required** for :attr:`Mesh.topology_dimension` of ``1``, and *optionally required* for @@ -1122,18 +1155,12 @@ def edge_node_connectivity(self): @property def face_coords(self): - """The *optional* UGRID ``face`` :class:`~iris.coords.AuxCoord` coordinates - of the :class:`Mesh`. - - """ + """The *optional* UGRID ``face`` :class:`~iris.coords.AuxCoord` coordinates of the :class:`Mesh`.""" return self._coord_manager.face_coords @property def face_dimension(self): - """The *optionally required* UGRID NetCDF variable name for the ``face`` - dimension. - - """ + """The *optional* UGRID NetCDF variable name for the ``face`` dimension.""" return self._metadata_manager.face_dimension @face_dimension.setter @@ -1155,7 +1182,9 @@ def face_dimension(self, name): @property def face_edge_connectivity(self): - """The *optional* UGRID ``face_edge_connectivity`` + """The *optional* UGRID ``face_edge_connectivity``:class:`~iris.experimental.ugrid.mesh.Connectivity`. + + The *optional* UGRID ``face_edge_connectivity`` :class:`~iris.experimental.ugrid.mesh.Connectivity` of the :class:`Mesh`. @@ -1165,7 +1194,9 @@ def face_edge_connectivity(self): @property def face_face_connectivity(self): - """The *optional* UGRID ``face_face_connectivity`` + """The *optional* UGRID ``face_face_connectivity`` :class:`~iris.experimental.ugrid.mesh.Connectivity`. + + The *optional* UGRID ``face_face_connectivity`` :class:`~iris.experimental.ugrid.mesh.Connectivity` of the :class:`Mesh`. @@ -1174,7 +1205,9 @@ def face_face_connectivity(self): @property def face_node_connectivity(self): - """The UGRID ``face_node_connectivity`` + """Return ``face_node_connectivity``:class:`~iris.experimental.ugrid.mesh.Connectivity`. + + The UGRID ``face_node_connectivity`` :class:`~iris.experimental.ugrid.mesh.Connectivity` of the :class:`Mesh`, which is **required** for :attr:`Mesh.topology_dimension` of ``2``, and *optionally required* for :attr:`Mesh.topology_dimension` @@ -1185,10 +1218,7 @@ def face_node_connectivity(self): @property def node_coords(self): - """The **required** UGRID ``node`` :class:`~iris.coords.AuxCoord` coordinates - of the :class:`Mesh`. - - """ + """The **required** UGRID ``node`` :class:`~iris.coords.AuxCoord` coordinates of the :class:`Mesh`.""" return self._coord_manager.node_coords @property @@ -1207,9 +1237,9 @@ def node_dimension(self, name): def add_connectivities(self, *connectivities): """Add one or more :class:`~iris.experimental.ugrid.mesh.Connectivity` instances to the :class:`Mesh`. - Args: - - * connectivities (iterable of object): + Parameters + ---------- + *connectivities : iterable of object A collection of one or more :class:`~iris.experimental.ugrid.mesh.Connectivity` instances to add to the :class:`Mesh`. @@ -1228,24 +1258,19 @@ def add_coords( ): """Add one or more :class:`~iris.coords.AuxCoord` coordinates to the :class:`Mesh`. - Kwargs: - - * node_x (object): + Parameters + ---------- + node_x : optional The ``x-axis`` like ``node`` :class:`~iris.coords.AuxCoord`. - - * node_y (object): + node_y : optional The ``y-axis`` like ``node`` :class:`~iris.coords.AuxCoord`. - - * edge_x (object): + edge_x : optional The ``x-axis`` like ``edge`` :class:`~iris.coords.AuxCoord`. - - * edge_y (object): + edge_y : optional The ``y-axis`` like ``edge`` :class:`~iris.coords.AuxCoord`. - - * face_x (object): + face_x : optional The ``x-axis`` like ``face`` :class:`~iris.coords.AuxCoord`. - - * face_y (object): + face_y : optional The ``y-axis`` like ``face`` :class:`~iris.coords.AuxCoord`. """ @@ -1275,7 +1300,9 @@ def connectivities( contains_edge=None, contains_face=None, ): - """Return all :class:`~iris.experimental.ugrid.mesh.Connectivity` + """Return all :class:`~iris.experimental.ugrid.mesh.Connectivity`. + + Return all :class:`~iris.experimental.ugrid.mesh.Connectivity` instances from the :class:`Mesh` that match the provided criteria. Criteria can be either specific properties or other objects with @@ -1285,9 +1312,9 @@ def connectivities( :meth:`Mesh.connectivity` for matching exactly one connectivity. - Kwargs: - - * item (str or object): + Parameters + ---------- + item : str or object Either, * a :attr:`~iris.common.mixin.CFVariableMixin.standard_name`, @@ -1299,47 +1326,41 @@ def connectivities( the desired objects e.g., :class:`~iris.experimental.ugrid.mesh.Connectivity` or :class:`~iris.experimental.ugrid.metadata.ConnectivityMetadata`. - - * standard_name (str): + standard_name : str, optional The CF standard name of the desired :class:`~iris.experimental.ugrid.mesh.Connectivity`. If ``None``, does not check for ``standard_name``. - - * long_name (str): + long_name : str, optional An unconstrained description of the :class:`~iris.experimental.ugrid.mesh.Connectivity`. If ``None``, does not check for ``long_name``. - - * var_name (str): + var_name : str, optional The NetCDF variable name of the desired :class:`~iris.experimental.ugrid.mesh.Connectivity`. If ``None``, does not check for ``var_name``. - - * attributes (dict): + attributes : dict, optional A dictionary of attributes desired on the :class:`~iris.experimental.ugrid.mesh.Connectivity`. If ``None``, does not check for ``attributes``. - - * cf_role (str): + cf_role : str, optional The UGRID ``cf_role`` of the desired :class:`~iris.experimental.ugrid.mesh.Connectivity`. - - * contains_node (bool): + contains_node : bool, optional Contains the ``node`` element as part of the :attr:`~iris.experimental.ugrid.metadata.ConnectivityMetadata.cf_role` in the list of objects to be matched. - - * contains_edge (bool): + contains_edge : bool, optional Contains the ``edge`` element as part of the :attr:`~iris.experimental.ugrid.metadata.ConnectivityMetadata.cf_role` in the list of objects to be matched. - - * contains_face (bool): + contains_face : bool, optional Contains the ``face`` element as part of the :attr:`~iris.experimental.ugrid.metadata.ConnectivityMetadata.cf_role` in the list of objects to be matched. - Returns: + Returns + ------- + list of :class:`~iris.experimental.ugrid.mesh.Connectivity` A list of :class:`~iris.experimental.ugrid.mesh.Connectivity` instances from the :class:`Mesh` that matched the given criteria. @@ -1369,7 +1390,9 @@ def connectivity( contains_edge=None, contains_face=None, ): - """Return a single :class:`~iris.experimental.ugrid.mesh.Connectivity` + """Return a single :class:`~iris.experimental.ugrid.mesh.Connectivity`. + + Return a single :class:`~iris.experimental.ugrid.mesh.Connectivity` from the :class:`Mesh` that matches the provided criteria. Criteria can be either specific properties or other objects with @@ -1385,9 +1408,9 @@ def connectivity( :meth:`Mesh.connectivities` for matching zero or more connectivities. - Kwargs: - - * item (str or object): + Parameters + ---------- + item : str or object Either, * a :attr:`~iris.common.mixin.CFVariableMixin.standard_name`, @@ -1399,52 +1422,45 @@ def connectivity( the desired object e.g., :class:`~iris.experimental.ugrid.mesh.Connectivity` or :class:`~iris.experimental.ugrid.metadata.ConnectivityMetadata`. - - * standard_name (str): + standard_name : str, optional The CF standard name of the desired :class:`~iris.experimental.ugrid.mesh.Connectivity`. If ``None``, does not check for ``standard_name``. - - * long_name (str): + long_name : str, optional An unconstrained description of the :class:`~iris.experimental.ugrid.mesh.Connectivity`. If ``None``, does not check for ``long_name``. - - * var_name (str): + var_name : str, optional The NetCDF variable name of the desired :class:`~iris.experimental.ugrid.mesh.Connectivity`. If ``None``, does not check for ``var_name``. - - * attributes (dict): + attributes : dict, optional A dictionary of attributes desired on the :class:`~iris.experimental.ugrid.mesh.Connectivity`. If ``None``, does not check for ``attributes``. - - * cf_role (str): + cf_role : str, optional The UGRID ``cf_role`` of the desired :class:`~iris.experimental.ugrid.mesh.Connectivity`. - - * contains_node (bool): + contains_node : bool, optional Contains the ``node`` element as part of the :attr:`~iris.experimental.ugrid.metadata.ConnectivityMetadata.cf_role` in the list of objects to be matched. - - * contains_edge (bool): + contains_edge : bool, optional Contains the ``edge`` element as part of the :attr:`~iris.experimental.ugrid.metadata.ConnectivityMetadata.cf_role` in the list of objects to be matched. - - * contains_face (bool): + contains_face : bool, optional Contains the ``face`` element as part of the :attr:`~iris.experimental.ugrid.metadata.ConnectivityMetadata.cf_role` in the list of objects to be matched. - Returns: + Returns + ------- + :class:`~iris.experimental.ugrid.mesh.Connectivity` The :class:`~iris.experimental.ugrid.mesh.Connectivity` from the :class:`Mesh` that matched the given criteria. """ - result = self._connectivity_manager.filter( item=item, standard_name=standard_name, @@ -1470,7 +1486,9 @@ def coord( include_edges=None, include_faces=None, ): - """Return a single :class:`~iris.coords.AuxCoord` coordinate from the + """Return a single :class:`~iris.coords.AuxCoord` coordinate. + + Return a single :class:`~iris.coords.AuxCoord` coordinate from the :class:`Mesh` that matches the provided criteria. Criteria can be either specific properties or other objects with @@ -1485,9 +1503,9 @@ def coord( :meth:`Mesh.coords` for matching zero or more coordinates. - Kwargs: - - * item (str or object): + Parameters + ---------- + item : str or object, optional Either, * a :attr:`~iris.common.mixin.CFVariableMixin.standard_name`, @@ -1498,38 +1516,32 @@ def coord( * a coordinate or metadata instance equal to that of the desired coordinate e.g., :class:`~iris.coords.AuxCoord` or :class:`~iris.common.metadata.CoordMetadata`. - - * standard_name (str): + standard_name : str, optional The CF standard name of the desired coordinate. If ``None``, does not check for ``standard_name``. - - * long_name (str): + long_name : str, optional An unconstrained description of the coordinate. If ``None``, does not check for ``long_name``. - - * var_name (str): + var_name : str, optional The NetCDF variable name of the desired coordinate. If ``None``, does not check for ``var_name``. - - * attributes (dict): + attributes : dict, optional A dictionary of attributes desired on the coordinates. If ``None``, does not check for ``attributes``. - - * axis (str): + axis : str, optional The desired coordinate axis, see :func:`~iris.util.guess_coord_axis`. If ``None``, does not check for ``axis``. Accepts the values ``X``, ``Y``, ``Z`` and ``T`` (case-insensitive). - - * include_node (bool): + include_node : bool, optional Include all ``node`` coordinates in the list of objects to be matched. - - * include_edge (bool): + include_edge : bool, optional Include all ``edge`` coordinates in the list of objects to be matched. - - * include_face (bool): + include_face : bool, optional Include all ``face`` coordinates in the list of objects to be matched. - Returns: + Returns + ------- + :class:`~iris.coords.AuxCoord` The :class:`~iris.coords.AuxCoord` coordinate from the :class:`Mesh` that matched the given criteria. @@ -1559,7 +1571,9 @@ def coords( include_edges=None, include_faces=None, ): - """Return all :class:`~iris.coords.AuxCoord` coordinates from the :class:`Mesh` that + """Return all :class:`~iris.coords.AuxCoord` coordinates from the :class:`Mesh`. + + Return all :class:`~iris.coords.AuxCoord` coordinates from the :class:`Mesh` that match the provided criteria. Criteria can be either specific properties or other objects with @@ -1569,9 +1583,9 @@ def coords( :meth:`Mesh.coord` for matching exactly one coordinate. - Kwargs: - - * item (str or object): + Parameters + ---------- + item : str or object, optional Either, * a :attr:`~iris.common.mixin.CFVariableMixin.standard_name`, @@ -1582,38 +1596,32 @@ def coords( * a coordinate or metadata instance equal to that of the desired coordinates e.g., :class:`~iris.coords.AuxCoord` or :class:`~iris.common.metadata.CoordMetadata`. - - * standard_name (str): + standard_name : str, optional The CF standard name of the desired coordinate. If ``None``, does not check for ``standard_name``. - - * long_name (str): + long_name : str, optional An unconstrained description of the coordinate. If ``None``, does not check for ``long_name``. - - * var_name (str): + var_name : str, optional The NetCDF variable name of the desired coordinate. If ``None``, does not check for ``var_name``. - - * attributes (dict): + attributes : dict, optional A dictionary of attributes desired on the coordinates. If ``None``, does not check for ``attributes``. - - * axis (str): + axis : str, optional The desired coordinate axis, see :func:`~iris.util.guess_coord_axis`. If ``None``, does not check for ``axis``. Accepts the values ``X``, ``Y``, ``Z`` and ``T`` (case-insensitive). - - * include_node (bool): + include_node : bool, optional Include all ``node`` coordinates in the list of objects to be matched. - - * include_edge (bool): + include_edge : bool, optional Include all ``edge`` coordinates in the list of objects to be matched. - - * include_face (bool): + include_face : bool, optional Include all ``face`` coordinates in the list of objects to be matched. - Returns: + Returns + ------- + list of :class:`~iris.coords.AuxCoord` A list of :class:`~iris.coords.AuxCoord` coordinates from the :class:`Mesh` that matched the given criteria. @@ -1643,15 +1651,17 @@ def remove_connectivities( contains_edge=None, contains_face=None, ): - """Remove one or more :class:`~iris.experimental.ugrid.mesh.Connectivity` + """Remove one or more :class:`~iris.experimental.ugrid.mesh.Connectivity`. + + Remove one or more :class:`~iris.experimental.ugrid.mesh.Connectivity` from the :class:`Mesh` that match the provided criteria. Criteria can be either specific properties or other objects with metadata to be matched. - Kwargs: - - * item (str or object): + Parameters + ---------- + item : str or object, optional Either, * a :attr:`~iris.common.mixin.CFVariableMixin.standard_name`, @@ -1663,47 +1673,41 @@ def remove_connectivities( the desired objects e.g., :class:`~iris.experimental.ugrid.mesh.Connectivity` or :class:`~iris.experimental.ugrid.metadata.ConnectivityMetadata`. - - * standard_name (str): + standard_name : str, optional The CF standard name of the desired :class:`~iris.experimental.ugrid.mesh.Connectivity`. If ``None``, does not check for ``standard_name``. - - * long_name (str): + long_name : str, optional An unconstrained description of the - :class:`~iris.experimental.ugrid.mesh.Connectivity. If ``None``, + :class:`~iris.experimental.ugrid.mesh.Connectivity`. If ``None``, does not check for ``long_name``. - - * var_name (str): + var_name : str, optional The NetCDF variable name of the desired :class:`~iris.experimental.ugrid.mesh.Connectivity`. If ``None``, does not check for ``var_name``. - - * attributes (dict): + attributes : dict, optional A dictionary of attributes desired on the :class:`~iris.experimental.ugrid.mesh.Connectivity`. If ``None``, does not check for ``attributes``. - - * cf_role (str): + cf_role : str, optional The UGRID ``cf_role`` of the desired :class:`~iris.experimental.ugrid.mesh.Connectivity`. - - * contains_node (bool): + contains_node : bool, optional Contains the ``node`` element as part of the :attr:`~iris.experimental.ugrid.metadata.ConnectivityMetadata.cf_role` in the list of objects to be matched for potential removal. - - * contains_edge (bool): + contains_edge : bool, optional Contains the ``edge`` element as part of the :attr:`~iris.experimental.ugrid.metadata.ConnectivityMetadata.cf_role` in the list of objects to be matched for potential removal. - - * contains_face (bool): + contains_face : bool, optional Contains the ``face`` element as part of the :attr:`~iris.experimental.ugrid.metadata.ConnectivityMetadata.cf_role` in the list of objects to be matched for potential removal. - Returns: + Returns + ------- + list of :class:`~iris.experimental.ugrid.mesh.Connectivity` A list of :class:`~iris.experimental.ugrid.mesh.Connectivity` instances removed from the :class:`Mesh` that matched the given criteria. @@ -1733,15 +1737,17 @@ def remove_coords( include_edges=None, include_faces=None, ): - """Remove one or more :class:`~iris.coords.AuxCoord` from the :class:`Mesh` + """Remove one or more :class:`~iris.coords.AuxCoord` from the :class:`Mesh`. + + Remove one or more :class:`~iris.coords.AuxCoord` from the :class:`Mesh` that match the provided criteria. Criteria can be either specific properties or other objects with metadata to be matched. - Kwargs: - - * item (str or object): + Parameters + ---------- + item : str or object, optional Either, * a :attr:`~iris.common.mixin.CFVariableMixin.standard_name`, @@ -1752,41 +1758,35 @@ def remove_coords( * a coordinate or metadata instance equal to that of the desired coordinates e.g., :class:`~iris.coords.AuxCoord` or :class:`~iris.common.metadata.CoordMetadata`. - - * standard_name (str): + standard_name : str, optional The CF standard name of the desired coordinate. If ``None``, does not check for ``standard_name``. - - * long_name (str): + long_name : str, optional An unconstrained description of the coordinate. If ``None``, does not check for ``long_name``. - - * var_name (str): + var_name : str, optional The NetCDF variable name of the desired coordinate. If ``None``, does not check for ``var_name``. - - * attributes (dict): + attributes : dict, optional A dictionary of attributes desired on the coordinates. If ``None``, does not check for ``attributes``. - - * axis (str): + axis : str, optional The desired coordinate axis, see :func:`~iris.util.guess_coord_axis`. If ``None``, does not check for ``axis``. Accepts the values ``X``, ``Y``, ``Z`` and ``T`` (case-insensitive). - - * include_node (bool): + include_node : bool, optional Include all ``node`` coordinates in the list of objects to be matched for potential removal. - - * include_edge (bool): + include_edge : bool, optional Include all ``edge`` coordinates in the list of objects to be matched for potential removal. - - * include_face (bool): + include_face : bool, optional Include all ``face`` coordinates in the list of objects to be matched for potential removal. - Returns: + Returns + ------- + list of :class:`~iris.coords.AuxCoord` A list of :class:`~iris.coords.AuxCoord` coordinates removed from the :class:`Mesh` that matched the given criteria. @@ -1809,15 +1809,16 @@ def remove_coords( return self._coord_manager.remove(**kwargs) def xml_element(self, doc): - """Create the :class:`xml.dom.minidom.Element` that describes this - :class:`Mesh`. + """Create the :class:`xml.dom.minidom.Element` that describes this :class:`Mesh`. - Args: - - * doc (object): + Parameters + ---------- + doc : object The parent :class:`xml.dom.minidom.Document`. - Returns: + Returns + ------- + :class:`xml.dom.minidom.Element` The :class:`xml.dom.minidom.Element` that will describe this :class:`Mesh`, and the dictionary of attributes that require to be added to this element. @@ -1839,7 +1840,9 @@ def xml_element(self, doc): # # return the lazy AuxCoord(...), AuxCoord(...) def to_MeshCoord(self, location, axis): - """Generate a :class:`~iris.experimental.ugrid.mesh.MeshCoord` that + """Generate a :class:`~iris.experimental.ugrid.mesh.MeshCoord`. + + Generate a :class:`~iris.experimental.ugrid.mesh.MeshCoord` that references the current :class:`Mesh`, and passing through the ``location`` and ``axis`` arguments. @@ -1847,17 +1850,18 @@ def to_MeshCoord(self, location, axis): :meth:`to_MeshCoords` for generating a series of mesh coords. - Args: - - * location (str) + Parameters + ---------- + location : str The ``location`` argument for :class:`~iris.experimental.ugrid.mesh.MeshCoord` instantiation. - - * axis (str) + axis : str The ``axis`` argument for :class:`~iris.experimental.ugrid.mesh.MeshCoord` instantiation. - Returns: + Returns + ------- + :class:`~iris.experimental.ugrid.mesh.MeshCoord` A :class:`~iris.experimental.ugrid.mesh.MeshCoord` referencing the current :class:`Mesh`. @@ -1865,8 +1869,10 @@ def to_MeshCoord(self, location, axis): return MeshCoord(mesh=self, location=location, axis=axis) def to_MeshCoords(self, location): - r"""Generate a tuple of - :class:`~iris.experimental.ugrid.mesh.MeshCoord`\\ s, each referencing + r"""Generate a tuple of :class:`~iris.experimental.ugrid.mesh.MeshCoord`. + + Generate a tuple of + :class:`~iris.experimental.ugrid.mesh.MeshCoord`, each referencing the current :class:`Mesh`, one for each :attr:`AXES` value, passing through the ``location`` argument. @@ -1874,13 +1880,15 @@ def to_MeshCoords(self, location): :meth:`to_MeshCoord` for generating a single mesh coord. - Args: - - * location (str) + Parameters + ---------- + location : str The ``location`` argument for :class:`MeshCoord` instantiation. - Returns: - tuple of :class:`~iris.experimental.ugrid.mesh.MeshCoord`\\ s + Returns + ------- + tuple of :class:`~iris.experimental.ugrid.mesh.MeshCoord` + tuple of :class:`~iris.experimental.ugrid.mesh.MeshCoord` referencing the current :class:`Mesh`. One for each value in :attr:`AXES`, using the value for the ``axis`` argument. @@ -1890,20 +1898,20 @@ def to_MeshCoords(self, location): return tuple(result) def dimension_names_reset(self, node=False, edge=False, face=False): - """Reset the name used for the NetCDF variable representing the ``node``, - ``edge`` and/or ``face`` dimension to ``None``. + """Reset the name used for the NetCDF variable. - Kwargs: + Reset the name used for the NetCDF variable representing the ``node``, + ``edge`` and/or ``face`` dimension to ``None``. - * node (bool): + Parameters + ---------- + node : bool, optional, default=False Reset the name of the ``node`` dimension if ``True``. Default is ``False``. - - * edge (bool): + edge : bool, default=False Reset the name of the ``edge`` dimension if ``True``. Default is ``False``. - - * face (bool): + face : bool, default=False Reset the name of the ``face`` dimension if ``True``. Default is ``False``. @@ -1911,24 +1919,24 @@ def dimension_names_reset(self, node=False, edge=False, face=False): return self._set_dimension_names(node, edge, face, reset=True) def dimension_names(self, node=None, edge=None, face=None): - """Assign the name to be used for the NetCDF variable representing + """Assign the name to be used for the NetCDF variable. + + Assign the name to be used for the NetCDF variable representing the ``node``, ``edge`` and ``face`` dimension. The default value of ``None`` will not be assigned to clear the associated ``node``, ``edge`` or ``face``. Instead use :meth:`Mesh.dimension_names_reset`. - Kwargs: - - * node (str): + Parameters + ---------- + node : str, optional The name to be used for the NetCDF variable representing the ``node`` dimension. - - * edge (str): + edge : str, optional The name to be used for the NetCDF variable representing the ``edge`` dimension. - - * face (str): + face : str, optional The name to be used for the NetCDF variable representing the ``face`` dimension. @@ -1942,7 +1950,9 @@ def cf_role(self): @property def topology_dimension(self): - """The UGRID ``topology_dimension`` attribute represents the highest + """UGRID ``topology_dimension`` attribute. + + The UGRID ``topology_dimension`` attribute represents the highest dimensionality of all the geometric elements (node, edge, face) represented within the :class:`Mesh`. @@ -1951,7 +1961,8 @@ def topology_dimension(self): class _Mesh1DCoordinateManager: - """TBD: require clarity on coord_systems validation + """TBD: require clarity on coord_systems validation. + TBD: require clarity on __eq__ support TBD: rationalise self.coords() logic with other manager and Cube. @@ -2140,7 +2151,9 @@ def _add(self, coords): setattr(self, member_y, coords[1]) def add(self, node_x=None, node_y=None, edge_x=None, edge_y=None): - """Use self.remove(edge_x=True) to remove a coordinate e.g., using the + """Use self.remove(edge_x=True) to remove a coordinate. + + Use self.remove(edge_x=True) to remove a coordinate e.g., using the pattern self.add(edge_x=None) will not remove the edge_x coordinate. """ @@ -2827,9 +2840,9 @@ def __getitem__(self, keys): def copy(self, points=None, bounds=None): """Make a copy of the MeshCoord. - Kwargs: - - * points, bounds (array): + Parameters + ---------- + points, bounds : array, optional Provided solely for signature compatibility with other types of :class:`~iris.coords.Coord`. In this case, if either is not 'None', an error is raised. @@ -2850,8 +2863,9 @@ def copy(self, points=None, bounds=None): return new_coord def __deepcopy__(self, memo): - """Make this equivalent to "shallow" copy, returning a new MeshCoord based - on the same Mesh. + """Make this equivalent to "shallow" copy. + + Returns a new MeshCoord based on the same Mesh. Required to prevent cube copying from copying the Mesh, which would prevent "cube.copy() == cube" : see notes for :meth:`copy`. @@ -2936,11 +2950,14 @@ def summary(self, *args, **kwargs): return result def _construct_access_arrays(self): - """Build lazy points and bounds arrays, providing dynamic access via the + """Build lazy points and bounds arrays. + + Build lazy points and bounds arrays, providing dynamic access via the Mesh, according to the location and axis. - Returns: - * points, bounds (array or None): + Returns + ------- + points, bounds : array or None lazy arrays which calculate the correct points and bounds from the Mesh data, based on the location and axis. The Mesh coordinates accessed are not identified on construction, diff --git a/lib/iris/experimental/ugrid/metadata.py b/lib/iris/experimental/ugrid/metadata.py index 153f71bfcb..8969ab72a1 100644 --- a/lib/iris/experimental/ugrid/metadata.py +++ b/lib/iris/experimental/ugrid/metadata.py @@ -37,14 +37,15 @@ def __eq__(self, other): def _combine_lenient(self, other): """Perform lenient combination of metadata members for connectivities. - Args: - - * other (ConnectivityMetadata): + Parameters + ---------- + other : ConnectivityMetadata The other connectivity metadata participating in the lenient combination. - Returns: - A list of combined metadata member values. + Returns + ------- + A list of combined metadata member values. """ @@ -65,14 +66,15 @@ def func(field): def _compare_lenient(self, other): """Perform lenient equality of metadata members for connectivities. - Args: - - * other (ConnectivityMetadata): + Parameters + ---------- + other : ConnectivityMetadata The other connectivity metadata participating in the lenient comparison. - Returns: - Boolean. + Returns + ------- + bool """ # Perform "strict" comparison for "cf_role", "start_index". @@ -93,14 +95,15 @@ def _compare_lenient(self, other): def _difference_lenient(self, other): """Perform lenient difference of metadata members for connectivities. - Args: - - * other (ConnectivityMetadata): + Parameters + ---------- + other : ConnectivityMetadata The other connectivity metadata participating in the lenient difference. - Returns: - A list of difference metadata member values. + Returns + ------- + A list of difference metadata member values. """ @@ -156,14 +159,15 @@ def __eq__(self, other): def _combine_lenient(self, other): """Perform lenient combination of metadata members for meshes. - Args: - - * other (MeshMetadata): + Parameters + ---------- + other : MeshMetadata The other mesh metadata participating in the lenient combination. - Returns: - A list of combined metadata member values. + Returns + ------- + A list of combined metadata member values. """ @@ -185,14 +189,15 @@ def func(field): def _compare_lenient(self, other): """Perform lenient equality of metadata members for meshes. - Args: - - * other (MeshMetadata): + Parameters + ---------- + other : MeshMetadata The other mesh metadata participating in the lenient comparison. - Returns: - Boolean. + Returns + ------- + bool """ # Perform "strict" comparison for "topology_dimension". @@ -208,14 +213,15 @@ def _compare_lenient(self, other): def _difference_lenient(self, other): """Perform lenient difference of metadata members for meshes. - Args: - - * other (MeshMetadata): + Parameters + ---------- + other : MeshMetadata The other mesh metadata participating in the lenient difference. - Returns: - A list of difference metadata member values. + Returns + ------- + A list of difference metadata member values. """ @@ -271,13 +277,14 @@ def __eq__(self, other): def _combine_lenient(self, other): """Perform lenient combination of metadata members for MeshCoord. - Args: - - * other (MeshCoordMetadata): + Parameters + ---------- + other : MeshCoordMetadata The other metadata participating in the lenient combination. - Returns: - A list of combined metadata member values. + Returns + ------- + A list of combined metadata member values. """ @@ -298,13 +305,14 @@ def func(field): def _compare_lenient(self, other): """Perform lenient equality of metadata members for MeshCoord. - Args: - - * other (MeshCoordMetadata): + Parameters + ---------- + other : MeshCoordMetadata The other metadata participating in the lenient comparison. - Returns: - Boolean. + Returns + ------- + bool """ # Perform "strict" comparison for the MeshCoord specific members @@ -321,14 +329,15 @@ def _compare_lenient(self, other): def _difference_lenient(self, other): """Perform lenient difference of metadata members for MeshCoord. - Args: - - * other (MeshCoordMetadata): + Parameters + ---------- + other : MeshCoordMetadata The other MeshCoord metadata participating in the lenient difference. - Returns: - A list of different metadata member values. + Returns + ------- + A list of different metadata member values. """ diff --git a/lib/iris/experimental/ugrid/save.py b/lib/iris/experimental/ugrid/save.py index 00891b3044..40d1c42e90 100644 --- a/lib/iris/experimental/ugrid/save.py +++ b/lib/iris/experimental/ugrid/save.py @@ -3,8 +3,7 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Extensions to Iris' NetCDF saving to allow -:class:`~iris.experimental.ugrid.mesh.Mesh` saving in UGRID format. +"""Extension to Iris' NetCDF saving to allow :class:`~iris.experimental.ugrid.mesh.Mesh` saving in UGRID format. Eventual destination: :mod:`iris.fileformats.netcdf`. @@ -17,17 +16,13 @@ def save_mesh(mesh, filename, netcdf_format="NETCDF4"): """Save mesh(es) to a netCDF file. - Args: - - * mesh (:class:`iris.experimental.ugrid.Mesh` or iterable): - mesh(es) to save. - - * filename (string): + Parameters + ---------- + mesh : :class:`iris.experimental.ugrid.Mesh` or iterable + Mesh(es) to save. + filename : str Name of the netCDF file to create. - - Kwargs: - - * netcdf_format (string): + netcdf_format : str, default="NETCDF4" Underlying netCDF file format, one of 'NETCDF4', 'NETCDF4_CLASSIC', 'NETCDF3_CLASSIC' or 'NETCDF3_64BIT'. Default is 'NETCDF4' format. diff --git a/lib/iris/experimental/ugrid/utils.py b/lib/iris/experimental/ugrid/utils.py index fc0464077a..fce1036c6d 100644 --- a/lib/iris/experimental/ugrid/utils.py +++ b/lib/iris/experimental/ugrid/utils.py @@ -3,9 +3,7 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Utility operations specific to unstructured data. - -""" +"""Utility operations specific to unstructured data.""" from typing import AnyStr, Iterable, Union import dask.array as da diff --git a/lib/iris/fileformats/__init__.py b/lib/iris/fileformats/__init__.py index cd967881c7..174c4d390c 100644 --- a/lib/iris/fileformats/__init__.py +++ b/lib/iris/fileformats/__init__.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""A package for converting cubes to and from specific file formats. - -""" +"""A package for converting cubes to and from specific file formats.""" from iris.io.format_picker import ( DataSourceObjectProtocol, diff --git a/lib/iris/fileformats/_ff.py b/lib/iris/fileformats/_ff.py index 88bbfd45d5..18c1aa766e 100644 --- a/lib/iris/fileformats/_ff.py +++ b/lib/iris/fileformats/_ff.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Provides UK Met Office Fields File (FF) format specific capabilities. - -""" +"""Provides UK Met Office Fields File (FF) format specific capabilities.""" import os import warnings @@ -12,12 +10,11 @@ import numpy as np from iris.exceptions import ( - IrisDefaultingWarning, - IrisLoadWarning, NotYetImplementedError, ) from iris.fileformats._ff_cross_references import STASH_TRANS +from ..warnings import IrisDefaultingWarning, IrisLoadWarning from . import pp IMDI = -32768 @@ -127,10 +124,7 @@ class _WarnComboLoadingDefaulting(IrisDefaultingWarning, IrisLoadWarning): class Grid: - """An abstract class representing the default/file-level grid - definition for a FieldsFile. - - """ + """An abstract class representing the default/file-level grid definition for a FieldsFile.""" def __init__( self, @@ -141,18 +135,15 @@ def __init__( ): """Create a Grid from the relevant sections of the FFHeader. - Args: - - * column_dependent_constants (numpy.ndarray): + Parameters + ---------- + column_dependent_constants : numpy.ndarray The `column_dependent_constants` from a FFHeader. - - * row_dependent_constants (numpy.ndarray): + row_dependent_constants : numpy.ndarray The `row_dependent_constants` from a FFHeader. - - * real_constants (numpy.ndarray): + real_constants : numpy.ndarray The `real_constants` from a FFHeader. - - * horiz_grid_type (integer): + horiz_grid_type : int `horiz_grid_type` from a FFHeader. """ @@ -183,16 +174,16 @@ def regular_y(self, subgrid): raise NotImplementedError() def vectors(self, subgrid): - """Return the X and Y coordinate vectors for the given sub-grid of - this grid. - - Args: + """Return the X and Y coordinate vectors for the given sub-grid of this grid. - * subgrid (integer): + Parameters + ---------- + subgrid : int A "grid type code" as described in UM documentation paper C4. - Returns: - A 2-tuple of X-vector, Y-vector. + Returns + ------- + A 2-tuple of X-vector, Y-vector. """ x_p, x_u = self._x_vectors() @@ -222,16 +213,16 @@ def _x_vectors(self): return x_p, x_u def regular_x(self, subgrid): - """Return the "zeroth" value and step for the X coordinate on the - given sub-grid of this grid. - - Args: + """Return the "zeroth" value & step for the X coord on the given sub-grid of this grid. - * subgrid (integer): + Parameters + ---------- + subgrid : int A "grid type code" as described in UM documentation paper C4. - Returns: - A 2-tuple of BZX, BDX. + Returns + ------- + A 2-tuple of BZX, BDX. """ bdx = self.ew_spacing @@ -241,16 +232,19 @@ def regular_x(self, subgrid): return bzx, bdx def regular_y(self, subgrid): - """Return the "zeroth" value and step for the Y coordinate on the - given sub-grid of this grid. + """Return the "zeroth" value & step for the Y coord on the given sub-grid of this grid. - Args: + Return the "zeroth" value and step for the Y coordinate on the + given sub-grid of this grid. - * subgrid (integer): + Parameters + ---------- + subgrid : int A "grid type code" as described in UM documentation paper C4. - Returns: - A 2-tuple of BZY, BDY. + Returns + ------- + A 2-tuple of BZY, BDY. """ bdy = self.ns_spacing @@ -302,20 +296,23 @@ class FFHeader: GRID_STAGGERING_CLASS = {3: NewDynamics, 6: ENDGame} def __init__(self, filename, word_depth=DEFAULT_FF_WORD_DEPTH): - """Create a FieldsFile header instance by reading the + """Create a FieldsFile header instance. + + Create a FieldsFile header instance by reading the FIXED_LENGTH_HEADER section of the FieldsFile, making the names defined in FF_HEADER available as attributes of a FFHeader instance. - Args: - - * filename (string): + Parameters + ---------- + filename : str Specify the name of the FieldsFile. + word_depth : int, default=DEFAULT_FF_WORD_DEPTH - Returns: - FFHeader object. + Returns + ------- + FFHeader object. """ - #: File name of the FieldsFile. self.ff_filename = filename self._word_depth = word_depth @@ -397,19 +394,18 @@ def _attribute_is_pointer_and_needs_addressing(self, name): return is_referenceable def shape(self, name): - """Return the dimension shape of the FieldsFile FIXED_LENGTH_HEADER - pointer attribute. - - Args: + """Return the dimension shape of the FieldsFile FIXED_LENGTH_HEADER pointer attribute. - * name (string): + Parameters + ---------- + name : str Specify the name of the FIXED_LENGTH_HEADER attribute. - Returns: - Dimension tuple. + Returns + ------- + Dimension tuple. """ - if name in _FF_HEADER_POINTERS: value = getattr(self, name)[1:] else: @@ -440,30 +436,32 @@ class FF2PP: """A class to extract the individual PPFields from within a FieldsFile.""" def __init__(self, filename, read_data=False, word_depth=DEFAULT_FF_WORD_DEPTH): - """Create a FieldsFile to Post Process instance that returns a generator - of PPFields contained within the FieldsFile. + """Create a generator of PPFields contained within the FieldsFile. - Args: + Create a FieldsFile to Post Process instance that returns a generator + of PPFields contained within the FieldsFile. - * filename (string): + Parameters + ---------- + filename : str Specify the name of the FieldsFile. - - Kwargs: - - * read_data (boolean): + read_data : bool, default=False Specify whether to read the associated PPField data within the FieldsFile. Default value is False. + word_depth : int, default=DEFAULT_FF_WORD_DEPTH - Returns: - PPField generator. + Returns + ------- + PPField generator. - For example:: + Examples + -------- + :: >>> for field in ff.FF2PP(filename): ... print(field) """ - self._ff_header = FFHeader(filename, word_depth=word_depth) self._word_depth = word_depth self._filename = filename @@ -807,22 +805,20 @@ def __iter__(self): def _parse_binary_stream(file_like, dtype=np.float64, count=-1): - """Replacement :func:`numpy.fromfile` due to python3 performance issues. - - Args: - - * file_like - Standard python file_like object. - - Kwargs: - - * dtype - Data type to be parsed out, used to work out bytes read in. - - * count - The number of values required to be generated from the parsing. + """Parse binary stream, replacement :func:`numpy.fromfile` due to python3 performance issues. + + Parameters + ---------- + file_like : + Standard python file_like object. + dtype : no.float64, optional + Data type to be parsed out, used to work out bytes read in. + count : optional, default=-1 + The number of values required to be generated from the parsing. The default is -1, which will read the entire contexts of the file_like object and generate as many values as possible. """ - # There are a wide range of types supported, we just need to know the byte # size of the object, so we just make sure we've go an instance of a # np.dtype @@ -840,17 +836,17 @@ def _parse_binary_stream(file_like, dtype=np.float64, count=-1): def load_cubes(filenames, callback, constraints=None): - """Loads cubes from a list of fields files filenames. + """Load cubes from a list of fields files filenames. - Args: - - * filenames - list of fields files filenames to load - - Kwargs: - - * callback - a function which can be passed on to - :func:`iris.io.run_callback` + Parameters + ---------- + filenames : + List of fields files filenames to load + callback : + A function which can be passed on to :func:`iris.io.run_callback` + Notes + ----- .. note:: The resultant cubes may not be in the order that they are in the @@ -864,11 +860,12 @@ def load_cubes(filenames, callback, constraints=None): def load_cubes_32bit_ieee(filenames, callback, constraints=None): - """Loads cubes from a list of 32bit ieee converted fieldsfiles filenames. - - .. seealso:: + """Load cubes from a list of 32bit ieee converted fieldsfiles filenames. - :func:`load_cubes` for keyword details + See Also + -------- + :func:`load_cubes` + For keyword details """ return pp._load_cubes_variable_loader( diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index 92d46c6693..1611ef7160 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -7,6 +7,7 @@ For now, we are still emulating various aspects of how our original Pyke-based code used the Pyke 'engine' to hold translation data, both Pyke-specific and not : + 1) basic details from the iris.fileformats.cf analysis of the file are recorded before translating each output cube, using "engine.assert_case_specific_fact(name, args)". @@ -42,9 +43,9 @@ import warnings from iris.config import get_logger -import iris.exceptions import iris.fileformats.cf import iris.fileformats.pp as pp +import iris.warnings from . import helpers as hh @@ -53,8 +54,8 @@ class _WarnComboCfLoadIgnoring( - iris.exceptions.IrisCfLoadWarning, - iris.exceptions.IrisIgnoringWarning, + iris.warnings.IrisCfLoadWarning, + iris.warnings.IrisIgnoringWarning, ): """One-off combination of warning classes - enhances user filtering.""" @@ -62,8 +63,8 @@ class _WarnComboCfLoadIgnoring( class _WarnComboLoadIgnoring( - iris.exceptions.IrisLoadWarning, - iris.exceptions.IrisIgnoringWarning, + iris.warnings.IrisLoadWarning, + iris.warnings.IrisIgnoringWarning, ): """One-off combination of warning classes - enhances user filtering.""" @@ -103,7 +104,7 @@ def inner(engine, *args, **kwargs): @action_function def action_default(engine): - """Standard operations for every cube.""" + """Perform standard operations for every cube.""" hh.build_cube_metadata(engine) @@ -544,7 +545,6 @@ def run_actions(engine): The specific cube being translated is "engine.cube". """ - # default (all cubes) action, always runs action_default(engine) # This should run the default rules. diff --git a/lib/iris/fileformats/_nc_load_rules/engine.py b/lib/iris/fileformats/_nc_load_rules/engine.py index e43a1c5c4b..111e8320b6 100644 --- a/lib/iris/fileformats/_nc_load_rules/engine.py +++ b/lib/iris/fileformats/_nc_load_rules/engine.py @@ -2,7 +2,9 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""A simple mimic of the Pyke 'knowledge_engine', for interfacing to the routines +"""A simple mimic of the Pyke 'knowledge_engine'. + +A simple mimic of the Pyke 'knowledge_engine', for interfacing to the routines in 'iris.fileformats.netcdf' with minimal changes to that code. This allows us to replace the Pyke rules operation with the simpler pure-Python @@ -102,10 +104,7 @@ def get_kb(self): return self.facts def print_stats(self): - """No-op, called by - :meth:`iris.fileformats.netcdf._action_activation_stats`. - - """ + """No-op, called by :meth:`iris.fileformats.netcdf._action_activation_stats`.""" pass def add_case_specific_fact(self, fact_name, fact_arglist): diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py index 31cc4aaa6c..dc68274a36 100644 --- a/lib/iris/fileformats/_nc_load_rules/helpers.py +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -2,7 +2,9 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""All the pure-Python 'helper' functions which were previously included in the +"""Helper functions for NetCDF loading rules. + +All the pure-Python 'helper' functions which were previously included in the Pyke rules database 'fc_rules_cf.krb'. The 'action' routines now call these, as the rules used to do. @@ -11,8 +13,10 @@ build routines, and which it does not use. """ +from __future__ import annotations + import re -from typing import List +from typing import TYPE_CHECKING, List, Optional import warnings import cf_units @@ -32,6 +36,13 @@ from iris.fileformats.netcdf.loader import _get_cf_var_data import iris.std_names import iris.util +import iris.warnings + +if TYPE_CHECKING: + from numpy.ma import MaskedArray + from numpy.typing import ArrayLike + + from iris.fileformats.cf import CFBoundaryVariable # TODO: should un-addable coords / cell measures / etcetera be skipped? iris#5068. @@ -152,7 +163,6 @@ CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN = "scale_factor_at_projection_origin" CF_ATTR_GRID_SCALE_FACTOR_AT_CENT_MERIDIAN = "scale_factor_at_central_meridian" CF_ATTR_GRID_LON_OF_CENT_MERIDIAN = "longitude_of_central_meridian" -CF_ATTR_GRID_STANDARD_PARALLEL = "standard_parallel" CF_ATTR_GRID_PERSPECTIVE_HEIGHT = "perspective_point_height" CF_ATTR_GRID_SWEEP_ANGLE_AXIS = "sweep_angle_axis" CF_ATTR_GRID_AZIMUTH_CENT_LINE = "azimuth_of_central_line" @@ -222,8 +232,8 @@ class _WarnComboIgnoringLoad( - iris.exceptions.IrisIgnoringWarning, - iris.exceptions.IrisLoadWarning, + iris.warnings.IrisIgnoringWarning, + iris.warnings.IrisLoadWarning, ): """One-off combination of warning classes - enhances user filtering.""" @@ -231,8 +241,8 @@ class _WarnComboIgnoringLoad( class _WarnComboDefaultingLoad( - iris.exceptions.IrisDefaultingWarning, - iris.exceptions.IrisLoadWarning, + iris.warnings.IrisDefaultingWarning, + iris.warnings.IrisLoadWarning, ): """One-off combination of warning classes - enhances user filtering.""" @@ -240,8 +250,8 @@ class _WarnComboDefaultingLoad( class _WarnComboDefaultingCfLoad( - iris.exceptions.IrisCfLoadWarning, - iris.exceptions.IrisDefaultingWarning, + iris.warnings.IrisCfLoadWarning, + iris.warnings.IrisDefaultingWarning, ): """One-off combination of warning classes - enhances user filtering.""" @@ -249,8 +259,8 @@ class _WarnComboDefaultingCfLoad( class _WarnComboIgnoringCfLoad( - iris.exceptions.IrisIgnoringWarning, - iris.exceptions.IrisCfLoadWarning, + iris.warnings.IrisIgnoringWarning, + iris.warnings.IrisCfLoadWarning, ): """One-off combination of warning classes - enhances user filtering.""" @@ -258,24 +268,29 @@ class _WarnComboIgnoringCfLoad( def _split_cell_methods(nc_cell_methods: str) -> List[re.Match]: - """Split a CF cell_methods attribute string into a list of zero or more cell + """Split a CF cell_methods. + + Split a CF cell_methods attribute string into a list of zero or more cell methods, each of which is then parsed with a regex to return a list of match objects. - Args: - - * nc_cell_methods: The value of the cell methods attribute to be split. + Parameters + ---------- + nc_cell_methods : str + The value of the cell methods attribute to be split. Returns ------- nc_cell_methods_matches: list of re.Match objects A list of re.Match objects associated with each parsed cell method + Notes + ----- Splitting is done based on words followed by colons outside of any brackets. Validation of anything other than being laid out in the expected format is left to the calling function. - """ + """ # Find name candidates name_start_inds = [] for m in _CM_PARSE_NAME.finditer(nc_cell_methods): @@ -295,7 +310,7 @@ def _split_cell_methods(nc_cell_methods: str) -> List[re.Match]: ) warnings.warn( msg, - category=iris.exceptions.IrisCfLoadWarning, + category=iris.warnings.IrisCfLoadWarning, stacklevel=2, ) if bracket_depth > 0 and ind in name_start_inds: @@ -314,39 +329,41 @@ def _split_cell_methods(nc_cell_methods: str) -> List[re.Match]: nc_cell_method_match = _CM_PARSE.match(nc_cell_method_str.strip()) if not nc_cell_method_match: msg = f"Failed to fully parse cell method string: {nc_cell_methods}" - warnings.warn(msg, category=iris.exceptions.IrisCfLoadWarning, stacklevel=2) + warnings.warn(msg, category=iris.warnings.IrisCfLoadWarning, stacklevel=2) continue nc_cell_methods_matches.append(nc_cell_method_match) return nc_cell_methods_matches -class UnknownCellMethodWarning(iris.exceptions.IrisUnknownCellMethodWarning): - """Backwards compatible form of :class:`iris.exceptions.IrisUnknownCellMethodWarning`.""" +class UnknownCellMethodWarning(iris.warnings.IrisUnknownCellMethodWarning): + """Backwards compatible form of :class:`iris.warnings.IrisUnknownCellMethodWarning`.""" # TODO: remove at the next major release. pass -def parse_cell_methods(nc_cell_methods): - """Parse a CF cell_methods attribute string into a tuple of zero or - more CellMethod instances. +def parse_cell_methods(nc_cell_methods, cf_name=None): + """Parse a CF cell_methods attribute string into a tuple of zero or more CellMethod instances. - Args: - - * nc_cell_methods (str): + Parameters + ---------- + nc_cell_methods : str The value of the cell methods attribute to be parsed. + cf_name : optional Returns ------- iterable of :class:`iris.coords.CellMethod`. + Notes + ----- Multiple coordinates, intervals and comments are supported. If a method has a non-standard name a warning will be issued, but the results are not affected. """ - + msg = None cell_methods = [] if nc_cell_methods is not None: for m in _split_cell_methods(nc_cell_methods): @@ -358,10 +375,16 @@ def parse_cell_methods(nc_cell_methods): method_words = method.split() if method_words[0].lower() not in _CM_KNOWN_METHODS: msg = "NetCDF variable contains unknown cell method {!r}" - warnings.warn( - msg.format("{}".format(method_words[0])), - category=UnknownCellMethodWarning, - ) + msg = msg.format(method_words[0]) + if cf_name: + name = "{}".format(cf_name) + msg = msg.replace("variable", "variable {!r}".format(name)) + else: + warnings.warn( + msg, + category=UnknownCellMethodWarning, + ) + msg = None d[_CM_METHOD] = method name = d[_CM_NAME] name = name.replace(" ", "") @@ -419,6 +442,9 @@ def parse_cell_methods(nc_cell_methods): comments=d[_CM_COMMENT], ) cell_methods.append(cell_method) + # only prints one warning, rather than each loop + if msg: + warnings.warn(msg, category=UnknownCellMethodWarning) return tuple(cell_methods) @@ -449,21 +475,7 @@ def build_cube_metadata(engine): # Incorporate cell methods nc_att_cell_methods = getattr(cf_var, CF_ATTR_CELL_METHODS, None) - with warnings.catch_warnings(record=True) as warning_records: - cube.cell_methods = parse_cell_methods(nc_att_cell_methods) - # Filter to get the warning we are interested in. - warning_records = [ - record - for record in warning_records - if issubclass(record.category, UnknownCellMethodWarning) - ] - if len(warning_records) > 0: - # Output an enhanced warning message. - warn_record = warning_records[0] - name = "{}".format(cf_var.cf_name) - msg = warn_record.message.args[0] - msg = msg.replace("variable", "variable {!r}".format(name)) - warnings.warn(message=msg, category=UnknownCellMethodWarning) + cube.cell_methods = parse_cell_methods(nc_att_cell_methods, cf_var.cf_name) # Set the cube global attributes. for attr_name, attr_value in cf_var.cf_group.global_attributes.items(): @@ -479,8 +491,11 @@ def build_cube_metadata(engine): ################################################################################ def _get_ellipsoid(cf_grid_var): - """Return a :class:`iris.coord_systems.GeogCS` using the relevant properties of + """Build a :class:`iris.coord_systems.GeogCS`. + + Return a :class:`iris.coord_systems.GeogCS` using the relevant properties of `cf_grid_var`. Returns None if no relevant properties are specified. + """ major = getattr(cf_grid_var, CF_ATTR_GRID_SEMI_MAJOR_AXIS, None) minor = getattr(cf_grid_var, CF_ATTR_GRID_SEMI_MINOR_AXIS, None) @@ -545,7 +560,7 @@ def build_rotated_coordinate_system(engine, cf_grid_var): if north_pole_latitude is None or north_pole_longitude is None: warnings.warn( "Rotated pole position is not fully specified", - category=iris.exceptions.IrisCfLoadWarning, + category=iris.warnings.IrisCfLoadWarning, ) north_pole_grid_lon = getattr(cf_grid_var, CF_ATTR_GRID_NORTH_POLE_GRID_LON, 0.0) @@ -562,10 +577,7 @@ def build_rotated_coordinate_system(engine, cf_grid_var): ################################################################################ def build_transverse_mercator_coordinate_system(engine, cf_grid_var): - """Create a transverse Mercator coordinate system from the CF-netCDF - grid mapping variable. - - """ + """Create a transverse Mercator coordinate system from the CF-netCDF grid mapping variable.""" ellipsoid = _get_ellipsoid(cf_grid_var) latitude_of_projection_origin = getattr( @@ -605,10 +617,7 @@ def build_transverse_mercator_coordinate_system(engine, cf_grid_var): ################################################################################ def build_lambert_conformal_coordinate_system(engine, cf_grid_var): - """Create a Lambert conformal conic coordinate system from the CF-netCDF - grid mapping variable. - - """ + """Create a Lambert conformal conic coordinate system from the CF-netCDF grid mapping variable.""" ellipsoid = _get_ellipsoid(cf_grid_var) latitude_of_projection_origin = getattr( @@ -635,10 +644,7 @@ def build_lambert_conformal_coordinate_system(engine, cf_grid_var): ################################################################################ def build_stereographic_coordinate_system(engine, cf_grid_var): - """Create a stereographic coordinate system from the CF-netCDF - grid mapping variable. - - """ + """Create a stereographic coordinate system from the CF-netCDF grid mapping variable.""" ellipsoid = _get_ellipsoid(cf_grid_var) latitude_of_projection_origin = getattr( @@ -669,10 +675,7 @@ def build_stereographic_coordinate_system(engine, cf_grid_var): ################################################################################ def build_polar_stereographic_coordinate_system(engine, cf_grid_var): - """Create a polar stereographic coordinate system from the CF-netCDF - grid mapping variable. - - """ + """Create a polar stereographic coordinate system from the CF-netCDF grid mapping variable.""" ellipsoid = _get_ellipsoid(cf_grid_var) latitude_of_projection_origin = getattr( @@ -704,10 +707,7 @@ def build_polar_stereographic_coordinate_system(engine, cf_grid_var): ################################################################################ def build_mercator_coordinate_system(engine, cf_grid_var): - """Create a Mercator coordinate system from the CF-netCDF - grid mapping variable. - - """ + """Create a Mercator coordinate system from the CF-netCDF grid mapping variable.""" ellipsoid = _get_ellipsoid(cf_grid_var) longitude_of_projection_origin = getattr( @@ -734,10 +734,7 @@ def build_mercator_coordinate_system(engine, cf_grid_var): ################################################################################ def build_lambert_azimuthal_equal_area_coordinate_system(engine, cf_grid_var): - """Create a lambert azimuthal equal area coordinate system from the CF-netCDF - grid mapping variable. - - """ + """Create a lambert azimuthal equal area coordinate system from the CF-netCDF grid mapping variable.""" ellipsoid = _get_ellipsoid(cf_grid_var) latitude_of_projection_origin = getattr( @@ -762,10 +759,7 @@ def build_lambert_azimuthal_equal_area_coordinate_system(engine, cf_grid_var): ################################################################################ def build_albers_equal_area_coordinate_system(engine, cf_grid_var): - """Create a albers conical equal area coordinate system from the CF-netCDF - grid mapping variable. - - """ + """Create a albers conical equal area coordinate system from the CF-netCDF grid mapping variable.""" ellipsoid = _get_ellipsoid(cf_grid_var) latitude_of_projection_origin = getattr( @@ -792,10 +786,7 @@ def build_albers_equal_area_coordinate_system(engine, cf_grid_var): ################################################################################ def build_vertical_perspective_coordinate_system(engine, cf_grid_var): - """Create a vertical perspective coordinate system from the CF-netCDF - grid mapping variable. - - """ + """Create a vertical perspective coordinate system from the CF-netCDF grid mapping variables.""" ellipsoid = _get_ellipsoid(cf_grid_var) latitude_of_projection_origin = getattr( @@ -824,10 +815,7 @@ def build_vertical_perspective_coordinate_system(engine, cf_grid_var): ################################################################################ def build_geostationary_coordinate_system(engine, cf_grid_var): - """Create a geostationary coordinate system from the CF-netCDF - grid mapping variable. - - """ + """Create a geostationary coordinate system from the CF-netCDF grid mapping variable.""" ellipsoid = _get_ellipsoid(cf_grid_var) latitude_of_projection_origin = getattr( @@ -858,10 +846,7 @@ def build_geostationary_coordinate_system(engine, cf_grid_var): ################################################################################ def build_oblique_mercator_coordinate_system(engine, cf_grid_var): - """Create an oblique mercator coordinate system from the CF-netCDF - grid mapping variable. - - """ + """Create an oblique mercator coordinate system from the CF-netCDF grid mapping variable.""" ellipsoid = _get_ellipsoid(cf_grid_var) azimuth_of_central_line = getattr(cf_grid_var, CF_ATTR_GRID_AZIMUTH_CENT_LINE, None) @@ -920,8 +905,9 @@ def get_attr_units(cf_var, attributes): cf_units.as_unit(attr_units) except ValueError: # Using converted unicode message. Can be reverted with Python 3. - msg = "Ignoring netCDF variable {!r} invalid units {!r}".format( - cf_var.cf_name, attr_units + msg = ( + f"Ignoring invalid units {attr_units!r} on netCDF variable " + f"{cf_var.cf_name!r}." ) warnings.warn( msg, @@ -952,7 +938,6 @@ def get_attr_units(cf_var, attributes): ################################################################################ def get_names(cf_coord_var, coord_name, attributes): """Determine the standard_name, long_name and var_name attributes.""" - standard_name = getattr(cf_coord_var, CF_ATTR_STD_NAME, None) long_name = getattr(cf_coord_var, CF_ATTR_LONG_NAME, None) cf_name = str(cf_coord_var.cf_name) @@ -988,10 +973,7 @@ def get_names(cf_coord_var, coord_name, attributes): ################################################################################ def get_cf_bounds_var(cf_coord_var): - """Return the CF variable representing the bounds of a coordinate - variable. - - """ + """Return the CF variable representing the bounds of a coordinate variable.""" attr_bounds = getattr(cf_coord_var, CF_ATTR_BOUNDS, None) attr_climatology = getattr(cf_coord_var, CF_ATTR_CLIMATOLOGY, None) @@ -1023,8 +1005,7 @@ def get_cf_bounds_var(cf_coord_var): ################################################################################ def reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var): - """Return a bounds_data array with the vertex dimension as the most - rapidly varying. + """Return a bounds_data array with the vertex dimension as the most rapidly varying. .. note:: @@ -1053,12 +1034,62 @@ def reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var): return bounds_data +################################################################################ +def _normalise_bounds_units( + points_units: str, cf_bounds_var: CFBoundaryVariable, bounds_data: ArrayLike +) -> Optional[MaskedArray]: + """Ensure bounds have units compatible with points. + + If required, the `bounds_data` will be converted to the `points_units`. + If the bounds units are not convertible, a warning will be issued and + the `bounds_data` will be ignored. + + Bounds with invalid units will be gracefully left unconverted and passed through. + + Parameters + ---------- + points_units : str + The units of the coordinate points. + cf_bounds_var : CFBoundaryVariable + The serialized NetCDF bounds variable. + bounds_data : MaskedArray + The pre-processed data of the bounds variable. + + Returns + ------- + MaskedArray or None + The bounds data with the same units as the points, or ``None`` + if the bounds units are not convertible to the points units. + + """ + bounds_units = get_attr_units(cf_bounds_var, {}) + + if bounds_units != UNKNOWN_UNIT_STRING: + points_units = cf_units.Unit(points_units) + bounds_units = cf_units.Unit(bounds_units) + + if bounds_units != points_units: + if bounds_units.is_convertible(points_units): + bounds_data = bounds_units.convert(bounds_data, points_units) + else: + wmsg = ( + f"Ignoring bounds on NetCDF variable {cf_bounds_var.cf_name!r}. " + f"Expected units compatible with {points_units.origin!r}, got " + f"{bounds_units.origin!r}." + ) + warnings.warn( + wmsg, category=iris.warnings.IrisCfLoadWarning, stacklevel=2 + ) + bounds_data = None + + return bounds_data + + ################################################################################ def build_dimension_coordinate( engine, cf_coord_var, coord_name=None, coord_system=None ): """Create a dimension coordinate (DimCoord) and add it to the cube.""" - cf_var = engine.cf_var cube = engine.cube attributes = {} @@ -1091,6 +1122,8 @@ def build_dimension_coordinate( # dimension names. if cf_bounds_var.shape[:-1] != cf_coord_var.shape: bounds_data = reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var) + + bounds_data = _normalise_bounds_units(attr_units, cf_bounds_var, bounds_data) else: bounds_data = None @@ -1162,7 +1195,7 @@ def build_dimension_coordinate( except iris.exceptions.CannotAddError as e_msg: warnings.warn( coord_skipped_msg.format(error=e_msg), - category=iris.exceptions.IrisCannotAddWarning, + category=iris.warnings.IrisCannotAddWarning, ) coord_skipped = True else: @@ -1176,7 +1209,7 @@ def build_dimension_coordinate( except iris.exceptions.CannotAddError as e_msg: warnings.warn( coord_skipped_msg.format(error=e_msg), - category=iris.exceptions.IrisCannotAddWarning, + category=iris.warnings.IrisCannotAddWarning, ) coord_skipped = True @@ -1190,7 +1223,6 @@ def build_auxiliary_coordinate( engine, cf_coord_var, coord_name=None, coord_system=None ): """Create an auxiliary coordinate (AuxCoord) and add it to the cube.""" - cf_var = engine.cf_var cube = engine.cube attributes = {} @@ -1217,6 +1249,8 @@ def build_auxiliary_coordinate( # compatibility with array creators (i.e. dask) bounds_data = np.asarray(bounds_data) bounds_data = reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var) + + bounds_data = _normalise_bounds_units(attr_units, cf_bounds_var, bounds_data) else: bounds_data = None @@ -1251,7 +1285,7 @@ def build_auxiliary_coordinate( msg = "{name!r} coordinate not added to Cube: {error}" warnings.warn( msg.format(name=str(cf_coord_var.cf_name), error=e_msg), - category=iris.exceptions.IrisCannotAddWarning, + category=iris.warnings.IrisCannotAddWarning, ) else: # Make a list with names, stored on the engine, so we can find them all later. @@ -1303,7 +1337,7 @@ def build_cell_measures(engine, cf_cm_var): msg = "{name!r} cell measure not added to Cube: {error}" warnings.warn( msg.format(name=str(cf_cm_var.cf_name), error=e_msg), - category=iris.exceptions.IrisCannotAddWarning, + category=iris.warnings.IrisCannotAddWarning, ) else: # Make a list with names, stored on the engine, so we can find them all later. @@ -1351,7 +1385,7 @@ def build_ancil_var(engine, cf_av_var): msg = "{name!r} ancillary variable not added to Cube: {error}" warnings.warn( msg.format(name=str(cf_av_var.cf_name), error=e_msg), - category=iris.exceptions.IrisCannotAddWarning, + category=iris.warnings.IrisCannotAddWarning, ) else: # Make a list with names, stored on the engine, so we can find them all later. @@ -1362,8 +1396,10 @@ def build_ancil_var(engine, cf_av_var): def _is_lat_lon(cf_var, ud_units, std_name, std_name_grid, axis_name, prefixes): """Determine whether the CF coordinate variable is a latitude/longitude variable. - Ref: [CF] Section 4.1 Latitude Coordinate. - [CF] Section 4.2 Longitude Coordinate. + Ref: + + * [CF] Section 4.1 Latitude Coordinate. + * [CF] Section 4.2 Longitude Coordinate. """ is_valid = False @@ -1434,10 +1470,7 @@ def is_longitude(engine, cf_name): ################################################################################ def is_projection_x_coordinate(engine, cf_name): - """Determine whether the CF coordinate variable is a - projection_x_coordinate variable. - - """ + """Determine whether the CF coordinate variable is a projection_x_coordinate variable.""" cf_var = engine.cf_var.cf_group[cf_name] attr_name = getattr(cf_var, CF_ATTR_STD_NAME, None) or getattr( cf_var, CF_ATTR_LONG_NAME, None @@ -1447,10 +1480,7 @@ def is_projection_x_coordinate(engine, cf_name): ################################################################################ def is_projection_y_coordinate(engine, cf_name): - """Determine whether the CF coordinate variable is a - projection_y_coordinate variable. - - """ + """Determine whether the CF coordinate variable is a projection_y_coordinate variable.""" cf_var = engine.cf_var.cf_group[cf_name] attr_name = getattr(cf_var, CF_ATTR_STD_NAME, None) or getattr( cf_var, CF_ATTR_LONG_NAME, None @@ -1499,7 +1529,6 @@ def is_time_period(engine, cf_name): ################################################################################ def is_grid_mapping(engine, cf_name, grid_mapping): """Determine whether the CF grid mapping variable is of the appropriate type.""" - is_valid = False cf_var = engine.cf_var.cf_group[cf_name] attr_mapping_name = getattr(cf_var, CF_ATTR_GRID_MAPPING_NAME, None) @@ -1513,7 +1542,6 @@ def is_grid_mapping(engine, cf_name, grid_mapping): ################################################################################ def _is_rotated(engine, cf_name, cf_attr_value): """Determine whether the CF coordinate variable is rotated.""" - is_valid = False cf_var = engine.cf_var.cf_group[cf_name] attr_std_name = getattr(cf_var, CF_ATTR_STD_NAME, None) @@ -1547,7 +1575,6 @@ def has_supported_mercator_parameters(engine, cf_name): Determine whether the CF grid mapping variable has the supported values for the parameters of the Mercator projection. """ - is_valid = True cf_grid_var = engine.cf_var.cf_group[cf_name] @@ -1560,7 +1587,7 @@ def has_supported_mercator_parameters(engine, cf_name): warnings.warn( "It does not make sense to provide both " '"scale_factor_at_projection_origin" and "standard_parallel".', - category=iris.exceptions.IrisCfInvalidCoordParamWarning, + category=iris.warnings.IrisCfInvalidCoordParamWarning, ) is_valid = False @@ -1569,10 +1596,12 @@ def has_supported_mercator_parameters(engine, cf_name): ################################################################################ def has_supported_polar_stereographic_parameters(engine, cf_name): - """Determine whether the CF grid mapping variable has the supported + """Determine whether CF grid mapping variable supports Polar Stereographic. + + Determine whether the CF grid mapping variable has the supported values for the parameters of the Polar Stereographic projection. - """ + """ is_valid = True cf_grid_var = engine.cf_var.cf_group[cf_name] @@ -1588,7 +1617,7 @@ def has_supported_polar_stereographic_parameters(engine, cf_name): if latitude_of_projection_origin != 90 and latitude_of_projection_origin != -90: warnings.warn( '"latitude_of_projection_origin" must be +90 or -90.', - category=iris.exceptions.IrisCfInvalidCoordParamWarning, + category=iris.warnings.IrisCfInvalidCoordParamWarning, ) is_valid = False @@ -1596,7 +1625,7 @@ def has_supported_polar_stereographic_parameters(engine, cf_name): warnings.warn( "It does not make sense to provide both " '"scale_factor_at_projection_origin" and "standard_parallel".', - category=iris.exceptions.IrisCfInvalidCoordParamWarning, + category=iris.warnings.IrisCfInvalidCoordParamWarning, ) is_valid = False @@ -1604,7 +1633,7 @@ def has_supported_polar_stereographic_parameters(engine, cf_name): warnings.warn( 'One of "scale_factor_at_projection_origin" and ' '"standard_parallel" is required.', - category=iris.exceptions.IrisCfInvalidCoordParamWarning, + category=iris.warnings.IrisCfInvalidCoordParamWarning, ) is_valid = False diff --git a/lib/iris/fileformats/_structured_array_identification.py b/lib/iris/fileformats/_structured_array_identification.py index 05bd04036b..8dada77458 100644 --- a/lib/iris/fileformats/_structured_array_identification.py +++ b/lib/iris/fileformats/_structured_array_identification.py @@ -2,8 +2,11 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -r"""The purpose of this module is to provide utilities for the identification +r"""Identification of multi-dimensional structure in a flat sequence of homogeneous objects. + +The purpose of this module is to provide utilities for the identification of multi-dimensional structure in a flat sequence of homogeneous objects. + One application of this is to efficiently identify a higher dimensional structure from a sorted sequence of PPField instances; for an example, given a list of 12 PPFields, identification that there are 3 unique "time" values @@ -47,14 +50,13 @@ class _UnstructuredArrayException(Exception): - """Raised when an array has been incorrectly assumed to be - structured in a specific way. - - """ + """Raised when an array has been incorrectly assumed to be structured in a specific way.""" class ArrayStructure(namedtuple("ArrayStructure", ["stride", "unique_ordered_values"])): - """Represents the identified structure of an array, where stride is the + """Represent the identified structure of an array. + + Represents the identified structure of an array, where stride is the step between each unique value being seen in order in the flattened version of the array. @@ -92,7 +94,9 @@ def __new__(cls, stride, unique_ordered_values): @property def size(self): - """The ``size`` attribute is the number of the unique values in the + """Number of unique values in the original array. + + The ``size`` attribute is the number of the unique values in the original array. It is **not** the length of the original array. """ @@ -114,7 +118,9 @@ def __ne__(self, other): return not (self == other) def construct_array(self, size): - """The inverse operation of :func:`ArrayStructure.from_array`, returning + """Build 1D array. + + The inverse operation of :func:`ArrayStructure.from_array`, returning a 1D array of the given length with the appropriate repetition pattern. @@ -125,8 +131,7 @@ def construct_array(self, size): ) def nd_array_and_dims(self, original_array, target_shape, order="c"): - """Given a 1D array, and a target shape, construct an ndarray - and associated dimensions. + """Given a 1D array and a target shape, construct an ndarray and associated dimensions. Raises an _UnstructuredArrayException if no optimised shape array can be returned, in which case, simply reshaping the original_array would @@ -199,7 +204,9 @@ def nd_array_and_dims(self, original_array, target_shape, order="c"): @classmethod def from_array(cls, arr): - """Return the computed ArrayStructure for the given flat array + """Return the computed ArrayStructure for the given flat array. + + Return the computed ArrayStructure for the given flat array (if a structure exists, otherwise return None). """ @@ -284,7 +291,9 @@ def from_array(cls, arr): class GroupStructure: - """The GroupStructure class represents a collection of array structures along + """Represent a collection of array structures. + + The GroupStructure class represents a collection of array structures along with additional information such as the length of the arrays and the array order in which they are found (row-major or column-major). @@ -307,14 +316,17 @@ def __init__(self, length, component_structure, array_order="c"): @classmethod def from_component_arrays(cls, component_arrays, array_order="c"): - """Given a dictionary of component name to flattened numpy array, + """From component arrays. + + Given a dictionary of component name to flattened numpy array, return an :class:`GroupStructure` instance which is representative of the underlying array structures. - Args: - - * component_arrays - A dictionary mapping component name to the - full sized 1d (flattened) numpy array. + Parameters + ---------- + component_arrays : + A dictionary mapping component name to the full sized 1d (flattened) + numpy array. """ cmpt_structure = { @@ -329,7 +341,8 @@ def from_component_arrays(cls, component_arrays, array_order="c"): return cls(sizes[0], cmpt_structure, array_order=array_order) def _potentially_flattened_components(self): - """Return a generator of the components which could form non-trivial + """Return a generator of the components which could form non-trivial. + (i.e. ``length > 1``) array dimensions. """ @@ -342,8 +355,7 @@ def is_row_major(self): return self._array_order == "c" def possible_structures(self): - """Return a tuple containing the possible structures that this group - could have. + """Return a tuple containing the possible structures that this group could have. A structure in this case is an iterable of ``(name, ArrayStructure)`` pairs, one per dimension, of a possible @@ -443,7 +455,9 @@ def __str__(self): return "\n".join(result) def build_arrays(self, shape, elements_arrays): - """Given the target shape, and a dictionary mapping name to 1D array of + """Build Arrays. + + Given the target shape, and a dictionary mapping name to 1D array of :attr:`.length`, return a dictionary mapping element name to ``(ndarray, dims)``. diff --git a/lib/iris/fileformats/abf.py b/lib/iris/fileformats/abf.py index 3f7a6b18e0..6dd8dfd14f 100644 --- a/lib/iris/fileformats/abf.py +++ b/lib/iris/fileformats/abf.py @@ -65,11 +65,14 @@ class ABFField: def __init__(self, filename): """Create an ABFField object from the given filename. - Args: + Parameters + ---------- + filename : str + An ABF filename. - * filename - An ABF filename. - - Example:: + Examples + -------- + :: field = ABFField("AVHRRBUVI01.1985feba.abl") @@ -116,7 +119,6 @@ def _read(self): def to_cube(self): """Return a new :class:`~iris.cube.Cube` from this ABFField.""" - cube = iris.cube.Cube(self.data) # Name. @@ -193,16 +195,17 @@ def to_cube(self): def load_cubes(filespecs, callback=None): - """Loads cubes from a list of ABF filenames. - - Args: - - * filenames - list of ABF filenames to load - - Kwargs: + """Load cubes from a list of ABF filenames. - * callback - a function that can be passed to :func:`iris.io.run_callback` + Parameters + ---------- + filenames : + List of ABF filenames to load + callback : optional + A function that can be passed to :func:`iris.io.run_callback` + Notes + ----- .. note:: The resultant cubes may not be in the same order as in the file. diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py index 737955b9a7..0dc505d522 100644 --- a/lib/iris/fileformats/cf.py +++ b/lib/iris/fileformats/cf.py @@ -2,10 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Provides the capability to load netCDF files and interpret them +"""Provide capability to load netCDF files and interpret them. + +Provides the capability to load netCDF files and interpret them according to the 'NetCDF Climate and Forecast (CF) Metadata Conventions'. -References: +References +---------- [CF] NetCDF Climate and Forecast (CF) Metadata conventions. [NUG] NetCDF User's Guide, https://www.unidata.ucar.edu/software/netcdf/documentation/NUG/ @@ -20,9 +23,9 @@ import numpy as np import numpy.ma as ma -import iris.exceptions from iris.fileformats.netcdf import _thread_safe_nc import iris.util +import iris.warnings # # CF parse pattern common to both formula terms and measure CF variables. @@ -112,40 +115,41 @@ def _identify_common(variables, ignore, target): def identify(self, variables, ignore=None, target=None, warn=True): """Identify all variables that match the criterion for this CF-netCDF variable class. - Args: - - * variables: + Parameters + ---------- + variables : Dictionary of netCDF4.Variable instance by variable name. - - Kwargs: - - * ignore: + ignore : optional List of variable names to ignore. - * target: + target : optional Name of a single variable to check. - * warn: + warn : bool, default=True Issue a warning if a missing variable is referenced. - Returns: - Dictionary of CFVariable instance by variable name. + Returns + ------- + Dictionary of CFVariable instance by variable name. """ pass def spans(self, cf_variable): - """Determine whether the dimensionality of this variable + """Determine dimensionality coverage. + + Determine whether the dimensionality of this variable is a subset of the specified target variable. Note that, by default scalar variables always span the dimensionality of the target variable. - Args: - - * cf_variable: + Parameters + ---------- + cf_variable : Compare dimensionality with the :class:`CFVariable`. - Returns: - Boolean. + Returns + ------- + bool """ result = set(self.dimensions).issubset(cf_variable.dimensions) @@ -215,15 +219,18 @@ def cf_attrs_reset(self): def add_formula_term(self, root, term): """Register the participation of this CF-netCDF variable in a CF-netCDF formula term. - Args: - - * root (string): - The name of CF-netCDF variable that defines the CF-netCDF formula_terms attribute. - * term (string): - The associated term name of this variable in the formula_terms definition. + Parameters + ---------- + root : str + The name of CF-netCDF variable that defines the CF-netCDF + formula_terms attribute. + term : str + The associated term name of this variable in the formula_terms + definition. - Returns: - None. + Returns + ------- + None """ self.cf_terms_by_root[root] = term @@ -231,15 +238,18 @@ def add_formula_term(self, root, term): def has_formula_terms(self): """Determine whether this CF-netCDF variable participates in a CF-netcdf formula term. - Returns: - Boolean. + Returns + ------- + bool """ return bool(self.cf_terms_by_root) class CFAncillaryDataVariable(CFVariable): - """A CF-netCDF ancillary data variable is a variable that provides metadata + """CF-netCDF ancillary data variable. + + A CF-netCDF ancillary data variable is a variable that provides metadata about the individual values of another data variable. Identified by the CF-netCDF variable attribute 'ancillary_variables'. @@ -268,7 +278,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): message = "Missing CF-netCDF ancillary data variable %r, referenced by netCDF variable %r" warnings.warn( message % (name, nc_var_name), - category=iris.exceptions.IrisCfMissingVarWarning, + category=iris.warnings.IrisCfMissingVarWarning, ) else: result[name] = CFAncillaryDataVariable( @@ -279,7 +289,9 @@ def identify(cls, variables, ignore=None, target=None, warn=True): class CFAuxiliaryCoordinateVariable(CFVariable): - """A CF-netCDF auxiliary coordinate variable is any netCDF variable that contains + """CF-netCDF auxiliary coordinate variable. + + A CF-netCDF auxiliary coordinate variable is any netCDF variable that contains coordinate data, but is not a CF-netCDF coordinate variable by definition. There is no relationship between the name of a CF-netCDF auxiliary coordinate @@ -288,8 +300,10 @@ class CFAuxiliaryCoordinateVariable(CFVariable): Identified by the CF-netCDF variable attribute 'coordinates'. Also see :class:`iris.fileformats.cf.CFLabelVariable`. - Ref: [CF] Chapter 5. Coordinate Systems. - [CF] Section 6.2. Alternative Coordinates. + Ref: + + * [CF] Chapter 5. Coordinate Systems. + * [CF] Section 6.2. Alternative Coordinates. """ @@ -313,7 +327,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): message = "Missing CF-netCDF auxiliary coordinate variable %r, referenced by netCDF variable %r" warnings.warn( message % (name, nc_var_name), - category=iris.exceptions.IrisCfMissingVarWarning, + category=iris.warnings.IrisCfMissingVarWarning, ) else: # Restrict to non-string type i.e. not a CFLabelVariable. @@ -326,7 +340,9 @@ def identify(cls, variables, ignore=None, target=None, warn=True): class CFBoundaryVariable(CFVariable): - """A CF-netCDF boundary variable is associated with a CF-netCDF variable that contains + """CF-netCDF boundary variable. + + A CF-netCDF boundary variable is associated with a CF-netCDF variable that contains coordinate data. When a data value provides information about conditions in a cell occupying a region of space/time or some other dimension, the boundary variable provides a description of cell extent. @@ -361,7 +377,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): message = "Missing CF-netCDF boundary variable %r, referenced by netCDF variable %r" warnings.warn( message % (name, nc_var_name), - category=iris.exceptions.IrisCfMissingVarWarning, + category=iris.warnings.IrisCfMissingVarWarning, ) else: result[name] = CFBoundaryVariable(name, variables[name]) @@ -369,19 +385,22 @@ def identify(cls, variables, ignore=None, target=None, warn=True): return result def spans(self, cf_variable): - """Determine whether the dimensionality of this variable + """Determine dimensionality coverage. + + Determine whether the dimensionality of this variable is a subset of the specified target variable. Note that, by default scalar variables always span the dimensionality of the target variable. - Args: - - * cf_variable: + Parameters + ---------- + cf_variable : Compare dimensionality with the :class:`CFVariable`. - Returns: - Boolean. + Returns + ------- + bool """ # Scalar variables always span the target variable. @@ -397,7 +416,9 @@ def spans(self, cf_variable): class CFClimatologyVariable(CFVariable): - """A CF-netCDF climatology variable is associated with a CF-netCDF variable that contains + """CF-netCDF climatology variable. + + A CF-netCDF climatology variable is associated with a CF-netCDF variable that contains coordinate data. When a data value provides information about conditions in a cell occupying a region of space/time or some other dimension, the climatology variable provides a climatological description of cell extent. @@ -432,7 +453,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): message = "Missing CF-netCDF climatology variable %r, referenced by netCDF variable %r" warnings.warn( message % (name, nc_var_name), - category=iris.exceptions.IrisCfMissingVarWarning, + category=iris.warnings.IrisCfMissingVarWarning, ) else: result[name] = CFClimatologyVariable(name, variables[name]) @@ -440,19 +461,22 @@ def identify(cls, variables, ignore=None, target=None, warn=True): return result def spans(self, cf_variable): - """Determine whether the dimensionality of this variable + """Determine dimensionality coverage. + + Determine whether the dimensionality of this variable is a subset of the specified target variable. Note that, by default scalar variables always span the dimensionality of the target variable. - Args: - - * cf_variable: + Parameters + ---------- + cf_variable : :class:`CFVariable` Compare dimensionality with the :class:`CFVariable`. - Returns: - Boolean. + Returns + ------- + bool """ # Scalar variables always span the target variable. @@ -468,7 +492,9 @@ def spans(self, cf_variable): class CFCoordinateVariable(CFVariable): - """A CF-netCDF coordinate variable is a one-dimensional variable with the same name + """A CF-netCDF coordinate variable. + + A CF-netCDF coordinate variable is a one-dimensional variable with the same name as its dimension, and it is defined as a numeric data type with values that are ordered monotonically. Missing values are not allowed in CF-netCDF coordinate variables. Also see [NUG] Section 2.3.1. @@ -522,14 +548,18 @@ def identify(cls, variables, ignore=None, target=None, warn=True): class _CFFormulaTermsVariable(CFVariable): - """A CF-netCDF formula terms variable corresponds to a term in a formula that + """CF-netCDF formula terms variable. + + A CF-netCDF formula terms variable corresponds to a term in a formula that allows dimensional vertical coordinate values to be computed from dimensionless vertical coordinate values and associated variables at specific grid points. Identified by the CF-netCDF variable attribute 'formula_terms'. - Ref: [CF] Section 4.3.2. Dimensional Vertical Coordinate. - [CF] Appendix D. Dimensionless Vertical Coordinates. + Ref: + + * [CF] Section 4.3.2. Dimensional Vertical Coordinate. + * [CF] Appendix D. Dimensionless Vertical Coordinates. """ @@ -563,7 +593,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): message = "Missing CF-netCDF formula term variable %r, referenced by netCDF variable %r" warnings.warn( message % (variable_name, nc_var_name), - category=iris.exceptions.IrisCfMissingVarWarning, + category=iris.warnings.IrisCfMissingVarWarning, ) else: if variable_name not in result: @@ -590,7 +620,9 @@ def __repr__(self): class CFGridMappingVariable(CFVariable): - """A CF-netCDF grid mapping variable contains a list of specific attributes that + """CF-netCDF grid mapping variable. + + A CF-netCDF grid mapping variable contains a list of specific attributes that define a particular grid mapping. A CF-netCDF grid mapping variable must contain the attribute 'grid_mapping_name'. @@ -600,8 +632,10 @@ class CFGridMappingVariable(CFVariable): Identified by the CF-netCDF variable attribute 'grid_mapping'. - Ref: [CF] Section 5.6. Horizontal Coordinate Reference Systems, Grid Mappings, and Projections. - [CF] Appendix F. Grid Mappings. + Ref: + + * [CF] Section 5.6. Horizontal Coordinate Reference Systems, Grid Mappings, and Projections. + * [CF] Appendix F. Grid Mappings. """ @@ -626,7 +660,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): message = "Missing CF-netCDF grid mapping variable %r, referenced by netCDF variable %r" warnings.warn( message % (name, nc_var_name), - category=iris.exceptions.IrisCfMissingVarWarning, + category=iris.warnings.IrisCfMissingVarWarning, ) else: result[name] = CFGridMappingVariable(name, variables[name]) @@ -635,7 +669,9 @@ def identify(cls, variables, ignore=None, target=None, warn=True): class CFLabelVariable(CFVariable): - """A CF-netCDF CF label variable is any netCDF variable that contain string + """Cariable is any netCDF variable that contain string textual information, or labels. + + A CF-netCDF CF label variable is any netCDF variable that contain string textual information, or labels. Identified by the CF-netCDF variable attribute 'coordinates'. @@ -665,7 +701,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): message = "Missing CF-netCDF label variable %r, referenced by netCDF variable %r" warnings.warn( message % (name, nc_var_name), - category=iris.exceptions.IrisCfMissingVarWarning, + category=iris.warnings.IrisCfMissingVarWarning, ) else: # Register variable, but only allow string type. @@ -678,16 +714,17 @@ def identify(cls, variables, ignore=None, target=None, warn=True): def cf_label_data(self, cf_data_var): """Return the associated CF-netCDF label variable strings. - Args: - - * cf_data_var (:class:`iris.fileformats.cf.CFDataVariable`): - The CF-netCDF data variable which the CF-netCDF label variable describes. + Parameters + ---------- + cf_data_var : :class:`iris.fileformats.cf.CFDataVariable` + The CF-netCDF data variable which the CF-netCDF label variable + describes. - Returns: - String labels. + Returns + ------- + str labels """ - if not isinstance(cf_data_var, CFDataVariable): raise TypeError( "cf_data_var argument should be of type CFDataVariable. Got %r." @@ -744,16 +781,17 @@ def cf_label_data(self, cf_data_var): def cf_label_dimensions(self, cf_data_var): """Return the name of the associated CF-netCDF label variable data dimensions. - Args: - - * cf_data_var (:class:`iris.fileformats.cf.CFDataVariable`): - The CF-netCDF data variable which the CF-netCDF label variable describes. + Parameters + ---------- + cf_data_var : :class:`iris.fileformats.cf.CFDataVariable` + The CF-netCDF data variable which the CF-netCDF label variable + describes. - Returns: - Tuple of label data dimension names. + Returns + ------- + Tuple of label data dimension names. """ - if not isinstance(cf_data_var, CFDataVariable): raise TypeError( "cf_data_var argument should be of type CFDataVariable. Got %r." @@ -769,19 +807,22 @@ def cf_label_dimensions(self, cf_data_var): ) def spans(self, cf_variable): - """Determine whether the dimensionality of this variable + """Determine dimensionality coverage. + + Determine whether the dimensionality of this variable is a subset of the specified target variable. Note that, by default scalar variables always span the dimensionality of the target variable. - Args: - - * cf_variable: + Parameters + ---------- + cf_variable : Compare dimensionality with the :class:`CFVariable`. - Returns: - Boolean. + Returns + ------- + bool """ # Scalar variables always span the target variable. @@ -835,7 +876,7 @@ def identify(cls, variables, ignore=None, target=None, warn=True): message = "Missing CF-netCDF measure variable %r, referenced by netCDF variable %r" warnings.warn( message % (variable_name, nc_var_name), - category=iris.exceptions.IrisCfMissingVarWarning, + category=iris.warnings.IrisCfMissingVarWarning, ) else: result[variable_name] = CFMeasureVariable( @@ -849,7 +890,9 @@ def identify(cls, variables, ignore=None, target=None, warn=True): ################################################################################ class CFGroup(MutableMapping): - """Represents a collection of 'NetCDF Climate and Forecast (CF) Metadata + """Collection of 'NetCDF CF Metadata Conventions variables and netCDF global attributes. + + Represents a collection of 'NetCDF Climate and Forecast (CF) Metadata Conventions' variables and netCDF global attributes. """ @@ -926,10 +969,7 @@ def cell_measures(self): @property def non_data_variable_names(self): - """:class:`set` of the names of the CF-netCDF variables that are not - the data pay-load. - - """ + """:class:`set` names of the CF-netCDF variables that are not the data pay-load.""" non_data_variables = ( self.ancillary_variables, self.auxiliary_coordinates, @@ -996,7 +1036,9 @@ def __repr__(self): ################################################################################ class CFReader: - """This class allows the contents of a netCDF file to be interpreted according + """Allows the contents of a netCDF file to be interpreted. + + This class allows the contents of a netCDF file to be interpreted according to the 'NetCDF Climate and Forecast (CF) Metadata Conventions'. """ @@ -1041,7 +1083,7 @@ def __init__(self, file_source, warn=False, monotonic=False): warnings.warn( "Optimise CF-netCDF loading by converting data from NetCDF3 " 'to NetCDF4 file format using the "nccopy" command.', - category=iris.exceptions.IrisLoadWarning, + category=iris.warnings.IrisLoadWarning, ) self._check_monotonic = monotonic @@ -1073,7 +1115,6 @@ def __repr__(self): def _translate(self): """Classify the netCDF variables into CF-netCDF variables.""" - netcdf_variable_names = list(self._dataset.variables.keys()) # Identify all CF coordinate variables first. This must be done @@ -1180,7 +1221,7 @@ def _build(cf_variable): ) warnings.warn( msg, - category=iris.exceptions.IrisCfNonSpanningVarWarning, + category=iris.warnings.IrisCfNonSpanningVarWarning, ) # Build CF data variable relationships. @@ -1229,7 +1270,7 @@ def _build(cf_variable): ) warnings.warn( msg, - category=iris.exceptions.IrisCfNonSpanningVarWarning, + category=iris.warnings.IrisCfNonSpanningVarWarning, ) # Add the CF group to the variable. @@ -1293,10 +1334,7 @@ def __del__(self): def _getncattr(dataset, attr, default=None): - """Simple wrapper round `netCDF4.Dataset.getncattr` to make it behave - more like `getattr`. - - """ + """Wrap `netCDF4.Dataset.getncattr` to make it behave more like `getattr`.""" try: value = dataset.getncattr(attr) except AttributeError: diff --git a/lib/iris/fileformats/dot.py b/lib/iris/fileformats/dot.py index 3c24145073..53f85794c6 100644 --- a/lib/iris/fileformats/dot.py +++ b/lib/iris/fileformats/dot.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Provides Creation and saving of DOT graphs for a :class:`iris.cube.Cube`. - -""" +"""Provides Creation and saving of DOT graphs for a :class:`iris.cube.Cube`.""" import os import subprocess @@ -57,10 +55,10 @@ def _dot_path(): def save(cube, target): """Save a dot representation of the cube. - Args - ---- + Parameters + ---------- cube: :class:`iris.cube.Cube`. - target + target : A filename or open file handle. See Also @@ -85,19 +83,19 @@ def save(cube, target): def save_png(source, target, launch=False): - """Produce a "dot" instance diagram by calling dot and optionally launching + """Produce a "dot" instance diagram by calling dot. + + Produce a "dot" instance diagram by calling dot and optionally launching the resulting image. - Args - ---- + Parameters + ---------- source: :class:`iris.cube.Cube`, or dot filename. - target + target : A filename or open file handle. If passing a file handle, take care to open it for binary output. - - **kwargs - * launch - Display the image. Default is False. + launch : bool, default=False + Display the image. Default is False. See Also -------- @@ -150,9 +148,9 @@ def save_png(source, target, launch=False): def cube_text(cube): """Return a DOT text representation a `iris.cube.Cube`. - Args - ---- - cube + Parameters + ---------- + cube : The cube for which to create DOT text. """ @@ -278,14 +276,13 @@ def cube_text(cube): def _coord_text(label, coord): - """Return a string containing the dot representation for a single coordinate - node. + """Return a string containing the dot representation for a single coordinate node. - Args - ---- - label + Parameters + ---------- + label : The dot ID of the coordinate node. - coord + coord : The coordinate to convert. """ @@ -308,14 +305,13 @@ def _coord_text(label, coord): def _coord_system_text(cs, uid): - """Return a string containing the dot representation for a single coordinate - system node. + """Return string containing dot representation for a single coordinate system node. - Args - ---- - cs + Parameters + ---------- + cs : The coordinate system to convert. - uid + uid : The uid allows/distinguishes non-identical CoordSystems of the same type. @@ -343,8 +339,9 @@ def _dot_node(indent, id, name, attributes): The ID of the node. name : The visual name of the node. - attributes: + attributes : An iterable of (name, value) attribute pairs. + """ # noqa: D410, D411 attributes = r"\n".join("%s: %s" % item for item in attributes) template = """%(indent)s"%(id)s" [ diff --git a/lib/iris/fileformats/name.py b/lib/iris/fileformats/name.py index 4742ec9001..bc1bb690c2 100644 --- a/lib/iris/fileformats/name.py +++ b/lib/iris/fileformats/name.py @@ -6,7 +6,9 @@ def _get_NAME_loader(filename): - """Return the appropriate load function for a NAME file based + """Return a NAME load function. + + Return the appropriate load function for a NAME file based on the contents of its header. """ @@ -43,21 +45,18 @@ def _get_NAME_loader(filename): def load_cubes(filenames, callback): - """Return a generator of cubes given one or more filenames and an - optional callback. - - Args: + """Return a generator of cubes given one or more filenames and an optional callback. - * filenames (string/list): + Parameters + ---------- + filenames : str or list One or more NAME filenames to load. - - Kwargs: - - * callback (callable function): + callback : callable function, optional A function which can be passed on to :func:`iris.io.run_callback`. - Returns: - A generator of :class:`iris.cubes.Cube` instances. + Returns + ------- + A generator of :class:`iris.cubes.Cube` instances. """ from iris.io import run_callback diff --git a/lib/iris/fileformats/name_loaders.py b/lib/iris/fileformats/name_loaders.py index e6acb6aa66..fe53308cb0 100644 --- a/lib/iris/fileformats/name_loaders.py +++ b/lib/iris/fileformats/name_loaders.py @@ -16,8 +16,9 @@ import iris.coord_systems from iris.coords import AuxCoord, CellMethod, DimCoord import iris.cube -from iris.exceptions import IrisLoadWarning, TranslationError +from iris.exceptions import TranslationError import iris.util +from iris.warnings import IrisLoadWarning EARTH_RADIUS = 6371229.0 NAMEIII_DATETIME_FORMAT = "%d/%m/%Y %H:%M %Z" @@ -45,16 +46,19 @@ def _split_name_and_units(name): def read_header(file_handle): - """Return a dictionary containing the header information extracted - from the the provided NAME file object. + """Return a dictionary containing the header information. - Args: + Return a dictionary containing the header information extracted + from the the provided NAME file object. - * file_handle (file-like object): + Parameters + ---------- + file_handle : file-like object A file-like object from which to read the header information. - Returns: - A dictionary containing the extracted header information. + Returns + ------- + A dictionary containing the extracted header information. """ header = {} @@ -95,7 +99,9 @@ def read_header(file_handle): def _read_data_arrays(file_handle, n_arrays, shape): - """Return a list of NumPy arrays containing the data extracted from + """Return a list of NumPy arrays containing the data extracted. + + Return a list of NumPy arrays containing the data extracted from the provided file object. The number and shape of the arrays must be specified. @@ -124,10 +130,12 @@ def _read_data_arrays(file_handle, n_arrays, shape): def _build_lat_lon_for_NAME_field( header, dimindex, x_or_y, coord_names=["longitude", "latitude"] ): - """Return regular latitude and longitude coordinates extracted from + """Return regular latitude and longitude coordinates. + + Return regular latitude and longitude coordinates extracted from the provided header dictionary. - """ + """ if x_or_y == "X": start = header["X grid origin"] step = header["X grid resolution"] @@ -145,7 +153,9 @@ def _build_lat_lon_for_NAME_field( def _build_lat_lon_for_NAME_timeseries(column_headings): - """Return regular latitude and longitude coordinates extracted from + """Return regular latitude and longitude coordinates. + + Return regular latitude and longitude coordinates extracted from the provided column_headings dictionary. """ @@ -184,7 +194,9 @@ def _build_lat_lon_for_NAME_timeseries(column_headings): def _calc_integration_period(time_avgs): - """Return a list of datetime.timedelta objects determined from the provided + """Calculate averaging/integration time periods. + + Return a list of datetime.timedelta objects determined from the provided list of averaging/integration period column headings. """ @@ -221,16 +233,16 @@ def _parse_units(units): * Units where the characters used are non-standard (i.e. 'mc' for micro instead of 'u') - Args: - - * units (string): + Parameters + ---------- + units : str NAME units. - Returns: - An instance of :class:`cf_units.Unit`. + Returns + ------- + An instance of :class:`cf_units.Unit`. """ - unit_mapper = { "Risks/m3": "1", # Used for Bluetongue "TCID50s/m3": "1", # Used for Foot and Mouth @@ -268,17 +280,18 @@ def _cf_height_from_name(z_coord, lower_bound=None, upper_bound=None): and flight level etc. This function returns an iris coordinate representing this field heading. - Args: - - * z_coord (list): + Parameters + ---------- + z_coord : list A field heading, specifically the z component. - Returns: + Returns + ------- + :class:`iris.coords.AuxCoord` An instance of :class:`iris.coords.AuxCoord` representing the interpretation of the supplied field heading. """ - # NAMEII - integer/float support. # Match against height agl, asl and Pa. pattern = re.compile( @@ -384,7 +397,9 @@ def _cf_height_from_name(z_coord, lower_bound=None, upper_bound=None): def _generate_cubes(header, column_headings, coords, data_arrays, cell_methods=None): - """Yield :class:`iris.cube.Cube` instances given + """Generate NAME cubes. + + Yield :class:`iris.cube.Cube` instances given the headers, column headings, coords and data_arrays extracted from a NAME file. @@ -546,21 +561,25 @@ def _generate_cubes(header, column_headings, coords, data_arrays, cell_methods=N def _build_cell_methods(av_or_ints, coord): - """Return a list of :class:`iris.coords.CellMethod` instances + """Create cell-methods. + + Return a list of :class:`iris.coords.CellMethod` instances based on the provided list of column heading entries and the associated coordinate. If a given entry does not correspond to a cell method (e.g. "No time averaging"), a value of None is inserted. - Args: - - * av_or_ints (iterable of strings): + Parameters + ---------- + av_or_ints : iterable of str An iterable of strings containing the column heading entries to be parsed. - * coord (string or :class:`iris.coords.Coord`): + coord : str or :class:`iris.coords.Coord` The coordinate name (or :class:`iris.coords.Coord` instance) to which the column heading entries refer. - Returns: + Returns + ------- + list of :class:`iris.coords.CellMethod` or None. A list that is the same length as `av_or_ints` containing :class:`iris.coords.CellMethod` instances or values of None. @@ -583,16 +602,19 @@ def _build_cell_methods(av_or_ints, coord): def load_NAMEIII_field(filename): - """Load a NAME III grid output file returning a - generator of :class:`iris.cube.Cube` instances. + """Load NAME III cubes. - Args: + Load a NAME III grid output file returning a + generator of :class:`iris.cube.Cube` instances. - * filename (string): + Parameters + ---------- + filename : str Name of file to load. - Returns: - A generator :class:`iris.cube.Cube` instances. + Returns + ------- + A generator :class:`iris.cube.Cube` instances. """ # Loading a file gives a generator of lines which can be progressed using @@ -677,16 +699,19 @@ def load_NAMEIII_field(filename): def load_NAMEII_field(filename): - """Load a NAME II grid output file returning a - generator of :class:`iris.cube.Cube` instances. + """Load a NAME II grid output file. - Args: + Load a NAME II grid output file returning a generator of + :class:`iris.cube.Cube` instances. - * filename (string): + Parameters + ---------- + filename : str Name of file to load. - Returns: - A generator :class:`iris.cube.Cube` instances. + Returns + ------- + A generator :class:`iris.cube.Cube` instances. """ with open(filename, "r") as file_handle: @@ -764,16 +789,19 @@ def load_NAMEII_field(filename): def load_NAMEIII_timeseries(filename): - """Load a NAME III time series file returning a - generator of :class:`iris.cube.Cube` instances. + """Load a NAME III time series file. - Args: + Load a NAME III time series file returning a generator of + :class:`iris.cube.Cube` instances. - * filename (string): + Parameters + ---------- + filename : str Name of file to load. - Returns: - A generator :class:`iris.cube.Cube` instances. + Returns + ------- + A generator :class:`iris.cube.Cube` instances. """ with open(filename, "r") as file_handle: @@ -852,16 +880,19 @@ def load_NAMEIII_timeseries(filename): def load_NAMEII_timeseries(filename): - """Load a NAME II Time Series file returning a - generator of :class:`iris.cube.Cube` instances. + """Load a NAME III time series file. - Args: + Load a NAME II Time Series file returning a generator of + :class:`iris.cube.Cube` instances. - * filename (string): + Parameters + ---------- + filename : str Name of file to load. - Returns: - A generator :class:`iris.cube.Cube` instances. + Returns + ------- + A generator :class:`iris.cube.Cube` instances. """ with open(filename, "r") as file_handle: @@ -922,19 +953,21 @@ def load_NAMEII_timeseries(filename): def load_NAMEIII_version2(filename): - """Load a NAME III version 2 file returning a - generator of :class:`iris.cube.Cube` instances. + """Load a NAME III version 2 file. - Args: + Load a NAME III version 2 file returning a generator of + :class:`iris.cube.Cube` instances. - * filename (string): + Parameters + ---------- + filename : str Name of file to load. - Returns: - A generator :class:`iris.cube.Cube` instances. + Returns + ------- + A generator :class:`iris.cube.Cube` instances. """ - # loading a file gives a generator of lines which can be progressed # using the next() method. This will come in handy as we wish to # progress through the file line by line. @@ -1141,16 +1174,19 @@ def load_NAMEIII_version2(filename): def load_NAMEIII_trajectory(filename): - """Load a NAME III trajectory file returning a - generator of :class:`iris.cube.Cube` instances. + """Load a NAME III trajectory file. - Args: + Load a NAME III trajectory file returning a generator of + :class:`iris.cube.Cube` instances. - * filename (string): + Parameters + ---------- + filename : str Name of file to load. - Returns: - A generator :class:`iris.cube.Cube` instances. + Returns + ------- + A generator :class:`iris.cube.Cube` instances. """ time_unit = cf_units.Unit("hours since epoch", calendar=cf_units.CALENDAR_STANDARD) diff --git a/lib/iris/fileformats/netcdf/loader.py b/lib/iris/fileformats/netcdf/loader.py index 31b1774f19..4e205ad7f3 100644 --- a/lib/iris/fileformats/netcdf/loader.py +++ b/lib/iris/fileformats/netcdf/loader.py @@ -34,12 +34,12 @@ import iris.config import iris.coord_systems import iris.coords -import iris.exceptions import iris.fileformats.cf from iris.fileformats.netcdf import _thread_safe_nc from iris.fileformats.netcdf.saver import _CF_ATTRS import iris.io import iris.util +import iris.warnings # Show actions activation statistics. DEBUG = False @@ -53,8 +53,8 @@ class _WarnComboIgnoringBoundsLoad( - iris.exceptions.IrisIgnoringBoundsWarning, - iris.exceptions.IrisLoadWarning, + iris.warnings.IrisIgnoringBoundsWarning, + iris.warnings.IrisLoadWarning, ): """One-off combination of warning classes - enhances user filtering.""" @@ -415,7 +415,7 @@ def coord_from_term(term): return coord warnings.warn( "Unable to find coordinate for variable {!r}".format(name), - category=iris.exceptions.IrisFactoryCoordNotFoundWarning, + category=iris.warnings.IrisFactoryCoordNotFoundWarning, ) if formula_type == "atmosphere_sigma_coordinate": @@ -562,10 +562,8 @@ def load_cubes(file_sources, callback=None, constraints=None): file_sources : str or list One or more NetCDF filenames/OPeNDAP URLs to load from. OR open datasets. - callback : function, optional Function which can be passed on to :func:`iris.io.run_callback`. - constraints : optional Returns @@ -648,7 +646,7 @@ def load_cubes(file_sources, callback=None, constraints=None): except ValueError as e: warnings.warn( "{}".format(e), - category=iris.exceptions.IrisLoadWarning, + category=iris.warnings.IrisLoadWarning, ) # Perform any user registered callback function. @@ -662,7 +660,11 @@ def load_cubes(file_sources, callback=None, constraints=None): class ChunkControl(threading.local): + """Provide user control of Chunk Control.""" + class Modes(Enum): + """Modes Enums.""" + DEFAULT = auto() FROM_FILE = auto() AS_DASK = auto() @@ -704,9 +706,9 @@ def set( ---------- var_names : str or list of str, default=None apply the `dimension_chunksizes` controls only to these variables, - or when building :class:`~iris.cube.Cube`\\ s from these data variables. + or when building :class:`~iris.cube.Cube` from these data variables. If ``None``, settings apply to all loaded variables. - dimension_chunksizes : dict of {str: int} + **dimension_chunksizes : dict of {str: int} Kwargs specifying chunksizes for dimensions of file variables. Each key-value pair defines a chunk size for a named file dimension, e.g. ``{'time': 10, 'model_levels':1}``. @@ -775,7 +777,7 @@ def from_file(self) -> None: ------ KeyError If any NetCDF data variables - those that become - :class:`~iris.cube.Cube`\\ s - do not specify chunk sizes. + :class:`~iris.cube.Cube` - do not specify chunk sizes. Notes ----- @@ -797,6 +799,7 @@ def as_dask(self) -> None: Notes ----- This function acts as a context manager, for use in a ``with`` block. + """ old_mode = self.mode old_var_dim_chunksizes = deepcopy(self.var_dim_chunksizes) diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index b35b85bbae..42616d7fd1 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -49,6 +49,7 @@ from iris.fileformats.netcdf import _dask_locks, _thread_safe_nc import iris.io import iris.util +import iris.warnings # Get the logger : shared logger for all in 'iris.fileformats.netcdf'. from . import logger @@ -161,8 +162,8 @@ class _WarnComboMaskSave( - iris.exceptions.IrisMaskValueMatchWarning, - iris.exceptions.IrisSaveWarning, + iris.warnings.IrisMaskValueMatchWarning, + iris.warnings.IrisSaveWarning, ): """One-off combination of warning classes - enhances user filtering.""" @@ -182,9 +183,9 @@ def append(self, name, coord): Parameters ---------- - name: + name : CF name of the associated coordinate. - coord: + coord : The coordinate of the associated CF name. Returns @@ -209,7 +210,7 @@ def name(self, coord): Parameters ---------- - coord: + coord : The coordinate of the associated CF name. Returns @@ -229,12 +230,13 @@ def coord(self, name): Parameters ---------- - name: + name : CF name of the associated coordinate, or None if not recognised. Returns ------- CF name or None. + """ result = None for pair in self._map: @@ -317,8 +319,8 @@ def _data_fillvalue_check(arraylib, data, check_value): return is_masked, contains_value -class SaverFillValueWarning(iris.exceptions.IrisSaverFillValueWarning): - """Backwards compatible form of :class:`iris.exceptions.IrisSaverFillValueWarning`.""" +class SaverFillValueWarning(iris.warnings.IrisSaverFillValueWarning): + """Backwards compatible form of :class:`iris.warnings.IrisSaverFillValueWarning`.""" # TODO: remove at the next major release. pass @@ -338,7 +340,7 @@ def _fillvalue_report(fill_info, is_masked, contains_fill_value, warn=False): whether the data array was masked contains_fill_value : bool whether the data array contained the fill-value - warn : bool, optional + warn : bool, default=False if True, also issue any resulting warning immediately. Returns @@ -390,11 +392,9 @@ def __init__(self, filename, netcdf_format, compute=True): filename : str or netCDF4.Dataset Name of the netCDF file to save the cube. OR a writeable object supporting the :class:`netCF4.Dataset` api. - netcdf_format : str Underlying netCDF file format, one of 'NETCDF4', 'NETCDF4_CLASSIC', 'NETCDF3_CLASSIC' or 'NETCDF3_64BIT'. Default is 'NETCDF4' format. - compute : bool, default=True If ``True``, delayed variable saves will be completed on exit from the Saver context (after first closing the target file), equivalent to @@ -404,7 +404,7 @@ def __init__(self, filename, netcdf_format, compute=True): variables for which the source data was lazy. These writes can be completed later, see :meth:`delayed_completion`. - .. Note:: + .. note:: If ``filename`` is an open dataset, rather than a filepath, then the caller must specify ``compute=False``, **close the dataset**, and complete delayed saving afterwards. @@ -543,39 +543,38 @@ def write( ---------- cube : :class:`iris.cube.Cube` A :class:`iris.cube.Cube` to be saved to a netCDF file. - local_keys : iterable of str + local_keys : iterable of str, optional An interable of cube attribute keys. Any cube attributes with matching keys will become attributes on the data variable rather than global attributes. - .. Note:: + .. note:: Has no effect if :attr:`iris.FUTURE.save_split_attrs` is ``True``. - - unlimited_dimensions : iterable of str and/or :class:`iris.coords.Coord` + unlimited_dimensions : iterable of str and/or :class:`iris.coords.Coord`, optional List of coordinate names (or coordinate objects) corresponding to coordinate dimensions of `cube` to save with the NetCDF dimension variable length 'UNLIMITED'. By default, no unlimited dimensions are saved. Only the 'NETCDF4' format supports multiple 'UNLIMITED' dimensions. - zlib : bool + zlib : bool, default=False If `True`, the data will be compressed in the netCDF file using gzip compression (default `False`). - complevel : int + complevel : int, default=4 An integer between 1 and 9 describing the level of compression desired (default 4). Ignored if `zlib=False`. - shuffle : bool + shuffle : bool, default=True If `True`, the HDF5 shuffle filter will be applied before compressing the data (default `True`). This significantly improves compression. Ignored if `zlib=False`. - fletcher32 : bool + fletcher32 : bool, default=False If `True`, the Fletcher32 HDF5 checksum algorithm is activated to detect errors. Default `False`. - contiguous : bool + contiguous : bool, default=False If `True`, the variable data is stored contiguously on disk. Default `False`. Setting to `True` for a variable with an unlimited dimension will trigger an error. - chunksizes : tuple of int + chunksizes : tuple of int, optional Used to manually specify the HDF5 chunksizes for each dimension of the variable. A detailed discussion of HDF chunking and I/O performance is available @@ -583,7 +582,7 @@ def write( Basically, you want the chunk size for each dimension to match as closely as possible the size of the data block that users will read from the file. `chunksizes` cannot be set if `contiguous=True`. - endian : str + endian : str, default="native" Used to control whether the data is stored in little or big endian format on disk. Possible values are 'little', 'big' or 'native' (default). The library will automatically handle endian conversions @@ -591,7 +590,7 @@ def write( on a computer with the opposite format as the one used to create the file, there may be some performance advantage to be gained by setting the endian-ness. - least_significant_digit : int + least_significant_digit : int, optional If `least_significant_digit` is specified, variable data will be truncated (quantized). In conjunction with `zlib=True` this produces 'lossy', but significantly more efficient compression. For @@ -603,7 +602,7 @@ def write( "least_significant_digit -- power of ten of the smallest decimal place in unpacked data that is a reliable value". Default is `None`, or no quantization, or 'lossless' compression. - packing : type or str or dict or list + packing : type or str or dict or list, optional A numpy integer datatype (signed or unsigned) or a string that describes a numpy integer dtype(i.e. 'i2', 'short', 'u4') or a dict of packing parameters as described below. This provides @@ -618,7 +617,7 @@ def write( manually using a dict to avoid this. The default is `None`, in which case the datatype is determined from the cube and no packing will occur. - fill_value: + fill_value : optional The value to use for the `_FillValue` attribute on the netCDF variable. If `packing` is specified the value of `fill_value` should be in the domain of the packed data. @@ -739,7 +738,7 @@ def write( cf_patch(profile, self._dataset, cf_var_cube) else: msg = "cf_profile is available but no {} defined.".format("cf_patch") - warnings.warn(msg, category=iris.exceptions.IrisCfSaveWarning) + warnings.warn(msg, category=iris.warnings.IrisCfSaveWarning) @staticmethod def check_attribute_compliance(container, data_dtype): @@ -783,7 +782,7 @@ def update_global_attributes(self, attributes=None, **kwargs): Parameters ---------- - attributes : dict or iterable of key, value pairs + attributes : dict or iterable of key, value pairs, optional CF global attributes to be updated. """ # TODO: when when iris.FUTURE.save_split_attrs is removed, this routine will @@ -807,12 +806,14 @@ def _create_cf_dimensions(self, cube, dimension_names, unlimited_dimensions=None ---------- cube : :class:`iris.cube.Cube` A :class:`iris.cube.Cube` in which to lookup coordinates. + dimension_names : unlimited_dimensions : iterable of strings and/or :class:`iris.coords.Coord` objects): List of coordinates to make unlimited (None by default). Returns ------- None. + """ unlimited_dim_names = [] if unlimited_dimensions is not None: @@ -1133,7 +1134,7 @@ def _add_aux_factories(self, cube, cf_var_cube, dimension_names): msg = "Unable to determine formula terms for AuxFactory: {!r}".format( factory ) - warnings.warn(msg, category=iris.exceptions.IrisSaveWarning) + warnings.warn(msg, category=iris.warnings.IrisSaveWarning) else: # Override `standard_name`, `long_name`, and `axis` of the # primary coord that signals the presence of a dimensionless @@ -1497,7 +1498,7 @@ def _create_cf_bounds(self, coord, cf_var, cf_name): ---------- coord : :class:`iris.coords.Coord` A coordinate of a cube. - cf_var: + cf_var : CF-netCDF variable cf_name : str name of the CF-NetCDF variable. @@ -1763,12 +1764,12 @@ def _create_generic_cf_array_var( An Iris :class:`iris.coords._DimensionalMetadata`, belonging to the cube. Provides data, units and standard/long/var names. Not used if 'element_dims' is not None. - element_dims : list of str, or None + element_dims : list of str, optionsl If set, contains the variable dimension (names), otherwise these are taken from `element.cube_dims[cube]`. For Mesh components (element coordinates and connectivities), this *must* be passed in, as "element.cube_dims" does not function. - fill_value : number or None + fill_value : number, optional If set, create the variable with this fill-value, and fill any masked data points with this value. If not set, standard netcdf4-python behaviour : the variable has no @@ -2084,7 +2085,7 @@ def add_ellipsoid(ellipsoid): elif isinstance(cs, iris.coord_systems.OSGB): warnings.warn( "OSGB coordinate system not yet handled", - category=iris.exceptions.IrisSaveWarning, + category=iris.warnings.IrisSaveWarning, ) # lambert azimuthal equal area @@ -2172,7 +2173,7 @@ def add_ellipsoid(ellipsoid): "Unable to represent the horizontal " "coordinate system. The coordinate system " "type %r is not yet implemented." % type(cs), - category=iris.exceptions.IrisSaveWarning, + category=iris.warnings.IrisSaveWarning, ) self._coord_systems.append(cs) @@ -2207,6 +2208,8 @@ def _create_cf_data_variable( fill_value : optional See :func:`iris.fileformats.netcdf.Saver.write` + Notes + ----- All other keywords are passed through to the dataset's `createVariable` method. @@ -2340,7 +2343,7 @@ def set_packing_ncattrs(cfvar): "attribute, but {attr_name!r} should only be a CF " "global attribute.".format(attr_name=attr_name) ) - warnings.warn(msg, category=iris.exceptions.IrisCfSaveWarning) + warnings.warn(msg, category=iris.warnings.IrisCfSaveWarning) _setncattr(cf_var, attr_name, value) @@ -2563,7 +2566,7 @@ def complete(self, issue_warnings=True) -> List[Warning]: if issue_warnings: # Issue any delayed warnings from the compute. for delayed_warning in result_warnings: - warnings.warn(delayed_warning, category=iris.exceptions.IrisSaveWarning) + warnings.warn(delayed_warning, category=iris.warnings.IrisSaveWarning) return result_warnings @@ -2590,7 +2593,7 @@ def save( * Iris will write CF 1.7 compliant NetCDF files. * **If split-attribute saving is disabled**, i.e. - :data:`iris.FUTURE`\\ ``.save_split_attrs`` is ``False``, then attributes + :data:`iris.FUTURE` ``.save_split_attrs`` is ``False``, then attributes dictionaries on each cube in the saved cube list will be compared, and common attributes saved as NetCDF global attributes where appropriate. @@ -2619,7 +2622,7 @@ def save( When saving to a dataset, ``compute`` **must** be ``False`` : See the ``compute`` parameter. - netcdf_format : str + netcdf_format : str, default="NETCDF" Underlying netCDF file format, one of 'NETCDF4', 'NETCDF4_CLASSIC', 'NETCDF3_CLASSIC' or 'NETCDF3_64BIT'. Default is 'NETCDF4' format. local_keys : iterable of str, optional @@ -2637,20 +2640,20 @@ def save( variable length 'UNLIMITED'. By default, no unlimited dimensions are saved. Only the 'NETCDF4' format supports multiple 'UNLIMITED' dimensions. - zlib : bool, optional + zlib : bool, default=False If `True`, the data will be compressed in the netCDF file using gzip compression (default `False`). - complevel : int + complevel : int, default=4 An integer between 1 and 9 describing the level of compression desired (default 4). Ignored if `zlib=False`. - shuffle : bool, optional + shuffle : bool, default=True If `True`, the HDF5 shuffle filter will be applied before compressing the data (default `True`). This significantly improves compression. Ignored if `zlib=False`. - fletcher32 : bool, optional + fletcher32 : bool, default=False If `True`, the Fletcher32 HDF5 checksum algorithm is activated to detect errors. Default `False`. - contiguous : bool, optional + contiguous : bool, default=False If `True`, the variable data is stored contiguously on disk. Default `False`. Setting to `True` for a variable with an unlimited dimension will trigger an error. @@ -2662,7 +2665,7 @@ def save( Basically, you want the chunk size for each dimension to match as closely as possible the size of the data block that users will read from the file. `chunksizes` cannot be set if `contiguous=True`. - endian : str + endian : str, default="native" Used to control whether the data is stored in little or big endian format on disk. Possible values are 'little', 'big' or 'native' (default). The library will automatically handle endian conversions @@ -2707,7 +2710,7 @@ def save( same number of elements as `cube` if `cube` is a :class:`iris.cube.CubeList`, or a single element, and each element of this argument will be applied to each cube separately. - compute : bool, optional + compute : bool, default=True Default is ``True``, meaning complete the file immediately, and return ``None``. When ``False``, create the output file but don't write any lazy array content to @@ -2717,12 +2720,12 @@ def save( Several such data saves can be performed in parallel, by passing a list of them into a :func:`dask.compute` call. - .. Note:: + .. note:: when computed, the returned :class:`dask.delayed.Delayed` object returns a list of :class:`Warning` : These are any warnings which *would* have been issued in the save call, if ``compute`` had been ``True``. - .. Note:: + .. note:: If saving to an open dataset instead of a filepath, then the caller **must** specify ``compute=False``, and complete delayed saves **after closing the dataset**. @@ -2732,7 +2735,7 @@ def save( Returns ------- - result: None or dask.delayed.Delayed + result : None or dask.delayed.Delayed If `compute=True`, returns `None`. Otherwise returns a :class:`dask.delayed.Delayed`, which implements delayed writing to fill in the variables data. @@ -2813,7 +2816,7 @@ def attr_values_equal(val1, val2): f"Saving the cube global attributes {sorted(invalid_globals)} as local " "(i.e. data-variable) attributes, where possible, since they are not " "the same on all input cubes.", - category=iris.exceptions.IrisSaveWarning, + category=iris.warnings.IrisSaveWarning, ) cubes = cubes.copy() # avoiding modifying the actual input arg. for i_cube in range(len(cubes)): @@ -2829,7 +2832,7 @@ def attr_values_equal(val1, val2): f"Global cube attributes {sorted(blocked_attrs)} " f'of cube "{cube.name()}" were not saved, overlaid ' "by existing local attributes with the same names.", - category=iris.exceptions.IrisSaveWarning, + category=iris.warnings.IrisSaveWarning, ) demote_attrs -= blocked_attrs if demote_attrs: @@ -2971,7 +2974,7 @@ def is_valid_packspec(p): msg = "cf_profile is available but no {} defined.".format( "cf_patch_conventions" ) - warnings.warn(msg, category=iris.exceptions.IrisCfSaveWarning) + warnings.warn(msg, category=iris.warnings.IrisCfSaveWarning) # Add conventions attribute. if iris.FUTURE.save_split_attrs: diff --git a/lib/iris/fileformats/nimrod.py b/lib/iris/fileformats/nimrod.py index 3aea8b8b81..55927df3ef 100644 --- a/lib/iris/fileformats/nimrod.py +++ b/lib/iris/fileformats/nimrod.py @@ -169,10 +169,11 @@ class NimrodField: Capable of converting itself into a :class:`~iris.cube.Cube` - References: - Met Office (2003): Met Office Rain Radar Data from the NIMROD System. - NCAS British Atmospheric Data Centre, date of citation. - https://catalogue.ceda.ac.uk/uuid/82adec1f896af6169112d09cc1174499 + References + ---------- + Met Office (2003): Met Office Rain Radar Data from the NIMROD System. + NCAS British Atmospheric Data Centre, date of citation. + https://catalogue.ceda.ac.uk/uuid/82adec1f896af6169112d09cc1174499 """ @@ -203,7 +204,6 @@ def _read_header_subset(self, infile, names, dtype): def _read_header(self, infile): """Load the 512 byte header (surrounded by 4-byte length).""" - leading_length = struct.unpack(">L", infile.read(4))[0] if leading_length != 512: raise TranslationError("Expected header leading_length of 512") @@ -290,17 +290,17 @@ def _read_data(self, infile): def load_cubes(filenames, callback=None): - """Loads cubes from a list of NIMROD filenames. - - Args: - - * filenames - list of NIMROD filenames to load - - Kwargs: + """Load cubes from a list of NIMROD filenames. - * callback - a function which can be passed on to - :func:`iris.io.run_callback` + Parameters + ---------- + filenames : + List of NIMROD filenames to load + callback : optional + A function which can be passed on to :func:`iris.io.run_callback` + Notes + ----- .. note:: The resultant cubes may not be in the same order as in the files. diff --git a/lib/iris/fileformats/nimrod_load_rules.py b/lib/iris/fileformats/nimrod_load_rules.py index bb6d13f50a..16f23c8a6f 100644 --- a/lib/iris/fileformats/nimrod_load_rules.py +++ b/lib/iris/fileformats/nimrod_load_rules.py @@ -17,9 +17,9 @@ from iris.coords import DimCoord from iris.exceptions import ( CoordinateNotFoundError, - IrisNimrodTranslationWarning, TranslationError, ) +from iris.warnings import IrisNimrodTranslationWarning __all__ = ["run"] @@ -32,7 +32,7 @@ class TranslationWarning(IrisNimrodTranslationWarning): - """Backwards compatible form of :class:`iris.exceptions.IrisNimrodTranslationWarning`.""" + """Backwards compatible form of :class:`iris.warnings.IrisNimrodTranslationWarning`.""" # TODO: remove at the next major release. pass @@ -45,9 +45,12 @@ def is_missing(field, value): def name(cube, field, handle_metadata_errors): """Set the cube's name from the field. + Modifies the Nimrod object title based on other meta-data in the Nimrod field and known use cases. + Adds "mean_of" or "standard_deviation_of_" to the cube name if appropriate. + """ title_from_field_code = { 12: "air_pressure", @@ -237,8 +240,7 @@ def reference_time(cube, field): def forecast_period(cube): - """Add a forecast_period coord based on existing time and - forecast_reference_time coords. + """Add forecast_period coord based on existing time and forecast_reference_time coords. Must be run after time() and reference_time() @@ -293,8 +295,7 @@ def experiment(cube, field): def proj_biaxial_ellipsoid(field, handle_metadata_errors): - """Return the correct dictionary of arguments needed to define an - iris.coord_systems.GeogCS. + """Return correct dict of arguments needed to define an iris.coord_systems.GeogCS. Based firstly on the value given by ellipsoid, then by grid if ellipsoid is missing, select the right pre-defined ellipsoid dictionary (Airy_1830 or @@ -339,7 +340,9 @@ def proj_biaxial_ellipsoid(field, handle_metadata_errors): def set_british_national_grid_defaults(field, handle_metadata_errors): - """Check for missing coord-system meta-data and set default values for + """Check for missing coord-system meta-data and set default values. + + Check for missing coord-system meta-data and set default values for the Ordnance Survey GB Transverse Mercator projection. Some Radarnet files are missing these. @@ -374,6 +377,7 @@ def set_british_national_grid_defaults(field, handle_metadata_errors): def coord_system(field, handle_metadata_errors): """Define the coordinate system for the field. + Handles Transverse Mercator, Universal Transverse Mercator and Plate Carree. Transverse Mercator projections will default to the British National Grid if any @@ -411,6 +415,7 @@ def coord_system(field, handle_metadata_errors): def horizontal_grid(cube, field, handle_metadata_errors): """Add X and Y coordinates to the cube. + Handles Transverse Mercator, Universal Transverse Mercator and Plate Carree. coordinate reference system is supplied by coord_system(field) @@ -455,7 +460,9 @@ def horizontal_grid(cube, field, handle_metadata_errors): def vertical_coord(cube, field): - """Add a vertical coord to the cube, with bounds, if appropriate. + """Add a vertical coord to the cube, with bounds. + + Add a vertical coord to the cube, with bounds, if appropriate. Handles special numbers for "at-sea-level" (8888) and "at-ground-level" (9999). @@ -695,7 +702,9 @@ def known_threshold_coord(field): def probability_coord(cube, field, handle_metadata_errors): - """Add a coord relating to probability meta-data from the header to the + """Add a coord relating to probability meta-data from the header to the cube. + + Add a coord relating to probability meta-data from the header to the cube if appropriate. Must be run after the name method. @@ -881,11 +890,10 @@ def time_averaging(cube, field): def run(field, handle_metadata_errors=True): """Convert a NIMROD field to an Iris cube. - Args - ---- - field: :class:`~iris.fileformats.nimrod.NimrodField` - - handle_metadata_errors + Parameters + ---------- + field : :class:`~iris.fileformats.nimrod.NimrodField` + handle_metadata_errors : bool, default=True Set to False to omit handling of known meta-data deficiencies in Nimrod-format data @@ -893,6 +901,7 @@ def run(field, handle_metadata_errors=True): ------- :class:`~iris.cube.Cube` A new :class:`~iris.cube.Cube`, created from the NimrodField. + """ cube = iris.cube.Cube(field.data) diff --git a/lib/iris/fileformats/pp.py b/lib/iris/fileformats/pp.py index a654bfde6f..c39c1a53a7 100644 --- a/lib/iris/fileformats/pp.py +++ b/lib/iris/fileformats/pp.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Provides UK Met Office Post Process (PP) format specific capabilities. - -""" +"""Provides UK Met Office Post Process (PP) format specific capabilities.""" from abc import ABCMeta, abstractmethod import collections @@ -34,6 +32,7 @@ import iris.fileformats.pp_load_rules from iris.fileformats.pp_save_rules import verify import iris.fileformats.rules +import iris.warnings try: import mo_pack @@ -218,8 +217,8 @@ class _WarnComboLoadingMask( - iris.exceptions.IrisLoadWarning, - iris.exceptions.IrisMaskValueMatchWarning, + iris.warnings.IrisLoadWarning, + iris.warnings.IrisMaskValueMatchWarning, ): """One-off combination of warning classes - enhances user filtering.""" @@ -227,8 +226,8 @@ class _WarnComboLoadingMask( class _WarnComboLoadingDefaulting( - iris.exceptions.IrisDefaultingWarning, - iris.exceptions.IrisLoadWarning, + iris.warnings.IrisDefaultingWarning, + iris.warnings.IrisLoadWarning, ): """One-off combination of warning classes - enhances user filtering.""" @@ -236,8 +235,8 @@ class _WarnComboLoadingDefaulting( class _WarnComboIgnoringLoad( - iris.exceptions.IrisIgnoringWarning, - iris.exceptions.IrisLoadWarning, + iris.warnings.IrisIgnoringWarning, + iris.warnings.IrisLoadWarning, ): """One-off combination of warning classes - enhances user filtering.""" @@ -275,13 +274,15 @@ class STASH(collections.namedtuple("STASH", "model section item")): __slots__ = () def __new__(cls, model, section, item): - """Args - ---- - model + """Create namedtuple STASH instance. + + Parameters + ---------- + model : A positive integer less than 100, or None. - section + section : A non-negative integer less than 100, or None. - item + item : A positive integer less than 1000, or None. """ @@ -358,8 +359,7 @@ def __ne__(self, other): class SplittableInt: - """A class to hold integers which can easily get each decimal digit - individually. + """A class to hold integers which can easily get each decimal digit individually. >>> three_six_two = SplittableInt(362) >>> print(three_six_two) @@ -371,28 +371,27 @@ class SplittableInt: Notes ----- - No support for negative numbers + No support for negative numbers. """ def __init__(self, value, name_mapping_dict=None): """Build a SplittableInt given the positive integer value provided. - Args - ---- - **kwargs - * name_mapping_dict - (dict) - A special mapping to provide name based access to specific - integer positions: + Parameters + ---------- + name_mapping_dict : dict + A special mapping to provide name based access to specific + integer positions: - >>> a = SplittableInt(1234, {'hundreds': 2}) - >>> print(a.hundreds) - 2 - >>> a.hundreds = 9 - >>> print(a.hundreds) - 9 - >>> print(a) - 1934 + >>> a = SplittableInt(1234, {'hundreds': 2}) + >>> print(a.hundreds) + 2 + >>> a.hundreds = 9 + >>> print(a.hundreds) + 9 + >>> print(a) + 1934 """ @@ -659,9 +658,10 @@ def __ne__(self, other): def _data_bytes_to_shaped_array( data_bytes, lbpack, boundary_packing, data_shape, data_type, mdi, mask=None ): - """Convert the already read binary data payload into a numpy array, unpacking - and decompressing as per the F3 specification. + """Convert binary payload into a numpy array. + Convert the already read binary data payload into a numpy array, unpacking + and decompressing as per the F3 specification. """ if lbpack.n1 in (0, 2): data = np.frombuffer(data_bytes, dtype=data_type) @@ -811,10 +811,7 @@ def _data_bytes_to_shaped_array( def _header_defn(release_number): - """Return the zero-indexed header definition for a particular release of - a PPField. - - """ + """Return zero-indexed header definition for a particular release of a PPField.""" um_header = UM_HEADERS[release_number] offset = UM_TO_PP_HEADER_OFFSET return [ @@ -824,7 +821,9 @@ def _header_defn(release_number): def _pp_attribute_names(header_defn): - """Return the allowed attributes of a PPField: + """Return the allowed attributes of a PPField. + + Return the allowed attributes of a PPField: all of the normal headers (i.e. not the _SPECIAL_HEADERS), the _SPECIAL_HEADERS with '_' prefixed, the possible extra data headers. @@ -846,7 +845,9 @@ def _pp_attribute_names(header_defn): class PPField(metaclass=ABCMeta): - """A generic class for PP fields - not specific to a particular + """Base class for PP fields. + + A generic class for PP fields - not specific to a particular header release number. A PPField instance can easily access the PP header "words" as attributes @@ -879,7 +880,9 @@ def __init__(self, header=None): self.raw_lbpack = header[self.HEADER_DICT["lbpack"][0]] def __getattr__(self, key): - """Method supports deferred attribute creation, which offers a + """Return the value of the key. + + Method supports deferred attribute creation, which offers a significant loading optimisation, particularly when not all attributes are referenced and therefore created on the instance. @@ -972,8 +975,9 @@ def __repr__(self): @property def stash(self): - """Stash property giving access to the associated STASH object, - now supporting __eq__. + """Stash property giving access to the associated STASH object. + + Now supporting __eq__. """ if ( @@ -1051,10 +1055,7 @@ def lbproc(self, value): @property def data(self): - """:class:`numpy.ndarray` representing the multidimensional data - of the pp file. - - """ + """:class:`numpy.ndarray` representing multidimensional data of the pp file.""" if is_lazy_data(self._data): # Replace with real data on the first access. self._data = as_concrete_data(self._data) @@ -1354,7 +1355,7 @@ def coord_system(self): Returns ------- - :class:`~iris.coord_systems.GeogCS` or class:`~iris.coord_systems.RotatedGeogCS`. + :class:`~iris.coord_systems.GeogCS` or :class:`~iris.coord_systems.RotatedGeogCS`. """ geog_cs = iris.coord_systems.GeogCS(EARTH_RADIUS) @@ -1447,10 +1448,7 @@ def __ne__(self, other): class PPField2(PPField): - """A class to hold a single field from a PP file, with a - header release number of 2. - - """ + """Hold a single field from a PP file, with a header release number of 2.""" HEADER_DEFN = _header_defn(2) HEADER_DICT = dict(HEADER_DEFN) @@ -1459,7 +1457,9 @@ class PPField2(PPField): @property def t1(self): - """cftime.datetime object consisting of the lbyr, lbmon, lbdat, lbhr, + """cftime.datetime object. + + cftime.datetime object consisting of the lbyr, lbmon, lbdat, lbhr, and lbmin attributes. """ @@ -1490,7 +1490,9 @@ def t1(self, dt): @property def t2(self): - """cftime.datetime object consisting of the lbyrd, lbmond, lbdatd, + """cftime.datetime object. + + cftime.datetime object consisting of the lbyrd, lbmond, lbdatd, lbhrd, and lbmind attributes. """ @@ -1521,10 +1523,7 @@ def t2(self, dt): class PPField3(PPField): - """A class to hold a single field from a PP file, with a - header release number of 3. - - """ + """Hold a single field from a PP file, with a header release number of 3.""" HEADER_DEFN = _header_defn(3) HEADER_DICT = dict(HEADER_DEFN) @@ -1533,7 +1532,9 @@ class PPField3(PPField): @property def t1(self): - """cftime.datetime object consisting of the lbyr, lbmon, lbdat, lbhr, + """cftime.datetime object. + + cftime.datetime object consisting of the lbyr, lbmon, lbdat, lbhr, lbmin, and lbsec attributes. """ @@ -1565,7 +1566,9 @@ def t1(self, dt): @property def t2(self): - """cftime.datetime object consisting of the lbyrd, lbmond, lbdatd, + """cftime.datetime object. + + cftime.datetime object consisting of the lbyrd, lbmond, lbdatd, lbhrd, lbmind, and lbsecd attributes. """ @@ -1614,17 +1617,16 @@ def make_pp_field(header): def load(filename, read_data=False, little_ended=False): """Return an iterator of PPFields given a filename. - Args - ---- - filename + Parameters + ---------- + filename : str string of the filename to load. - **kwargs - * read_data - boolean - Flag whether or not the data should be read, if False an empty - data manager will be provided which can subsequently load the data - on demand. Default False. - * little_ended - boolean - If True, file contains all little-ended words (header and data). + read_data : bool, default=False + Flag whether or not the data should be read, if False an empty + data manager will be provided which can subsequently load the data + on demand. Default False. + little_ended : bool, default=False + If True, file contains all little-ended words (header and data). Notes ----- @@ -1640,7 +1642,9 @@ def load(filename, read_data=False, little_ended=False): def _interpret_fields(fields): - """Turn the fields read with load and FF2PP._extract_field into usable + """Turn the fields read with load and FF2PP._extract_field into usable fields. + + Turn the fields read with load and FF2PP._extract_field into usable fields. One of the primary purposes of this function is to either convert "deferred bytes" into "deferred arrays" or "loaded bytes" into actual numpy arrays (via the _create_field_data) function. @@ -1695,7 +1699,7 @@ def _interpret_fields(fields): "Landmask compressed fields existed without a " "landmask to decompress with. The data will have " "a shape of (0, 0) and will not read.", - category=iris.exceptions.IrisLoadWarning, + category=iris.warnings.IrisLoadWarning, ) mask_shape = (0, 0) else: @@ -1708,9 +1712,12 @@ def _interpret_fields(fields): def _create_field_data(field, data_shape, land_mask_field=None): - """Modify a field's ``_data`` attribute either by: - * converting a 'deferred array bytes' tuple into a lazy array, - * converting LoadedArrayBytes into an actual numpy array. + """Modify a field's ``_data`` attribute. + + Modify a field's ``_data`` attribute either by: + + * converting a 'deferred array bytes' tuple into a lazy array, + * converting LoadedArrayBytes into an actual numpy array. If 'land_mask_field' is passed (not None), then it contains the associated landmask, which is also a field : Its data array is used as a template for @@ -1805,8 +1812,7 @@ def calc_array(mask, values): def _field_gen(filename, read_data_bytes, little_ended=False): - """Return a generator of "half-formed" PPField instances derived from - the given filename. + """Return generator of "half-formed" PPField instances derived from given filename. A field returned by the generator is only "half-formed" because its `_data` attribute represents a simple one-dimensional stream of @@ -1933,7 +1939,9 @@ def _field_gen(filename, read_data_bytes, little_ended=False): def _convert_constraints(constraints): - """Convert known constraints from Iris semantics to PP semantics + """Convert known constraints from Iris semantics to PP semantics. + + Convert known constraints from Iris semantics to PP semantics ignoring all unknown constraints. """ @@ -1942,10 +1950,7 @@ def _convert_constraints(constraints): unhandled_constraints = False def _make_func(stashobj): - """Provide unique name-space for each lambda function's stashobj - variable. - - """ + """Provide unique name-space for each lambda function's stashobj variable.""" return lambda stash: stash == stashobj for con in constraints: @@ -1976,10 +1981,7 @@ def _make_func(stashobj): unhandled_constraints = True def pp_filter(field): - """Return True if field is to be kept, - False if field does not match filter. - - """ + """Return True if field is to be kept, False if field does not match filter.""" res = True if field.stash not in _STASH_ALLOW: if pp_constraints.get("stash"): @@ -2000,15 +2002,14 @@ def pp_filter(field): def load_cubes(filenames, callback=None, constraints=None): """Load cubes from a list of pp filenames. - Args - ---- - filenames + Parameters + ---------- + filenames : list of pp filenames to load - **kwargs - * constraints - a list of Iris constraints - * callback - a function which can be passed on to :func:`iris.io.run_callback` + constraints : optional + A list of Iris constraints + callback : optional + A function which can be passed on to :func:`iris.io.run_callback` Notes ----- @@ -2025,21 +2026,20 @@ def load_cubes(filenames, callback=None, constraints=None): def load_cubes_little_endian(filenames, callback=None, constraints=None): """Load cubes from a list of pp filenames containing little-endian data. - Args - ---- - filenames + Parameters + ---------- + filenames : list of pp filenames to load - **kwargs - * constraints - a list of Iris constraints - * callback - a function which can be passed on to :func:`iris.io.run_callback` + constraints : optional + a list of Iris constraints + callback : optional + a function which can be passed on to :func:`iris.io.run_callback` Notes ----- The resultant cubes may not be in the order that they are in the file (order is not preserved when there is a field with orography - references) + references). """ return _load_cubes_variable_loader( @@ -2052,12 +2052,11 @@ def load_cubes_little_endian(filenames, callback=None, constraints=None): def load_pairs_from_fields(pp_fields): - r"""Convert an iterable of PP fields into an iterable of tuples of - (Cubes, PPField). + r"""Convert an iterable of PP fields into an iterable of tuples of (Cubes, PPField). - Args - ---- - pp_fields: + Parameters + ---------- + pp_fields : An iterable of :class:`iris.fileformats.pp.PPField`. Returns @@ -2150,27 +2149,24 @@ def _load_cubes_variable_loader( def save(cube, target, append=False, field_coords=None): """Use the PP saving rules (and any user rules) to save a cube to a PP file. - Args - ---- - cube: :class:`iris.cube.Cube` - + Parameters + ---------- + cube : :class:`iris.cube.Cube` target A filename or open file handle. - - **kwargs - * append - Whether to start a new file afresh or add the cube(s) - to the end of the file. - Only applicable when target is a filename, not a file - handle. - Default is False. - * field_coords - list of 2 coords or coord names which are to be used - for reducing the given cube into 2d slices, - which will ultimately determine the x and y - coordinates of the resulting fields. - If None, the final two dimensions are chosen - for slicing. + append : bool, default=False + Whether to start a new file afresh or add the cube(s) + to the end of the file. + Only applicable when target is a filename, not a file + handle. + Default is False. + field_coords : optional + list of 2 coords or coord names which are to be used + for reducing the given cube into 2d slices, + which will ultimately determine the x and y + coordinates of the resulting fields. + If None, the final two dimensions are chosen + for slicing. Notes ----- @@ -2184,21 +2180,22 @@ def save(cube, target, append=False, field_coords=None): def save_pairs_from_cube(cube, field_coords=None, target=None): - """Use the PP saving rules to convert a cube or + """Use the PP saving rules to convert a cube. + + Use the PP saving rules to convert a cube or iterable of cubes to an iterable of (2D cube, PP field) pairs. - Args - ---- - cube: + Parameters + ---------- + cube : A :class:`iris.cube.Cube` - **kwargs - * field_coords: - List of 2 coords or coord names which are to be used for - reducing the given cube into 2d slices, which will ultimately - determine the x and y coordinates of the resulting fields. - If None, the final two dimensions are chosen for slicing. - * target: - A filename or open file handle. + field_coords : optional + List of 2 coords or coord names which are to be used for + reducing the given cube into 2d slices, which will ultimately + determine the x and y coordinates of the resulting fields. + If None, the final two dimensions are chosen for slicing. + target : optional + A filename or open file handle. """ # Open issues @@ -2300,21 +2297,21 @@ def save_pairs_from_cube(cube, field_coords=None, target=None): def as_fields(cube, field_coords=None, target=None): - """Use the PP saving rules (and any user rules) to convert a cube to + """Use the PP saving rules to convert a cube to an iterable of PP fields. + + Use the PP saving rules (and any user rules) to convert a cube to an iterable of PP fields. - Args - ---- - cube - A :class:`iris.cube.Cube` - **kwargs - * field_coords: - List of 2 coords or coord names which are to be used for - reducing the given cube into 2d slices, which will ultimately - determine the x and y coordinates of the resulting fields. - If None, the final two dimensions are chosen for slicing. - * target: - A filename or open file handle. + Parameters + ---------- + cube : :class:`iris.cube.Cube` + field_coords : optional + List of 2 coords or coord names which are to be used for + reducing the given cube into 2d slices, which will ultimately + determine the x and y coordinates of the resulting fields. + If None, the final two dimensions are chosen for slicing. + target : optional + A filename or open file handle. """ return ( @@ -2325,21 +2322,20 @@ def as_fields(cube, field_coords=None, target=None): ) -def save_fields(fields, target, append=False): +def save_fields(fields, target, append: bool = False): """Save an iterable of PP fields to a PP file. - Args - ---- - fields: + Parameters + ---------- + fields : An iterable of PP fields. - target: + target : A filename or open file handle. - **kwargs - * append: - Whether to start a new file afresh or add the cube(s) to the end - of the file. - Only applicable when target is a filename, not a file handle. - Default is False. + append : bool, default=False + Whether to start a new file afresh or add the cube(s) to the end + of the file. + Only applicable when target is a filename, not a file handle. + Default is False. See Also -------- diff --git a/lib/iris/fileformats/pp_load_rules.py b/lib/iris/fileformats/pp_load_rules.py index 10da402520..8343afab40 100644 --- a/lib/iris/fileformats/pp_load_rules.py +++ b/lib/iris/fileformats/pp_load_rules.py @@ -3,10 +3,11 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. - # Historically this was auto-generated from # SciTools/iris-code-generators:tools/gen_rules.py +"""PP Load Rules.""" + import calendar from functools import wraps @@ -58,48 +59,36 @@ def _convert_vertical_coords( brlev, dim=None, ): - """Encode scalar or vector vertical level values from PP headers as CM data - components. - - Args: + """Encode scalar or vector vertical level values from PP headers as CM data components. - * lbcode: + Parameters + ---------- + lbcode : :class:`iris.fileformats.pp.SplittableInt` Scalar field :class:`iris.fileformats.pp.SplittableInt` value. - - * lbvc: + lbvc : Scalar field value. - - * blev: + blev : Scalar field value or :class:`numpy.ndarray` vector of field values. - - * lblev: + lblev : Scalar field value or :class:`numpy.ndarray` vector of field values. - - * stash: + stash : Scalar field :class:`iris.fileformats.pp.STASH` value. - - * bhlev: + bhlev : Scalar field value or :class:`numpy.ndarray` vector of field values. - - * bhrlev: + bhrlev : Scalar field value or :class:`numpy.ndarray` vector of field values. - - * brsvd1: + brsvd1 : Scalar field value or :class:`numpy.ndarray` vector of field values. - - * brsvd2: + brsvd2 : Scalar field value or :class:`numpy.ndarray` vector of field values. - - * brlev: + brlev : Scalar field value or :class:`numpy.ndarray` vector of field values. - - Kwargs: - - * dim: + dim : optional Associated dimension of the vertical coordinate. Defaults to None. - Returns: - A tuple containing a list of coords_and_dims, and a list of factories. + Returns + ------- + A tuple containing a list of coords_and_dims, and a list of factories. """ factories = [] @@ -311,9 +300,9 @@ def _reshape_vector_args(values_and_dims): can combine without broadcasting errors (provided that all inputs mapping to a dimension define the same associated length). - Args: - - * values_and_dims (iterable of (array-like, iterable of int)): + Parameters + ---------- + values_and_dims : iterable of (array-like, iterable of int) Input arrays with associated mapping dimension numbers. The length of each 'dims' must match the ndims of the 'value'. @@ -350,7 +339,9 @@ def _reshape_vector_args(values_and_dims): def _collapse_degenerate_points_and_bounds(points, bounds=None, rtol=1.0e-7): - """Collapse points (and optionally bounds) in any dimensions over which all + """Collapse points (and optionally bounds) in any dimensions. + + Collapse points (and optionally bounds) in any dimensions over which all values are the same. All dimensions are tested, and if degenerate are reduced to length 1. @@ -359,17 +350,15 @@ def _collapse_degenerate_points_and_bounds(points, bounds=None, rtol=1.0e-7): numbers from cftime.date2num, which has limited precision because of the way it calculates with floats of days. - Args: - - * points (:class:`numpy.ndarray`)): + Parameters + ---------- + points : :class:`numpy.ndarray` Array of points values. - - Kwargs: - - * bounds (:class:`numpy.ndarray`) + bounds : :class:`numpy.ndarray`, optional Array of bounds values. This array should have an additional vertex dimension (typically of length 2) when compared to the points array i.e. bounds.shape = points.shape + (nvertex,) + rtol : optional, default=1.0e-7 Returns ------- @@ -397,8 +386,7 @@ def _collapse_degenerate_points_and_bounds(points, bounds=None, rtol=1.0e-7): def _reduce_points_and_bounds(points, lower_and_upper_bounds=None): - """Reduce the dimensionality of arrays of coordinate points (and optionally - bounds). + """Reduce the dimensionality of arrays of coordinate points (and optionally bounds). Dimensions over which all values are the same are reduced to size 1, using :func:`_collapse_degenerate_points_and_bounds`. @@ -406,19 +394,16 @@ def _reduce_points_and_bounds(points, lower_and_upper_bounds=None): If the bounds arrays are also passed in, then all three arrays must have the same shape or be capable of being broadcast to match. - Args: - - * points (array-like): + Parameters + ---------- + points : array-like Coordinate point values. - - Kwargs: - - * lower_and_upper_bounds (pair of array-like, or None): + lower_and_upper_bounds : pair of array-like or None, optional Corresponding bounds values (lower, upper), if any. - Returns: - dims (iterable of ints), points(array), bounds(array) - + Returns + ------- + dims (iterable of ints), points(array), bounds(array) * 'dims' is the mapping from the result array dimensions to the original dimensions. However, when 'array' is scalar, 'dims' will be None (rather than an empty tuple). @@ -456,7 +441,9 @@ def _reduce_points_and_bounds(points, lower_and_upper_bounds=None): def _new_coord_and_dims( is_vector_operation, name, units, points, lower_and_upper_bounds=None ): - """Make a new (coordinate, cube_dims) pair with the given points, name, units + """Make a new (coordinate, cube_dims) pair. + + Make a new (coordinate, cube_dims) pair with the given points, name, units and optional bounds. In 'vector' style operation, the data arrays must have same number of @@ -466,27 +453,22 @@ def _new_coord_and_dims( * the result coordinate may be an AuxCoord if a DimCoord cannot be made (e.g. if values are non-monotonic). - Args: - - * is_vector_operation (bool): + Parameters + ---------- + is_vector_operation : bool If True, perform 'vector' style operation. - - * points (array-like): + points : array-like Coordinate point values. - - * name (string): + name : str Standard name of coordinate. - - * units (string or cf_unit.Unit): + units : str or cf_unit.Unit Units of coordinate. - - Kwargs: - - * lower_and_upper_bounds (pair of array-like, or None): + lower_and_upper_bounds : pair of array-like or None, optional Corresponding bounds values (lower, upper), if any. - Returns: - a new (coordinate, dims) pair. + Returns + ------- + A new (coordinate, dims) pair. """ bounds = lower_and_upper_bounds @@ -504,12 +486,15 @@ def _new_coord_and_dims( def _epoch_date_hours_internals(epoch_hours_unit, datetime): """Return an 'hours since epoch' number for a date. - Args: - * epoch_hours_unit (:class:`cf_unit.Unit'): + Parameters + ---------- + epoch_hours_unit : :class:`cf_unit.Unit' Unit defining the calendar and zero-time of conversion. - * datetime (:class:`datetime.datetime`-like): + datetime : :class:`datetime.datetime`-like Date object containing year / month / day attributes. + Notes + ----- This routine can also handle dates with a zero year, month or day : such dates were valid inputs to 'date2num' up to cftime version 1.0.1, but are now illegal : This routine interprets any zeros as being "1 year/month/day @@ -612,24 +597,21 @@ def _convert_time_coords( ): """Make time coordinates from the time metadata. - Args: - - * lbcode(:class:`iris.fileformats.pp.SplittableInt`): + Parameters + ---------- + lbcode : :class:`iris.fileformats.pp.SplittableInt` Scalar field value. - * lbtim (:class:`iris.fileformats.pp.SplittableInt`): + lbtim : :class:`iris.fileformats.pp.SplittableInt` Scalar field value. - * epoch_hours_unit (:class:`cf_units.Unit`): + epoch_hours_unit : :class:`cf_units.Unit` Epoch time reference unit. - * t1 (array-like or scalar): + t1 : array-like or scalar Scalar field value or an array of values. - * t2 (array-like or scalar): + t2 : array-like or scalar Scalar field value or an array of values. - * lbft (array-like or scalar): + lbft : array-like or scalar Scalar field value or an array of values. - - Kwargs: - - * t1_dims, t2_dims, lbft_dims (tuples of int): + t1_dims, t2_dims, lbft_dims : tuples of int, optional Cube dimension mappings for the array metadata. Each default to to (). The length of each dims tuple should equal the dimensionality of the corresponding array of values. @@ -824,13 +806,14 @@ def date2year(t_in): def _model_level_number(lblev): """Return model level number for an LBLEV value. - Args: - - * lblev (int): + Parameters + ---------- + lblev : int PP field LBLEV value. - Returns: - Model level number (integer). + Returns + ------- + Model level number (int). """ # See Word no. 33 (LBLEV) in section 4 of UM Model Docs (F3). @@ -874,15 +857,15 @@ def _convert_scalar_pseudo_level_coords(lbuser5): def convert(f): - """Converts a PP field into the corresponding items of Cube metadata. - - Args: + """Convert a PP field into the corresponding items of Cube metadata. - * f: - A :class:`iris.fileformats.pp.PPField` object. + Parameters + ---------- + f : :class:`iris.fileformats.pp.PPField` - Returns: - A :class:`iris.fileformats.rules.ConversionMetadata` object. + Returns + ------- + :class:`iris.fileformats.rules.ConversionMetadata` object. """ factories = [] @@ -949,7 +932,9 @@ def convert(f): def _all_other_rules(f): - """This deals with all the other rules that have not been factored into any of + """Deals with all the other rules. + + Deals with all the other rules that have not been factored into any of the other convert_scalar_coordinate functions above. """ diff --git a/lib/iris/fileformats/pp_save_rules.py b/lib/iris/fileformats/pp_save_rules.py index 60eef7ad96..73331268e4 100644 --- a/lib/iris/fileformats/pp_save_rules.py +++ b/lib/iris/fileformats/pp_save_rules.py @@ -3,13 +3,14 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. +"""PP Save Rules.""" + import warnings import cftime import iris from iris.aux_factory import HybridHeightFactory, HybridPressureFactory -from iris.exceptions import IrisPpClimModifiedWarning from iris.fileformats._ff_cross_references import STASH_TRANS from iris.fileformats._pp_lbproc_pairs import LBPROC_MAP from iris.fileformats.rules import ( @@ -21,17 +22,22 @@ ) from iris.fileformats.um_cf_map import CF_TO_LBFC from iris.util import is_regular, regular_step +from iris.warnings import IrisPpClimModifiedWarning def _basic_coord_system_rules(cube, pp): """Rules for setting the coord system of the PP field. - Args: - cube: the cube being saved as a series of PP fields. + Parameters + ---------- + cube : + The cube being saved as a series of PP fields. pp: the current PP field having save rules applied. + pp : - Returns: - The PP field with updated metadata. + Returns + ------- + The PP field with updated metadata. """ if cube.coord_system("GeogCS") is not None or cube.coord_system(None) is None: @@ -76,12 +82,16 @@ def _um_version_rules(cube, pp): def _stash_rules(cube, pp): """Attributes rules for setting the STASH attribute of the PP field. - Args: - cube: the cube being saved as a series of PP fields. - pp: the current PP field having save rules applied. + Parameters + ---------- + cube : + The cube being saved as a series of PP fields. + pp : + The current PP field having save rules applied. - Returns: - The PP field with updated metadata. + Returns + ------- + The PP field with updated metadata. """ if "STASH" in cube.attributes: @@ -95,12 +105,16 @@ def _stash_rules(cube, pp): def _general_time_rules(cube, pp): """Rules for setting time metadata of the PP field. - Args: - cube: the cube being saved as a series of PP fields. - pp: the current PP field having save rules applied. + Parameters + ---------- + cube : + The cube being saved as a series of PP fields. + pp : + The current PP field having save rules applied. - Returns: - The PP field with updated metadata. + Returns + ------- + The PP field with updated metadata. """ time_coord = scalar_coord(cube, "time") @@ -365,12 +379,16 @@ def _general_time_rules(cube, pp): def _calendar_rules(cube, pp): """Rules for setting the calendar of the PP field. - Args: - cube: the cube being saved as a series of PP fields. - pp: the current PP field having save rules applied. + Parameters + ---------- + cube : + The cube being saved as a series of PP fields. + pp : + The current PP field having save rules applied. - Returns: - The PP field with updated metadata. + Returns + ------- + The PP field with updated metadata. """ time_coord = scalar_coord(cube, "time") @@ -387,12 +405,16 @@ def _calendar_rules(cube, pp): def _grid_and_pole_rules(cube, pp): """Rules for setting the horizontal grid and pole location of the PP field. - Args: - cube: the cube being saved as a series of PP fields. - pp: the current PP field having save rules applied. + Parameters + ---------- + cube : + The cube being saved as a series of PP fields. + pp : + The current PP field having save rules applied. - Returns: - The PP field with updated metadata. + Returns + ------- + The PP field with updated metadata. """ lon_coord = vector_coord(cube, "longitude") @@ -465,12 +487,16 @@ def _grid_and_pole_rules(cube, pp): def _non_std_cross_section_rules(cube, pp): """Rules for applying non-standard cross-sections to the PP field. - Args: - cube: the cube being saved as a series of PP fields. - pp: the current PP field having save rules applied. + Parameters + ---------- + cube : + The cube being saved as a series of PP fields. + pp : + The current PP field having save rules applied. - Returns: - The PP field with updated metadata. + Returns + ------- + The PP field with updated metadata. """ # Define commonly-used coords. @@ -594,12 +620,16 @@ def _lbproc_rules(cube, pp): Note: `pp.lbproc` must be set to 0 before these rules are run. - Args: - cube: the cube being saved as a series of PP fields. - pp: the current PP field having save rules applied. + Parameters + ---------- + cube : + The cube being saved as a series of PP fields. + pp : + The current PP field having save rules applied. - Returns: - The PP field with updated metadata. + Returns + ------- + The PP field with updated metadata. """ # Basic setting (this may be overridden by subsequent rules). @@ -636,12 +666,16 @@ def _lbproc_rules(cube, pp): def _vertical_rules(cube, pp): """Rules for setting vertical levels for the PP field. - Args: - cube: the cube being saved as a series of PP fields. - pp: the current PP field having save rules applied. + Parameters + ---------- + cube : + The cube being saved as a series of PP fields. + pp : + The current PP field having save rules applied. - Returns: - The PP field with updated metadata. + Returns + ------- + The PP field with updated metadata. """ # Define commonly-used coords. @@ -820,12 +854,16 @@ def _all_other_rules(cube, pp): * lbfc (field code) * lbrsvd[3] (ensemble member number) - Args: - cube: the cube being saved as a series of PP fields. - pp: the current PP field having save rules applied. + Parameters + ---------- + cube : + The cube being saved as a series of PP fields. + pp : + The current PP field having save rules applied. - Returns: - The PP field with updated metadata. + Returns + ------- + The PP field with updated metadata. """ # "CFNAME mega-rule." diff --git a/lib/iris/fileformats/rules.py b/lib/iris/fileformats/rules.py index e2bfd250fd..be04f0bb5d 100644 --- a/lib/iris/fileformats/rules.py +++ b/lib/iris/fileformats/rules.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Generalised mechanisms for metadata translation and cube construction. - -""" +"""Generalised mechanisms for metadata translation and cube construction.""" import collections import warnings @@ -15,6 +13,7 @@ import iris.cube import iris.exceptions import iris.fileformats.um_cf_map +import iris.warnings Factory = collections.namedtuple("Factory", ["factory_class", "args"]) ReferenceTarget = collections.namedtuple("ReferenceTarget", ("name", "transform")) @@ -44,7 +43,7 @@ def as_cube(self): if len(src_cubes) > 1: warnings.warn( "Multiple reference cubes for {}".format(self.name), - category=iris.exceptions.IrisUserWarning, + category=iris.warnings.IrisUserWarning, ) src_cube = src_cubes[-1] @@ -102,7 +101,9 @@ def scalar_cell_method(cube, method, coord_name): def has_aux_factory(cube, aux_factory_class): - """Try to find an class:`~iris.aux_factory.AuxCoordFactory` instance of the + """Determine :class:`~iris.aux_factory.AuxCoordFactory` availability within cube. + + Try to find an :class:`~iris.aux_factory.AuxCoordFactory` instance of the specified type on the cube. """ @@ -113,7 +114,9 @@ def has_aux_factory(cube, aux_factory_class): def aux_factory(cube, aux_factory_class): - """Return the class:`~iris.aux_factory.AuxCoordFactory` instance of the + """Retrieve :class:`~iris.aux_factory.AuxCoordFactory` instance from cube. + + Return the :class:`~iris.aux_factory.AuxCoordFactory` instance of the specified type from a cube. """ @@ -140,7 +143,7 @@ class _ReferenceError(Exception): def _dereference_args(factory, reference_targets, regrid_cache, cube): - """Converts all the arguments for a factory into concrete coordinates.""" + """Convert all the arguments for a factory into concrete coordinates.""" args = [] for arg in factory.args: if isinstance(arg, Reference): @@ -197,7 +200,9 @@ def _regrid_to_target(src_cube, target_coords, target_cube): def _ensure_aligned(regrid_cache, src_cube, target_cube): - """Returns a version of `src_cube` suitable for use as an AuxCoord + """Ensure dimension compatible cubes are spatially aligned. + + Returns a version of `src_cube` suitable for use as an AuxCoord on `target_cube`, or None if no version can be made. """ @@ -253,16 +258,14 @@ class Loader(collections.namedtuple("Loader", _loader_attrs)): def __new__(cls, field_generator, field_generator_kwargs, converter): """Create a definition of a field-based Cube loader. - Args: - - * field_generator + Parameters + ---------- + field_generator : A callable that accepts a filename as its first argument and returns an iterable of field objects. - - * field_generator_kwargs + field_generator_kwargs : Additional arguments to be passed to the field_generator. - - * converter + converter : A callable that converts a field object into a Cube. """ @@ -311,7 +314,7 @@ def _make_cube(field, converter): cube.units = metadata.units except ValueError: msg = "Ignoring PP invalid units {!r}".format(metadata.units) - warnings.warn(msg, category=iris.exceptions.IrisIgnoringWarning) + warnings.warn(msg, category=iris.warnings.IrisIgnoringWarning) cube.attributes["invalid_units"] = metadata.units cube.units = cf_units._UNKNOWN_UNIT_STRING @@ -334,7 +337,7 @@ def _resolve_factory_references( factory_name = factory.factory_class.__name__ warnings.warn( msg.format(factory=factory_name), - category=iris.exceptions.IrisUserWarning, + category=iris.warnings.IrisUserWarning, ) else: aux_factory = factory.factory_class(*args) @@ -385,20 +388,19 @@ def _load_pairs_from_fields_and_filenames( def load_pairs_from_fields(fields, converter): - """Convert an iterable of fields into an iterable of Cubes using the - provided converter. + """Convert iterable of fields into iterable of Cubes using the provided converter. - Args: - - * fields: + Parameters + ---------- + fields : An iterable of fields. - - * converter: + converter : An Iris converter function, suitable for use with the supplied fields. See the description in :class:`iris.fileformats.rules.Loader`. - Returns: - An iterable of (:class:`iris.cube.Cube`, field) pairs. + Returns + ------- + An iterable of (:class:`iris.cube.Cube`, field) pairs. """ return _load_pairs_from_fields_and_filenames( diff --git a/lib/iris/fileformats/um/_fast_load.py b/lib/iris/fileformats/um/_fast_load.py index 477a221727..12441acdcc 100644 --- a/lib/iris/fileformats/um/_fast_load.py +++ b/lib/iris/fileformats/um/_fast_load.py @@ -2,7 +2,8 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Support for "fast" loading of structured UM files in iris load functions, +"""Support for "fast" loading of structured UM files in iris load functions. + i.e. :meth:`iris.load` and its associates. This provides a context manager to enable structured loading via all the iris @@ -49,12 +50,11 @@ class FieldCollation(BasicFieldCollation): def __init__(self, fields, filepath): """FieldCollation initialise. - Args: - - * fields (iterable of :class:`iris.fileformats.pp.PPField`): + Parameters + ---------- + fields : iterable of :class:`iris.fileformats.pp.PPField` The fields in the collation. - - * filepath (string): + filepath : str The path of the file the collation is loaded from. """ @@ -72,8 +72,9 @@ def data_field_indices(self): This records the original file location of the individual data fields contained, within the input datafile. - Returns: - An integer array of shape `self.vector_dims_shape`. + Returns + ------- + An integer array of shape `self.vector_dims_shape`. """ # Get shape : N.B. this calculates (and caches) the structure. @@ -154,18 +155,21 @@ def iter_fields_decorated_with_load_indices(fields_iter): def _convert_collation(collation): - """Converts a FieldCollation into the corresponding items of Cube - metadata. + """Convert a FieldCollation into the corresponding items of Cube metadata. - Args: - - * collation: + Parameters + ---------- + collation : A FieldCollation object. - Returns: - A :class:`iris.fileformats.rules.ConversionMetadata` object. + Returns + ------- + A :class:`iris.fileformats.rules.ConversionMetadata` object. + Notes + ----- .. note: + This is the 'loader.converter', in the control structure passed to the generic rules code, :meth:`iris.fileformats.rules.load_cubes`. @@ -538,7 +542,9 @@ def structured_um_loading(): @contextmanager def _raw_structured_loading(): - """Private context manager called by :func:`iris.load_raw` to prevent + """Prevent structured loading from concatenating its result cubes. + + Private context manager called by :func:`iris.load_raw` to prevent structured loading from concatenating its result cubes in that case. """ diff --git a/lib/iris/fileformats/um/_fast_load_structured_fields.py b/lib/iris/fileformats/um/_fast_load_structured_fields.py index 41ec8720bc..976819ffd5 100644 --- a/lib/iris/fileformats/um/_fast_load_structured_fields.py +++ b/lib/iris/fileformats/um/_fast_load_structured_fields.py @@ -20,7 +20,9 @@ class BasicFieldCollation: - """An object representing a group of UM fields with array structure that can + """An object representing a group of UM fields with array structure. + + An object representing a group of UM fields with array structure that can be vectorized into a single cube. For example: @@ -47,9 +49,9 @@ class BasicFieldCollation: def __init__(self, fields): """BasicFieldCollation initialise. - Args: - - * fields (iterable of :class:`iris.fileformats.pp.PPField`): + Parameters + ---------- + fields : iterable of :class:`iris.fileformats.pp.PPField` The fields in the collation. """ @@ -234,7 +236,7 @@ def _calculate_structure(self): def _um_collation_key_function(field): - """Standard collation key definition for fast structured field loading. + """Collation key definition for fast structured field loading. The elements used here are the minimum sufficient to define the 'phenomenon', as described for :meth:`group_structured_fields`. @@ -264,40 +266,44 @@ def _um_collation_key_function(field): def group_structured_fields( field_iterator, collation_class=BasicFieldCollation, **collation_kwargs ): - """Collect structured fields into identified groups whose fields can be - combined to form a single cube. + """Collect structured fields into identified groups. - Args: + Collect structured fields into identified groups whose fields can be + combined to form a single cube. - * field_iterator (iterator of :class:`iris.fileformats.pp.PPField`): + Parameters + ---------- + field_iterator : iterator of :class:`iris.fileformats.pp.PPField` A source of PP or FF fields. N.B. order is significant. - - Kwargs: - - * collation_class (class): + collation_class : class, optional, default=BasicFieldCollation Type of collation wrapper to create from each group of fields. - * collation_kwargs (dict): + **collation_kwargs : dict Additional constructor keywords for collation creation. + Returns + ------- + Generator of 'collation_class' objects + A generator of 'collation_class' objects, each of which contains a + single collated group from the input fields. + + Notes + ----- + Implicitly, within each result group, *all* other metadata components + should be either: + + * the same for all fields, + * completely irrelevant, or + * used by a vectorised rule function (such as + :func:`iris.fileformats.pp_load_rules._convert_time_coords`). + The function sorts and collates on phenomenon-relevant metadata only, defined as the field components: 'lbuser[3]' (stash), 'lbproc' (statistic), 'lbuser[6]' (model). + Each distinct combination of these defines a specific phenomenon (or statistical aggregation of one), and those fields appear as a single iteration result. - Implicitly, within each result group, *all* other metadata components - should be either: - - * the same for all fields, - * completely irrelevant, or - * used by a vectorised rule function (such as - :func:`iris.fileformats.pp_load_rules._convert_time_coords`). - - Returns: - A generator of 'collation_class' objects, each of which contains a - single collated group from the input fields. - .. note:: At present, fields with different values of 'lbuser[4]' (pseudo-level) diff --git a/lib/iris/fileformats/um/_ff_replacement.py b/lib/iris/fileformats/um/_ff_replacement.py index d726f63a10..52afe343c3 100644 --- a/lib/iris/fileformats/um/_ff_replacement.py +++ b/lib/iris/fileformats/um/_ff_replacement.py @@ -19,21 +19,22 @@ def um_to_pp(filename, read_data=False, word_depth=None): Returns an iterator over the fields contained within the FieldsFile, returned as :class:`iris.fileformats.pp.PPField` instances. - Args: - - * filename (string): + Parameters + ---------- + filename : str Specify the name of the FieldsFile. - - Kwargs: - - * read_data (boolean): + read_data : bool, default=False Specify whether to read the associated PPField data within the FieldsFile. Default value is False. + word_depth : optional - Returns: - Iteration of :class:`iris.fileformats.pp.PPField`. + Returns + ------- + Iteration of :class:`iris.fileformats.pp.PPField`. - For example:: + Examples + -------- + :: >>> for field in um.um_to_pp(filename): ... print(field) @@ -50,17 +51,19 @@ def um_to_pp(filename, read_data=False, word_depth=None): def load_cubes(filenames, callback, constraints=None, _loader_kwargs=None): - """Loads cubes from filenames of UM fieldsfile-like files. - - Args: - - * filenames - list of filenames to load - - Kwargs: - - * callback - a function which can be passed on to - :func:`iris.io.run_callback` - + """Load cubes from filenames of UM fieldsfile-like files. + + Parameters + ---------- + filenames : + list of filenames to load + callback : optional + A function which can be passed on to :func:`iris.io.run_callback` + constraints : optional + _loader_kwargs : optional + + Notes + ----- .. note:: The resultant cubes may not be in the order that they are in the @@ -78,12 +81,12 @@ def load_cubes(filenames, callback, constraints=None, _loader_kwargs=None): def load_cubes_32bit_ieee(filenames, callback, constraints=None): - """Loads cubes from filenames of 32bit ieee converted UM fieldsfile-like - files. - - .. seealso:: + """Load cubes from filenames of 32bit ieee converted UM fieldsfile-like files. - :func:`load_cubes` for keyword details + See Also + -------- + :func:`load_cubes` + For keyword details """ return load_cubes( diff --git a/lib/iris/fileformats/um/_optimal_array_structuring.py b/lib/iris/fileformats/um/_optimal_array_structuring.py index ce2cba7d5c..7d006ebeff 100644 --- a/lib/iris/fileformats/um/_optimal_array_structuring.py +++ b/lib/iris/fileformats/um/_optimal_array_structuring.py @@ -9,7 +9,9 @@ def _optimal_dimensioning_structure(structure, element_priorities): - """Uses the structure options provided by the + """Determine the optimal array structure for the :class:`FieldCollation`. + + Uses the structure options provided by the :class:`~iris.fileformats._structured_array_identification.GroupStructure` to determine the optimal array structure for the :class:`FieldCollation`. @@ -18,13 +20,12 @@ def _optimal_dimensioning_structure(structure, element_priorities): in more than one structure options then dimension priorities as specified by `element_priorities` are used to determine optimal structure. - Args: - - * structure: + Parameters + ---------- + structure : A set of structure options, as provided by :class:\ `~iris.fileformats._structured_array_identification.GroupStructure`. - - * element_priorities: + element_priorities : A dictionary mapping structure element names to their priority as defined by their input order to :func:`~optimal_array_structure`. @@ -52,37 +53,39 @@ def _optimal_dimensioning_structure(structure, element_priorities): def optimal_array_structure(ordering_elements, actual_values_elements=None): """Calculate an optimal array replication structure for a set of vectors. - Args: - - * ordering_elements (iterable of (name, 1-d array)): + Parameters + ---------- + ordering_elements : iterable of (name, 1-d array) Input element names and value-vectors. Must all be the same length (but not necessarily type). Must have at least one. - Kwargs: + .. note:: - * actual_values_elements (iterable of (name, 1-d array)): + The 'ordering_elements' arg contains the pattern used to deduce a + structure. The order of this is significant, in that earlier + elements get priority when associating dimensions with specific + elements. + actual_values_elements : iterable of (name, 1-d array), optional The 'real' values used to construct the result arrays, if different from 'ordering_elements'. Must contain all the same names (but not necessarily in the same order). - The 'ordering_elements' arg contains the pattern used to deduce a - structure. The order of this is significant, in that earlier elements get - priority when associating dimensions with specific elements. - - Returns: - dims_shape, primary_elements, element_arrays_and_dims, where: - - * 'dims_shape' is the shape of the vector dimensions chosen. - - * 'primary_elements' is a set of dimension names; the names of input - elements that are identified as dimensions. At most one for each - dimension. - - * 'element_arrays_and_dims' is a dictionary [name: (array, dims)], - for all elements that are not dimensionless. Each array is reduced - to the shape of its mapped dimension. - - For example:: + Returns + ------- + dims_shape + Shape of the vector dimensions chosen. + primary_elements + Set of dimension names; the names of input + elements that are identified as dimensions. At most one for each + dimension. + element_arrays_and_dims + A dictionary [name: (array, dims)], + for all elements that are not dimensionless. Each array is reduced + to the shape of its mapped dimension. + + Examples + -------- + :: >>> import iris.fileformats.um._optimal_array_structuring as optdims >>> elements_structure = [('a', np.array([1, 1, 1, 2, 2, 2])), diff --git a/lib/iris/io/__init__.py b/lib/iris/io/__init__.py index b944b51978..d5896f25a4 100644 --- a/lib/iris/io/__init__.py +++ b/lib/iris/io/__init__.py @@ -147,7 +147,7 @@ def expand_filespecs(file_specs, files_expected=True): ---------- file_specs : iterable of str File paths which may contain ``~`` elements or wildcards. - files_expected : bool, optional, default=True + files_expected : bool, default=True Whether file is expected to exist (i.e. for load). Returns @@ -381,12 +381,12 @@ def save(source, target, saver=None, **kwargs): Iris currently supports three file formats for saving, which it can recognise by filename extension: - * netCDF - the Unidata network Common Data Format: - * see :func:`iris.fileformats.netcdf.save` - * GRIB2 - the WMO GRIdded Binary data format: - * see :func:`iris_grib.save_grib2`. - * PP - the Met Office UM Post Processing Format: - * see :func:`iris.fileformats.pp.save` + * **netCDF** - the Unidata network Common Data Format, + see :func:`iris.fileformats.netcdf.save` + * **GRIB2** - the WMO GRIdded Binary data format, + see :func:`iris_grib.save_grib2`. + * **PP** - the Met Office UM Post Processing Format, + see :func:`iris.fileformats.pp.save` A custom saver can be provided to the function to write to a different file format. diff --git a/lib/iris/io/format_picker.py b/lib/iris/io/format_picker.py index 2a056c788c..d5e54d231a 100644 --- a/lib/iris/io/format_picker.py +++ b/lib/iris/io/format_picker.py @@ -189,7 +189,7 @@ def __init__( priority: int Integer giving a priority for considering this specification where higher priority means sooner consideration. - constraint_aware_handler: optional, default=False + constraint_aware_handler: default=False """ if not isinstance(file_element, FileElement): raise ValueError( @@ -279,7 +279,7 @@ def __init__(self, requires_fh=True): Parameters ---------- - requires_fh : optional + requires_fh : bool, default=True Whether this FileElement needs a file buffer. """ diff --git a/lib/iris/iterate.py b/lib/iris/iterate.py index 41b3929464..0cf7a035be 100644 --- a/lib/iris/iterate.py +++ b/lib/iris/iterate.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Cube functions for iteration in step. - -""" +"""Cube functions for iteration in step.""" from collections.abc import Iterator import itertools @@ -12,7 +10,7 @@ import numpy as np -from iris.exceptions import IrisUserWarning +from iris.warnings import IrisUserWarning __all__ = ["izip"] @@ -25,30 +23,28 @@ def izip(*cubes, **kwargs): resulting iterator will step through combinations of the associated coordinates. - Args: - - * cubes (:class:`iris.cube.Cube`): + Parameters + ---------- + cubes : :class:`iris.cube.Cube` One or more :class:`iris.cube.Cube` instances over which to iterate in step. Each cube should be provided as a separate argument e.g. ``iris.iterate.izip(cube_a, cube_b, cube_c, ...)``. - - Kwargs: - - * coords (string, coord or a list of strings/coords): + coords : str, coord or a list of strings/coords Coordinate names/coordinates of the desired subcubes (i.e. those that are not iterated over). They must all be orthogonal (i.e. point to different dimensions). - * ordered (Boolean): + ordered : bool, optional If True (default), the order of the coordinates in the resulting subcubes will match the order of the coordinates in the coords keyword argument. If False, the order of the coordinates will be preserved and will match that of the input cubes. - Returns: - An iterator over a collection of tuples that contain the resulting - subcubes. + Returns + ------- + An iterator over a collection of tuples that contain the resulting subcubes. - For example: + Examples + -------- >>> e_content, e_density = iris.load_cubes( ... iris.sample_data_path('space_weather.nc'), ... ['total electron content', 'electron density']) @@ -61,6 +57,7 @@ def izip(*cubes, **kwargs): ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. + """ if not cubes: raise TypeError("Expected one or more cubes.") @@ -169,7 +166,9 @@ def izip(*cubes, **kwargs): class _ZipSlicesIterator(Iterator): - """Extension to _SlicesIterator (see cube.py) to support iteration over a + """Support iteration over a collection of cubes. + + Extension to _SlicesIterator (see cube.py) to support iteration over a collection of cubes in step. """ @@ -281,7 +280,9 @@ def __next__(self): class _CoordWrapper: - """Class for creating a coordinate wrapper that allows the use of an + """Create a coordinate wrapper. + + Class for creating a coordinate wrapper that allows the use of an alternative equality function based on metadata rather than metadata + points/bounds. diff --git a/lib/iris/palette.py b/lib/iris/palette.py index 3180f1e02a..e180b649a8 100644 --- a/lib/iris/palette.py +++ b/lib/iris/palette.py @@ -2,9 +2,10 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Load, configure and register color map palettes and initialise -color map meta-data mappings. +"""Color map pallettes management. +Load, configure and register color map palettes and initialise +color map meta-data mappings. """ from functools import wraps @@ -37,13 +38,14 @@ def is_brewer(cmap): """Determine whether the color map is a Cynthia Brewer color map. - Args: - - * cmap: + Parameters + ---------- + cmap : The color map instance. - Returns: - Boolean. + Returns + ------- + bool """ result = False @@ -53,8 +55,11 @@ def is_brewer(cmap): def _default_cmap_norm(args, kwargs): - """This function injects default cmap and norm behaviour into the keyword + """Injects default cmap and norm behaviour into the keyword arguments. + + This function injects default cmap and norm behaviour into the keyword arguments, based on the cube referenced within the positional arguments. + """ cube = None @@ -103,16 +108,20 @@ def _default_cmap_norm(args, kwargs): def cmap_norm(cube): - """Determine the default :class:`matplotlib.colors.LinearSegmentedColormap` + """Determine the default. + + Determine the default :class:`matplotlib.colors.LinearSegmentedColormap` and :class:`iris.palette.SymmetricNormalize` instances associated with the cube. - Args: - - * cube (:class:`iris.cube.Cube`): + Parameters + ---------- + cube : :class:`iris.cube.Cube` Source cube to generate default palette from. - Returns: + Returns + ------- + tuple Tuple of :class:`matplotlib.colors.LinearSegmentedColormap` and :class:`iris.palette.SymmetricNormalize` @@ -127,25 +136,25 @@ def cmap_norm(cube): def auto_palette(func): - """Decorator wrapper function to control the default behaviour of the - matplotlib cmap and norm keyword arguments. + """Auto palette decorator wrapper function to control the default behaviour. - Args: + Decorator wrapper function to control the default behaviour of the + matplotlib cmap and norm keyword arguments. - * func (callable): + Parameters + ---------- + func : callable Callable function to be wrapped by the decorator. - Returns: - Closure wrapper function. + Returns + ------- + Closure wrapper function. """ @wraps(func) def wrapper_func(*args, **kwargs): - """Closure wrapper function to provide default keyword argument - behaviour. - - """ + """Closure wrapper function to provide default keyword argument behaviour.""" # Update the keyword arguments with defaults. args, kwargs = _default_cmap_norm(args, kwargs) # Call the wrapped function and return its result. @@ -211,7 +220,9 @@ def vmax(self, val): def _load_palette(): - """Load, configure and register color map palettes and initialise + """Load palette. + + Load, configure and register color map palettes and initialise color map metadata mappings. """ diff --git a/lib/iris/pandas.py b/lib/iris/pandas.py index dd7d0d31fc..1e79e1b31e 100644 --- a/lib/iris/pandas.py +++ b/lib/iris/pandas.py @@ -27,7 +27,7 @@ from iris._deprecation import warn_deprecated from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, DimCoord from iris.cube import Cube, CubeList -from iris.exceptions import IrisIgnoringWarning +from iris.warnings import IrisIgnoringWarning def _get_dimensional_metadata(name, values, calendar=None, dm_class=None): @@ -147,8 +147,8 @@ def as_cube( Example usage:: - as_cube(series, calendars={0: cf_units.CALENDAR_360_DAY}) - as_cube(data_frame, calendars={1: cf_units.CALENDAR_STANDARD}) + as_cube(series, calendars={0: cf_units.CALENDAR_360_DAY}) + as_cube(data_frame, calendars={1: cf_units.CALENDAR_STANDARD}) Since this function converts to/from a Pandas object, laziness will not be preserved. @@ -195,7 +195,7 @@ def as_cubes( r"""Convert a Pandas Series/DataFrame into n-dimensional Iris Cubes, including dimensional metadata. The index of `pandas_structure` will be used for generating the - :class:`~iris.cube.Cube` dimension(s) and :class:`~iris.coords.DimCoord`\\ s. + :class:`~iris.cube.Cube` dimension(s) and :class:`~iris.coords.DimCoord`. Other dimensional metadata may span multiple dimensions - based on how the column values vary with the index values. @@ -227,7 +227,7 @@ def as_cubes( A :class:`~pandas.DataFrame` using columns as a second data dimension will need to be 'melted' before conversion. See the Examples for how. - :class:`dask.dataframe.DataFrame`\\ s are not supported. + :class:`dask.dataframe.DataFrame` are not supported. Since this function converts to/from a Pandas object, laziness will not be preserved. @@ -303,7 +303,7 @@ def as_cubes( in_region x x - Pandas uses ``NaN`` rather than masking data. Converted - :class:`~iris.cube.Cube`\\s can be masked in downstream user code : + :class:`~iris.cube.Cube` can be masked in downstream user code : >>> my_series = Series([300, np.NaN, 302], name="air_temperature") >>> converted_cube = as_cubes(my_series)[0] @@ -583,8 +583,6 @@ def as_series(cube, copy=True): If you have a large array that cannot be copied, make sure it is not masked and use copy=False. - Notes - ----- Since this function converts to/from a Pandas object, laziness will not be preserved. """ @@ -626,7 +624,7 @@ def as_data_frame( Parameters ---------- - cube: :class:`~iris.cube.Cube` + cube : :class:`~iris.cube.Cube` The :class:`~iris.cube.Cube` to be converted to a :class:`pandas.DataFrame`. copy : bool, default=True Whether the :class:`pandas.DataFrame` is a copy of the the Cube @@ -638,7 +636,7 @@ def as_data_frame( add_cell_measures : bool, default=False If True, add :attr:`~iris.cube.Cube.cell_measures` to the returned :class:`pandas.DataFrame`. - add_ancillary_variables: bool, default=False + add_ancillary_variables : bool, default=False If True, add :attr:`~iris.cube.Cube.ancillary_variables` to the returned :class:`pandas.DataFrame`. @@ -653,7 +651,7 @@ def as_data_frame( #. This documentation is for the new ``as_data_frame()`` behaviour, which is **currently opt-in** to preserve backwards compatibility. The default legacy behaviour is documented in pre-``v3.4`` documentation (summary: - limited to 2-dimensional :class:`~iris.cube.Cube`\\ s, with only the + limited to 2-dimensional :class:`~iris.cube.Cube`, with only the :attr:`~iris.cube.Cube.data` and :attr:`~iris.cube.Cube.dim_coords` being added). The legacy behaviour will be removed in a future version of Iris, so please opt-in to the new behaviour at your earliest @@ -673,7 +671,7 @@ def as_data_frame( Notes ----- - :class:`dask.dataframe.DataFrame`\\ s are not supported. + :class:`dask.dataframe.DataFrame` are not supported. A :class:`~pandas.MultiIndex` :class:`~pandas.DataFrame` is returned by default. Use the :meth:`~pandas.DataFrame.reset_index` to return a diff --git a/lib/iris/plot.py b/lib/iris/plot.py index 06dfe79aa9..e9f73bd86b 100644 --- a/lib/iris/plot.py +++ b/lib/iris/plot.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Iris-specific extensions to matplotlib, mimicking the :mod:`matplotlib.pyplot` -interface. +"""Iris-specific extensions to matplotlib, mimicking the :mod:`matplotlib.pyplot` interface. See also: :ref:`matplotlib `. @@ -31,11 +30,12 @@ import iris.coord_systems import iris.coords import iris.cube -from iris.exceptions import IrisError, IrisUnsupportedPlottingWarning +from iris.exceptions import IrisError # Importing iris.palette to register the brewer palettes. import iris.palette from iris.util import _meshgrid +from iris.warnings import IrisUnsupportedPlottingWarning # Cynthia Brewer citation text. BREWER_CITE = "Colours based on ColorBrewer.org" @@ -133,7 +133,9 @@ def _valid_bound_dim_coord(coord): def _get_plot_defn(cube, mode, ndims=2): - """Return data and plot-axis coords given a cube & a mode of either + """Return data and plot-axis coords. + + Return data and plot-axis coords given a cube & a mode of either POINT_MODE or BOUND_MODE. """ @@ -234,7 +236,6 @@ def _broadcast_2d(u, v): def _string_coord_axis_tick_labels(string_axes, axes=None): """Apply tick labels for string coordinates.""" - ax = axes if axes else plt.gca() for axis, ticks in string_axes.items(): # Define a tick formatter. This will assign a label to all ticks @@ -257,14 +258,16 @@ def ticker_func(tick_location, _): def _invert_yaxis(v_coord, axes=None): """Inverts the y-axis of the current plot based on conditions. - * If the y-axis is already inverted we don't want to re-invert it. - * If v_coord is None then it will not have any attributes. - * If neither of the above are true then invert y if v_coord has - attribute 'positive' set to 'down'. - - Args: + * If the y-axis is already inverted we don't want to re-invert it. + * If v_coord is None then it will not have any attributes. + * If neither of the above are true then invert y if v_coord has + attribute 'positive' set to 'down'. - * v_coord - the coord to be plotted on the y-axis + Parameters + ---------- + v_coord : + The coord to be plotted on the y-axis + axes : optional """ axes = axes if axes else plt.gca() @@ -276,7 +279,9 @@ def _invert_yaxis(v_coord, axes=None): def _check_bounds_contiguity_and_mask(coord, data, atol=None, rtol=None): - """Checks that any discontiguities in the bounds of the given coordinate only + """Check discontiguities in the bounds of the given coordinate. + + Checks that any discontiguities in the bounds of the given coordinate only occur where the data is masked. Where a discontinuity occurs the grid created for plotting will not be @@ -286,18 +291,20 @@ def _check_bounds_contiguity_and_mask(coord, data, atol=None, rtol=None): If a discontiguity occurs where the data is *not* masked, an error is raised. - Args: - coord: (iris.coord.Coord) - Coordinate the bounds of which will be checked for contiguity - data: (array) - Data of the the cube we are plotting - atol: - Absolute tolerance when checking the contiguity. Defaults to None. - If an absolute tolerance is not set, 1D coords are not checked (so - as to not introduce a breaking change without a major release) but - 2D coords are always checked, by calling - :meth:`iris.coords.Coord._discontiguity_in_bounds` with its default - tolerance. + Parameters + ---------- + coord : iris.coord.Coord + Coordinate the bounds of which will be checked for contiguity + data : array + Data of the the cube we are plotting + atol : optional + Absolute tolerance when checking the contiguity. Defaults to None. + If an absolute tolerance is not set, 1D coords are not checked (so + as to not introduce a breaking change without a major release) but + 2D coords are always checked, by calling + :meth:`iris.coords.Coord._discontiguity_in_bounds` with its default + tolerance. + rtol : optional """ kwargs = {} @@ -699,7 +706,9 @@ def _get_geodesic_params(globe): def _shift_plot_sections(u_object, u, v): - """Shifts subsections of u by multiples of 360 degrees within ranges + """Shifts subsections of u by multiples of 360 degrees. + + Shifts subsections of u by multiples of 360 degrees within ranges defined by the points where the line should cross over the 0/360 degree longitude boundary. @@ -810,7 +819,9 @@ def _draw_1d_from_points(draw_method_name, arg_func, *args, **kwargs): def _draw_two_1d_from_points(draw_method_name, arg_func, *args, **kwargs): - """This function is equivalend to _draw_two_1d_from_points but expects two + """Draw two 1d frompoints (expects two y-axis variables rather than one). + + This function is equivalend to _draw_two_1d_from_points but expects two y-axis variables rather than one (such as is required for .fill_between). It can't be used where the y-axis variables are string coordinates. The y-axis variable provided first has precedence where the two differ on whether the @@ -853,12 +864,13 @@ def _draw_two_1d_from_points(draw_method_name, arg_func, *args, **kwargs): def _replace_axes_with_cartopy_axes(cartopy_proj): - """Replace non-cartopy subplot/axes with a cartopy alternative + """Replace non-cartopy subplot/axes with a cartopy alternative. + + Replace non-cartopy subplot/axes with a cartopy alternative based on the provided projection. If the current axes are already an instance of :class:`cartopy.mpl.geoaxes.GeoAxes` then no action is taken. """ - ax = plt.gca() if not isinstance(ax, cartopy.mpl.geoaxes.GeoAxes): fig = ax.get_figure() @@ -885,7 +897,9 @@ def _replace_axes_with_cartopy_axes(cartopy_proj): def _ensure_cartopy_axes_and_determine_kwargs(x_coord, y_coord, kwargs): - """Replace the current non-cartopy axes with + """Replace the current non-cartopy axes with :class:`cartopy.mpl.geoaxes.GeoAxes`. + + Replace the current non-cartopy axes with :class:`cartopy.mpl.geoaxes.GeoAxes` and return the appropriate kwargs dict based on the provided coordinates and kwargs. @@ -1040,21 +1054,20 @@ def _map_common(draw_method_name, arg_func, mode, cube, plot_defn, *args, **kwar def contour(cube, *args, **kwargs): """Draws contour lines based on the given Cube. - Kwargs: - - * coords: list of :class:`~iris.coords.Coord` objects or coordinate names + Parameters + ---------- + coords : list of :class:`~iris.coords.Coord` objects or coordinate names, optional Use the given coordinates as the axes for the plot. The order of the given coordinates indicates which axis to use for each, where the first element is the horizontal axis of the plot and the second element is the vertical axis of the plot. - - * axes: :class:`matplotlib.axes.Axes` + axes : :class:`matplotlib.axes.Axes`, optional The axes to use for drawing. Defaults to the current axes if none provided. - - See :func:`matplotlib.pyplot.contour` for details of other valid - keyword arguments. + **kwargs : dict, optional + See :func:`matplotlib.pyplot.contour` for details of other valid + keyword arguments. Notes ----- @@ -1069,20 +1082,19 @@ def contour(cube, *args, **kwargs): def contourf(cube, *args, **kwargs): """Draws filled contours based on the given Cube. - Kwargs: - - * coords: list of :class:`~iris.coords.Coord` objects or coordinate names + Parameters + ---------- + coords : list of :class:`~iris.coords.Coord` objects or coordinate names, optional Use the given coordinates as the axes for the plot. The order of the given coordinates indicates which axis to use for each, where the first element is the horizontal axis of the plot and the second element is the vertical axis of the plot. - - * axes: :class:`matplotlib.axes.Axes` + axes : :class:`matplotlib.axes.Axes`, optional The axes to use for drawing. Defaults to the current axes if none provided. - - See :func:`matplotlib.pyplot.contourf` for details of other valid - keyword arguments. + **kwargs : dict, optional + See :func:`matplotlib.pyplot.contourf` for details of other valid + keyword arguments. Notes ----- @@ -1173,12 +1185,13 @@ def default_projection(cube): def default_projection_extent(cube, mode=iris.coords.POINT_MODE): """Return the cube's extents ``(x0, x1, y0, y1)`` in its default projection. - Keyword arguments: - - * mode: Either ``iris.coords.POINT_MODE`` or ``iris.coords.BOUND_MODE`` - Triggers whether the extent should be representative of the cell - points, or the limits of the cell's bounds. - The default is iris.coords.POINT_MODE. + Parameters + ---------- + mode : + Either ``iris.coords.POINT_MODE`` or ``iris.coords.BOUND_MODE`` + Triggers whether the extent should be representative of the cell + points, or the limits of the cell's bounds. + The default is iris.coords.POINT_MODE. Notes ----- @@ -1220,14 +1233,13 @@ def _fill_orography(cube, coords, mode, vert_plot, horiz_plot, style_args): def orography_at_bounds(cube, facecolor="#888888", coords=None, axes=None): - """Plots orography defined at cell boundaries from the given Cube. + """Plot orography defined at cell boundaries from the given Cube. Notes ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. """ - # XXX Needs contiguous orography corners to work. raise NotImplementedError( "This operation is temporarily not provided " @@ -1259,14 +1271,13 @@ def horiz_plot(v_coord, orography, style_args): def orography_at_points(cube, facecolor="#888888", coords=None, axes=None): - """Plots orography defined at sample points from the given Cube. + """Plot orography defined at sample points from the given Cube. Notes ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. """ - style_args = {"facecolor": facecolor} def vert_plot(u_coord, orography, style_args): @@ -1289,24 +1300,21 @@ def horiz_plot(v_coord, orography, style_args): def outline(cube, coords=None, color="k", linewidth=None, axes=None): """Draws cell outlines based on the given Cube. - Kwargs: - - * coords: list of :class:`~iris.coords.Coord` objects or coordinate names + Parameters + ---------- + coords : list of :class:`~iris.coords.Coord` objects or coordinate names Use the given coordinates as the axes for the plot. The order of the given coordinates indicates which axis to use for each, where the first element is the horizontal axis of the plot and the second element is the vertical axis of the plot. - - * color: None or mpl color + color : mpl color, default="k" The color of the cell outlines. If None, the matplotlibrc setting patch.edgecolor is used by default. - - * linewidth: None or number + linewidth : number, optional The width of the lines showing the cell outlines. If None, the default width in patch.linewidth in matplotlibrc is used. - - * axes: :class:`matplotlib.axes.Axes` + axes : :class:`matplotlib.axes.Axes`,voptional The axes to use for drawing. Defaults to the current axes if none provided. @@ -1336,25 +1344,23 @@ def pcolor(cube, *args, **kwargs): The cube must have either two 1-dimensional coordinates or two 2-dimensional coordinates with contiguous bounds to plot the cube against. - Kwargs: - - * coords: list of :class:`~iris.coords.Coord` objects or coordinate names + Parameters + ---------- + coords : list of :class:`~iris.coords.Coord` objects or coordinate names, optional Use the given coordinates as the axes for the plot. The order of the given coordinates indicates which axis to use for each, where the first element is the horizontal axis of the plot and the second element is the vertical axis of the plot. - - * axes: :class:`matplotlib.axes.Axes` + axes : :class:`matplotlib.axes.Axes`, optional The axes to use for drawing. Defaults to the current axes if none provided. - - * contiguity_tolerance: float + contiguity_tolerance : float, optional The absolute tolerance used when checking for contiguity between the bounds of the cells. Defaults to None. - - See :func:`matplotlib.pyplot.pcolor` for details of other valid - keyword arguments. + **kwargs : dict, optional + See :func:`matplotlib.pyplot.pcolor` for details of other valid + keyword arguments. Notes ----- @@ -1375,24 +1381,22 @@ def pcolormesh(cube, *args, **kwargs): 2-dimensional coordinates with contiguous bounds to plot against each other. - Kwargs: - - * coords: list of :class:`~iris.coords.Coord` objects or coordinate names + Parameters + ---------- + coords : list of :class:`~iris.coords.Coord` objects or coordinate names, optional Use the given coordinates as the axes for the plot. The order of the given coordinates indicates which axis to use for each, where the first element is the horizontal axis of the plot and the second element is the vertical axis of the plot. - - * axes: :class:`matplotlib.axes.Axes` + axes : :class:`matplotlib.axes.Axes`, optional The axes to use for drawing. Defaults to the current axes if none provided. - - * contiguity_tolerance: float + contiguity_tolerance : float, optional The absolute tolerance used when checking for contiguity between the bounds of the cells. Defaults to None. - - See :func:`matplotlib.pyplot.pcolormesh` for details of other - valid keyword arguments. + **kwargs : dict, optional + See :func:`matplotlib.pyplot.pcolormesh` for details of other + valid keyword arguments. Notes ----- @@ -1407,21 +1411,20 @@ def pcolormesh(cube, *args, **kwargs): def points(cube, *args, **kwargs): """Draws sample point positions based on the given Cube. - Kwargs: - - * coords: list of :class:`~iris.coords.Coord` objects or coordinate names + Parameters + ---------- + coords : list of :class:`~iris.coords.Coord` objects or coordinate names, optional Use the given coordinates as the axes for the plot. The order of the given coordinates indicates which axis to use for each, where the first element is the horizontal axis of the plot and the second element is the vertical axis of the plot. - - * axes: :class:`matplotlib.axes.Axes` + axes : :class:`matplotlib.axes.Axes`, optional The axes to use for drawing. Defaults to the current axes if none provided. - - See :func:`matplotlib.pyplot.scatter` for details of other valid - keyword arguments. + **kwargs : dict, optional + See :func:`matplotlib.pyplot.scatter` for details of other valid + keyword arguments. Notes ----- @@ -1437,7 +1440,7 @@ def _scatter_args(u, v, data, *args, **kwargs): def _vector_component_args(x_points, y_points, u_data, *args, **kwargs): - """Callback from _draw_2d_from_points for 'quiver' and 'streamlines'. + """Vector component callback from _draw_2d_from_points for 'quiver' and 'streamlines'. Returns arguments (x, y, u, v), to be passed to the underlying matplotlib call. @@ -1477,19 +1480,34 @@ def _vector_component_args(x_points, y_points, u_data, *args, **kwargs): def barbs(u_cube, v_cube, *args, **kwargs): - """Draws a barb plot from two vector component cubes. Triangles, full-lines - and half-lines represent increments of 50, 10 and 5 respectively. + """Draw a barb plot from two vector component cubes. - Args: + Draws a barb plot from two vector component cubes. Triangles, full-lines + and half-lines represent increments of 50, 10 and 5 respectively. - * u_cube, v_cube : (:class:`~iris.cube.Cube`) + Parameters + ---------- + u_cube, v_cube : :class:`~iris.cube.Cube` u and v vector components. Must have same shape and units. If the cubes have geographic coordinates, the values are treated as true distance differentials, e.g. windspeeds, and *not* map coordinate vectors. The components are aligned with the North and East of the cube coordinate system. + coords : list of :class:`~iris.coords.Coord` or str, optional + Coordinates or coordinate names. Use the given coordinates as the axes + for the plot. The order of the given coordinates indicates which axis + to use for each, where the first element is the horizontal + axis of the plot and the second element is the vertical axis + of the plot. + axes : :class:`matplotlib.axes.Axes`, optional + Defaults to the current axes if none provided. + **kwargs : dict, optional + See :func:`matplotlib.pyplot.barbs` for details of other valid + keyword arguments. - .. Note:: + Notes + ----- + .. note:: At present, if u_cube and v_cube have geographic coordinates, then they must be in a lat-lon coordinate system, though it may be a rotated one. @@ -1500,23 +1518,6 @@ def barbs(u_cube, v_cube, *args, **kwargs): the :meth:`~cartopy.crs.CRS.transform_points` method of :class:`cartopy.crs.CRS`. - Kwargs: - - * coords: (list of :class:`~iris.coords.Coord` or string) - Coordinates or coordinate names. Use the given coordinates as the axes - for the plot. The order of the given coordinates indicates which axis - to use for each, where the first element is the horizontal - axis of the plot and the second element is the vertical axis - of the plot. - - * axes: the :class:`matplotlib.axes.Axes` to use for drawing. - Defaults to the current axes if none provided. - - See :func:`matplotlib.pyplot.barbs` for details of other valid - keyword arguments. - - Notes - ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -1533,16 +1534,30 @@ def barbs(u_cube, v_cube, *args, **kwargs): def quiver(u_cube, v_cube, *args, **kwargs): """Draws an arrow plot from two vector component cubes. - Args: - - * u_cube, v_cube : :class:`~iris.cube.Cube` + Parameters + ---------- + u_cube, v_cube : :class:`~iris.cube.Cube` u and v vector components. Must have same shape and units. If the cubes have geographic coordinates, the values are treated as true distance differentials, e.g. windspeeds, and *not* map coordinate vectors. The components are aligned with the North and East of the cube coordinate system. + coords : list of :class:`~iris.coords.Coord` or str, optional + Coordinates or coordinate names. Use the given coordinates as the axes + for the plot. The order of the given coordinates indicates which axis + to use for each, where the first element is the horizontal + axis of the plot and the second element is the vertical axis + of the plot. + axes : :class:`matplotlib.axes.Axes`, optional + The axes to use for drawing. Defaults to the current axes if none + provided. + **kwargs : dict, optional + See :func:`matplotlib.pyplot.quiver` for details of other valid + keyword arguments. - .. Note:: + Notes + ----- + .. note:: At present, if u_cube and v_cube have geographic coordinates, then they must be in a lat-lon coordinate system, though it may be a rotated one. @@ -1553,24 +1568,6 @@ def quiver(u_cube, v_cube, *args, **kwargs): the :meth:`~cartopy.crs.CRS.transform_points` method of :class:`cartopy.crs.CRS`. - Kwargs: - - * coords: list of :class:`~iris.coords.Coord` or string - Coordinates or coordinate names. Use the given coordinates as the axes - for the plot. The order of the given coordinates indicates which axis - to use for each, where the first element is the horizontal - axis of the plot and the second element is the vertical axis - of the plot. - - * axes: :class:`matplotlib.axes.Axes` - The axes to use for drawing. Defaults to the current axes if none - provided. - - See :func:`matplotlib.pyplot.quiver` for details of other valid - keyword arguments. - - Notes - ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. @@ -1587,6 +1584,22 @@ def quiver(u_cube, v_cube, *args, **kwargs): def plot(*args, **kwargs): """Draws a line plot based on the given cube(s) or coordinate(s). + Parameters + ---------- + axes : :class:`matplotlib.axes.Axes`, optional + The axes to use for drawing. Defaults to the current axes if none + provided. + **kwargs : dict, optional + See :func:`matplotlib.pyplot.plot` for details of additional valid + keyword arguments. + + Notes + ----- + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + + Examples + -------- The first one or two arguments may be cubes or coordinates to plot. Each of the following is valid:: @@ -1610,20 +1623,6 @@ def plot(*args, **kwargs): # plot two 1d cubes against one-another plot(cube1, cube2) - Kwargs: - - * axes: :class:`matplotlib.axes.Axes` - The axes to use for drawing. Defaults to the current axes if none - provided. - - See :func:`matplotlib.pyplot.plot` for details of additional valid - keyword arguments. - - Notes - ----- - This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. - """ if "coords" in kwargs: raise TypeError( @@ -1638,22 +1637,18 @@ def plot(*args, **kwargs): def scatter(x, y, *args, **kwargs): """Draws a scatter plot based on the given cube(s) or coordinate(s). - Args: - - * x: :class:`~iris.cube.Cube` or :class:`~iris.coords.Coord` + Parameters + ---------- + x : :class:`~iris.cube.Cube` or :class:`~iris.coords.Coord` A cube or a coordinate to plot on the x-axis. - - * y: :class:`~iris.cube.Cube` or :class:`~iris.coords.Coord` + y : :class:`~iris.cube.Cube` or :class:`~iris.coords.Coord` A cube or a coordinate to plot on the y-axis. - - Kwargs: - - * axes: :class:`matplotlib.axes.Axes` + axes : :class:`matplotlib.axes.Axes`, optional The axes to use for drawing. Defaults to the current axes if none provided. - - See :func:`matplotlib.pyplot.scatter` for details of additional - valid keyword arguments. + **kwargs : dict, optional + See :func:`matplotlib.pyplot.scatter` for details of additional + valid keyword arguments. Notes ----- @@ -1672,27 +1667,22 @@ def scatter(x, y, *args, **kwargs): def fill_between(x, y1, y2, *args, **kwargs): - """Plots y1 and y2 against x, and fills the space between them. - - Args: + """Plot y1 and y2 against x, and fills the space between them. - * x: :class:`~iris.cube.Cube` or :class:`~iris.coords.Coord` + Parameters + ---------- + x : :class:`~iris.cube.Cube` or :class:`~iris.coords.Coord` A cube or a coordinate to plot on the x-axis. - - * y1: :class:`~iris.cube.Cube` or :class:`~iris.coords.Coord` + y1 : :class:`~iris.cube.Cube` or :class:`~iris.coords.Coord` First cube or a coordinate to plot on the y-axis. - - * y2: :class:`~iris.cube.Cube` or :class:`~iris.coords.Coord` + y2 : :class:`~iris.cube.Cube` or :class:`~iris.coords.Coord` Second cube or a coordinate to plot on the y-axis. - - Kwargs: - - * axes: :class:`matplotlib.axes.Axes` + axes : :class:`matplotlib.axes.Axes`, optional The axes to use for drawing. Defaults to the current axes if none provided. - - See :func:`matplotlib.pyplot.fill_between` for details of additional valid - keyword arguments. + **kwargs : dict, optional + See :func:`matplotlib.pyplot.fill_between` for details of additional valid + keyword arguments. Notes ----- @@ -1715,18 +1705,18 @@ def fill_between(x, y1, y2, *args, **kwargs): def hist(x, *args, **kwargs): """Compute and plot a histogram. - Args: - - * x: + Parameters + ---------- + x : A :class:`~iris.cube.Cube`, :class:`~iris.coords.Coord`, :class:`~iris.coords.CellMeasure`, or :class:`~iris.coords.AncillaryVariable` that will be used as the values that will be used to create the histogram. Note that if a coordinate is given, the points are used, ignoring the bounds. - - See :func:`matplotlib.pyplot.hist` for details of additional valid - keyword arguments. + **kwargs : dict, optional + See :func:`matplotlib.pyplot.hist` for details of additional valid + keyword arguments. Notes ----- @@ -1754,27 +1744,20 @@ def symbols(x, y, symbols, size, axes=None, units="inches"): See :mod:`iris.symbols` for available symbols. - Args: - - * x: iterable + Parameters + ---------- + x : iterable The x coordinates where the symbols will be plotted. - - * y: iterable + y : iterable The y coordinates where the symbols will be plotted. - - * symbols: iterable + symbols : iterable The symbols (from :mod:`iris.symbols`) to plot. - - * size: float + size : float The symbol size in `units`. - - Kwargs: - - * axes: :class:`matplotlib.axes.Axes` + axes : :class:`matplotlib.axes.Axes`, optional The axes to use for drawing. Defaults to the current axes if none provided. - - * units: ['inches', 'points'] + units : ['inches', 'points'], default="inches" The unit for the symbol size. Notes @@ -1826,22 +1809,17 @@ def citation(text, figure=None, axes=None): Places an anchored text citation in the bottom right hand corner of the plot. - Args: - - * text: str + Parameters + ---------- + text : str Citation text to be plotted. - - Kwargs: - - * figure::class:`matplotlib.figure.Figure` + figure : :class:`matplotlib.figure.Figure`, optional Target figure instance. Defaults to the current figure if none provided. - - * axes: :class:`matplotlib.axes.Axes` + axes : :class:`matplotlib.axes.Axes`, optional The axes to use for drawing. Defaults to the current axes if none provided. """ - if text is not None and len(text): if figure is None and not axes: figure = plt.gcf() @@ -1868,29 +1846,25 @@ def animate(cube_iterator, plot_func, fig=None, **kwargs): fig : :class:`matplotlib.figure.Figure` instance, optional By default, the current figure will be used or a new figure instance created if no figure is available. See :func:`matplotlib.pyplot.gcf`. + coords: list of :class:`~iris.coords.Coord` objects or coordinate names, optional + Use the given coordinates as the axes for the plot. The order of the + given coordinates indicates which axis to use for each, where the first + element is the horizontal axis of the plot and the second element is + the vertical axis of the plot. + interval: int, float or long, optional + Defines the time interval in milliseconds between successive frames. + A default interval of 100ms is set. + vmin, vmax: int, float or long, optional + Color scaling values, see :class:`matplotlib.colors.Normalize` for + further details. Default values are determined by the min-max across + the data set over the entire sequence. **kwargs : dict, optional - Valid keyword arguments: - - coords: list of :class:`~iris.coords.Coord` objects or coordinate names - Use the given coordinates as the axes for the plot. The order of the - given coordinates indicates which axis to use for each, where the first - element is the horizontal axis of the plot and the second element is - the vertical axis of the plot. - interval: int, float or long - Defines the time interval in milliseconds between successive frames. - A default interval of 100ms is set. - vmin, vmax: int, float or long - Color scaling values, see :class:`matplotlib.colors.Normalize` for - further details. Default values are determined by the min-max across - the data set over the entire sequence. - See :class:`matplotlib.animation.FuncAnimation` for details of other valid keyword arguments. Returns ------- - :class:`~matplotlib.animation.FuncAnimation` object suitable for - saving and or plotting. + :class:`~matplotlib.animation.FuncAnimation` object suitable for saving and or plotting. Examples -------- diff --git a/lib/iris/quickplot.py b/lib/iris/quickplot.py index fcb0b0d5b7..14380019f3 100644 --- a/lib/iris/quickplot.py +++ b/lib/iris/quickplot.py @@ -49,8 +49,7 @@ def _title(cube_or_coord, with_units): def _label(cube, mode, result=None, ndims=2, coords=None, axes=None): - """Puts labels on the current plot using the given cube.""" - + """Put labels on the current plot using the given cube.""" if axes is None: axes = plt.gca() @@ -199,19 +198,17 @@ def contourf(cube, *args, **kwargs): def outline(cube, coords=None, color="k", linewidth=None, axes=None): """Draws cell outlines on a labelled plot based on the given Cube. - Kwargs: - - * coords: list of :class:`~iris.coords.Coord` objects or coordinate names + Parameters + ---------- + coords : list of :class:`~iris.coords.Coord` objects or coordinate names, optional Use the given coordinates as the axes for the plot. The order of the given coordinates indicates which axis to use for each, where the first element is the horizontal axis of the plot and the second element is the vertical axis of the plot. - - * color: None or mpl color + color : str, default="k" The color of the cell outlines. If None, the matplotlibrc setting patch.edgecolor is used by default. - - * linewidth: None or number + linewidth : number, optional The width of the lines showing the cell outlines. If None, the default width in patch.linewidth in matplotlibrc is used. @@ -283,8 +280,7 @@ def points(cube, *args, **kwargs): def plot(*args, **kwargs): - """Draws a labelled line plot based on the given cube(s) or - coordinate(s). + """Draws a labelled line plot based on the given cube(s) or coordinate(s). See :func:`iris.plot.plot` for details of valid arguments and keyword arguments. @@ -302,8 +298,7 @@ def plot(*args, **kwargs): def scatter(x, y, *args, **kwargs): - """Draws a labelled scatter plot based on the given cubes or - coordinates. + """Draws a labelled scatter plot based on the given cubes or coordinates. See :func:`iris.plot.scatter` for details of valid arguments and keyword arguments. @@ -330,6 +325,7 @@ def fill_between(x, y1, y2, *args, **kwargs): ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. + """ axes = kwargs.get("axes") result = iplt.fill_between(x, y1, y2, *args, **kwargs) @@ -347,6 +343,7 @@ def hist(x, *args, **kwargs): ----- This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. + """ axes = kwargs.get("axes") result = iplt.hist(x, *args, **kwargs) diff --git a/lib/iris/symbols.py b/lib/iris/symbols.py index b55471dadd..e52266b2fe 100644 --- a/lib/iris/symbols.py +++ b/lib/iris/symbols.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Contains symbol definitions for use with :func:`iris.plot.symbols`. - -""" +"""Contains symbol definitions for use with :func:`iris.plot.symbols`.""" import itertools import math @@ -195,8 +193,11 @@ def _backslash_path(): def _wedge_fix(wedge_path): - """Fixes the problem with Path.wedge where it doesn't initialise the first, + """Fix the problem with Path.wedge. + + Fixes the problem with Path.wedge where it doesn't initialise the first, and last two vertices. + This fix should not have any side-effects once Path.wedge has been fixed, but will then be redundant and should be removed. @@ -226,8 +227,8 @@ def _wedge_fix(wedge_path): A dictionary mapping WMO cloud cover codes to their corresponding symbol. See https://www.wmo.int/pages/prog/www/DPFS/documents/485_Vol_I_en_colour.pdf - Part II, Appendix II.4, Graphical Representation of Data, Analyses - and Forecasts +Part II, Appendix II.4, Graphical Representation of Data, Analyses +and Forecasts """ diff --git a/lib/iris/tests/__init__.py b/lib/iris/tests/__init__.py index 94c26ef45f..77a9fcdd67 100644 --- a/lib/iris/tests/__init__.py +++ b/lib/iris/tests/__init__.py @@ -42,6 +42,7 @@ import iris.config import iris.cube +import iris.fileformats import iris.tests.graphics as graphics import iris.util @@ -155,14 +156,11 @@ def assert_masked_array_equal(a, b, strict=False): """Check that masked arrays are equal. This requires the unmasked values and masks to be identical. - Args: - - * a, b (array-like): + Parameters + ---------- + a, b : array-like Two arrays to compare. - - Kwargs: - - * strict (bool): + strict : bool, optional If True, perform a complete mask and data array equality check. If False (default), the data array equality considers only unmasked elements. @@ -176,19 +174,15 @@ def assert_masked_array_almost_equal(a, b, decimal=6, strict=False): masks to be identical, and the unmasked values to be almost equal. - Args: - - * a, b (array-like): + Parameters + ---------- + a, b : array-like Two arrays to compare. - - Kwargs: - - * strict (bool): + strict : bool, optional If True, perform a complete mask and data array equality check. If False (default), the data array equality considers only unmasked elements. - - * decimal (int): + decimal : int, optional, default=6 Equality tolerance level for :meth:`numpy.testing.assert_array_almost_equal`, with the meaning 'abs(desired-actual) < 0.5 * 10**(-decimal)' @@ -278,11 +272,12 @@ def result_path(self, basename=None, ext=""): """Return the full path to a test result, generated from the \ calling file, class and, optionally, method. - Optional kwargs : - - * basename - File basename. If omitted, this is \ - generated from the calling method. - * ext - Appended file extension. + Parameters + ---------- + basename : optional, default=None + File basename. If omitted, this is generated from the calling method. + ext : str, optional, default="" + Appended file extension. """ if ext and not ext.startswith("."): @@ -333,20 +328,16 @@ def assertCDL(self, netcdf_filename, reference_filename=None, flags="-h"): If the environment variable IRIS_TEST_CREATE_MISSING is non-empty, the reference file is created if it doesn't exist. - Args: - - * netcdf_filename: + Parameters + ---------- + netcdf_filename : The path to the netCDF file. - - Kwargs: - - * reference_filename: + reference_filename : optional, default=None The relative path (relative to the test results directory). If omitted, the result is generated from the calling method's name, class, and module using :meth:`iris.tests.IrisTest.result_path`. - - * flags: + flags : str, optional Command-line flags for `ncdump`, as either a whitespace separated string or an iterable. Defaults to '-h'. @@ -401,20 +392,16 @@ def assertCML(self, cubes, reference_filename=None, checksum=True): If the environment variable IRIS_TEST_CREATE_MISSING is non-empty, the reference file is created if it doesn't exist. - Args: - - * cubes: + Parameters + ---------- + cubes : Either a Cube or a sequence of Cubes. - - Kwargs: - - * reference_filename: + reference_filename : optional, default=None The relative path (relative to the test results directory). If omitted, the result is generated from the calling method's name, class, and module using :meth:`iris.tests.IrisTest.result_path`. - - * checksum: + checksum : bool, optional When True, causes the CML to include a checksum for each Cube's data. Defaults to True. @@ -514,14 +501,11 @@ def assertString(self, string, reference_filename=None): If the environment variable IRIS_TEST_CREATE_MISSING is non-empty, the reference file is created if it doesn't exist. - Args: - - * string: + Parameters + ---------- + string : str The string to check. - - Kwargs: - - * reference_filename: + reference_filename : optional, default=None The relative path (relative to the test results directory). If omitted, the result is generated from the calling method's name, class, and module using @@ -630,16 +614,15 @@ def assertArrayAlmostEqual(self, a, b, decimal=6): def assertArrayAllClose(self, a, b, rtol=1.0e-7, atol=1.0e-8, **kwargs): """Check arrays are equal, within given relative + absolute tolerances. - Args: - - * a, b (array-like): + Parameters + ---------- + a, b : array-like Two arrays to compare. - - Kwargs: - - * rtol, atol (float): + rtol, atol : float, optional Relative and absolute tolerances to apply. + Other Parameters + ---------------- Any additional kwargs are passed to numpy.testing.assert_allclose. Performs pointwise toleranced comparison, and raises an assertion if @@ -776,10 +759,13 @@ def patch(self, *args, **kwargs): The patch is created with mock.patch(*args, **kwargs). - Returns: - The substitute object returned by patch.start(). + Returns + ------- + The substitute object returned by patch.start(). - For example:: + Examples + -------- + :: mock_call = self.patch('module.Class.call', return_value=1) module_Class_instance.call(3, 4) @@ -818,7 +804,9 @@ def assertArrayShapeStats(self, result, shape, mean, std_dev, rtol=1e-6): self.assertArrayAllClose(result.data.std(), std_dev, rtol=rtol) def assertDictEqual(self, lhs, rhs, msg=None): - """This method overrides unittest.TestCase.assertDictEqual (new in Python3.1) + """Dictionary Comparison. + + This method overrides unittest.TestCase.assertDictEqual (new in Python3.1) in order to cope with dictionary comparison where the value of a key may be a numpy array. @@ -896,6 +884,93 @@ class GraphicsTest(graphics.GraphicsTestMixin, IrisTest): pass +class PPTest: + """A mixin class to provide PP-specific utilities to subclasses of tests.IrisTest.""" + + @contextlib.contextmanager + def cube_save_test( + self, + reference_txt_path, + reference_cubes=None, + reference_pp_path=None, + **kwargs, + ): + """A context manager for testing the saving of Cubes to PP files. + + Args: + + * reference_txt_path: + The path of the file containing the textual PP reference data. + + Kwargs: + + * reference_cubes: + The cube(s) from which the textual PP reference can be re-built if necessary. + * reference_pp_path: + The location of a PP file from which the textual PP reference can be re-built if necessary. + NB. The "reference_cubes" argument takes precedence over this argument. + + The return value from the context manager is the name of a temporary file + into which the PP data to be tested should be saved. + + Example:: + with self.cube_save_test(reference_txt_path, reference_cubes=cubes) as temp_pp_path: + iris.save(cubes, temp_pp_path) + + """ + # Watch out for a missing reference text file + if not os.path.isfile(reference_txt_path): + if reference_cubes: + temp_pp_path = iris.util.create_temp_filename(".pp") + try: + iris.save(reference_cubes, temp_pp_path, **kwargs) + self._create_reference_txt(reference_txt_path, temp_pp_path) + finally: + os.remove(temp_pp_path) + elif reference_pp_path: + self._create_reference_txt(reference_txt_path, reference_pp_path) + else: + raise ValueError( + "Missing all of reference txt file, cubes, and PP path." + ) + + temp_pp_path = iris.util.create_temp_filename(".pp") + try: + # This value is returned to the target of the "with" statement's "as" clause. + yield temp_pp_path + + # Load deferred data for all of the fields (but don't do anything with it) + pp_fields = list(iris.fileformats.pp.load(temp_pp_path)) + for pp_field in pp_fields: + pp_field.data + with open(reference_txt_path, "r") as reference_fh: + reference = "".join(reference_fh) + self._assert_str_same( + reference + "\n", + str(pp_fields) + "\n", + reference_txt_path, + type_comparison_name="PP files", + ) + finally: + os.remove(temp_pp_path) + + def _create_reference_txt(self, txt_path, pp_path): + # Load the reference data + pp_fields = list(iris.fileformats.pp.load(pp_path)) + for pp_field in pp_fields: + pp_field.data + + # Clear any header words we don't use + unused = ("lbexp", "lbegin", "lbnrec", "lbproj", "lbtyp") + for pp_field in pp_fields: + for word_name in unused: + setattr(pp_field, word_name, 0) + + # Save the textual representation of the PP fields + with open(txt_path, "w") as txt_file: + txt_file.writelines(str(pp_fields)) + + def skip_data(fn): """Decorator to choose whether to run tests, based on the availability of external data. diff --git a/lib/iris/tests/experimental/__init__.py b/lib/iris/tests/experimental/__init__.py index 951fca1eae..01a9ee6bdc 100644 --- a/lib/iris/tests/experimental/__init__.py +++ b/lib/iris/tests/experimental/__init__.py @@ -2,6 +2,4 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Experimental code is tested in this package. - -""" +"""Experimental code is tested in this package.""" diff --git a/lib/iris/tests/experimental/regrid/__init__.py b/lib/iris/tests/experimental/regrid/__init__.py index be5871a5a6..e3983bc695 100644 --- a/lib/iris/tests/experimental/regrid/__init__.py +++ b/lib/iris/tests/experimental/regrid/__init__.py @@ -2,6 +2,4 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Regridding code is tested in this package. - -""" +"""Regridding code is tested in this package.""" diff --git a/lib/iris/tests/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py b/lib/iris/tests/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py index b68c8f1625..68fa47f25c 100644 --- a/lib/iris/tests/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py +++ b/lib/iris/tests/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Test area weighted regridding. - -""" +"""Test area weighted regridding.""" # import iris tests first so that some things can be initialised # before importing anything else. diff --git a/lib/iris/tests/experimental/regrid/test_regrid_conservative_via_esmpy.py b/lib/iris/tests/experimental/regrid/test_regrid_conservative_via_esmpy.py index c74bab37fb..e06809f3c0 100644 --- a/lib/iris/tests/experimental/regrid/test_regrid_conservative_via_esmpy.py +++ b/lib/iris/tests/experimental/regrid/test_regrid_conservative_via_esmpy.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Tests for :func:`iris.experimental.regrid.regrid_conservative_via_esmpy`. - -""" +"""Tests for :func:`iris.experimental.regrid.regrid_conservative_via_esmpy`.""" # import iris tests first so that some things can be initialised # before importing anything else. diff --git a/lib/iris/tests/graphics/idiff.py b/lib/iris/tests/graphics/idiff.py index 2e2ef75776..64d690e55d 100755 --- a/lib/iris/tests/graphics/idiff.py +++ b/lib/iris/tests/graphics/idiff.py @@ -26,7 +26,7 @@ from matplotlib.testing.exceptions import ImageComparisonFailure # noqa import matplotlib.widgets as mwidget # noqa -from iris.exceptions import IrisIgnoringWarning # noqa +from iris.warnings import IrisIgnoringWarning # noqa import iris.tests # noqa import iris.tests.graphics as graphics # noqa diff --git a/lib/iris/tests/graphics/recreate_imagerepo.py b/lib/iris/tests/graphics/recreate_imagerepo.py index ae12eb447d..5261f0cc29 100755 --- a/lib/iris/tests/graphics/recreate_imagerepo.py +++ b/lib/iris/tests/graphics/recreate_imagerepo.py @@ -3,9 +3,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Updates imagerepo.json based on the baseline images. - -""" +"""Updates imagerepo.json based on the baseline images.""" import argparse from pathlib import Path diff --git a/lib/iris/tests/integration/experimental/test_ugrid_load.py b/lib/iris/tests/integration/experimental/test_ugrid_load.py index 63406f1ba0..d513d02497 100644 --- a/lib/iris/tests/integration/experimental/test_ugrid_load.py +++ b/lib/iris/tests/integration/experimental/test_ugrid_load.py @@ -17,13 +17,13 @@ import pytest from iris import Constraint, load -from iris.exceptions import IrisCfWarning from iris.experimental.ugrid.load import PARSE_UGRID_ON_LOAD, load_mesh, load_meshes from iris.experimental.ugrid.mesh import Mesh from iris.tests.stock.netcdf import ( _file_from_cdl_template as create_file_from_cdl_template, ) from iris.tests.unit.tests.stock.test_netcdf import XIOSFileMixin +from iris.warnings import IrisCfWarning def ugrid_load(uris, constraints=None, callback=None): diff --git a/lib/iris/tests/integration/experimental/test_ugrid_save.py b/lib/iris/tests/integration/experimental/test_ugrid_save.py index cbff1d767f..8350a2004f 100644 --- a/lib/iris/tests/integration/experimental/test_ugrid_save.py +++ b/lib/iris/tests/integration/experimental/test_ugrid_save.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Integration tests for NetCDF-UGRID file saving. - -""" +"""Integration tests for NetCDF-UGRID file saving.""" # Import iris.tests first so that some things can be initialised before # importing anything else. import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/integration/merge/test_merge.py b/lib/iris/tests/integration/merge/test_merge.py index 4e8562cb60..7e1acd4ad6 100644 --- a/lib/iris/tests/integration/merge/test_merge.py +++ b/lib/iris/tests/integration/merge/test_merge.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Integration tests for merging cubes. - -""" +"""Integration tests for merging cubes.""" # import iris tests first so that some things can be initialised # before importing anything else. diff --git a/lib/iris/tests/integration/netcdf/test_delayed_save.py b/lib/iris/tests/integration/netcdf/test_delayed_save.py index d76d57c3e1..cb375cc592 100644 --- a/lib/iris/tests/integration/netcdf/test_delayed_save.py +++ b/lib/iris/tests/integration/netcdf/test_delayed_save.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Integration tests for delayed saving. -""" +"""Integration tests for delayed saving.""" import re import warnings @@ -16,10 +15,10 @@ import pytest import iris -from iris.exceptions import IrisSaverFillValueWarning from iris.fileformats.netcdf._thread_safe_nc import default_fillvals import iris.tests from iris.tests.stock import realistic_4d +from iris.warnings import IrisSaverFillValueWarning class Test__lazy_stream_data: diff --git a/lib/iris/tests/integration/netcdf/test_general.py b/lib/iris/tests/integration/netcdf/test_general.py index 8c27742185..1020ddbb96 100644 --- a/lib/iris/tests/integration/netcdf/test_general.py +++ b/lib/iris/tests/integration/netcdf/test_general.py @@ -29,6 +29,7 @@ # Get the netCDF4 module, but in a sneaky way that avoids triggering the "do not import # netCDF4" check in "iris.tests.test_coding_standards.test_netcdf4_import()". import iris.fileformats.netcdf._thread_safe_nc as threadsafe_nc +import iris.warnings nc = threadsafe_nc.netCDF4 @@ -138,7 +139,7 @@ def test_unknown_method(self): warning_messages = [ warn for warn in warning_messages - if isinstance(warn, iris.exceptions.IrisUnknownCellMethodWarning) + if isinstance(warn, iris.warnings.IrisUnknownCellMethodWarning) ] self.assertEqual(len(warning_messages), 1) message = warning_messages[0].args[0] @@ -484,5 +485,40 @@ def test_path_string_save_same(self): self.assertCDL(tempfile_frompath) +@tests.skip_data +class TestWarningRepeats(tests.IrisTest): + def test_datum_once(self): + """Tests for warnings being duplicated. + + Notes + ----- + This test relies on `iris.load` throwing a warning. This warning might + be removed in the future, in which case `assert len(record) == 2 should` + be change to `assert len(record) == 1`. + + toa_brightness_temperature.nc has an AuxCoord with lazy data, and triggers a + specific part of dask which contains a `catch_warnings()` call which + causes warnings to be repeated, and so has been removed from the + `fnames` list until a solution is found for such a file. + + """ + # + fnames = [ + "false_east_north_merc.nc", + "non_unit_scale_factor_merc.nc", + # toa_brightness_temperature.nc, + ] + fpaths = [ + tests.get_data_path(("NetCDF", "mercator", fname)) for fname in fnames + ] + + with warnings.catch_warnings(record=True) as record: + warnings.simplefilter("default") + for fpath in fpaths: + iris.load(fpath) + warnings.warn("Dummy warning", category=iris.warnings.IrisUserWarning) + assert len(record) == 2 + + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/integration/netcdf/test_self_referencing.py b/lib/iris/tests/integration/netcdf/test_self_referencing.py index 7f52f722ae..b2b9b6d4e1 100644 --- a/lib/iris/tests/integration/netcdf/test_self_referencing.py +++ b/lib/iris/tests/integration/netcdf/test_self_referencing.py @@ -15,8 +15,8 @@ import numpy as np import iris -from iris.exceptions import IrisCfMissingVarWarning from iris.fileformats.netcdf import _thread_safe_nc +from iris.warnings import IrisCfMissingVarWarning @tests.skip_data diff --git a/lib/iris/tests/integration/plot/test_animate.py b/lib/iris/tests/integration/plot/test_animate.py index 53a6b38797..4afee0c463 100644 --- a/lib/iris/tests/integration/plot/test_animate.py +++ b/lib/iris/tests/integration/plot/test_animate.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Integration tests for :func:`iris.plot.animate`. - -""" +"""Integration tests for :func:`iris.plot.animate`.""" # import iris tests first so that some things can be initialised before # importing anything else diff --git a/lib/iris/tests/integration/plot/test_netcdftime.py b/lib/iris/tests/integration/plot/test_netcdftime.py index dbe67efd35..750de9fdf3 100644 --- a/lib/iris/tests/integration/plot/test_netcdftime.py +++ b/lib/iris/tests/integration/plot/test_netcdftime.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Test plot of time coord with non-standard calendar. - -""" +"""Test plot of time coord with non-standard calendar.""" # import iris tests first so that some things can be initialised before # importing anything else diff --git a/lib/iris/tests/integration/plot/test_nzdateline.py b/lib/iris/tests/integration/plot/test_nzdateline.py index 5a83ac5d89..cb119f5b27 100644 --- a/lib/iris/tests/integration/plot/test_nzdateline.py +++ b/lib/iris/tests/integration/plot/test_nzdateline.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Test set up of limited area map extents which bridge the date line. - -""" +"""Test set up of limited area map extents which bridge the date line.""" # import iris tests first so that some things can be initialised before # importing anything else diff --git a/lib/iris/tests/integration/plot/test_plot_2d_coords.py b/lib/iris/tests/integration/plot/test_plot_2d_coords.py index dafddd064d..43cd051f46 100644 --- a/lib/iris/tests/integration/plot/test_plot_2d_coords.py +++ b/lib/iris/tests/integration/plot/test_plot_2d_coords.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Test plots with two dimensional coordinates. - -""" +"""Test plots with two dimensional coordinates.""" # import iris tests first so that some things can be initialised before # importing anything else diff --git a/lib/iris/tests/integration/plot/test_vector_plots.py b/lib/iris/tests/integration/plot/test_vector_plots.py index 08170f6f89..5419dc182f 100644 --- a/lib/iris/tests/integration/plot/test_vector_plots.py +++ b/lib/iris/tests/integration/plot/test_vector_plots.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Test some key usages of :func:`iris.plot.quiver`. - -""" +"""Test some key usages of :func:`iris.plot.quiver`.""" # import iris tests first so that some things can be initialised before # importing anything else diff --git a/lib/iris/tests/integration/test_mask_cube_from_shapefile.py b/lib/iris/tests/integration/test_mask_cube_from_shapefile.py new file mode 100644 index 0000000000..59f3e3a72a --- /dev/null +++ b/lib/iris/tests/integration/test_mask_cube_from_shapefile.py @@ -0,0 +1,109 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Integration tests for :func:`iris.util.mask_cube_from_shapefile`.""" + +import math + +import cartopy.io.shapereader as shpreader +import numpy as np + +import iris +import iris.tests as tests +from iris.util import mask_cube_from_shapefile + + +@tests.skip_data +class TestCubeMasking(tests.IrisTest): + """integration tests of mask_cube_from_shapefile + using different projections in iris_test_data - + values are the KGO calculated using ASCEND. + """ + + def setUp(self): + ne_countries = shpreader.natural_earth( + resolution="10m", category="cultural", name="admin_0_countries" + ) + self.reader = shpreader.Reader(ne_countries) + + def test_global_proj_russia(self): + path = tests.get_data_path( + ["NetCDF", "global", "xyt", "SMALL_hires_wind_u_for_ipcc4.nc"] + ) + test_global = iris.load_cube(path) + ne_russia = [ + country.geometry + for country in self.reader.records() + if "Russia" in country.attributes["NAME_LONG"] + ][0] + masked_test = mask_cube_from_shapefile(test_global, ne_russia) + print(np.sum(masked_test.data)) + assert math.isclose( + np.sum(masked_test.data), 76845.37, rel_tol=0.001 + ), "Global data with Russia mask failed test" + + def test_rotated_pole_proj_germany(self): + path = tests.get_data_path( + ["NetCDF", "rotated", "xy", "rotPole_landAreaFraction.nc"] + ) + test_rotated = iris.load_cube(path) + ne_germany = [ + country.geometry + for country in self.reader.records() + if "Germany" in country.attributes["NAME_LONG"] + ][0] + masked_test = mask_cube_from_shapefile(test_rotated, ne_germany) + assert math.isclose( + np.sum(masked_test.data), 179.46872, rel_tol=0.001 + ), "rotated europe data with German mask failed test" + + def test_transverse_mercator_proj_uk(self): + path = tests.get_data_path( + ["NetCDF", "transverse_mercator", "tmean_1910_1910.nc"] + ) + test_transverse = iris.load_cube(path) + ne_uk = [ + country.geometry + for country in self.reader.records() + if "United Kingdom" in country.attributes["NAME_LONG"] + ][0] + masked_test = mask_cube_from_shapefile(test_transverse, ne_uk) + assert math.isclose( + np.sum(masked_test.data), 90740.25, rel_tol=0.001 + ), "transverse mercator UK data with UK mask failed test" + + def test_rotated_pole_proj_germany_weighted_area(self): + path = tests.get_data_path( + ["NetCDF", "rotated", "xy", "rotPole_landAreaFraction.nc"] + ) + test_rotated = iris.load_cube(path) + ne_germany = [ + country.geometry + for country in self.reader.records() + if "Germany" in country.attributes["NAME_LONG"] + ][0] + masked_test = mask_cube_from_shapefile( + test_rotated, ne_germany, minimum_weight=0.9 + ) + assert math.isclose( + np.sum(masked_test.data), 125.60199, rel_tol=0.001 + ), "rotated europe data with 0.9 weight germany mask failed test" + + def test_4d_global_proj_brazil(self): + path = tests.get_data_path(["NetCDF", "global", "xyz_t", "GEMS_CO2_Apr2006.nc"]) + test_4d_brazil = iris.load_cube(path, "Carbon Dioxide") + ne_brazil = [ + country.geometry + for country in self.reader.records() + if "Brazil" in country.attributes["NAME_LONG"] + ][0] + masked_test = mask_cube_from_shapefile( + test_4d_brazil, + ne_brazil, + ) + print(np.sum(masked_test.data)) + # breakpoint() + assert math.isclose( + np.sum(masked_test.data), 18616921.2, rel_tol=0.001 + ), "4d data with brazil mask failed test" diff --git a/lib/iris/tests/integration/test_netcdf__loadsaveattrs.py b/lib/iris/tests/integration/test_netcdf__loadsaveattrs.py index dcf61a947f..226babc9fb 100644 --- a/lib/iris/tests/integration/test_netcdf__loadsaveattrs.py +++ b/lib/iris/tests/integration/test_netcdf__loadsaveattrs.py @@ -4,7 +4,8 @@ # See LICENSE in the root of the repository for full licensing details. """Integration tests for loading and saving netcdf file attributes. -Notes: +Notes +----- (1) attributes in netCDF files can be either "global attributes", or variable ("local") type. @@ -178,6 +179,7 @@ def make_tempdir(self, tmp_path_factory): N.B. "tmp_path_factory" is a standard PyTest fixture, which provides a dirpath *shared* by all tests. This is a bit quicker and more debuggable than having a directory per-testcase. + """ # Store the temporary directory path on the test instance self.tmpdir = str(tmp_path_factory.getbasetemp()) @@ -220,6 +222,7 @@ def create_testcase_files_or_cubes( Note: 'var_values_file' args are dictionaries. The named variables are created, with an attribute = the dictionary value, *except* that a dictionary value of None means that a local attribute is _not_ created on the variable. + """ # save attribute on the instance self.attrname = attr_name @@ -307,6 +310,8 @@ def run_testcase( create_cubes_or_files : str, default "files" create either cubes or testfiles. + Notes + ----- If ``create_cubes_or_files`` == "files", create one temporary netCDF file per values-list, and record in ``self.input_filepaths``. Else if ``create_cubes_or_files`` == "cubes", create sets of cubes with common diff --git a/lib/iris/tests/integration/test_pp.py b/lib/iris/tests/integration/test_pp.py index e8dd367187..1ed9dca853 100644 --- a/lib/iris/tests/integration/test_pp.py +++ b/lib/iris/tests/integration/test_pp.py @@ -17,12 +17,13 @@ from iris.aux_factory import HybridHeightFactory, HybridPressureFactory from iris.coords import AuxCoord, CellMethod, DimCoord from iris.cube import Cube -from iris.exceptions import IgnoreCubeException, IrisUserWarning +from iris.exceptions import IgnoreCubeException import iris.fileformats.pp from iris.fileformats.pp import load_pairs_from_fields import iris.fileformats.pp_load_rules from iris.fileformats.pp_save_rules import verify import iris.util +from iris.warnings import IrisUserWarning class TestVertical(tests.IrisTest): diff --git a/lib/iris/tests/integration/um/test_fieldsfile.py b/lib/iris/tests/integration/um/test_fieldsfile.py index 18c28ee1c7..969a343c4f 100644 --- a/lib/iris/tests/integration/um/test_fieldsfile.py +++ b/lib/iris/tests/integration/um/test_fieldsfile.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Test the fast loading of structured Fieldsfiles. - -""" +"""Test the fast loading of structured Fieldsfiles.""" # import iris tests first so that some things can be initialised before # importing anything else diff --git a/lib/iris/tests/pp.py b/lib/iris/tests/pp.py deleted file mode 100644 index b0af72d5ee..0000000000 --- a/lib/iris/tests/pp.py +++ /dev/null @@ -1,96 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. - -import contextlib -import os.path - -import iris - - -class PPTest: - """A mixin class to provide PP-specific utilities to subclasses of tests.IrisTest.""" - - @contextlib.contextmanager - def cube_save_test( - self, - reference_txt_path, - reference_cubes=None, - reference_pp_path=None, - **kwargs, - ): - """A context manager for testing the saving of Cubes to PP files. - - Args: - - * reference_txt_path: - The path of the file containing the textual PP reference data. - - Kwargs: - - * reference_cubes: - The cube(s) from which the textual PP reference can be re-built if necessary. - * reference_pp_path: - The location of a PP file from which the textual PP reference can be re-built if necessary. - NB. The "reference_cubes" argument takes precedence over this argument. - - The return value from the context manager is the name of a temporary file - into which the PP data to be tested should be saved. - - Example:: - with self.cube_save_test(reference_txt_path, reference_cubes=cubes) as temp_pp_path: - iris.save(cubes, temp_pp_path) - - """ - # Watch out for a missing reference text file - if not os.path.isfile(reference_txt_path): - if reference_cubes: - temp_pp_path = iris.util.create_temp_filename(".pp") - try: - iris.save(reference_cubes, temp_pp_path, **kwargs) - self._create_reference_txt(reference_txt_path, temp_pp_path) - finally: - os.remove(temp_pp_path) - elif reference_pp_path: - self._create_reference_txt(reference_txt_path, reference_pp_path) - else: - raise ValueError( - "Missing all of reference txt file, cubes, and PP path." - ) - - temp_pp_path = iris.util.create_temp_filename(".pp") - try: - # This value is returned to the target of the "with" statement's "as" clause. - yield temp_pp_path - - # Load deferred data for all of the fields (but don't do anything with it) - pp_fields = list(iris.fileformats.pp.load(temp_pp_path)) - for pp_field in pp_fields: - pp_field.data - with open(reference_txt_path, "r") as reference_fh: - reference = "".join(reference_fh) - self._assert_str_same( - reference + "\n", - str(pp_fields) + "\n", - reference_txt_path, - type_comparison_name="PP files", - ) - finally: - os.remove(temp_pp_path) - - def _create_reference_txt(self, txt_path, pp_path): - # Load the reference data - pp_fields = list(iris.fileformats.pp.load(pp_path)) - for pp_field in pp_fields: - pp_field.data - - # Clear any header words we don't use - unused = ("lbexp", "lbegin", "lbnrec", "lbproj", "lbtyp") - for pp_field in pp_fields: - for word_name in unused: - setattr(pp_field, word_name, 0) - - # Save the textual representation of the PP fields - with open(txt_path, "w") as txt_file: - txt_file.writelines(str(pp_fields)) diff --git a/lib/iris/tests/stock/__init__.py b/lib/iris/tests/stock/__init__.py index 894cc1d02c..8c1154af72 100644 --- a/lib/iris/tests/stock/__init__.py +++ b/lib/iris/tests/stock/__init__.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""A collection of routines which create standard Cubes/files for test purposes. - -""" +"""A collection of routines which create standard Cubes/files for test purposes.""" import iris.tests as tests # isort:skip from datetime import datetime diff --git a/lib/iris/tests/stock/mesh.py b/lib/iris/tests/stock/mesh.py index aafd8b6c20..3520436f66 100644 --- a/lib/iris/tests/stock/mesh.py +++ b/lib/iris/tests/stock/mesh.py @@ -27,16 +27,17 @@ def sample_mesh(n_nodes=None, n_faces=None, n_edges=None, lazy_values=False): Mesh has nodes, plus faces and/or edges, with face-coords and edge-coords, numbers of which can be controlled. - Args: - * n_nodes (int or None): + Parameters + ---------- + n_nodes : int or None Number of nodes in mesh. Default is 15. Cannot be 0. - * n_edges (int or None): + n_edges : int or None Number of edges in mesh. Default is 5. If not 0, edge coords and an 'edge_node_connectivity' are included. - * n_faces (int or None): + n_faces : int or None Number of faces in mesh. Default is 3. If not 0, face coords and a 'face_node_connectivity' are included. - * lazy_values (bool): + lazy_values : bool, default=False If True, all content values of coords and connectivities are lazy. """ @@ -124,25 +125,34 @@ def sample_meshcoord(mesh=None, location="face", axis="x", **extra_kwargs): def sample_mesh_cube(nomesh_faces=None, n_z=2, with_parts=False, **meshcoord_kwargs): """Create a 2d test cube with 1 'normal' and 1 unstructured dimension (with a Mesh). - Result contains : dimcoords for both dims; an auxcoord on the unstructured dim; 2 mesh-coords. - By default, the mesh is provided by :func:`sample_mesh`, so coordinates and connectivity are not realistic. + Result contains : dimcoords for both dims; an auxcoord on the unstructured + dim; 2 mesh-coords. - Kwargs: - * nomesh_faces (int or None): + By default, the mesh is provided by :func:`sample_mesh`, so coordinates + and connectivity are not realistic. + + Parameters + ---------- + nomesh_faces : int or None, optional, default=None If set, don't add MeshCoords, so dim 1 is just a plain anonymous dim. Set its length to the given value. - * n_z (int): + n_z : int, optional, default=2 Length of the 'normal' dim. If 0, it is *omitted*. - * with_parts (bool): + with_parts : bool, optional, default=False If set, return all the constituent component coords - * meshcoord_kwargs (dict): - Extra controls passed to :func:`sample_meshcoord` for MeshCoord creation, to allow user-specified - location/mesh. The 'axis' key is not available, as we always add both an 'x' and 'y' MeshCOord. - - Returns: - * cube : if with_parts not set - * (cube, parts) : if with_parts is set - 'parts' is (mesh, dim0-dimcoord, dim1-dimcoord, dim1-auxcoord, x-meshcoord [or None], y-meshcoord [or None]). + **meshcoord_kwargs : dict, optional + Extra controls passed to :func:`sample_meshcoord` for MeshCoord + creation, to allow user-specified location/mesh. The 'axis' key is + not available, as we always add both an 'x' and 'y' MeshCOord. + + Returns + ------- + cube + if with_parts not set + (cube, parts) + if with_parts is set + 'parts' is (mesh, dim0-dimcoord, dim1-dimcoord, dim1-auxcoord, + x-meshcoord [or None], y-meshcoord [or None]). """ nomesh = nomesh_faces is not None diff --git a/lib/iris/tests/stock/netcdf.py b/lib/iris/tests/stock/netcdf.py index c1dd5cb631..5721433103 100644 --- a/lib/iris/tests/stock/netcdf.py +++ b/lib/iris/tests/stock/netcdf.py @@ -89,7 +89,6 @@ def _add_standard_data(nc_path, unlimited_dim_size=0): dimension size, 'dimension coordinates' and a possible unlimited dimension. """ - ds = _thread_safe_nc.DatasetWrapper(nc_path, "r+") unlimited_dim_names = [ diff --git a/lib/iris/tests/system_test.py b/lib/iris/tests/system_test.py index 9df43bf8f9..176da91ecb 100644 --- a/lib/iris/tests/system_test.py +++ b/lib/iris/tests/system_test.py @@ -4,7 +4,10 @@ # See LICENSE in the root of the repository for full licensing details. -"""This system test module is useful to identify if some of the key components required for Iris are available. +"""System test module. + +System test module is useful to identify if some of the key components required for +Iris are available. The system tests can be run with ``python setup.py test --system-tests``. diff --git a/lib/iris/tests/test_cartography.py b/lib/iris/tests/test_cartography.py index 0d4845757b..56ac3d52c7 100644 --- a/lib/iris/tests/test_cartography.py +++ b/lib/iris/tests/test_cartography.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Tests elements of the cartography module. - -""" +"""Tests elements of the cartography module.""" # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_cdm.py b/lib/iris/tests/test_cdm.py index 5f8b247b79..937df0bc2a 100644 --- a/lib/iris/tests/test_cdm.py +++ b/lib/iris/tests/test_cdm.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Test cube indexing, slicing, and extracting, and also the dot graphs. - -""" +"""Test cube indexing, slicing, and extracting, and also the dot graphs.""" # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip @@ -23,7 +21,6 @@ import iris.cube import iris.fileformats import iris.fileformats.dot -import iris.tests.pp as pp import iris.tests.stock @@ -1287,7 +1284,7 @@ def test_non_string_attributes(self): @tests.skip_data -class TestMaskedData(tests.IrisTest, pp.PPTest): +class TestMaskedData(tests.IrisTest, tests.PPTest): def _load_3d_cube(self): # This 3D data set has a missing a slice with SOME missing values. # The missing data is in the pressure = 1000 hPa, forcast_period = 0, diff --git a/lib/iris/tests/test_cell.py b/lib/iris/tests/test_cell.py index d5d11b8ee5..9271f47670 100644 --- a/lib/iris/tests/test_cell.py +++ b/lib/iris/tests/test_cell.py @@ -131,7 +131,7 @@ def test_numpy_int_equality(self): def test_numpy_float_equality(self): dtypes = ( - np.float_, + np.float64, np.float16, np.float32, np.float64, diff --git a/lib/iris/tests/test_cf.py b/lib/iris/tests/test_cf.py index 99b39af4f1..89ad0e3f2b 100644 --- a/lib/iris/tests/test_cf.py +++ b/lib/iris/tests/test_cf.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Test the cf module. - -""" +"""Test the cf module.""" # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_coding_standards.py b/lib/iris/tests/test_coding_standards.py index 44bd2dc868..2f14ea703e 100644 --- a/lib/iris/tests/test_coding_standards.py +++ b/lib/iris/tests/test_coding_standards.py @@ -64,7 +64,9 @@ def test_netcdf4_import(): def test_python_versions(): - """This test is designed to fail whenever Iris' supported Python versions are + """Test Python Versions. + + Test is designed to fail whenever Iris' supported Python versions are updated, insisting that versions are updated EVERYWHERE in-sync. """ latest_supported = "3.11" @@ -128,7 +130,7 @@ def test_categorised_warnings(): r"""To ensure that all UserWarnings raised by Iris are categorised, for ease of use. No obvious category? Use the parent: - :class:`iris.exceptions.IrisUserWarning`. + :class:`iris.warnings.IrisUserWarning`. Warning matches multiple categories? Create a one-off combo class. For example: diff --git a/lib/iris/tests/test_concatenate.py b/lib/iris/tests/test_concatenate.py index f08680c698..de324e6dd3 100644 --- a/lib/iris/tests/test_concatenate.py +++ b/lib/iris/tests/test_concatenate.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Test the cube concatenate mechanism. - -""" +"""Test the cube concatenate mechanism.""" # import iris tests first so that some things can be initialised # before importing anything else. @@ -18,8 +16,8 @@ from iris.aux_factory import HybridHeightFactory from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, DimCoord import iris.cube -from iris.exceptions import IrisUserWarning import iris.tests.stock as stock +from iris.warnings import IrisUserWarning def _make_cube( @@ -35,48 +33,37 @@ def _make_cube( ): """A convenience test function that creates a custom 2D cube. - Args: - - * x: - A (start, stop, step) tuple for specifying the - x-axis dimensional coordinate points. Bounds are - automatically guessed. - - * y: - A (start, stop, step) tuple for specifying the - y-axis dimensional coordinate points. Bounds are - automatically guessed. - - * data: + Parameters + ---------- + x : + A (start, stop, step) tuple for specifying the x-axis dimensional + coordinate points. Bounds are automatically guessed. + y : + A (start, stop, step) tuple for specifying the y-axis dimensional + coordinate points. Bounds are automatically guessed. + data : The data payload for the cube. - - Kwargs: - - * aux: + aux : optional, default=None A CSV string specifying which points only auxiliary coordinates to create. Accepts either of 'x', 'y', 'xy'. - - * cell_measure: + cell_measure : optional, default=None A CSV string specifying which points only cell measures coordinates to create. Accepts either of 'x', 'y', 'xy'. - - * ancil: + ancil : optional, default=None A CSV string specifying which points only ancillary variables coordinates to create. Accepts either of 'x', 'y', 'xy'. - - * derived: + derived : optional, default=None A CSV string specifying which points only derived coordinates coordinates to create. Accepts either of 'x', 'y', 'xy'. - - * offset: + offset : int, optional, default=0 Offset value to be added to the 'xy' auxiliary coordinate points. - - * scalar: + scalar : optional, default=None Create a 'height' scalar coordinate with the given value. - Returns: - The newly created 2D :class:`iris.cube.Cube`. + Returns + ------- + The newly created 2D :class:`iris.cube.Cube`. """ x_range = np.arange(*x, dtype=np.float32) @@ -170,39 +157,30 @@ def _make_cube( def _make_cube_3d(x, y, z, data, aux=None, offset=0): """A convenience test function that creates a custom 3D cube. - Args: - - * x: - A (start, stop, step) tuple for specifying the - x-axis dimensional coordinate points. Bounds are - automatically guessed. - - * y: - A (start, stop, step) tuple for specifying the - y-axis dimensional coordinate points. Bounds are - automatically guessed. - - * z: - A (start, stop, step) tuple for specifying the - z-axis dimensional coordinate points. Bounds are - automatically guessed. - - * data: + Parameters + ---------- + x : + A (start, stop, step) tuple for specifying the x-axis dimensional + coordinate points. Bounds are automatically guessed. + y : + A (start, stop, step) tuple for specifying the y-axis dimensional + coordinate points. Bounds are automatically guessed. + z : + A (start, stop, step) tuple for specifying the z-axis dimensional + coordinate points. Bounds are automatically guessed. + data : The data payload for the cube. - - Kwargs: - - * aux: + aux : optional, default=None A CSV string specifying which points only auxiliary coordinates to create. Accepts either of 'x', 'y', 'z', 'xy', 'xz', 'yz', 'xyz'. - - * offset: + offset : int, optional, default=0 Offset value to be added to non-1D auxiliary coordinate points. - Returns: - The newly created 3D :class:`iris.cube.Cube`. + Returns + ------- + The newly created 3D :class:`iris.cube.Cube`. """ x_range = np.arange(*x, dtype=np.float32) diff --git a/lib/iris/tests/test_constraints.py b/lib/iris/tests/test_constraints.py index 4fa34ee63d..f7dab288b6 100644 --- a/lib/iris/tests/test_constraints.py +++ b/lib/iris/tests/test_constraints.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Test the constrained cube loading mechanism. - -""" +"""Test the constrained cube loading mechanism.""" # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_coord_api.py b/lib/iris/tests/test_coord_api.py index 70cbd15899..50ae3b2696 100644 --- a/lib/iris/tests/test_coord_api.py +++ b/lib/iris/tests/test_coord_api.py @@ -16,7 +16,6 @@ import iris.aux_factory import iris.coord_systems import iris.coords -import iris.exceptions import iris.tests.stock diff --git a/lib/iris/tests/test_coordsystem.py b/lib/iris/tests/test_coordsystem.py index 69aeeaa1b1..f7ee34d0fc 100644 --- a/lib/iris/tests/test_coordsystem.py +++ b/lib/iris/tests/test_coordsystem.py @@ -17,8 +17,8 @@ ) import iris.coords import iris.cube -from iris.exceptions import IrisUserWarning import iris.tests.stock +from iris.warnings import IrisUserWarning def osgb(): diff --git a/lib/iris/tests/test_cube_to_pp.py b/lib/iris/tests/test_cube_to_pp.py index 704e82539b..6ae4567f49 100644 --- a/lib/iris/tests/test_cube_to_pp.py +++ b/lib/iris/tests/test_cube_to_pp.py @@ -17,7 +17,6 @@ import iris.coords import iris.fileformats.pp from iris.fileformats.pp import PPField3 -import iris.tests.pp as pp import iris.tests.stock as stock import iris.util @@ -36,7 +35,7 @@ def itab_callback(cube, field, filename): @tests.skip_data -class TestPPSave(tests.IrisTest, pp.PPTest): +class TestPPSave(tests.IrisTest, tests.PPTest): def test_no_forecast_time(self): cube = stock.lat_lon_cube() coord = iris.coords.DimCoord( @@ -268,13 +267,15 @@ class FakePPEnvironment: def geog_cs(self): """Return a GeogCS for this PPField. - Returns: - A GeogCS with the appropriate earth shape, meridian and pole position. + Returns + ------- + A GeogCS with the appropriate earth shape, meridian and pole position. + """ return iris.coord_systems.GeogCS(6371229.0) -class TestPPSaveRules(tests.IrisTest, pp.PPTest): +class TestPPSaveRules(tests.IrisTest, tests.PPTest): def test_default_coord_system(self): GeogCS = iris.coord_systems.GeogCS cube = iris.tests.stock.lat_lon_cube() diff --git a/lib/iris/tests/test_ff.py b/lib/iris/tests/test_ff.py index c4868c8d15..fdfa274bdf 100644 --- a/lib/iris/tests/test_ff.py +++ b/lib/iris/tests/test_ff.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Test the Fieldsfile file loading plugin and FFHeader. - -""" +"""Test the Fieldsfile file loading plugin and FFHeader.""" # import iris tests first so that some things can be initialised before # importing anything else diff --git a/lib/iris/tests/test_file_load.py b/lib/iris/tests/test_file_load.py index 65bd6e4025..e1510532a1 100644 --- a/lib/iris/tests/test_file_load.py +++ b/lib/iris/tests/test_file_load.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Test the file loading mechanism. - -""" +"""Test the file loading mechanism.""" # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_file_save.py b/lib/iris/tests/test_file_save.py index f3fe60549f..ab0b74c1c2 100644 --- a/lib/iris/tests/test_file_save.py +++ b/lib/iris/tests/test_file_save.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Test the file saving mechanism. - -""" +"""Test the file saving mechanism.""" # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_hybrid.py b/lib/iris/tests/test_hybrid.py index f8913f4fbd..1bc8f2e70e 100644 --- a/lib/iris/tests/test_hybrid.py +++ b/lib/iris/tests/test_hybrid.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Test the hybrid vertical coordinate representations. - -""" +"""Test the hybrid vertical coordinate representations.""" # import iris tests first so that some things can be initialised before # importing anything else @@ -16,8 +14,8 @@ import iris from iris.aux_factory import HybridHeightFactory, HybridPressureFactory -from iris.exceptions import IrisIgnoringBoundsWarning import iris.tests.stock +from iris.warnings import IrisIgnoringBoundsWarning @tests.skip_plot diff --git a/lib/iris/tests/test_intersect.py b/lib/iris/tests/test_intersect.py index f6287ef1f7..e28c99dea4 100644 --- a/lib/iris/tests/test_intersect.py +++ b/lib/iris/tests/test_intersect.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Test the intersection of Coords. - -""" +"""Test the intersection of Coords.""" # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_io_init.py b/lib/iris/tests/test_io_init.py index 10654e5f64..f8cfaee2bf 100644 --- a/lib/iris/tests/test_io_init.py +++ b/lib/iris/tests/test_io_init.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Test the io/__init__.py module. - -""" +"""Test the io/__init__.py module.""" # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_iterate.py b/lib/iris/tests/test_iterate.py index 627d90e0bc..749e8650db 100644 --- a/lib/iris/tests/test_iterate.py +++ b/lib/iris/tests/test_iterate.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Test the iteration of cubes in step. - -""" +"""Test the iteration of cubes in step.""" # import iris tests first so that some things can be initialised before # importing anything else @@ -20,9 +18,9 @@ import iris import iris.analysis -from iris.exceptions import IrisUserWarning import iris.iterate import iris.tests.stock +from iris.warnings import IrisUserWarning @tests.skip_data diff --git a/lib/iris/tests/test_load.py b/lib/iris/tests/test_load.py index c77854b541..b84ab32add 100644 --- a/lib/iris/tests/test_load.py +++ b/lib/iris/tests/test_load.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Test the main loading API. - -""" +"""Test the main loading API.""" # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_mapping.py b/lib/iris/tests/test_mapping.py index 53563dd193..4f59bf8d31 100644 --- a/lib/iris/tests/test_mapping.py +++ b/lib/iris/tests/test_mapping.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Tests map creation. - -""" +"""Tests map creation.""" # import iris tests first so that some things can be initialised before # importing anything else diff --git a/lib/iris/tests/test_merge.py b/lib/iris/tests/test_merge.py index a9b1069e95..1fc6fd8b10 100644 --- a/lib/iris/tests/test_merge.py +++ b/lib/iris/tests/test_merge.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Test the cube merging mechanism. - -""" +"""Test the cube merging mechanism.""" # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_netcdf.py b/lib/iris/tests/test_netcdf.py index 8509f4edca..3cdac260b3 100644 --- a/lib/iris/tests/test_netcdf.py +++ b/lib/iris/tests/test_netcdf.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Test CF-NetCDF file loading and saving. - -""" +"""Test CF-NetCDF file loading and saving.""" # Import iris tests first so that some things can be initialised before # importing anything else. @@ -24,7 +22,6 @@ from iris._lazy_data import is_lazy_data import iris.analysis.trajectory import iris.coord_systems as icoord_systems -from iris.exceptions import IrisCfSaveWarning from iris.fileformats._nc_load_rules import helpers as ncload_helpers import iris.fileformats.netcdf from iris.fileformats.netcdf import _thread_safe_nc @@ -33,6 +30,7 @@ import iris.tests.stock as stock from iris.tests.stock.netcdf import ncgen_from_cdl import iris.util +from iris.warnings import IrisCfSaveWarning @tests.skip_data diff --git a/lib/iris/tests/test_pickling.py b/lib/iris/tests/test_pickling.py index 7bea687231..311e6ec9aa 100644 --- a/lib/iris/tests/test_pickling.py +++ b/lib/iris/tests/test_pickling.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Test pickling of Iris objects. - -""" +"""Test pickling of Iris objects.""" # Import iris tests first so that some things can be initialised # before importing anything else. diff --git a/lib/iris/tests/test_pp_cf.py b/lib/iris/tests/test_pp_cf.py index 59bfe28965..8b0af5a5c3 100644 --- a/lib/iris/tests/test_pp_cf.py +++ b/lib/iris/tests/test_pp_cf.py @@ -13,7 +13,6 @@ import iris.coords from iris.fileformats.netcdf import _thread_safe_nc from iris.fileformats.pp import STASH -import iris.tests.pp as pp import iris.util @@ -64,11 +63,11 @@ def callback_aaxzc_n10r13xy_b_pp(cube, field, filename): @tests.skip_data -class TestAll(tests.IrisTest, pp.PPTest): +class TestAll(tests.IrisTest, tests.PPTest): _ref_dir = ("usecases", "pp_to_cf_conversion") def _test_file(self, name): - """This is the main test routine that is called for each of the files listed below.""" + """Main test routine that is called for each of the files listed below.""" pp_path = self._src_pp_path(name) # 1) Load the PP and check the Cube diff --git a/lib/iris/tests/test_quickplot.py b/lib/iris/tests/test_quickplot.py index f6a5d210e0..fdd534a2c5 100644 --- a/lib/iris/tests/test_quickplot.py +++ b/lib/iris/tests/test_quickplot.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Tests the high-level plotting interface. - -""" +"""Tests the high-level plotting interface.""" # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/test_util.py b/lib/iris/tests/test_util.py index d9cebf224f..56774f89f8 100644 --- a/lib/iris/tests/test_util.py +++ b/lib/iris/tests/test_util.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Test iris.util. - -""" +"""Test iris.util.""" # import iris tests first so that some things can be initialised before # importing anything else diff --git a/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py b/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py index 93354d4278..82f6fe693d 100644 --- a/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py +++ b/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for :class:`iris.analysis._area_weighted.AreaWeightedRegridder`. - -""" +"""Unit tests for :class:`iris.analysis._area_weighted.AreaWeightedRegridder`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/analysis/cartography/test_project.py b/lib/iris/tests/unit/analysis/cartography/test_project.py index 35c22af363..65796b5611 100644 --- a/lib/iris/tests/unit/analysis/cartography/test_project.py +++ b/lib/iris/tests/unit/analysis/cartography/test_project.py @@ -15,9 +15,9 @@ import iris.coord_systems import iris.coords import iris.cube -from iris.exceptions import IrisDefaultingWarning import iris.tests import iris.tests.stock +from iris.warnings import IrisDefaultingWarning ROBINSON = ccrs.Robinson() diff --git a/lib/iris/tests/unit/analysis/geometry/test__extract_relevant_cube_slice.py b/lib/iris/tests/unit/analysis/geometry/test__extract_relevant_cube_slice.py index 44695b81c3..7cb4d80f49 100644 --- a/lib/iris/tests/unit/analysis/geometry/test__extract_relevant_cube_slice.py +++ b/lib/iris/tests/unit/analysis/geometry/test__extract_relevant_cube_slice.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for :func:`iris.analysis.geometry._extract_relevant_cube_slice`. - -""" +"""Unit tests for :func:`iris.analysis.geometry._extract_relevant_cube_slice`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/analysis/geometry/test_geometry_area_weights.py b/lib/iris/tests/unit/analysis/geometry/test_geometry_area_weights.py index 5d7d39dfc4..d98f47975d 100644 --- a/lib/iris/tests/unit/analysis/geometry/test_geometry_area_weights.py +++ b/lib/iris/tests/unit/analysis/geometry/test_geometry_area_weights.py @@ -16,8 +16,8 @@ from iris.analysis.geometry import geometry_area_weights from iris.coords import DimCoord from iris.cube import Cube -from iris.exceptions import IrisGeometryExceedWarning import iris.tests.stock as stock +from iris.warnings import IrisGeometryExceedWarning class Test(tests.IrisTest): diff --git a/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py b/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py index b37fa1de62..ed6e230840 100644 --- a/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py +++ b/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for :class:`iris.analysis._interpolation.RectilinearInterpolator`. - -""" +"""Unit tests for :class:`iris.analysis._interpolation.RectilinearInterpolator`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/analysis/interpolation/test_get_xy_dim_coords.py b/lib/iris/tests/unit/analysis/interpolation/test_get_xy_dim_coords.py index 6300b81d1a..ae47159df3 100644 --- a/lib/iris/tests/unit/analysis/interpolation/test_get_xy_dim_coords.py +++ b/lib/iris/tests/unit/analysis/interpolation/test_get_xy_dim_coords.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for :func:`iris.analysis._interpolation.get_xy_dim_coords`. - -""" +"""Unit tests for :func:`iris.analysis._interpolation.get_xy_dim_coords`.""" # import iris tests first so that some things can be initialised # before importing anything else. diff --git a/lib/iris/tests/unit/analysis/maths/test__get_dtype.py b/lib/iris/tests/unit/analysis/maths/test__get_dtype.py index 4388913d54..643d7e43d7 100644 --- a/lib/iris/tests/unit/analysis/maths/test__get_dtype.py +++ b/lib/iris/tests/unit/analysis/maths/test__get_dtype.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the function :func:`iris.analysis.maths._get_dtype`. - -""" +"""Unit tests for the function :func:`iris.analysis.maths._get_dtype`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/analysis/maths/test__inplace_common_checks.py b/lib/iris/tests/unit/analysis/maths/test__inplace_common_checks.py index 42317b7efd..7ce6bc79c1 100644 --- a/lib/iris/tests/unit/analysis/maths/test__inplace_common_checks.py +++ b/lib/iris/tests/unit/analysis/maths/test__inplace_common_checks.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the function :func:`iris.analysis.maths._inplace_common_checks`. - -""" +"""Unit tests for the function :func:`iris.analysis.maths._inplace_common_checks`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/analysis/maths/test__output_dtype.py b/lib/iris/tests/unit/analysis/maths/test__output_dtype.py index b14db4120a..4e5845b73f 100644 --- a/lib/iris/tests/unit/analysis/maths/test__output_dtype.py +++ b/lib/iris/tests/unit/analysis/maths/test__output_dtype.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the function :func:`iris.analysis.maths._output_dtype`. - -""" +"""Unit tests for the function :func:`iris.analysis.maths._output_dtype`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/analysis/stats/test_pearsonr.py b/lib/iris/tests/unit/analysis/stats/test_pearsonr.py index 50387e1418..d46bcd21ba 100644 --- a/lib/iris/tests/unit/analysis/stats/test_pearsonr.py +++ b/lib/iris/tests/unit/analysis/stats/test_pearsonr.py @@ -8,17 +8,22 @@ # importing anything else. import iris.tests as tests # isort:skip +from unittest import mock + +import dask +import dask.array import numpy as np import numpy.ma as ma +import pytest import iris +import iris._lazy_data import iris.analysis.stats as stats from iris.exceptions import CoordinateNotFoundError -@tests.skip_data -class Test(tests.IrisTest): - def setUp(self): +class Mixin: + def setup_method(self): # 3D cubes: cube_temp = iris.load_cube( tests.get_data_path( @@ -33,21 +38,36 @@ def setUp(self): cube_temp.coord("longitude").guess_bounds() self.weights = iris.analysis.cartography.area_weights(cube_temp) - def test_perfect_corr(self): + +@tests.skip_data +class TestLazy(Mixin): + @pytest.fixture + def mocked_compute(self, monkeypatch): + m_compute = mock.Mock(wraps=dask.base.compute) + + # The three dask compute functions are all the same function but monkeypatch + # does not automatically know that. + # https://stackoverflow.com/questions/77820437 + monkeypatch.setattr(dask.base, dask.base.compute.__name__, m_compute) + monkeypatch.setattr(dask, dask.compute.__name__, m_compute) + monkeypatch.setattr(dask.array, dask.array.compute.__name__, m_compute) + + return m_compute + + def test_perfect_corr(self, mocked_compute): r = stats.pearsonr(self.cube_a, self.cube_a, ["latitude", "longitude"]) - self.assertArrayEqual(r.data, np.array([1.0] * 6)) + mocked_compute.assert_not_called() + np.testing.assert_array_equal(r.data, np.array([1.0] * 6)) - def test_perfect_corr_all_dims(self): + def test_perfect_corr_all_dims(self, mocked_compute): r = stats.pearsonr(self.cube_a, self.cube_a) - self.assertArrayEqual(r.data, np.array([1.0])) + mocked_compute.assert_not_called() + np.testing.assert_array_equal(r.data, np.array([1.0])) - def test_incompatible_cubes(self): - with self.assertRaises(ValueError): - stats.pearsonr(self.cube_a[:, 0, :], self.cube_b[0, :, :], "longitude") - - def test_compatible_cubes(self): + def test_compatible_cubes(self, mocked_compute): r = stats.pearsonr(self.cube_a, self.cube_b, ["latitude", "longitude"]) - self.assertArrayAlmostEqual( + mocked_compute.assert_not_called() + np.testing.assert_array_almost_equal( r.data, [ 0.81114936, @@ -59,13 +79,15 @@ def test_compatible_cubes(self): ], ) - def test_broadcast_cubes(self): + def test_broadcast_cubes(self, mocked_compute): r1 = stats.pearsonr( self.cube_a, self.cube_b[0, :, :], ["latitude", "longitude"] ) r2 = stats.pearsonr( self.cube_b[0, :, :], self.cube_a, ["latitude", "longitude"] ) + + mocked_compute.assert_not_called() r_by_slice = [ stats.pearsonr( self.cube_a[i, :, :], @@ -74,14 +96,16 @@ def test_broadcast_cubes(self): ).data for i in range(6) ] - self.assertArrayEqual(r1.data, np.array(r_by_slice)) - self.assertArrayEqual(r2.data, np.array(r_by_slice)) + np.testing.assert_array_equal(r1.data, np.array(r_by_slice)) + np.testing.assert_array_equal(r2.data, np.array(r_by_slice)) - def test_compatible_cubes_weighted(self): + def test_compatible_cubes_weighted(self, mocked_compute): r = stats.pearsonr( self.cube_a, self.cube_b, ["latitude", "longitude"], self.weights ) - self.assertArrayAlmostEqual( + + mocked_compute.assert_not_called() + np.testing.assert_array_almost_equal( r.data, [ 0.79105429, @@ -93,13 +117,15 @@ def test_compatible_cubes_weighted(self): ], ) - def test_broadcast_cubes_weighted(self): + def test_broadcast_cubes_weighted(self, mocked_compute): r = stats.pearsonr( self.cube_a, self.cube_b[0, :, :], ["latitude", "longitude"], weights=self.weights[0, :, :], ) + + mocked_compute.assert_not_called() r_by_slice = [ stats.pearsonr( self.cube_a[i, :, :], @@ -109,10 +135,31 @@ def test_broadcast_cubes_weighted(self): ).data for i in range(6) ] - self.assertArrayAlmostEqual(r.data, np.array(r_by_slice)) + np.testing.assert_array_almost_equal(r.data, np.array(r_by_slice)) + + def test_broadcast_transpose_cubes_weighted(self, mocked_compute): + # Reference is calculated with no transposition. + r_ref = stats.pearsonr( + self.cube_a, + self.cube_b[0, :, :], + ["latitude", "longitude"], + weights=self.weights[0, :, :], + ) + + self.cube_a.transpose() + r_test = stats.pearsonr( + self.cube_a, + self.cube_b[0, :, :], + ["latitude", "longitude"], + weights=self.weights[0, :, :], + ) + + mocked_compute.assert_not_called() + # Should get the same result, but transposed. + np.testing.assert_array_almost_equal(r_test.data, r_ref.data.T) def test_weight_error(self): - with self.assertRaises(ValueError): + with pytest.raises(ValueError): stats.pearsonr( self.cube_a, self.cube_b[0, :, :], @@ -120,54 +167,74 @@ def test_weight_error(self): weights=self.weights, ) - def test_non_existent_coord(self): - with self.assertRaises(CoordinateNotFoundError): - stats.pearsonr(self.cube_a, self.cube_b, "bad_coord") - - def test_mdtol(self): + def test_mdtol(self, mocked_compute): cube_small = self.cube_a[:, 0, 0] - cube_small_masked = cube_small.copy() - cube_small_masked.data = ma.array( - cube_small.data, mask=np.array([0, 0, 0, 1, 1, 1], dtype=bool) - ) + cube_small_masked = iris.util.mask_cube(cube_small, [0, 0, 0, 1, 1, 1]) r1 = stats.pearsonr(cube_small, cube_small_masked) r2 = stats.pearsonr(cube_small, cube_small_masked, mdtol=0.49) - self.assertArrayAlmostEqual(r1.data, np.array([0.74586593])) - self.assertMaskedArrayEqual(r2.data, ma.array([0], mask=[True])) - def test_common_mask_simple(self): + mocked_compute.assert_not_called() + np.testing.assert_array_almost_equal(r1.data, np.array([0.74586593])) + tests.assert_masked_array_equal(r2.data, ma.array([0], mask=[True])) + + def test_common_mask_simple(self, mocked_compute): cube_small = self.cube_a[:, 0, 0] - cube_small_masked = cube_small.copy() - cube_small_masked.data = ma.array( - cube_small.data, mask=np.array([0, 0, 0, 1, 1, 1], dtype=bool) - ) + cube_small_masked = iris.util.mask_cube(cube_small, [0, 0, 0, 1, 1, 1]) r = stats.pearsonr(cube_small, cube_small_masked, common_mask=True) - self.assertArrayAlmostEqual(r.data, np.array([1.0])) - def test_common_mask_broadcast(self): - cube_small = self.cube_a[:, 0, 0] + mocked_compute.assert_not_called() + np.testing.assert_array_almost_equal(r.data, np.array([1.0])) + + def test_common_mask_broadcast(self, mocked_compute): + cube_small = iris.util.mask_cube(self.cube_a[:, 0, 0], [0, 0, 0, 0, 0, 1]) + mask_2d = np.zeros((6, 2), dtype=bool) + # 2d mask varies on unshared coord: + mask_2d[0, 1] = 1 + + # Make a (6, 2) cube. cube_small_2d = self.cube_a[:, 0:2, 0] - cube_small.data = ma.array( - cube_small.data, mask=np.array([0, 0, 0, 0, 0, 1], dtype=bool) - ) - cube_small_2d.data = ma.array( - np.tile(cube_small.data[:, np.newaxis], 2), - mask=np.zeros((6, 2), dtype=bool), + # Duplicate data along unshared coord's dimension. + new_data = iris.util.broadcast_to_shape( + cube_small.core_data(), (6, 2), dim_map=[0] ) - # 2d mask varies on unshared coord: - cube_small_2d.data.mask[0, 1] = 1 + cube_small_2d.data = iris.util._mask_array(new_data, mask_2d) + r = stats.pearsonr( cube_small, cube_small_2d, weights=self.weights[:, 0, 0], common_mask=True, ) - self.assertArrayAlmostEqual(r.data, np.array([1.0, 1.0])) + + mocked_compute.assert_not_called() + np.testing.assert_array_almost_equal(r.data, np.array([1.0, 1.0])) # 2d mask does not vary on unshared coord: cube_small_2d.data.mask[0, 0] = 1 r = stats.pearsonr(cube_small, cube_small_2d, common_mask=True) - self.assertArrayAlmostEqual(r.data, np.array([1.0, 1.0])) + np.testing.assert_array_almost_equal(r.data, np.array([1.0, 1.0])) + + +class TestReal(TestLazy): + def setup_method(self): + super().setup_method() + for cube in [self.cube_a, self.cube_b]: + _ = cube.data + +class TestCoordHandling(Mixin): + def test_lenient_handling(self): + # Smoke test that mismatched var_name does not prevent operation. + self.cube_a.coord("time").var_name = "wibble" + stats.pearsonr(self.cube_a, self.cube_b) -if __name__ == "__main__": - tests.main() + def test_incompatible_cubes(self): + with pytest.raises(ValueError): + stats.pearsonr(self.cube_a[:, 0, :], self.cube_b[0, :, :], "longitude") + + def test_single_coord(self): + # Smoke test that single coord can be passed as single string. + stats.pearsonr(self.cube_a, self.cube_b, "latitude") + + def test_non_existent_coord(self): + with pytest.raises(CoordinateNotFoundError): + stats.pearsonr(self.cube_a, self.cube_b, "bad_coord") diff --git a/lib/iris/tests/unit/analysis/test_MAX_RUN.py b/lib/iris/tests/unit/analysis/test_MAX_RUN.py index d7cc714609..9e314aad7e 100644 --- a/lib/iris/tests/unit/analysis/test_MAX_RUN.py +++ b/lib/iris/tests/unit/analysis/test_MAX_RUN.py @@ -26,7 +26,6 @@ def setUp(self): Uses 1 and 3 rather than 1 and 0 to check that lambda is being applied. """ - self.data_1ds = [ (np.array([3, 1, 1, 3, 3, 3]), 2), # One run (np.array([3, 1, 1, 3, 1, 3]), 2), # Two runs @@ -60,7 +59,6 @@ def setUp(self): Uses 1 and 3 rather than 1 and 0 to check that lambda is being applied. """ - self.data_1ds = [ ( ma.masked_array( diff --git a/lib/iris/tests/unit/analysis/test_PercentileAggregator.py b/lib/iris/tests/unit/analysis/test_PercentileAggregator.py index 84628c9ae5..9099ac42fe 100644 --- a/lib/iris/tests/unit/analysis/test_PercentileAggregator.py +++ b/lib/iris/tests/unit/analysis/test_PercentileAggregator.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :class:`iris.analysis.PercentileAggregator` class instance. - -""" +"""Unit tests for the :class:`iris.analysis.PercentileAggregator` class instance.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/analysis/test_WeightedPercentileAggregator.py b/lib/iris/tests/unit/analysis/test_WeightedPercentileAggregator.py index 0531a5aa3d..1d047db6b6 100644 --- a/lib/iris/tests/unit/analysis/test_WeightedPercentileAggregator.py +++ b/lib/iris/tests/unit/analysis/test_WeightedPercentileAggregator.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :class:`iris.analysis.PercentileAggregator` class instance. - -""" +"""Unit tests for the :class:`iris.analysis.PercentileAggregator` class instance.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/analysis/trajectory/test_Trajectory.py b/lib/iris/tests/unit/analysis/trajectory/test_Trajectory.py index 7f01a816fa..abf0e86513 100644 --- a/lib/iris/tests/unit/analysis/trajectory/test_Trajectory.py +++ b/lib/iris/tests/unit/analysis/trajectory/test_Trajectory.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for :class:`iris.analysis.trajectory.Trajectory`. - -""" +"""Unit tests for :class:`iris.analysis.trajectory.Trajectory`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/analysis/trajectory/test_interpolate.py b/lib/iris/tests/unit/analysis/trajectory/test_interpolate.py index 05a2d666cc..c171b18f6e 100644 --- a/lib/iris/tests/unit/analysis/trajectory/test_interpolate.py +++ b/lib/iris/tests/unit/analysis/trajectory/test_interpolate.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for :meth:`iris.analysis.trajectory.interpolate`. - -""" +"""Unit tests for :meth:`iris.analysis.trajectory.interpolate`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py b/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py index 6029f2c518..5e136395b5 100644 --- a/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for `iris.aux_factory.AuxCoordFactory`. - -""" +"""Unit tests for `iris.aux_factory.AuxCoordFactory`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/common/lenient/test_Lenient.py b/lib/iris/tests/unit/common/lenient/test_Lenient.py index abafcc1a3e..375a745ce8 100644 --- a/lib/iris/tests/unit/common/lenient/test_Lenient.py +++ b/lib/iris/tests/unit/common/lenient/test_Lenient.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :class:`iris.common.lenient.Lenient`. - -""" +"""Unit tests for the :class:`iris.common.lenient.Lenient`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/common/lenient/test__Lenient.py b/lib/iris/tests/unit/common/lenient/test__Lenient.py index 0ed947e513..814359fbaf 100644 --- a/lib/iris/tests/unit/common/lenient/test__Lenient.py +++ b/lib/iris/tests/unit/common/lenient/test__Lenient.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :class:`iris.common.lenient._Lenient`. - -""" +"""Unit tests for the :class:`iris.common.lenient._Lenient`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/common/lenient/test__lenient_client.py b/lib/iris/tests/unit/common/lenient/test__lenient_client.py index 54e2aca185..509b183003 100644 --- a/lib/iris/tests/unit/common/lenient/test__lenient_client.py +++ b/lib/iris/tests/unit/common/lenient/test__lenient_client.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :func:`iris.common.lenient._lenient_client`. - -""" +"""Unit tests for the :func:`iris.common.lenient._lenient_client`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/common/lenient/test__lenient_service.py b/lib/iris/tests/unit/common/lenient/test__lenient_service.py index a916779c79..c0ed8df403 100644 --- a/lib/iris/tests/unit/common/lenient/test__lenient_service.py +++ b/lib/iris/tests/unit/common/lenient/test__lenient_service.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :func:`iris.common.lenient._lenient_service`. - -""" +"""Unit tests for the :func:`iris.common.lenient._lenient_service`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/common/lenient/test__qualname.py b/lib/iris/tests/unit/common/lenient/test__qualname.py index 47b779f6da..49576814d4 100644 --- a/lib/iris/tests/unit/common/lenient/test__qualname.py +++ b/lib/iris/tests/unit/common/lenient/test__qualname.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :func:`iris.common.lenient._qualname`. - -""" +"""Unit tests for the :func:`iris.common.lenient._qualname`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/common/metadata/test_AncillaryVariableMetadata.py b/lib/iris/tests/unit/common/metadata/test_AncillaryVariableMetadata.py index 2bc9cd9191..b7304f4301 100644 --- a/lib/iris/tests/unit/common/metadata/test_AncillaryVariableMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_AncillaryVariableMetadata.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :class:`iris.common.metadata.AncillaryVariableMetadata`. - -""" +"""Unit tests for the :class:`iris.common.metadata.AncillaryVariableMetadata`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py b/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py index 60bde320a5..73886882de 100644 --- a/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :class:`iris.common.metadata.BaseMetadata`. - -""" +"""Unit tests for the :class:`iris.common.metadata.BaseMetadata`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/common/metadata/test_CellMeasureMetadata.py b/lib/iris/tests/unit/common/metadata/test_CellMeasureMetadata.py index 4801f99385..3618d2ace5 100644 --- a/lib/iris/tests/unit/common/metadata/test_CellMeasureMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_CellMeasureMetadata.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :class:`iris.common.metadata.CellMeasureMetadata`. - -""" +"""Unit tests for the :class:`iris.common.metadata.CellMeasureMetadata`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/common/metadata/test_CoordMetadata.py b/lib/iris/tests/unit/common/metadata/test_CoordMetadata.py index b5cdeaca47..010838b7fc 100644 --- a/lib/iris/tests/unit/common/metadata/test_CoordMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_CoordMetadata.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :class:`iris.common.metadata.CoordMetadata`. - -""" +"""Unit tests for the :class:`iris.common.metadata.CoordMetadata`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py b/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py index 0bcb553729..92af65da5c 100644 --- a/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :class:`iris.common.metadata.CubeMetadata`. - -""" +"""Unit tests for the :class:`iris.common.metadata.CubeMetadata`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py b/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py index bdf48029e4..5099645082 100644 --- a/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py +++ b/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :class:`iris.common.metadata._NamedTupleMeta`. - -""" +"""Unit tests for the :class:`iris.common.metadata._NamedTupleMeta`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/common/metadata/test_hexdigest.py b/lib/iris/tests/unit/common/metadata/test_hexdigest.py index 035e051440..1a0a0e0120 100644 --- a/lib/iris/tests/unit/common/metadata/test_hexdigest.py +++ b/lib/iris/tests/unit/common/metadata/test_hexdigest.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :func:`iris.common.metadata.hexdigest`. - -""" +"""Unit tests for the :func:`iris.common.metadata.hexdigest`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/common/metadata/test_metadata_filter.py b/lib/iris/tests/unit/common/metadata/test_metadata_filter.py index c77d0dc357..586a5fe5f8 100644 --- a/lib/iris/tests/unit/common/metadata/test_metadata_filter.py +++ b/lib/iris/tests/unit/common/metadata/test_metadata_filter.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :func:`iris.common.metadata_filter`. - -""" +"""Unit tests for the :func:`iris.common.metadata_filter`.""" import numpy as np diff --git a/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py b/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py index 3eda14e635..e9ec42e04b 100644 --- a/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py +++ b/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :func:`iris.common.metadata.metadata_manager_factory`. - -""" +"""Unit tests for the :func:`iris.common.metadata.metadata_manager_factory`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py b/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py index 2e858a74bf..020f18a358 100644 --- a/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py +++ b/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :class:`iris.common.mixin.CFVariableMixin`. - -""" +"""Unit tests for the :class:`iris.common.mixin.CFVariableMixin`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/common/mixin/test_LimitedAttributeDict.py b/lib/iris/tests/unit/common/mixin/test_LimitedAttributeDict.py index 85d4cfe9a3..f538279bb1 100644 --- a/lib/iris/tests/unit/common/mixin/test_LimitedAttributeDict.py +++ b/lib/iris/tests/unit/common/mixin/test_LimitedAttributeDict.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :class:`iris.common.mixin.LimitedAttributeDict`. - -""" +"""Unit tests for the :class:`iris.common.mixin.LimitedAttributeDict`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/common/mixin/test__get_valid_standard_name.py b/lib/iris/tests/unit/common/mixin/test__get_valid_standard_name.py index d7b929eeb3..67ba108333 100644 --- a/lib/iris/tests/unit/common/mixin/test__get_valid_standard_name.py +++ b/lib/iris/tests/unit/common/mixin/test__get_valid_standard_name.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :func:`iris.common.mixin._get_valid_standard_name`. - -""" +"""Unit tests for the :func:`iris.common.mixin._get_valid_standard_name`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/common/resolve/test_Resolve.py b/lib/iris/tests/unit/common/resolve/test_Resolve.py index 96b12f149a..0bad967acb 100644 --- a/lib/iris/tests/unit/common/resolve/test_Resolve.py +++ b/lib/iris/tests/unit/common/resolve/test_Resolve.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :class:`iris.common.resolve.Resolve`. - -""" +"""Unit tests for the :class:`iris.common.resolve.Resolve`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/coord_categorisation/test_add_hour.py b/lib/iris/tests/unit/coord_categorisation/test_add_hour.py index 05a0f3e474..c6b0a53656 100644 --- a/lib/iris/tests/unit/coord_categorisation/test_add_hour.py +++ b/lib/iris/tests/unit/coord_categorisation/test_add_hour.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Test coordinate categorisation function add_hour. -""" +"""Test coordinate categorisation function add_hour.""" # import iris tests first so that some things can be initialised before # importing anything else diff --git a/lib/iris/tests/unit/coord_categorisation/test_coord_categorisation.py b/lib/iris/tests/unit/coord_categorisation/test_coord_categorisation.py index 2fc3db9b05..daf4b7e3f2 100644 --- a/lib/iris/tests/unit/coord_categorisation/test_coord_categorisation.py +++ b/lib/iris/tests/unit/coord_categorisation/test_coord_categorisation.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Test the coordinate categorisation functions. -""" +"""Test the coordinate categorisation functions.""" import warnings @@ -162,7 +161,6 @@ def test_add_season_nonstandard(cube, time_coord): ) def test_add_season_year(cube, time_coord, backwards, nonstandard): """Specific test to account for the extra use_year_at_season_start argument.""" - kwargs = dict( cube=cube, coord=time_coord, diff --git a/lib/iris/tests/unit/coord_systems/test_AlbersEqualArea.py b/lib/iris/tests/unit/coord_systems/test_AlbersEqualArea.py index 768cca70b7..6c28562133 100644 --- a/lib/iris/tests/unit/coord_systems/test_AlbersEqualArea.py +++ b/lib/iris/tests/unit/coord_systems/test_AlbersEqualArea.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :class:`iris.coord_systems.AlbersEqualArea` class. - -""" +"""Unit tests for the :class:`iris.coord_systems.AlbersEqualArea` class.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/coord_systems/test_LambertAzimuthalEqualArea.py b/lib/iris/tests/unit/coord_systems/test_LambertAzimuthalEqualArea.py index a1d978811d..06b6aad5d3 100644 --- a/lib/iris/tests/unit/coord_systems/test_LambertAzimuthalEqualArea.py +++ b/lib/iris/tests/unit/coord_systems/test_LambertAzimuthalEqualArea.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :class:`iris.coord_systems.LambertAzimuthalEqualArea` class. - -""" +"""Unit tests for the :class:`iris.coord_systems.LambertAzimuthalEqualArea` class.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/coords/test_CellMethod.py b/lib/iris/tests/unit/coords/test_CellMethod.py index 58a10aff50..f2a468ecf1 100644 --- a/lib/iris/tests/unit/coords/test_CellMethod.py +++ b/lib/iris/tests/unit/coords/test_CellMethod.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :class:`iris.coords.CellMethod`. -""" +"""Unit tests for the :class:`iris.coords.CellMethod`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/coords/test_Coord.py b/lib/iris/tests/unit/coords/test_Coord.py index 5745c870ce..c0accfe071 100644 --- a/lib/iris/tests/unit/coords/test_Coord.py +++ b/lib/iris/tests/unit/coords/test_Coord.py @@ -19,8 +19,9 @@ import iris from iris.coords import AuxCoord, Coord, DimCoord from iris.cube import Cube -from iris.exceptions import IrisVagueMetadataWarning, UnitConversionError +from iris.exceptions import UnitConversionError from iris.tests.unit.coords import CoordTestMixin +from iris.warnings import IrisVagueMetadataWarning Pair = collections.namedtuple("Pair", "points bounds") @@ -240,9 +241,12 @@ def _mock_coord(self): coord = mock.Mock( spec=Coord, ndim=1, - points=np.array([mock.sentinel.time]), - bounds=np.array([[mock.sentinel.lower, mock.sentinel.upper]]), ) + coord.core_points = lambda: np.array([mock.sentinel.time]) + coord.core_bounds = lambda: np.array( + [[mock.sentinel.lower, mock.sentinel.upper]] + ) + return coord def test_time_as_object(self): diff --git a/lib/iris/tests/unit/cube/test_Cube.py b/lib/iris/tests/unit/cube/test_Cube.py index 27f1756770..ec94e346b2 100644 --- a/lib/iris/tests/unit/cube/test_Cube.py +++ b/lib/iris/tests/unit/cube/test_Cube.py @@ -33,12 +33,11 @@ AncillaryVariableNotFoundError, CellMeasureNotFoundError, CoordinateNotFoundError, - IrisUserWarning, - IrisVagueMetadataWarning, UnitConversionError, ) import iris.tests.stock as stock from iris.tests.stock.mesh import sample_mesh, sample_mesh_cube, sample_meshcoord +from iris.warnings import IrisUserWarning, IrisVagueMetadataWarning class Test___init___data(tests.IrisTest): @@ -956,7 +955,9 @@ def test_weights_dim_coord(self): class Test_slices_dim_order(tests.IrisTest): - """This class tests the capability of iris.cube.Cube.slices(), including its + """Test the capability of iris.cube.Cube.slices(). + + Test the capability of iris.cube.Cube.slices(), including its ability to correctly re-order the dimensions. """ @@ -2839,11 +2840,20 @@ def test_unit_multiply(self): class Test__eq__data(tests.IrisTest): """Partial cube equality testing, for data type only.""" + def test_cube_identical_to_itself(self): + cube = Cube([1.0]) + self.assertTrue(cube == cube) + def test_data_float_eq(self): cube1 = Cube([1.0]) cube2 = Cube([1.0]) self.assertTrue(cube1 == cube2) + def test_data_float_nan_eq(self): + cube1 = Cube([np.nan, 1.0]) + cube2 = Cube([np.nan, 1.0]) + self.assertTrue(cube1 == cube2) + def test_data_float_eqtol(self): val1 = np.array(1.0, dtype=np.float32) # NOTE: Since v2.3, Iris uses "allclose". Prior to that we used diff --git a/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py b/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py index 67d66373ff..64c99ebd4b 100644 --- a/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py +++ b/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py @@ -20,7 +20,6 @@ import iris.coords from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, DimCoord from iris.cube import Cube -import iris.exceptions from iris.tests.stock import realistic_4d diff --git a/lib/iris/tests/unit/data_manager/test_DataManager.py b/lib/iris/tests/unit/data_manager/test_DataManager.py index f35c2fcfcb..b419e556a7 100644 --- a/lib/iris/tests/unit/data_manager/test_DataManager.py +++ b/lib/iris/tests/unit/data_manager/test_DataManager.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :class:`iris._data_manager.DataManager`. - -""" +"""Unit tests for the :class:`iris._data_manager.DataManager`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/experimental/stratify/test_relevel.py b/lib/iris/tests/unit/experimental/stratify/test_relevel.py index f587019f3a..1f0a5618aa 100644 --- a/lib/iris/tests/unit/experimental/stratify/test_relevel.py +++ b/lib/iris/tests/unit/experimental/stratify/test_relevel.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :func:`iris.experimental.stratify.relevel` function. - -""" +"""Unit tests for the :func:`iris.experimental.stratify.relevel` function.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridAuxiliaryCoordinateVariable.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridAuxiliaryCoordinateVariable.py index 5f613840a3..4a45e9a4df 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridAuxiliaryCoordinateVariable.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridAuxiliaryCoordinateVariable.py @@ -18,11 +18,11 @@ import numpy as np import pytest -import iris.exceptions from iris.experimental.ugrid.cf import CFUGridAuxiliaryCoordinateVariable from iris.tests.unit.experimental.ugrid.cf.test_CFUGridReader import ( netcdf_ugrid_variable, ) +import iris.warnings def named_variable(name): @@ -201,7 +201,7 @@ def test_warn(self): def operation(warn: bool): warnings.warn( "emit at least 1 warning", - category=iris.exceptions.IrisUserWarning, + category=iris.warnings.IrisUserWarning, ) result = CFUGridAuxiliaryCoordinateVariable.identify(vars_all, warn=warn) self.assertDictEqual({}, result) @@ -210,7 +210,7 @@ def operation(warn: bool): warn_regex = ( rf"Missing CF-netCDF auxiliary coordinate variable {subject_name}.*" ) - with pytest.warns(iris.exceptions.IrisCfMissingVarWarning, match=warn_regex): + with pytest.warns(iris.warnings.IrisCfMissingVarWarning, match=warn_regex): operation(warn=True) with pytest.warns() as record: operation(warn=False) @@ -220,7 +220,7 @@ def operation(warn: bool): # String variable warning. warn_regex = r".*is a CF-netCDF label variable.*" vars_all[subject_name] = netcdf_ugrid_variable(subject_name, "", np.bytes_) - with pytest.warns(iris.exceptions.IrisCfLabelVarWarning, match=warn_regex): + with pytest.warns(iris.warnings.IrisCfLabelVarWarning, match=warn_regex): operation(warn=True) with pytest.warns() as record: operation(warn=False) diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridConnectivityVariable.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridConnectivityVariable.py index dcddfa08b8..5144729c7f 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridConnectivityVariable.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridConnectivityVariable.py @@ -18,12 +18,12 @@ import numpy as np import pytest -import iris.exceptions from iris.experimental.ugrid.cf import CFUGridConnectivityVariable from iris.experimental.ugrid.mesh import Connectivity from iris.tests.unit.experimental.ugrid.cf.test_CFUGridReader import ( netcdf_ugrid_variable, ) +import iris.warnings def named_variable(name): @@ -186,14 +186,14 @@ def test_warn(self): def operation(warn: bool): warnings.warn( "emit at least 1 warning", - category=iris.exceptions.IrisUserWarning, + category=iris.warnings.IrisUserWarning, ) result = CFUGridConnectivityVariable.identify(vars_all, warn=warn) self.assertDictEqual({}, result) # Missing warning. warn_regex = rf"Missing CF-UGRID connectivity variable {subject_name}.*" - with pytest.warns(iris.exceptions.IrisCfMissingVarWarning, match=warn_regex): + with pytest.warns(iris.warnings.IrisCfMissingVarWarning, match=warn_regex): operation(warn=True) with pytest.warns() as record: operation(warn=False) @@ -203,7 +203,7 @@ def operation(warn: bool): # String variable warning. warn_regex = r".*is a CF-netCDF label variable.*" vars_all[subject_name] = netcdf_ugrid_variable(subject_name, "", np.bytes_) - with pytest.warns(iris.exceptions.IrisCfLabelVarWarning, match=warn_regex): + with pytest.warns(iris.warnings.IrisCfLabelVarWarning, match=warn_regex): operation(warn=True) with pytest.warns() as record: operation(warn=False) diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridMeshVariable.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridMeshVariable.py index ccefe01b3c..ef5447382a 100644 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridMeshVariable.py +++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridMeshVariable.py @@ -18,11 +18,11 @@ import numpy as np import pytest -import iris.exceptions from iris.experimental.ugrid.cf import CFUGridMeshVariable from iris.tests.unit.experimental.ugrid.cf.test_CFUGridReader import ( netcdf_ugrid_variable, ) +import iris.warnings def named_variable(name): @@ -233,14 +233,14 @@ def test_warn(self): def operation(warn: bool): warnings.warn( "emit at least 1 warning", - category=iris.exceptions.IrisUserWarning, + category=iris.warnings.IrisUserWarning, ) result = CFUGridMeshVariable.identify(vars_all, warn=warn) self.assertDictEqual({}, result) # Missing warning. warn_regex = rf"Missing CF-UGRID mesh variable {subject_name}.*" - with pytest.warns(iris.exceptions.IrisCfMissingVarWarning, match=warn_regex): + with pytest.warns(iris.warnings.IrisCfMissingVarWarning, match=warn_regex): operation(warn=True) with pytest.warns() as record: operation(warn=False) @@ -250,7 +250,7 @@ def operation(warn: bool): # String variable warning. warn_regex = r".*is a CF-netCDF label variable.*" vars_all[subject_name] = netcdf_ugrid_variable(subject_name, "", np.bytes_) - with pytest.warns(iris.exceptions.IrisCfLabelVarWarning, match=warn_regex): + with pytest.warns(iris.warnings.IrisCfLabelVarWarning, match=warn_regex): operation(warn=True) with pytest.warns() as record: operation(warn=False) diff --git a/lib/iris/tests/unit/experimental/ugrid/load/test_load_mesh.py b/lib/iris/tests/unit/experimental/ugrid/load/test_load_mesh.py index 09e15915db..382a36fa71 100644 --- a/lib/iris/tests/unit/experimental/ugrid/load/test_load_mesh.py +++ b/lib/iris/tests/unit/experimental/ugrid/load/test_load_mesh.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :func:`iris.experimental.ugrid.load.load_mesh` function. - -""" +"""Unit tests for the :func:`iris.experimental.ugrid.load.load_mesh` function.""" # Import iris.tests first so that some things can be initialised before # importing anything else. import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/unit/experimental/ugrid/load/test_load_meshes.py b/lib/iris/tests/unit/experimental/ugrid/load/test_load_meshes.py index d0cfdd4309..8932989252 100644 --- a/lib/iris/tests/unit/experimental/ugrid/load/test_load_meshes.py +++ b/lib/iris/tests/unit/experimental/ugrid/load/test_load_meshes.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :func:`iris.experimental.ugrid.load.load_meshes` function. - -""" +"""Unit tests for the :func:`iris.experimental.ugrid.load.load_meshes` function.""" # Import iris.tests first so that some things can be initialised before # importing anything else. import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py index bc0daf14bc..48f9910099 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py @@ -1181,7 +1181,6 @@ def test_face_dimension_set(self): def test_remove_connectivities(self): """Do what 1D test could not - test removal of optional connectivity.""" - # Add an optional connectivity. self.mesh.add_connectivities(self.FACE_FACE) # Attempt to remove a non-existent connectivity. diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py index bf5500c7ed..2282bc07b9 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :class:`iris.experimental.ugrid.mesh.MeshCoord`. - -""" +"""Unit tests for the :class:`iris.experimental.ugrid.mesh.MeshCoord`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh__from_coords.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh__from_coords.py index 4d12a73a9e..31c5dbfcc0 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh__from_coords.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh__from_coords.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :meth:`iris.experimental.ugrid.mesh.Mesh.from_coords`. - -""" +"""Unit tests for the :meth:`iris.experimental.ugrid.mesh.Mesh.from_coords`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/unit/experimental/ugrid/metadata/test_ConnectivityMetadata.py b/lib/iris/tests/unit/experimental/ugrid/metadata/test_ConnectivityMetadata.py index b0d4d70cbe..fa62a9f7e2 100644 --- a/lib/iris/tests/unit/experimental/ugrid/metadata/test_ConnectivityMetadata.py +++ b/lib/iris/tests/unit/experimental/ugrid/metadata/test_ConnectivityMetadata.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :class:`iris.experimental.ugrid.metadata.ConnectivityMetadata`. - -""" +"""Unit tests for the :class:`iris.experimental.ugrid.metadata.ConnectivityMetadata`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshCoordMetadata.py b/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshCoordMetadata.py index dbf1446b52..fc9242a8f9 100644 --- a/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshCoordMetadata.py +++ b/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshCoordMetadata.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :class:`iris.experimental.ugrid.metadata.MeshCoordMetadata`. - -""" +"""Unit tests for the :class:`iris.experimental.ugrid.metadata.MeshCoordMetadata`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshMetadata.py b/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshMetadata.py index 98e918c342..080d94c188 100644 --- a/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshMetadata.py +++ b/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshMetadata.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :class:`iris.experimental.ugrid.metadata.MeshMetadata`. - -""" +"""Unit tests for the :class:`iris.experimental.ugrid.metadata.MeshMetadata`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/unit/experimental/ugrid/utils/test_recombine_submeshes.py b/lib/iris/tests/unit/experimental/ugrid/utils/test_recombine_submeshes.py index 9fcb775433..f01dc345fa 100644 --- a/lib/iris/tests/unit/experimental/ugrid/utils/test_recombine_submeshes.py +++ b/lib/iris/tests/unit/experimental/ugrid/utils/test_recombine_submeshes.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for :func:`iris.experimental.ugrid.utils.recombine_submeshes`. - -""" +"""Unit tests for :func:`iris.experimental.ugrid.utils.recombine_submeshes`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py index 0e6805d104..667c679bfb 100644 --- a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py +++ b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the `iris.fileformats.cf.CFReader` class. - -""" +"""Unit tests for the `iris.fileformats.cf.CFReader` class.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py b/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py index 2c19bdc12e..4d031ac4a6 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py +++ b/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py @@ -14,10 +14,11 @@ import numpy as np -from iris.exceptions import IrisLoadWarning, NotYetImplementedError +from iris.exceptions import NotYetImplementedError import iris.fileformats._ff as ff from iris.fileformats._ff import FF2PP import iris.fileformats.pp as pp +from iris.warnings import IrisLoadWarning # PP-field: LBPACK N1 values. _UNPACKED = 0 diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__build_cell_methods.py b/lib/iris/tests/unit/fileformats/name_loaders/test__build_cell_methods.py index 2d9faa90e5..ff80acf95b 100644 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__build_cell_methods.py +++ b/lib/iris/tests/unit/fileformats/name_loaders/test__build_cell_methods.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for :func:`iris.fileformats.name_loaders._build_cell_methods`. - -""" +"""Unit tests for :func:`iris.fileformats.name_loaders._build_cell_methods`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. @@ -13,8 +11,8 @@ from unittest import mock import iris.coords -from iris.exceptions import IrisLoadWarning from iris.fileformats.name_loaders import _build_cell_methods +from iris.warnings import IrisLoadWarning class Tests(tests.IrisTest): diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__build_lat_lon_for_NAME_timeseries.py b/lib/iris/tests/unit/fileformats/name_loaders/test__build_lat_lon_for_NAME_timeseries.py index 0a020e6142..9cc7ec356a 100644 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__build_lat_lon_for_NAME_timeseries.py +++ b/lib/iris/tests/unit/fileformats/name_loaders/test__build_lat_lon_for_NAME_timeseries.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for :func:`iris.analysis.name_loaders._build_lat_lon_for_NAME_timeseries`. - -""" +"""Unit tests for :func:`iris.analysis.name_loaders._build_lat_lon_for_NAME_timeseries`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__calc_integration_period.py b/lib/iris/tests/unit/fileformats/name_loaders/test__calc_integration_period.py index fb28ad911b..35ca2760b8 100644 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__calc_integration_period.py +++ b/lib/iris/tests/unit/fileformats/name_loaders/test__calc_integration_period.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for :func:`iris.fileformats.name_loaders.__calc_integration_period`. - -""" +"""Unit tests for :func:`iris.fileformats.name_loaders.__calc_integration_period`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__generate_cubes.py b/lib/iris/tests/unit/fileformats/name_loaders/test__generate_cubes.py index f41c52c105..fc00db9663 100644 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__generate_cubes.py +++ b/lib/iris/tests/unit/fileformats/name_loaders/test__generate_cubes.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for :func:`iris.analysis.name_loaders._generate_cubes`. - -""" +"""Unit tests for :func:`iris.analysis.name_loaders._generate_cubes`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py index 8107a869f4..845b88536a 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py @@ -2,20 +2,18 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the module :mod:`iris.fileformats._nc_load_rules.actions`. - -""" +"""Unit tests for the module :mod:`iris.fileformats._nc_load_rules.actions`.""" from pathlib import Path import shutil import tempfile import warnings -from iris.exceptions import IrisLoadWarning import iris.fileformats._nc_load_rules.engine from iris.fileformats.cf import CFReader import iris.fileformats.netcdf from iris.fileformats.netcdf.loader import _load_cube from iris.tests.stock.netcdf import ncgen_from_cdl +from iris.warnings import IrisLoadWarning """ Notes on testing method. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py b/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py index e54c6938bc..7aaca67326 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :mod:`iris.fileformats._nc_load_rules.engine` module. - -""" +"""Unit tests for the :mod:`iris.fileformats._nc_load_rules.engine` module.""" from unittest import mock from iris.fileformats._nc_load_rules.engine import Engine, FactEntity diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test__normalise_bounds_units.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test__normalise_bounds_units.py new file mode 100644 index 0000000000..337279426e --- /dev/null +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test__normalise_bounds_units.py @@ -0,0 +1,102 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Test function :func:`iris.fileformats._nc_load_rules.helpers._normalise_bounds_units`.""" + +# import iris tests first so that some things can be initialised before +# importing anything else +from typing import Optional +from unittest import mock + +import numpy as np +import pytest + +from iris.fileformats._nc_load_rules.helpers import ( + _normalise_bounds_units, + _WarnComboIgnoringCfLoad, +) +from iris.warnings import IrisCfLoadWarning + +BOUNDS = mock.sentinel.bounds +CF_NAME = "dummy_bnds" + + +def _make_cf_bounds_var( + units: Optional[str] = None, + unitless: bool = False, +) -> mock.MagicMock: + """Construct a mock CF bounds variable.""" + if units is None: + units = "days since 1970-01-01" + + cf_data = mock.Mock(spec=[]) + # we want to mock the absence of flag attributes to helpers.get_attr_units + # see https://docs.python.org/3/library/unittest.mock.html#deleting-attributes + del cf_data.flag_values + del cf_data.flag_masks + del cf_data.flag_meanings + + cf_var = mock.MagicMock( + cf_name=CF_NAME, + cf_data=cf_data, + units=units, + calendar=None, + dtype=float, + ) + + if unitless: + del cf_var.units + + return cf_var + + +def test_unitless() -> None: + """Test bounds variable with no units.""" + cf_bounds_var = _make_cf_bounds_var(unitless=True) + result = _normalise_bounds_units(None, cf_bounds_var, BOUNDS) + assert result == BOUNDS + + +def test_invalid_units__pass_through() -> None: + """Test bounds variable with invalid units.""" + units = "invalid" + cf_bounds_var = _make_cf_bounds_var(units=units) + wmsg = f"Ignoring invalid units {units!r} on netCDF variable {CF_NAME!r}" + with pytest.warns(_WarnComboIgnoringCfLoad, match=wmsg): + result = _normalise_bounds_units(None, cf_bounds_var, BOUNDS) + assert result == BOUNDS + + +@pytest.mark.parametrize("units", ["unknown", "no_unit", "1", "kelvin"]) +def test_ignore_bounds(units) -> None: + """Test bounds variable with incompatible units compared to points.""" + points_units = "km" + cf_bounds_var = _make_cf_bounds_var(units=units) + wmsg = ( + f"Ignoring bounds on NetCDF variable {CF_NAME!r}. " + f"Expected units compatible with {points_units!r}" + ) + with pytest.warns(IrisCfLoadWarning, match=wmsg): + result = _normalise_bounds_units(points_units, cf_bounds_var, BOUNDS) + assert result is None + + +def test_compatible() -> None: + """Test bounds variable with compatible units requiring conversion.""" + points_units, bounds_units = "days since 1970-01-01", "hours since 1970-01-01" + cf_bounds_var = _make_cf_bounds_var(units=bounds_units) + bounds = np.arange(10, dtype=float) * 24 + result = _normalise_bounds_units(points_units, cf_bounds_var, bounds) + expected = bounds / 24 + np.testing.assert_array_equal(result, expected) + + +def test_same_units() -> None: + """Test bounds variable with same units as points.""" + units = "days since 1970-01-01" + cf_bounds_var = _make_cf_bounds_var(units=units) + bounds = np.arange(10, dtype=float) + result = _normalise_bounds_units(units, cf_bounds_var, bounds) + np.testing.assert_array_equal(result, bounds) + assert result is bounds diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_ancil_var.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_ancil_var.py index 5ee0c2d992..0193ef68d5 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_ancil_var.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_ancil_var.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Test function :func:`iris.fileformats._nc_load_rules.helpers.build_ancil_var`. - -""" +"""Test function :func:`iris.fileformats._nc_load_rules.helpers.build_ancil_var`.""" from unittest import mock diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py index e2335d2ee6..73533a9c33 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py @@ -45,7 +45,7 @@ def setUp(self): cf_data=cf_data, standard_name=None, long_name="wibble", - units="m", + units="km", shape=points.shape, size=np.prod(points.shape), dtype=points.dtype, @@ -96,31 +96,42 @@ def _get_per_test_bounds_var(_coord_unused): ) @classmethod - def _make_array_and_cf_data(cls, dimension_names): + def _make_array_and_cf_data(cls, dimension_names, rollaxis=False): shape = tuple(cls.dim_names_lens[name] for name in dimension_names) cf_data = mock.MagicMock(_FillValue=None, spec=[]) cf_data.chunking = mock.MagicMock(return_value=shape) - return np.zeros(shape), cf_data - - def _make_cf_bounds_var(self, dimension_names): + data = np.arange(np.prod(shape), dtype=float) + if rollaxis: + shape = shape[1:] + (shape[0],) + data = data.reshape(shape) + data = np.rollaxis(data, -1) + else: + data = data.reshape(shape) + return data, cf_data + + def _make_cf_bounds_var(self, dimension_names, rollaxis=False): # Create the bounds cf variable. - bounds, cf_data = self._make_array_and_cf_data(dimension_names) + bounds, cf_data = self._make_array_and_cf_data( + dimension_names, rollaxis=rollaxis + ) + bounds *= 1000 # Convert to metres. cf_bounds_var = mock.Mock( spec=CFVariable, dimensions=dimension_names, cf_name="wibble_bnds", cf_data=cf_data, + units="m", shape=bounds.shape, size=np.prod(bounds.shape), dtype=bounds.dtype, __getitem__=lambda self, key: bounds[key], ) - return bounds, cf_bounds_var + return cf_bounds_var - def _check_case(self, dimension_names): - bounds, self.cf_bounds_var = self._make_cf_bounds_var( - dimension_names=dimension_names + def _check_case(self, dimension_names, rollaxis=False): + self.cf_bounds_var = self._make_cf_bounds_var( + dimension_names, rollaxis=rollaxis ) # Asserts must lie within context manager because of deferred loading. @@ -133,15 +144,15 @@ def _check_case(self, dimension_names): expected_list = [(self.expected_coord, self.cf_coord_var.cf_name)] self.assertEqual(self.engine.cube_parts["coordinates"], expected_list) - def test_fastest_varying_vertex_dim(self): + def test_fastest_varying_vertex_dim__normalise_bounds(self): # The usual order. self._check_case(dimension_names=("foo", "bar", "nv")) - def test_slowest_varying_vertex_dim(self): + def test_slowest_varying_vertex_dim__normalise_bounds(self): # Bounds in the first (slowest varying) dimension. - self._check_case(dimension_names=("nv", "foo", "bar")) + self._check_case(dimension_names=("nv", "foo", "bar"), rollaxis=True) - def test_fastest_with_different_dim_names(self): + def test_fastest_with_different_dim_names__normalise_bounds(self): # Despite the dimension names ('x', and 'y') differing from the coord's # which are 'foo' and 'bar' (as permitted by the cf spec), # this should still work because the vertex dim is the fastest varying. @@ -232,6 +243,7 @@ def setUp(self): ) points = np.arange(6) + units = "days since 1970-01-01" self.cf_coord_var = mock.Mock( spec=threadsafe_nc.VariableWrapper, dimensions=("foo",), @@ -241,7 +253,7 @@ def setUp(self): cf_data=mock.MagicMock(chunking=mock.Mock(return_value=None), spec=[]), standard_name=None, long_name="wibble", - units="days since 1970-01-01", + units=units, calendar=None, shape=points.shape, size=np.prod(points.shape), @@ -250,13 +262,20 @@ def setUp(self): ) bounds = np.arange(12).reshape(6, 2) + cf_data = mock.MagicMock(chunking=mock.Mock(return_value=None)) + # we want to mock the absence of flag attributes to helpers.get_attr_units + # see https://docs.python.org/3/library/unittest.mock.html#deleting-attributes + del cf_data.flag_values + del cf_data.flag_masks + del cf_data.flag_meanings self.cf_bounds_var = mock.Mock( spec=threadsafe_nc.VariableWrapper, dimensions=("x", "nv"), scale_factor=1, add_offset=0, cf_name="wibble_bnds", - cf_data=mock.MagicMock(chunking=mock.Mock(return_value=None)), + cf_data=cf_data, + units=units, shape=bounds.shape, size=np.prod(bounds.shape), dtype=bounds.dtype, diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cell_measure.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cell_measure.py index 74e7d5117d..bdd057b537 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cell_measure.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cell_measure.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Test function :func:`iris.fileformats._nc_load_rules.helpers.build_cell_measure`. - -""" +"""Test function :func:`iris.fileformats._nc_load_rules.helpers.build_cell_measure`.""" from unittest import mock diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py index b2c7d4f4d6..28d710d6b8 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py @@ -22,6 +22,26 @@ from iris.fileformats._nc_load_rules.helpers import build_dimension_coordinate +def _make_bounds_var(bounds, dimensions, units): + bounds = np.array(bounds) + cf_data = mock.Mock(spec=[]) + # we want to mock the absence of flag attributes to helpers.get_attr_units + # see https://docs.python.org/3/library/unittest.mock.html#deleting-attributes + del cf_data.flag_values + del cf_data.flag_masks + del cf_data.flag_meanings + return mock.Mock( + dimensions=dimensions, + cf_name="wibble_bnds", + cf_data=cf_data, + units=units, + calendar=None, + shape=bounds.shape, + dtype=bounds.dtype, + __getitem__=lambda self, key: bounds[key], + ) + + class RulesTestMixin: def setUp(self): # Create dummy pyke engine. @@ -65,12 +85,9 @@ def setUp(self): RulesTestMixin.setUp(self) bounds = np.arange(12).reshape(6, 2) - self.cf_bounds_var = mock.Mock( - dimensions=("x", "nv"), - cf_name="wibble_bnds", - shape=bounds.shape, - __getitem__=lambda self, key: bounds[key], - ) + dimensions = ("x", "nv") + units = "days since 1970-01-01" + self.cf_bounds_var = _make_bounds_var(bounds, dimensions, units) self.bounds = bounds # test_dimcoord_not_added() and test_auxcoord_not_added have been @@ -276,25 +293,22 @@ def setUp(self): standard_name=None, long_name="wibble", cf_data=mock.Mock(spec=[]), - units="m", + units="km", shape=points.shape, dtype=points.dtype, __getitem__=lambda self, key: points[key], ) - def test_slowest_varying_vertex_dim(self): + def test_slowest_varying_vertex_dim__normalise_bounds(self): # Create the bounds cf variable. - bounds = np.arange(12).reshape(2, 6) - self.cf_bounds_var = mock.Mock( - dimensions=("nv", "foo"), - cf_name="wibble_bnds", - shape=bounds.shape, - __getitem__=lambda self, key: bounds[key], - ) + bounds = np.arange(12).reshape(2, 6) * 1000 + dimensions = ("nv", "foo") + units = "m" + self.cf_bounds_var = _make_bounds_var(bounds, dimensions, units) # Expected bounds on the resulting coordinate should be rolled so that # the vertex dimension is at the end. - expected_bounds = bounds.transpose() + expected_bounds = bounds.transpose() / 1000 expected_coord = DimCoord( self.cf_coord_var[:], long_name=self.cf_coord_var.long_name, @@ -314,21 +328,18 @@ def test_slowest_varying_vertex_dim(self): expected_list = [(expected_coord, self.cf_coord_var.cf_name)] self.assertEqual(self.engine.cube_parts["coordinates"], expected_list) - def test_fastest_varying_vertex_dim(self): - bounds = np.arange(12).reshape(6, 2) - self.cf_bounds_var = mock.Mock( - dimensions=("foo", "nv"), - cf_name="wibble_bnds", - shape=bounds.shape, - __getitem__=lambda self, key: bounds[key], - ) + def test_fastest_varying_vertex_dim__normalise_bounds(self): + bounds = np.arange(12).reshape(6, 2) * 1000 + dimensions = ("foo", "nv") + units = "m" + self.cf_bounds_var = _make_bounds_var(bounds, dimensions, units) expected_coord = DimCoord( self.cf_coord_var[:], long_name=self.cf_coord_var.long_name, var_name=self.cf_coord_var.cf_name, units=self.cf_coord_var.units, - bounds=bounds, + bounds=bounds / 1000, ) # Asserts must lie within context manager because of deferred loading. @@ -342,24 +353,21 @@ def test_fastest_varying_vertex_dim(self): expected_list = [(expected_coord, self.cf_coord_var.cf_name)] self.assertEqual(self.engine.cube_parts["coordinates"], expected_list) - def test_fastest_with_different_dim_names(self): + def test_fastest_with_different_dim_names__normalise_bounds(self): # Despite the dimension names 'x' differing from the coord's # which is 'foo' (as permitted by the cf spec), # this should still work because the vertex dim is the fastest varying. - bounds = np.arange(12).reshape(6, 2) - self.cf_bounds_var = mock.Mock( - dimensions=("x", "nv"), - cf_name="wibble_bnds", - shape=bounds.shape, - __getitem__=lambda self, key: bounds[key], - ) + bounds = np.arange(12).reshape(6, 2) * 1000 + dimensions = ("x", "nv") + units = "m" + self.cf_bounds_var = _make_bounds_var(bounds, dimensions, units) expected_coord = DimCoord( self.cf_coord_var[:], long_name=self.cf_coord_var.long_name, var_name=self.cf_coord_var.cf_name, units=self.cf_coord_var.units, - bounds=bounds, + bounds=bounds / 1000, ) # Asserts must lie within context manager because of deferred loading. @@ -396,12 +404,8 @@ def _make_vars(self, points, bounds=None, units="degrees"): ) if bounds: bounds = np.array(bounds).reshape(self.cf_coord_var.shape + (2,)) - self.cf_bounds_var = mock.Mock( - dimensions=("x", "nv"), - cf_name="wibble_bnds", - shape=bounds.shape, - __getitem__=lambda self, key: bounds[key], - ) + dimensions = ("x", "nv") + self.cf_bounds_var = _make_bounds_var(bounds, dimensions, units) def _check_circular(self, circular, *args, **kwargs): if "coord_name" in kwargs: @@ -483,12 +487,13 @@ def _make_vars(self, bounds): # Note that for a scalar the shape of the array from # the cf var is (), rather than (1,). points = np.array([0.0]) + units = "degrees" self.cf_coord_var = mock.Mock( dimensions=(), cf_name="wibble", standard_name=None, long_name="wibble", - units="degrees", + units=units, cf_data=mock.Mock(spec=[]), shape=(), dtype=points.dtype, @@ -496,12 +501,8 @@ def _make_vars(self, bounds): ) bounds = np.array(bounds) - self.cf_bounds_var = mock.Mock( - dimensions=("bnds"), - cf_name="wibble_bnds", - shape=bounds.shape, - __getitem__=lambda self, key: bounds[key], - ) + dimensions = ("bnds",) + self.cf_bounds_var = _make_bounds_var(bounds, dimensions, units) def _assert_circular(self, value): with self.deferred_load_patch, self.get_cf_bounds_var_patch: diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py index 4554ef601d..3e12e33762 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Test function :func:`iris.fileformats._nc_load_rules.helpers.build_oblique_mercator_coordinate_system`. - -""" +"""Test function :func:`iris.fileformats._nc_load_rules.helpers.build_oblique_mercator_coordinate_system`.""" from typing import List, NamedTuple, Type from unittest import mock diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py index abbe71012d..0f8fd0152f 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for :func:`iris.fileformats.netcdf.parse_cell_methods`. - -""" +"""Unit tests for :func:`iris.fileformats.netcdf.parse_cell_methods`.""" # import iris tests first so that some things can be initialised before # importing anything else @@ -13,8 +11,8 @@ from unittest import mock from iris.coords import CellMethod -from iris.exceptions import IrisCfLoadWarning from iris.fileformats._nc_load_rules.helpers import parse_cell_methods +from iris.warnings import IrisCfLoadWarning class Test(tests.IrisTest): diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test__chunk_control.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__chunk_control.py index d032e2e576..bc7911578a 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test__chunk_control.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__chunk_control.py @@ -178,7 +178,9 @@ def test_no_chunks_from_file(tmp_filepath, save_cubelist_with_sigma): def test_as_dask(tmp_filepath, save_cubelist_with_sigma): - """This does not test return values, as we can't be sure + """Test as dask. + + No return values, as we can't be sure dask chunking behaviour won't change, or that it will differ from our own chunking behaviour. """ diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_aux_factory.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_aux_factory.py index eacdee2782..5aafeaf0fc 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_aux_factory.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_aux_factory.py @@ -15,8 +15,8 @@ from iris.coords import DimCoord from iris.cube import Cube -from iris.exceptions import IrisFactoryCoordNotFoundWarning from iris.fileformats.netcdf.loader import _load_aux_factory +from iris.warnings import IrisFactoryCoordNotFoundWarning class TestAtmosphereHybridSigmaPressureCoordinate(tests.IrisTest): diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py index 28ef972c8c..744051f02d 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py @@ -32,9 +32,9 @@ ) from iris.coords import AuxCoord, DimCoord from iris.cube import Cube -from iris.exceptions import IrisMaskValueMatchWarning from iris.fileformats.netcdf import Saver, _thread_safe_nc import iris.tests.stock as stock +from iris.warnings import IrisMaskValueMatchWarning class Test_write(tests.IrisTest): diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py index 69eabac5f5..c1bc411564 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py @@ -16,9 +16,9 @@ import numpy as np import pytest -from iris.exceptions import IrisMaskValueMatchWarning import iris.fileformats.netcdf._thread_safe_nc as threadsafe_nc from iris.fileformats.netcdf.saver import Saver, _FillvalueCheckInfo +from iris.warnings import IrisMaskValueMatchWarning class Test__lazy_stream_data: diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py index 070cdcaf8b..241ff5a5cc 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py @@ -45,17 +45,17 @@ def build_mesh( Mesh has faces edges, face-coords and edge-coords, numbers of which can be controlled. - Args: - - * n_nodes, n_faces, n_edges (int): + Parameters + ---------- + n_nodes, n_faces, n_edges : int Basic dimensions of mesh components. Zero means no such location. - * nodecoord_xyargs, edgecoord_xyargs, facecoord_xyargs (pair of dict): + nodecoord_xyargs, edgecoord_xyargs, facecoord_xyargs : pair of dict Pairs (x,y) of settings kwargs, applied after initial creation the relevant location coordinates. - * conn_role_kwargs (dict of string:dict): + conn_role_kwargs : dict of str Mapping from cf_role name to settings kwargs for connectivities, applied after initially creating them. - * mesh_kwargs (dict): + mesh_kwargs : dict Dictionary of key settings to apply to the Mesh, after creating it. """ @@ -129,11 +129,11 @@ def apply_xyargs(coords, xyargs): def make_mesh(basic=True, **kwargs): """Create a test mesh, with some built-in 'standard' settings. - Kwargs: - - * basic (bool): + Parameters + ---------- + basic : bool If true (default), create with 'standard' set of test properties. - * kwargs (dict): + **kwargs : dict Additional kwargs, passed through to 'build_mesh'. Items here override the 'standard' settings. @@ -193,13 +193,13 @@ def default_mesh(): def make_cube(mesh=None, location="face", **kwargs): """Create a test cube, based on a given mesh + location. - Kwargs: - - * mesh (:class:`iris.experimental.ugrid.mesh.Mesh` or None): + Parameters + ---------- + mesh : :class:`iris.experimental.ugrid.mesh.Mesh` or None, optional If None, use 'default_mesh()' - * location (string): + location : str, optional, default="face" Which mesh element to map the cube to. - * kwargs (dict): + **kwargs : dict, optional Additional property settings to apply to the cube (after creation). """ @@ -234,15 +234,15 @@ def add_height_dim(cube): def scan_dataset(filepath): """Snapshot a netcdf dataset (the key metadata). - Returns: - dimsdict, varsdict - * dimsdict (dict): - A map of dimension-name: length. - * varsdict (dict): - A map of each variable's properties, {var_name: propsdict} - Each propsdict is {attribute-name: value} over the var's ncattrs(). - Each propsdict ALSO contains a [_VAR_DIMS] entry listing the - variable's dims. + Returns + ------- + dimsdict : dict + A map of dimension-name: length. + varsdict : dict + A map of each variable's properties, {var_name: propsdict} + Each propsdict is {attribute-name: value} over the var's ncattrs(). + Each propsdict ALSO contains a [_VAR_DIMS] entry listing the + variable's dims. """ ds = _thread_safe_nc.DatasetWrapper(filepath) @@ -299,19 +299,23 @@ def vars_meshnames(vars): def vars_meshdim(vars, location, mesh_name=None): """Extract a dim-name for a given element location. - Args: - * vars (varsdict): - file varsdict, as returned from 'snapshot_dataset'. - * location (string): - a mesh location : 'node' / 'edge' / 'face' - * mesh_name (string or None): - If given, identifies the mesh var. - Otherwise, find a unique mesh var (i.e. there must be exactly 1). - - Returns: - dim_name (string) - The dim-name of the mesh dim for the given location. - + Parameters + ---------- + vars : varsdict + file varsdict, as returned from 'snapshot_dataset'. + location : string + a mesh location : 'node' / 'edge' / 'face' + mesh_name : str or None, optional, default=None + If given, identifies the mesh var. + Otherwise, find a unique mesh var (i.e. there must be exactly 1). + + Returns + ------- + dim_name : str + The dim-name of the mesh dim for the given location. + + Notes + ----- TODO: relies on the element having coordinates, which in future will not always be the case. This can be fixed @@ -650,7 +654,6 @@ def test_alternate_cube_dim_order(self): def test_mixed_aux_coords(self): """``coordinates`` attribute should include mesh location coords and 'normal' coords.""" - cube = make_cube() mesh_dim = cube.mesh_dim() mesh_len = cube.shape[mesh_dim] diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py index 77fd0cb0ca..0b37070241 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py @@ -2,16 +2,15 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for :func:`iris.fileformats.netcdf.saver._fillvalue_report`. -""" +"""Unit tests for :func:`iris.fileformats.netcdf.saver._fillvalue_report`.""" import warnings import numpy as np import pytest -from iris.exceptions import IrisSaverFillValueWarning from iris.fileformats.netcdf._thread_safe_nc import default_fillvals from iris.fileformats.netcdf.saver import _fillvalue_report, _FillvalueCheckInfo +from iris.warnings import IrisSaverFillValueWarning class Test__fillvaluereport: diff --git a/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_units.py b/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_units.py index 03ec3f5f65..2767807377 100644 --- a/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_units.py +++ b/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_units.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the `iris.fileformats.nimrod_load_rules.units` function. - -""" +"""Unit tests for the `iris.fileformats.nimrod_load_rules.units` function.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/fileformats/pp/test_PPField.py b/lib/iris/tests/unit/fileformats/pp/test_PPField.py index de7c2b1ba5..f3aed0bea2 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_PPField.py +++ b/lib/iris/tests/unit/fileformats/pp/test_PPField.py @@ -12,9 +12,9 @@ import numpy as np -from iris.exceptions import IrisDefaultingWarning, IrisMaskValueMatchWarning import iris.fileformats.pp as pp from iris.fileformats.pp import PPField, SplittableInt +from iris.warnings import IrisDefaultingWarning, IrisMaskValueMatchWarning # The PPField class is abstract, so to test we define a minimal, # concrete subclass with the `t1` and `t2` properties. diff --git a/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py b/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py index 1dbb2097fb..3bd8fcb8d7 100644 --- a/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py +++ b/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the `iris.fileformats.pp._data_bytes_to_shaped_array` function. - -""" +"""Unit tests for the `iris.fileformats.pp._data_bytes_to_shaped_array` function.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py index 58b7c1f384..aa6b79e9a0 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the `iris.fileformats.pp_load_rules._all_other_rules` function. - -""" +"""Unit tests for the `iris.fileformats.pp_load_rules._all_other_rules` function.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__model_level_number.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__model_level_number.py index fd3d236625..65c6bc8442 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__model_level_number.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__model_level_number.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for :func:`iris.fileformats.pp_load_rules._model_level_number`. - -""" +"""Unit tests for :func:`iris.fileformats.pp_load_rules._model_level_number`.""" # Import iris.tests first so that some things can be initialised before # importing anything else. diff --git a/lib/iris/tests/unit/fileformats/test_rules.py b/lib/iris/tests/unit/fileformats/test_rules.py index f8be75f8fc..d39b6a997d 100644 --- a/lib/iris/tests/unit/fileformats/test_rules.py +++ b/lib/iris/tests/unit/fileformats/test_rules.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Test iris.fileformats.rules.py - metadata translation rules. - -""" +"""Test iris.fileformats.rules.py - metadata translation rules.""" # import iris tests first so that some things can be initialised before # importing anything else diff --git a/lib/iris/tests/unit/fileformats/um/fast_load/__init__.py b/lib/iris/tests/unit/fileformats/um/fast_load/__init__.py index 89897d173b..1737fcb6cd 100644 --- a/lib/iris/tests/unit/fileformats/um/fast_load/__init__.py +++ b/lib/iris/tests/unit/fileformats/um/fast_load/__init__.py @@ -2,6 +2,4 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the module :mod:`iris.fileformats.um._fast_load`. - -""" +"""Unit tests for the module :mod:`iris.fileformats.um._fast_load`.""" diff --git a/lib/iris/tests/unit/plot/test_contourf.py b/lib/iris/tests/unit/plot/test_contourf.py index 64ab87f879..59fe631b67 100644 --- a/lib/iris/tests/unit/plot/test_contourf.py +++ b/lib/iris/tests/unit/plot/test_contourf.py @@ -93,7 +93,7 @@ def test_skip_contour(self): def test_apply_contour_nans(self): # Presence of nans should not prevent contours being added. cube = simple_2d() - cube.data = cube.data.astype(np.float_) + cube.data = cube.data.astype(np.float64) cube.data[0, 0] = np.nan levels = [2, 4, 6, 8] diff --git a/lib/iris/tests/unit/plot/test_plot.py b/lib/iris/tests/unit/plot/test_plot.py index c4f65fb1a6..6adf1c4cf5 100644 --- a/lib/iris/tests/unit/plot/test_plot.py +++ b/lib/iris/tests/unit/plot/test_plot.py @@ -103,7 +103,6 @@ def check_paths(self, expected_path, expected_path_crs, lines, axes): plotted on the given geoaxes. """ - self.assertEqual( 1, len(lines), "Expected a single line, got {}".format(len(lines)) ) diff --git a/lib/iris/tests/unit/util/test_array_equal.py b/lib/iris/tests/unit/util/test_array_equal.py index 38b9652443..f63092587c 100644 --- a/lib/iris/tests/unit/util/test_array_equal.py +++ b/lib/iris/tests/unit/util/test_array_equal.py @@ -101,12 +101,15 @@ def test_string_arrays_0d_and_scalar(self): self.assertFalse(array_equal(array_a, "foobar.")) def test_nan_equality_nan_ne_nan(self): - array = np.array([1.0, np.nan, 2.0, np.nan, 3.0]) - self.assertFalse(array_equal(array, array)) + array_a = np.array([1.0, np.nan, 2.0, np.nan, 3.0]) + array_b = array_a.copy() + self.assertFalse(array_equal(array_a, array_a)) + self.assertFalse(array_equal(array_a, array_b)) def test_nan_equality_nan_naneq_nan(self): array_a = np.array([1.0, np.nan, 2.0, np.nan, 3.0]) array_b = np.array([1.0, np.nan, 2.0, np.nan, 3.0]) + self.assertTrue(array_equal(array_a, array_a, withnans=True)) self.assertTrue(array_equal(array_a, array_b, withnans=True)) def test_nan_equality_nan_nanne_a(self): diff --git a/lib/iris/tests/unit/util/test_equalise_attributes.py b/lib/iris/tests/unit/util/test_equalise_attributes.py index 4a900d2cbb..9b09c84dd4 100644 --- a/lib/iris/tests/unit/util/test_equalise_attributes.py +++ b/lib/iris/tests/unit/util/test_equalise_attributes.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :func:`iris.util.equalise_attributes` function. - -""" +"""Unit tests for the :func:`iris.util.equalise_attributes` function.""" # import iris tests first so that some things can be initialised # before importing anything else. diff --git a/lib/iris/tests/unit/util/test_file_is_newer_than.py b/lib/iris/tests/unit/util/test_file_is_newer_than.py index 93385ed0e0..567b2a1439 100644 --- a/lib/iris/tests/unit/util/test_file_is_newer_than.py +++ b/lib/iris/tests/unit/util/test_file_is_newer_than.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Test function :func:`iris.util.test_file_is_newer`. - -""" +"""Test function :func:`iris.util.test_file_is_newer`.""" # import iris tests first so that some things can be initialised before # importing anything else diff --git a/lib/iris/tests/unit/util/test_mask_cube_from_shapefile.py b/lib/iris/tests/unit/util/test_mask_cube_from_shapefile.py new file mode 100644 index 0000000000..7a03ea91aa --- /dev/null +++ b/lib/iris/tests/unit/util/test_mask_cube_from_shapefile.py @@ -0,0 +1,121 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Unit tests for :func:`iris.util.mask_cube_from_shapefile`.""" + +import numpy as np +import pytest +import shapely + +from iris.coord_systems import RotatedGeogCS +from iris.coords import DimCoord +import iris.cube +import iris.tests as tests +from iris.util import mask_cube_from_shapefile +from iris.warnings import IrisUserWarning + + +class TestBasicCubeMasking(tests.IrisTest): + """Unit tests for mask_cube_from_shapefile function.""" + + def setUp(self): + basic_data = np.array([[1, 2, 3], [4, 8, 12]]) + self.basic_cube = iris.cube.Cube(basic_data) + coord = DimCoord( + np.array([0, 1.0]), + standard_name="projection_y_coordinate", + bounds=[[0, 0.5], [0.5, 1]], + units="1", + ) + self.basic_cube.add_dim_coord(coord, 0) + coord = DimCoord( + np.array([0, 1.0, 1.5]), + standard_name="projection_x_coordinate", + bounds=[[0, 0.5], [0.5, 1], [1, 1.5]], + units="1", + ) + self.basic_cube.add_dim_coord(coord, 1) + + def test_basic_cube_intersect(self): + shape = shapely.geometry.box(0.6, 0.6, 0.9, 0.9) + masked_cube = mask_cube_from_shapefile(self.basic_cube, shape) + assert ( + np.sum(masked_cube.data) == 8 + ), f"basic cube masking failed test - expected 8 got {np.sum(masked_cube.data)}" + + def test_basic_cube_intersect_in_place(self): + shape = shapely.geometry.box(0.6, 0.6, 0.9, 0.9) + cube = self.basic_cube.copy() + mask_cube_from_shapefile(cube, shape, in_place=True) + assert ( + np.sum(cube.data) == 8 + ), f"basic cube masking failed test - expected 8 got {np.sum(cube.data)}" + + def test_basic_cube_intersect_low_weight(self): + shape = shapely.geometry.box(0.1, 0.6, 1, 1) + masked_cube = mask_cube_from_shapefile( + self.basic_cube, shape, minimum_weight=0.2 + ) + assert ( + np.sum(masked_cube.data) == 12 + ), f"basic cube masking weighting failed test - expected 12 got {np.sum(masked_cube.data)}" + + def test_basic_cube_intersect_high_weight(self): + shape = shapely.geometry.box(0.1, 0.6, 1, 1) + masked_cube = mask_cube_from_shapefile( + self.basic_cube, shape, minimum_weight=0.7 + ) + assert ( + np.sum(masked_cube.data) == 8 + ), f"basic cube masking weighting failed test- expected 8 got {np.sum(masked_cube.data)}" + + def test_cube_list_error(self): + cubelist = iris.cube.CubeList([self.basic_cube]) + shape = shapely.geometry.box(1, 1, 2, 2) + with pytest.raises(TypeError, match="CubeList object rather than Cube"): + mask_cube_from_shapefile(cubelist, shape) + + def test_non_cube_error(self): + fake = None + shape = shapely.geometry.box(1, 1, 2, 2) + with pytest.raises(TypeError, match="Received non-Cube object"): + mask_cube_from_shapefile(fake, shape) + + def test_line_shape_warning(self): + shape = shapely.geometry.LineString([(0, 0.75), (2, 0.75)]) + with pytest.warns(IrisUserWarning, match="invalid type"): + masked_cube = mask_cube_from_shapefile( + self.basic_cube, shape, minimum_weight=0.1 + ) + assert ( + np.sum(masked_cube.data) == 24 + ), f"basic cube masking against line failed test - expected 24 got {np.sum(masked_cube.data)}" + + def test_cube_coord_mismatch_warning(self): + shape = shapely.geometry.box(0.6, 0.6, 0.9, 0.9) + cube = self.basic_cube + cube.coord("projection_x_coordinate").points = [180, 360, 540] + cube.coord("projection_x_coordinate").coord_system = RotatedGeogCS(30, 30) + with pytest.warns(IrisUserWarning, match="masking"): + mask_cube_from_shapefile( + cube, + shape, + ) + + def test_missing_xy_coord(self): + shape = shapely.geometry.box(0.6, 0.6, 0.9, 0.9) + cube = self.basic_cube + cube.remove_coord("projection_x_coordinate") + with pytest.raises(ValueError, match="1d xy coordinates"): + mask_cube_from_shapefile(cube, shape) + + def test_shape_not_shape(self): + shape = [5, 6, 7, 8] # random array + with pytest.raises(TypeError, match="valid Shapely"): + mask_cube_from_shapefile(self.basic_cube, shape) + + def test_shape_invalid(self): + shape = shapely.box(0, 1, 1, 1) + with pytest.raises(TypeError, match="valid Shapely"): + mask_cube_from_shapefile(self.basic_cube, shape) diff --git a/lib/iris/util.py b/lib/iris/util.py index 59a171fa04..020b67783a 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Miscellaneous utility functions. - -""" +"""Miscellaneous utility functions.""" from abc import ABCMeta, abstractmethod from collections.abc import Hashable, Iterable @@ -22,6 +20,7 @@ from iris._deprecation import warn_deprecated from iris._lazy_data import is_lazy_data, is_lazy_masked_data +from iris._shapefiles import create_shapefile_mask from iris.common import SERVICES from iris.common.lenient import _lenient_client import iris.exceptions @@ -34,15 +33,13 @@ def broadcast_to_shape(array, shape, dim_map): given shape. The result is a read-only view (see :func:`numpy.broadcast_to`). If you need to write to the resulting array, make a copy first. - Args: - - * array (:class:`numpy.ndarray`-like) + Parameters + ---------- + array : :class:`numpy.ndarray`-like An array to broadcast. - - * shape (:class:`list`, :class:`tuple` etc.): + shape : :class:`list`, :class:`tuple` etc The shape the array should be broadcast to. - - * dim_map (:class:`list`, :class:`tuple` etc.): + dim_map : :class:`list`, :class:`tuple` etc A mapping of the dimensions of *array* to their corresponding element in *shape*. *dim_map* must be the same length as the number of dimensions in *array*. Each element of *dim_map* @@ -99,17 +96,15 @@ def broadcast_to_shape(array, shape, dim_map): def delta(ndarray, dimension, circular=False): - """Calculates the difference between values along a given dimension. + """Calculate the difference between values along a given dimension. - Args: - - * ndarray: + Parameters + ---------- + ndarray : The array over which to do the difference. - - * dimension: + dimension : The dimension over which to do the difference on ndarray. - - * circular: + circular : bool, default=False If not False then return n results in the requested dimension with the delta between the last and first element included in the result otherwise the result will be of length n-1 (where n @@ -125,6 +120,8 @@ def delta(ndarray, dimension, circular=False): original array -180, -90, 0, 90 delta (with circular=360): 90, 90, 90, -270+360 + Notes + ----- .. note:: The difference algorithm implemented is forward difference: @@ -137,10 +134,10 @@ def delta(ndarray, dimension, circular=False): >>> iris.util.delta(original, 0, circular=360) array([90, 90, 90, 90]) - Notes - ----- - This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + .. note:: + + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. """ if circular is not False: @@ -162,26 +159,27 @@ def delta(ndarray, dimension, circular=False): def describe_diff(cube_a, cube_b, output_file=None): - """Prints the differences that prevent compatibility between two cubes, as - defined by :meth:`iris.cube.Cube.is_compatible()`. + """Print the differences that prevent compatibility between two cubes. - Args: + Print the differences that prevent compatibility between two cubes, as + defined by :meth:`iris.cube.Cube.is_compatible()`. - * cube_a: + Parameters + ---------- + cube_a : An instance of :class:`iris.cube.Cube` or :class:`iris.cube.CubeMetadata`. - - * cube_b: + cube_b : An instance of :class:`iris.cube.Cube` or :class:`iris.cube.CubeMetadata`. - - * output_file: + output_file : optional A :class:`file` or file-like object to receive output. Defaults to sys.stdout. - .. seealso:: - - :meth:`iris.cube.Cube.is_compatible()` + Notes + ----- + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. .. note:: @@ -191,13 +189,11 @@ def describe_diff(cube_a, cube_b, output_file=None): two cubes will merge requires additional logic that is beyond the scope of this function. - Notes - ----- - This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See Also + -------- + :meth:`iris.cube.Cube.is_compatible()` """ - if output_file is None: output_file = sys.stdout @@ -234,29 +230,29 @@ def describe_diff(cube_a, cube_b, output_file=None): def guess_coord_axis(coord): - """Returns a "best guess" axis name of the coordinate. + """Return a "best guess" axis name of the coordinate. Heuristic categorisation of the coordinate into either label 'T', 'Z', 'Y', 'X' or None. - Args: - - * coord: + Parameters + ---------- + coord : The :class:`iris.coords.Coord`. - Returns: - 'T', 'Z', 'Y', 'X', or None. + Returns + ------- + {'T', 'Z', 'Y', 'X'} or None. Notes ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. - The ``guess_coord_axis`` behaviour can be skipped by setting the coordinate property ``ignore_axis`` - to ``False``. + The ``guess_coord_axis`` behaviour can be skipped by setting the + :attr:`~iris.coords.Coord.ignore_axis` property on `coord` to ``False``. """ - axis = None if hasattr(coord, "ignore_axis") and coord.ignore_axis is True: @@ -288,25 +284,26 @@ def guess_coord_axis(coord): def rolling_window(a, window=1, step=1, axis=-1): """Make an ndarray with a rolling window of the last dimension. - Args: - - * a : array_like + Parameters + ---------- + a : array_like Array to add rolling window to - - Kwargs: - - * window : int + window : int, default=1 Size of rolling window - * step : int + step : int, default=1 Size of step between rolling windows - * axis : int + axis : int, default=-1 Axis to take the rolling window over - Returns: + Returns + ------- + array Array that is a view of the original array with an added dimension of the size of the given window at axis + 1. - Examples:: + Examples + -------- + :: >>> x = np.arange(10).reshape((2, 5)) >>> rolling_window(x, 3) @@ -357,29 +354,28 @@ def rolling_window(a, window=1, step=1, axis=-1): def array_equal(array1, array2, withnans=False): - """Returns whether two arrays have the same shape and elements. + """Return whether two arrays have the same shape and elements. - Args: - - * array1, array2 (arraylike): + Parameters + ---------- + array1, array2 : arraylike args to be compared, normalised if necessary with :func:`np.asarray`. - - Kwargs: - - * withnans (bool): + withnans : bool, default=False When unset (default), the result is False if either input contains NaN points. This is the normal floating-point arithmetic result. When set, return True if inputs contain the same value in all elements, _including_ any NaN values. + Notes + ----- This provides much the same functionality as :func:`numpy.array_equal`, but with additional support for arrays of strings and NaN-tolerant operation. - Notes - ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. """ + if withnans and (array1 is array2): + return True def normalise_array(array): if not is_lazy_data(array): @@ -399,7 +395,9 @@ def normalise_array(array): def approx_equal(a, b, max_absolute_error=1e-10, max_relative_error=1e-10): - """Returns whether two numbers are almost equal, allowing for the finite + """Check if two numbers are almost equal. + + Returns whether two numbers are almost equal, allowing for the finite precision of floating point numbers. .. deprecated:: 3.2.0 @@ -433,26 +431,25 @@ def approx_equal(a, b, max_absolute_error=1e-10, max_relative_error=1e-10): def between(lh, rh, lh_inclusive=True, rh_inclusive=True): - """Provides a convenient way of defining a 3 element inequality such as - ``a < number < b``. + """Provide convenient way of defining a 3 element inequality. - Arguments: + Such as ``a < number < b``. - * lh + Parameters + ---------- + lh : The left hand element of the inequality - * rh + rh : The right hand element of the inequality - - Keywords: - - * lh_inclusive - boolean + lh_inclusive : bool, default=True Affects the left hand comparison operator to use in the inequality. True for ``<=`` false for ``<``. Defaults to True. - * rh_inclusive - boolean + rh_inclusive : bool, default=True Same as lh_inclusive but for right hand operator. - - For example:: + Examples + -------- + :: between_3_and_6 = between(3, 6) for i in range(10): @@ -482,16 +479,18 @@ def between(lh, rh, lh_inclusive=True, rh_inclusive=True): def reverse(cube_or_array, coords_or_dims): """Reverse the cube or array along the given dimensions. - Args: - - * cube_or_array: :class:`iris.cube.Cube` or :class:`numpy.ndarray` + Parameters + ---------- + cube_or_array : :class:`iris.cube.Cube` or :class:`numpy.ndarray` The cube or array to reverse. - * coords_or_dims: int, str, :class:`iris.coords.Coord` or sequence of these + coords_or_dims : int, str, :class:`iris.coords.Coord` or sequence of these Identify one or more dimensions to reverse. If cube_or_array is a numpy array, use int or a sequence of ints, as in the examples below. If cube_or_array is a Cube, a Coord or coordinate name (or sequence of these) may be specified instead. + Examples + -------- :: >>> import numpy as np @@ -579,11 +578,11 @@ def monotonic(array, strict=False, return_direction=False): Note that, the array must not contain missing data. - Kwargs: - - * strict (boolean) + Parameters + ---------- + strict : bool, default=False Flag to enable strict monotonic checking - * return_direction (boolean) + return_direction : bool, default=False Flag to change return behaviour to return (monotonic_status, direction). Direction will be 1 for positive or -1 for negative. The direction is meaningless if the array is @@ -639,7 +638,9 @@ def monotonic(array, strict=False, return_direction=False): def column_slices_generator(full_slice, ndims): - """Given a full slice full of tuples, return a dictionary mapping old + """Return a dictionary mapping old data dimensions to new. + + Given a full slice full of tuples, return a dictionary mapping old data dimensions to new and a generator which gives the successive slices needed to index correctly (across columns). @@ -712,7 +713,9 @@ def is_tuple_style_index(key): def _build_full_slice_given_keys(keys, ndim): - """Given the keys passed to a __getitem__ call, build an equivalent + """Build an equivalent tuple of keys which span ndims. + + Given the keys passed to a __getitem__ call, build an equivalent tuple of keys which span ndims. """ @@ -777,27 +780,27 @@ def _build_full_slice_given_keys(keys, ndim): def _slice_data_with_keys(data, keys): """Index an array-like object as "data[keys]", with orthogonal indexing. - Args: - - * data (array-like): + Parameters + ---------- + data : array-like array to index. - - * keys (list): + keys : list list of indexes, as received from a __getitem__ call. - This enforces an orthogonal interpretation of indexing, which means that - both 'real' (numpy) arrays and other array-likes index in the same way, - instead of numpy arrays doing 'fancy indexing'. - - Returns (dim_map, data_region), where : - - * dim_map (dict) : + Returns + ------- + dim_map : dict A dimension map, as returned by :func:`column_slices_generator`. i.e. "dim_map[old_dim_index]" --> "new_dim_index" or None. - - * data_region (array-like) : + data_region : array-like The sub-array. + Notes + ----- + This enforces an orthogonal interpretation of indexing, which means that + both 'real' (numpy) arrays and other array-likes index in the same way, + instead of numpy arrays doing 'fancy indexing'. + .. Note:: Avoids copying the data, where possible. @@ -820,8 +823,7 @@ def _slice_data_with_keys(data, keys): def _wrap_function_for_method(function, docstring=None): - """Returns a wrapper function modified to be suitable for use as a - method. + """Return a wrapper function modified to be suitable for use as a method. The wrapper function renames the first argument as "self" and allows an alternative docstring, thus allowing the built-in help(...) @@ -866,7 +868,9 @@ def _wrap_function_for_method(function, docstring=None): class _MetaOrderedHashable(ABCMeta): - """A metaclass that ensures that non-abstract subclasses of _OrderedHashable + """Ensures that non-abstract subclasses are given a default __init__ method. + + A metaclass that ensures that non-abstract subclasses of _OrderedHashable without an explicit __init__ method are given a default __init__ method with the appropriate method signature. @@ -931,7 +935,9 @@ class _OrderedHashable(Hashable, metaclass=_MetaOrderedHashable): @property @abstractmethod def _names(self): - """Override this attribute to declare the names of all the attributes + """Override this attribute to declare the names of all the attributes relevant. + + Override this attribute to declare the names of all the attributes relevant to the hash/comparison semantics. """ @@ -987,9 +993,10 @@ def __lt__(self, other): def create_temp_filename(suffix=""): """Return a temporary file name. - Args: - - * suffix - Optional filename extension. + Parameters + ---------- + suffix : str, optional, default="" + Filename extension. """ temp_file = tempfile.mkstemp(suffix) @@ -998,7 +1005,9 @@ def create_temp_filename(suffix=""): def clip_string(the_str, clip_length=70, rider="..."): - """Returns a clipped version of the string based on the specified clip + """Return clipped version of the string based on the specified clip length. + + Return a clipped version of the string based on the specified clip length and whether or not any graceful clip points can be found. If the string to be clipped is shorter than the specified clip @@ -1010,19 +1019,21 @@ def clip_string(the_str, clip_length=70, rider="..."): rider is added. If no graceful point can be found, then the string is clipped exactly where the user requested and the rider is added. - Args: - - * the_str + Parameters + ---------- + the_str : str The string to be clipped - * clip_length + clip_length : int, default=70 The length in characters that the input string should be clipped to. Defaults to a preconfigured value if not specified. - * rider + rider : str, default="..." A series of characters appended at the end of the returned string to show it has been clipped. Defaults to a preconfigured value if not specified. - Returns: + Returns + ------- + str The string clipped to the required length with a rider appended. If the clip length was greater than the original string, the original string is returned unaltered. @@ -1031,8 +1042,8 @@ def clip_string(the_str, clip_length=70, rider="..."): ----- This function does maintain laziness when called; it doesn't realise data. See more at :doc:`/userguide/real_and_lazy_data`. - """ + """ if clip_length >= len(the_str) or clip_length <= 0: return the_str else: @@ -1053,7 +1064,9 @@ def clip_string(the_str, clip_length=70, rider="..."): def format_array(arr): - """Returns the given array as a string, using the python builtin str + """Create a new axis as the leading dimension of the cube. + + Returns the given array as a string, using the python builtin str function on a piecewise basis. Useful for xml representation of arrays. @@ -1066,7 +1079,6 @@ def format_array(arr): See more at :doc:`/userguide/real_and_lazy_data`. """ - max_line_len = 50 result = np.array2string( @@ -1080,31 +1092,33 @@ def format_array(arr): def new_axis(src_cube, scalar_coord=None, expand_extras=()): # maybe not lazy - """Create a new axis as the leading dimension of the cube, promoting a scalar - coordinate if specified. + """Create a new axis as the leading dimension of the cube. - Args: + Create a new axis as the leading dimension of the cube, promoting a scalar + coordinate if specified. - * src_cube (:class:`iris.cube.Cube`) + Parameters + ---------- + src_cube : :class:`iris.cube.Cube` Source cube on which to generate a new axis. - - Kwargs: - - * scalar_coord (:class:`iris.coord.Coord` or 'string') + scalar_coord : :class:`iris.coord.Coord` or 'string', optional Scalar coordinate to promote to a dimension coordinate. - - * expand_extras (iterable) + expand_extras : iterable, optional Auxiliary coordinates, ancillary variables and cell measures which will be expanded so that they map to the new dimension as well as the existing dimensions. - Returns: + Returns + ------- + :class:`iris.cube.Cube` A new :class:`iris.cube.Cube` instance with one extra leading dimension (length 1). Chosen auxiliary coordinates, cell measures and ancillary variables will also be given an additional dimension, associated with the leading dimension of the cube. - For example:: + Examples + -------- + :: >>> cube.shape (360, 360) @@ -1116,6 +1130,7 @@ def new_axis(src_cube, scalar_coord=None, expand_extras=()): # maybe not lazy ----- This function does maintain laziness when called; it doesn't realise data. See more at :doc:`/userguide/real_and_lazy_data`. + """ def _reshape_data_array(data_manager): @@ -1204,18 +1219,24 @@ def _handle_dimensional_metadata(cube, dm_item, cube_add_method, expand_extras): def squeeze(cube): - """Removes any dimension of length one. If it has an associated DimCoord or - AuxCoord, this becomes a scalar coord. + """Remove any dimension of length one. - Args: + Remove any dimension of length one. If it has an associated DimCoord or + AuxCoord, this becomes a scalar coord. - * cube (:class:`iris.cube.Cube`) + Parameters + ---------- + cube : :class:`iris.cube.Cube` Source cube to remove length 1 dimension(s) from. - Returns: + Returns + ------- + :class:`iris.cube.Cube` A new :class:`iris.cube.Cube` instance without any dimensions of length 1. + Examples + -------- For example:: >>> cube.shape @@ -1230,7 +1251,6 @@ def squeeze(cube): See more at :doc:`/userguide/real_and_lazy_data`. """ - slices = [0 if cube.shape[dim] == 1 else slice(None) for dim in range(cube.ndim)] squeezed = cube[tuple(slices)] @@ -1239,28 +1259,33 @@ def squeeze(cube): def file_is_newer_than(result_path, source_paths): - """Return whether the 'result' file has a later modification time than all of + """Determine if the 'result' file was modified last. + + Return whether the 'result' file has a later modification time than all of the 'source' files. If a stored result depends entirely on known 'sources', it need only be re-built when one of them changes. This function can be used to test that by comparing file timestamps. - Args: - - * result_path (string): + Parameters + ---------- + result_path : str The filepath of a file containing some derived result data. - * source_paths (string or iterable of strings): + source_paths : str or iterable of str The path(s) to the original datafiles used to make the result. May include wildcards and '~' expansions (like Iris load paths), but not URIs. - Returns: + Returns + ------- + bool True if all the sources are older than the result, else False. - If any of the file paths describes no existing files, an exception will be raised. + Notes + ----- .. note:: There are obvious caveats to using file timestamps for this, as correct usage depends on how the sources might change. For example, a file @@ -1343,10 +1368,8 @@ def regular_points(zeroth, step, count): ---------- zeroth : number The value *prior* to the first point value. - step : number The numeric difference between successive point values. - count : number The number of point values. @@ -1373,8 +1396,8 @@ def points_step(points): Returns ------- numeric, bool - A tuple containing the average difference between values, and whether the difference is regular. - + A tuple containing the average difference between values, and whether + the difference is regular. Notes ----- @@ -1395,7 +1418,9 @@ def points_step(points): def unify_time_units(cubes): - """Performs an in-place conversion of the time units of all time coords in the + """Perform an in-place conversion of the time units. + + Perform an in-place conversion of the time units of all time coords in the cubes in a given iterable. One common epoch is defined for each calendar found in the cubes to prevent units being defined with inconsistencies between epoch and calendar. During this process, all time coordinates have @@ -1404,9 +1429,9 @@ def unify_time_units(cubes): Each epoch is defined from the first suitable time coordinate found in the input cubes. - Arg: - - * cubes: + Parameters + ---------- + cubes : An iterable containing :class:`iris.cube.Cube` instances. Notes @@ -1431,27 +1456,26 @@ def unify_time_units(cubes): def _is_circular(points, modulus, bounds=None): - """Determine whether the provided points or bounds are circular in nature + """Determine whether the provided points or bounds are circular. + + Determine whether the provided points or bounds are circular in nature relative to the modulus value. If the bounds are provided then these are checked for circularity rather than the points. - Args: - - * points: + Parameters + ---------- + points : :class:`numpy.ndarray` :class:`numpy.ndarray` of point values. - - * modulus: + modulus : Circularity modulus value. - - Kwargs: - - * bounds: + bounds : :class:`numpy.ndarray`, optional :class:`numpy.ndarray` of bound values. - Returns: - Boolean. + Returns + ------- + bool """ circular = False @@ -1498,29 +1522,24 @@ def _is_circular(points, modulus, bounds=None): def promote_aux_coord_to_dim_coord(cube, name_or_coord): - """Promotes an AuxCoord on the cube to a DimCoord. This AuxCoord must be - associated with a single cube dimension. If the AuxCoord is associated - with a dimension that already has a DimCoord, that DimCoord gets - demoted to an AuxCoord. + r"""Promote an auxiliary to a dimension coordinate on the cube. - Args: + This AuxCoord must be associated with a single cube dimension. If the + AuxCoord is associated with a dimension that already has a DimCoord, that + DimCoord gets demoted to an AuxCoord. - * cube + Parameters + ---------- + cube : An instance of :class:`iris.cube.Cube` + name_or_coord : + * \(a) An instance of :class:`iris.coords.AuxCoord` + * \(b) the :attr:`standard_name`, :attr:`long_name`, or + :attr:`var_name` of an instance of an instance of + :class:`iris.coords.AuxCoord`. - * name_or_coord: - Either - - (a) An instance of :class:`iris.coords.AuxCoord` - - or - - (b) the :attr:`standard_name`, :attr:`long_name`, or - :attr:`var_name` of an instance of an instance of - :class:`iris.coords.AuxCoord`. - - For example, - + Examples + -------- .. testsetup:: promote import iris @@ -1560,7 +1579,6 @@ def promote_aux_coord_to_dim_coord(cube, name_or_coord): This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. - """ from iris.coords import Coord, DimCoord @@ -1625,31 +1643,25 @@ def promote_aux_coord_to_dim_coord(cube, name_or_coord): def demote_dim_coord_to_aux_coord(cube, name_or_coord): - """Demotes a dimension coordinate on the cube to an auxiliary coordinate. + r"""Demotes a dimension coordinate on the cube to an auxiliary coordinate. The DimCoord is demoted to an auxiliary coordinate on the cube. The dimension of the cube that was associated with the DimCoord becomes anonymous. The class of the coordinate is left as DimCoord, it is not recast as an AuxCoord instance. - Args: - - * cube + Parameters + ---------- + cube : An instance of :class:`iris.cube.Cube` + name_or_coord : + * \(a) An instance of :class:`iris.coords.DimCoord` + * \(b) the :attr:`standard_name`, :attr:`long_name`, or + :attr:`var_name` of an instance of an instance of + :class:`iris.coords.DimCoord`. - * name_or_coord: - Either - - (a) An instance of :class:`iris.coords.DimCoord` - - or - - (b) the :attr:`standard_name`, :attr:`long_name`, or - :attr:`var_name` of an instance of an instance of - :class:`iris.coords.DimCoord`. - - For example, - + Examples + -------- .. testsetup:: demote import iris @@ -1689,7 +1701,6 @@ def demote_dim_coord_to_aux_coord(cube, name_or_coord): This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. - """ from iris.coords import Coord @@ -1720,13 +1731,12 @@ def demote_dim_coord_to_aux_coord(cube, name_or_coord): @functools.wraps(np.meshgrid) def _meshgrid(*xi, **kwargs): - """@numpy v1.13, the dtype of each output n-D coordinate is the same as its + """Ensure consistent meshgrid behaviour across numpy versions. + + @numpy v1.13, the dtype of each output n-D coordinate is the same as its associated input 1D coordinate. This is not the case prior to numpy v1.13, where the output dtype is cast up to its highest resolution, regardlessly. - This convenience function ensures consistent meshgrid behaviour across - numpy versions. - Reference: https://github.com/numpy/numpy/pull/5302 """ @@ -1738,23 +1748,21 @@ def _meshgrid(*xi, **kwargs): def find_discontiguities(cube, rel_tol=1e-5, abs_tol=1e-8): - """Searches the 'x' and 'y' coord on the cube for discontiguities in the + """Identify spatial discontiguities. + + Searches the 'x' and 'y' coord on the cube for discontiguities in the bounds array, returned as a boolean array (True for all cells which are discontiguous with the cell immediately above them or to their right). - Args: - - * cube (`iris.cube.Cube`): + Parameters + ---------- + cube : `iris.cube.Cube` The cube to be checked for discontinuities in its 'x' and 'y' coordinates. These coordinates must be 2D. - - Kwargs: - - * rel_tol (float): + rel_tol : float, default=1e-5 The relative equality tolerance to apply in coordinate bounds checking. - - * abs_tol (float): + abs_tol : float, default=1e-8 The absolute value tolerance to apply in coordinate bounds checking. @@ -1787,7 +1795,6 @@ def find_discontiguities(cube, rel_tol=1e-5, abs_tol=1e-8): This function does not maintain laziness when called; it realises data. See more at :doc:`/userguide/real_and_lazy_data`. - """ lats_and_lons = [ "latitude", @@ -1834,10 +1841,11 @@ def find_discontiguities(cube, rel_tol=1e-5, abs_tol=1e-8): def _mask_array(array, points_to_mask, in_place=False): - """Apply masking to array where points_to_mask is True/non-zero. Designed to - work with iris.analysis.maths._binary_op_common so array and points_to_mask - will be broadcastable to each other. array and points_to_mask may be numpy - or dask types (or one of each). + """Apply masking to array where points_to_mask is True/non-zero. + + Designed to work with iris.analysis.maths._binary_op_common so array + and points_to_mask will be broadcastable to each other. + array and points_to_mask may be numpy or dask types (or one of each). If array is lazy then in_place is ignored: _math_op_common will use the returned value regardless of in_place, so we do not need to implement it @@ -1887,7 +1895,9 @@ def _mask_array(array, points_to_mask, in_place=False): @_lenient_client(services=SERVICES) def mask_cube(cube, points_to_mask, in_place=False, dim=None): - """Masks any cells in the cube's data array which correspond to cells marked + """Masks any cells in the cube's data array. + + Masks any cells in the cube's data array which correspond to cells marked ``True`` (or non zero) in ``points_to_mask``. ``points_to_mask`` may be specified as a :class:`numpy.ndarray`, :class:`dask.array.Array`, :class:`iris.coords.Coord` or :class:`iris.cube.Cube`, following the same @@ -1897,17 +1907,15 @@ def mask_cube(cube, points_to_mask, in_place=False, dim=None): ---------- cube : iris.cube.Cube Cube containing data that requires masking. - points_to_mask : numpy.ndarray, dask.array.Array, iris.coords.Coord or iris.cube.Cube - Specifies booleans (or ones and zeros) indicating which points will be masked. - + Specifies booleans (or ones and zeros) indicating which points will + be masked. in_place : bool, default=False - If `True`, masking is applied to the input cube. Otherwise a copy is masked - and returned. - + If `True`, masking is applied to the input cube. Otherwise a copy is + masked and returned. dim : int, optional - If `points_to_mask` is a coord which does not exist on the cube, specify the - dimension to which it should be mapped. + If `points_to_mask` is a coord which does not exist on the cube, + specify the dimension to which it should be mapped. Returns ------- @@ -1957,9 +1965,9 @@ def equalise_attributes(cubes): given cubes. The cubes will then have identical attributes, and the removed attributes are returned. The given cubes are modified in-place. - Args: - - * cubes (iterable of :class:`iris.cube.Cube`): + Parameters + ---------- + cubes : iterable of :class:`iris.cube.Cube` A collection of cubes to compare and adjust. Returns @@ -2056,10 +2064,13 @@ def is_masked(array): def _strip_metadata_from_dims(cube, dims): """Remove ancillary variables and cell measures that map to specific dimensions. - Returns a cube copy with (possibly) some cell-measures and ancillary variables removed. + Returns a cube copy with (possibly) some cell-measures and ancillary + variables removed. To be used by operations that modify or remove dimensions. - Note: does nothing to (aux)-coordinates. Those would be handled explicitly by the calling operation. + + Note: does nothing to (aux)-coordinates. Those would be handled explicitly + by the calling operation. """ reduced_cube = cube.copy() @@ -2076,3 +2087,63 @@ def _strip_metadata_from_dims(cube, dims): reduced_cube.remove_cell_measure(cm) return reduced_cube + + +def mask_cube_from_shapefile(cube, shape, minimum_weight=0.0, in_place=False): + """Take a shape object and masks all points not touching it in a cube. + + Finds the overlap between the `shape` and the `cube` in 2D xy space and + masks out any cells with less % overlap with shape than set. + Default behaviour is to count any overlap between shape and cell as valid + + Parameters + ---------- + shape : Shapely.Geometry object + A single `shape` of the area to remain unmasked on the `cube`. + If it a line object of some kind then minimum_weight will be ignored, + because you cannot compare the area of a 1D line and 2D Cell + cube : :class:`~iris.cube.Cube` object + The `Cube` object to masked. Must be singular, rather than a `CubeList` + minimum_weight : float , default=0.0 + A number between 0-1 describing what % of a cube cell area must + the shape overlap to include it. + in_place : bool, default=False + Whether to mask the `cube` in-place or return a newly masked `cube`. + Defaults to False. + + Returns + ------- + iris.Cube + A masked version of the input cube, if in_place is False + + + See Also + -------- + :func:`~iris.util.mask_cube` + + Notes + ----- + This function allows masking a cube with any cartopy projection by a shape object, + most commonly from Natural Earth Shapefiles via cartopy. + To mask a cube from a shapefile, both must first be on the same coordinate system. + Shapefiles are mostly on a lat/lon grid with a projection very similar to GeogCS + The shapefile is projected to the coord system of the cube using cartopy, then each cell + is compared to the shapefile to determine overlap and populate a true/false array + This array is then used to mask the cube using the `iris.util.mask_cube` function + This uses numpy arithmetic logic for broadcasting, so you may encounter unexpected + results if your cube has other dimensions the same length as the x/y dimensions + + Examples + -------- + >>> import shapely + >>> from iris.util import mask_cube_from_shapefile + >>> cube = iris.load_cube(iris.sample_data_path("E1_north_america.nc")) + >>> shape = shapely.geometry.box(-100,30, -80,40) # box between 30N-40N 100W-80W + >>> masked_cube = mask_cube_from_shapefile(cube, shape) + + ... + """ + shapefile_mask = create_shapefile_mask(shape, cube, minimum_weight) + masked_cube = mask_cube(cube, shapefile_mask, in_place=in_place) + if not in_place: + return masked_cube diff --git a/lib/iris/warnings.py b/lib/iris/warnings.py new file mode 100644 index 0000000000..1a885f60a3 --- /dev/null +++ b/lib/iris/warnings.py @@ -0,0 +1,180 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Warnings specific to the :mod:`iris` package. + +PLEASE NAMESPACE ALL WARNING CLASSES (i.e. prefix with Iris...). +""" + + +class IrisUserWarning(UserWarning): + r"""Base class for :class:`UserWarning` generated by Iris.""" + + pass + + +class IrisLoadWarning(IrisUserWarning): + """Any warning relating to loading.""" + + pass + + +class IrisSaveWarning(IrisUserWarning): + """Any warning relating to saving.""" + + pass + + +class IrisCfWarning(IrisUserWarning): + """Any warning relating to :term:`CF Conventions` .""" + + pass + + +class IrisIgnoringWarning(IrisUserWarning): + """Any warning that involves an Iris operation not using some information. + + E.g. :class:`~iris.aux_factory.AuxCoordFactory` generation disregarding + bounds. + """ + + pass + + +class IrisDefaultingWarning(IrisUserWarning): + """Any warning that involves Iris changing invalid/missing information. + + E.g. creating a :class:`~iris.coords.AuxCoord` from an invalid + :class:`~iris.coords.DimCoord` definition. + """ + + pass + + +class IrisVagueMetadataWarning(IrisUserWarning): + """Warnings where object metadata may not be fully descriptive.""" + + pass + + +class IrisUnsupportedPlottingWarning(IrisUserWarning): + """Warnings where support for a plotting module/function is not guaranteed.""" + + pass + + +class IrisImpossibleUpdateWarning(IrisUserWarning): + """Warnings where it is not possible to update an object. + + Mainly generated during regridding where the necessary information for + updating an :class:`~iris.aux_factory.AuxCoordFactory` is no longer + present. + """ + + pass + + +class IrisGeometryExceedWarning(IrisUserWarning): + """:mod:`iris.analysis.geometry` warnings about geometry exceeding dimensions.""" + + pass + + +class IrisMaskValueMatchWarning(IrisUserWarning): + """Warnings where the value representing masked data is actually present in data.""" + + pass + + +class IrisCfLoadWarning(IrisCfWarning, IrisLoadWarning): + """Any warning relating to both loading and :term:`CF Conventions` .""" + + pass + + +class IrisCfSaveWarning(IrisCfWarning, IrisSaveWarning): + """Any warning relating to both saving and :term:`CF Conventions` .""" + + pass + + +class IrisCfInvalidCoordParamWarning(IrisCfLoadWarning): + """Warnings where incorrect information for CF coord construction is in a file.""" + + pass + + +class IrisCfMissingVarWarning(IrisCfLoadWarning): + """Warnings where a CF variable references another variable that is not in the file.""" + + pass + + +class IrisCfLabelVarWarning(IrisCfLoadWarning, IrisIgnoringWarning): + """Warnings where a CF string/label variable is being used inappropriately.""" + + pass + + +class IrisCfNonSpanningVarWarning(IrisCfLoadWarning, IrisIgnoringWarning): + """Warnings where a CF variable is ignored because it does not span the required dimension.""" + + pass + + +class IrisIgnoringBoundsWarning(IrisIgnoringWarning): + """Warnings where bounds information has not been used by an Iris operation.""" + + pass + + +class IrisCannotAddWarning(IrisIgnoringWarning): + """Warnings where a member object cannot be added to a :class:`~iris.cube.Cube` .""" + + pass + + +class IrisGuessBoundsWarning(IrisDefaultingWarning): + """Warnings where Iris has filled absent bounds information with a best estimate.""" + + pass + + +class IrisPpClimModifiedWarning(IrisSaveWarning, IrisDefaultingWarning): + """Warnings where a climatology has been modified while saving :term:`Post Processing (PP) Format` .""" + + pass + + +class IrisFactoryCoordNotFoundWarning(IrisLoadWarning): + """Warnings where a referenced factory coord can not be found when loading a variable in :term:`NetCDF Format`.""" + + pass + + +class IrisNimrodTranslationWarning(IrisLoadWarning): + """For unsupported vertical coord types in :mod:`iris.file_formats.nimrod_load_rules`. + + (Pre-dates the full categorisation of Iris UserWarnings). + """ + + pass + + +class IrisUnknownCellMethodWarning(IrisCfLoadWarning): + """If a loaded :class:`~iris.coords.CellMethod` is not one the method names known to Iris. + + (Pre-dates the full categorisation of Iris UserWarnings). + """ + + pass + + +class IrisSaverFillValueWarning(IrisMaskValueMatchWarning, IrisSaveWarning): + """For fill value complications during Iris file saving :term:`NetCDF Format`. + + (Pre-dates the full categorisation of Iris UserWarnings). + """ + + pass diff --git a/noxfile.py b/noxfile.py index 4d3bb85f98..a30b6ce784 100644 --- a/noxfile.py +++ b/noxfile.py @@ -39,7 +39,7 @@ def session_lockfile(session: nox.sessions.Session) -> Path: def session_cachefile(session: nox.sessions.Session) -> Path: - """Returns the path of the session lockfile cache.""" + """Return the path of the session lockfile cache.""" lockfile = session_lockfile(session) tmp_dir = Path(session.create_tmp()) cache = tmp_dir / lockfile.name @@ -55,7 +55,7 @@ def venv_populated(session: nox.sessions.Session) -> bool: def venv_changed(session: nox.sessions.Session) -> bool: - """Returns True if the installed session is different. + """Return True if the installed session is different. Compares to that specified in the lockfile. """ @@ -109,8 +109,9 @@ def cache_cartopy(session: nox.sessions.Session) -> None: def prepare_venv(session: nox.sessions.Session) -> None: - """Create and cache the nox session conda environment, and additionally - provide conda environment package details and info. + """Create and cache the nox session conda environment. + + Additionally provide conda environment package details and info. Note that, iris is installed into the environment using pip. diff --git a/pyproject.toml b/pyproject.toml index d0c2816e93..4325de0e0e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -103,6 +103,16 @@ ignore = [ force-sort-within-sections = true known-first-party = ["iris"] +[tool.ruff.lint.per-file-ignores] +# All test scripts + +"lib/iris/tests/*.py" = [ + # https://docs.astral.sh/ruff/rules/undocumented-public-module/ + "D100", # Missing docstring in public module + "D205", # 1 blank line required between summary line and description + "D401", # 1 First line of docstring should be in imperative mood +] + [tool.ruff.lint.pydocstyle] convention = "numpy" diff --git a/requirements/locks/py310-linux-64.lock b/requirements/locks/py310-linux-64.lock index 1c9184e51a..3cbf8fb510 100644 --- a/requirements/locks/py310-linux-64.lock +++ b/requirements/locks/py310-linux-64.lock @@ -1,24 +1,24 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 261e2a16d1b94dedb72e8d7119ea263c3e0f5a5c4eb2730980eda055cd4683ec +# input_hash: 808932859f70a640fcdd10a6b3c7fb545b14087d3a6db4e60bf79e05a4272c0a @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 -https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.11.17-hbcca054_0.conda#01ffc8d36f9eba0ce0b3c1955fa780ee +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.2.2-hbcca054_0.conda#2f4327a1cbe7f022401b236e915a5fef https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_1.conda#6185f640c43843e5ad6fd1c5372c3f80 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_3.conda#937eaed008f6bf2191c5fe76f87755e9 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_5.conda#f6f6600d18a4047b54f803cf708b868a https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.10-4_cp310.conda#26322ec5d7712c3ded99dd656142b8ce -https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda#939e3e74d8be4dac89ce83b20de2492a +https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda#161081fc7cec0bfda0d86d7cb595f8d8 https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_3.conda#7124cbb46b13d395bdde68f2d215c989 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_5.conda#d211c42b9ce49aee3734fdc828731689 https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_3.conda#23fdf1fef05baeb7eadc2aed5fb0011f +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_5.conda#d4ff227c46917d3b4565302a2bbb276b https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda#69b8b6202a07720f448be700e300ccf4 -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.23.0-hd590300_0.conda#d459949bc10f64dee1595c176c2e6291 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.26.0-hd590300_0.conda#a86d90025198fd411845fc245ebc06c8 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.1-h59595ed_0.conda#8c0f4f71f5a59ceb0c6fa9f51501066d https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 @@ -33,18 +33,19 @@ https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.19-hd590300_0.conda https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_3.conda#c714d905cdfa0e70200f68b80cc04764 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_5.conda#7a6bd7a12a4bd359e2afe6c0fa1acace https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_2.conda#d66573916ffcf376178462f1b61c941e https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.2-hd590300_0.conda#30de3fd9b3b602f7473f30e684eeea8c +https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda#5aa797f8787fe7a17d1b0821485b5adc https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda#f36c115f1ee199da648e0597ec2047ad https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-h59595ed_2.conda#7dbaa197d7ba6032caf7ae7f32c1efa0 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.2.0-hd590300_1.conda#603827b39ea2b835268adb8c821b8570 -https://conda.anaconda.org/conda-forge/linux-64/pixman-0.42.2-h59595ed_0.conda#700edd63ccd5fc66b70b1c028cea9a68 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.2.1-hd590300_0.conda#51a753e64a3027bd7e23a189b1f6e91e +https://conda.anaconda.org/conda-forge/linux-64/pixman-0.43.2-h59595ed_0.conda#71004cbf7924e19c02746ccde9fd7123 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a @@ -62,14 +63,14 @@ https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd7 https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_1.conda#f07002e225d7a60a694d42a7bf5ff53f https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_1.conda#5fc11c6020d421960607d821310fcd4d https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_3.conda#73031c79546ad06f1fe62e57fdd021bc +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_5.conda#e73e9cfd1191783392131e6238bdb3e9 https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_1.conda#700ac6ea6d53d5510591c4344d5c989a -https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.44.2-h2797004_0.conda#3b6a9f225c3dbe0d24f4fedd4625c5bf +https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.42-h2797004_0.conda#d67729828dc6ff7ba44a61062ad79880 +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.45.1-h2797004_0.conda#fc4ccadfbf6d4784de88c41704792562 https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.12.3-h232c23b_0.conda#bc6ac4c0cea148d924f621985bc3892b +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.12.5-h232c23b_0.conda#c442ebfda7a475f5e78f1c8e45f1e919 https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda#ac79812548e7e8cf61f7b0abdef01d3b https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.42-hcad00b1_0.conda#679c8961826aa4b50653bce17ee52abe https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 @@ -82,33 +83,33 @@ https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_1.cond https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda#cd95826dbd331ed1be26bdf401432844 https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.3-h783c2da_0.conda#9bd06b12bbfa6fd1740fd23af4b0f0c7 -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.25-pthreads_h413a1c8_0.conda#d172b34a443b95f86089e8229ddc9a17 +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.26-pthreads_h413a1c8_0.conda#760ae35415f5ba8b15d09df5afe8b23a https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-ha9c0a0a_2.conda#55ed21669b2015f77c180feb1dd41930 -https://conda.anaconda.org/conda-forge/linux-64/python-3.10.13-hd12c33a_0_cpython.conda#f3a8c32aa764c3e7188b4b810fc9d6ce -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.44.2-h2c6b66d_0.conda#4f2892c672829693fd978d065db4e8be +https://conda.anaconda.org/conda-forge/linux-64/python-3.10.13-hd12c33a_1_cpython.conda#ed38140af93f81319ebc472fbcf16cca +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.45.1-h2c6b66d_0.conda#93acf31b379acebada263b9bce3dc6ed https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.7-h8ee46fc_0.conda#49e482d882669206653b095f5206c05b -https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e +https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.16-pyhd8ed1ab_0.conda#def531a3ac77b7fb8c21d17bb5d0badb https://conda.anaconda.org/conda-forge/noarch/antlr-python-runtime-4.11.1-pyhd8ed1ab_0.tar.bz2#15109c4977d39ad7aa3423f57243e286 https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.1.0-pyhd8ed1ab_0.conda#0e8715bef534217eae333c53f645c9ed https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_1.conda#f27a24d46e3ea7b70a1f98e50c62508f https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py310hc6cd4ac_1.conda#1f95722c94f00b69af69a066c7433714 -https://conda.anaconda.org/conda-forge/noarch/certifi-2023.11.17-pyhd8ed1ab_0.conda#2011bcf45376341dd1d690263fdbc789 +https://conda.anaconda.org/conda-forge/noarch/certifi-2024.2.2-pyhd8ed1ab_0.conda#0876280e409658fc6f9e75d035960333 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda#7f4a9e3fcff3f6356ae99244a014da6a https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda#f3ad426304898027fc619827ff428eca https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.0.0-pyhd8ed1ab_0.conda#753d29fe41bb881e4b9c004f0abf973f https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda#5cd86562580f274031ede6aa6aa24441 -https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.6-py310hc6cd4ac_0.conda#7f987c519edb4df04d21a282678368cf +https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.8-py310hc6cd4ac_0.conda#e533f96945907b2e81c6b604f578b69c https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.8-pyhd8ed1ab_0.conda#db16c66b759a64dc5183d69cc3745a52 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py310hff52083_1.tar.bz2#21b8fa2179290505e607f5ccd65b01b0 -https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.0-pyhd8ed1ab_0.conda#f6c211fee3c98229652b60a9a42ef363 +https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.0-pyhd8ed1ab_2.conda#8d652ea2ee8eaee02ed8dc820bc794aa https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 https://conda.anaconda.org/conda-forge/noarch/filelock-3.13.1-pyhd8ed1ab_0.conda#0c1729b74a8152fde6a38ba0a2ab9f45 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d -https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.12.2-pyhca7485f_0.conda#bf40f2a8835b78b1f91083d306b493d2 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.2.0-pyhca7485f_0.conda#fad86b90138cf5d82c6f5a2ed6e683d9 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h829c605_4.conda#252a696860674caf7a855e16f680d63a https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/idna-3.6-pyhd8ed1ab_0.conda#1a76f09108576397c41c0b0c5bd84134 @@ -117,28 +118,28 @@ https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py310hd41b1e2_1.conda#b8d67603d43b23ce7e988a5d81a7ab79 https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.16-hb7c19ff_0.conda#51bb7010fc86f70eee639b4bb7a894f5 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-20_linux64_openblas.conda#2b7bb4f7562c8cf334fc2e20c2d28abc +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-21_linux64_openblas.conda#0ac9f44fc096772b0aa092119b00c3ca https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.5.0-hca28451_0.conda#7144d5a828e2cae218e0e3c98d8a0aeb https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.2-h658648e_1.conda#0ebb65e8d86843865796c7c95a941f34 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 -https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py310h2372a71_1.conda#b74e07a054c479e45a83a83fc5be713c +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.5-py310h2372a71_0.conda#f6703fa0214a00bf49d1bef6dc7672d0 https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.7-py310hd41b1e2_0.conda#dc5263dcaa1347e5a456ead3537be27d https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-h488ebb8_3.conda#128c25b7fe6a25286a48f3a6a9b5b6f3 https://conda.anaconda.org/conda-forge/noarch/packaging-23.2-pyhd8ed1ab_0.conda#79002079284aa895f883c6b7f3f88fd6 -https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.1.0-pyhd8ed1ab_0.conda#45a5065664da0d1dfa8f8cd2eaf05ab9 -https://conda.anaconda.org/conda-forge/noarch/pluggy-1.3.0-pyhd8ed1ab_0.conda#2390bd10bed1f3fdc7a537fb5a447d8d -https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py310h2372a71_1.conda#cb25177acf28cc35cfa6c1ac1c679e22 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.2.0-pyhd8ed1ab_0.conda#a0bc3eec34b0fab84be6b2da94e98e20 +https://conda.anaconda.org/conda-forge/noarch/pluggy-1.4.0-pyhd8ed1ab_0.conda#139e9feb65187e916162917bb2484976 +https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.8-py310h2372a71_0.conda#bd19b3096442ea342c4a5208379660b1 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff https://conda.anaconda.org/conda-forge/noarch/pygments-2.17.2-pyhd8ed1ab_0.conda#140a7f159396547e9799aa98f9f0742e https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.1-pyhd8ed1ab_0.conda#176f7d56f0cfe9008bdf1bccd7de02fb https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 -https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 +https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.4-pyhd8ed1ab_0.conda#c79cacf8a06a51552fc651652f170208 https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.4.1-py310h2372a71_0.conda#b631b889b0b4bc2fca7b8b977ca484b2 -https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3.post1-pyhd8ed1ab_0.conda#c93346b446cd08c169d843ae5fc0da97 +https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda#3eeeeb9e4827ace8c0c1419c85d590ad https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.1-py310h2372a71_1.conda#bb010e368de4940771368bc3dc4c63e7 -https://conda.anaconda.org/conda-forge/noarch/setuptools-68.2.2-pyhd8ed1ab_0.conda#fc2166155db840c634a1291a5c35a709 +https://conda.anaconda.org/conda-forge/noarch/setuptools-69.0.3-pyhd8ed1ab_0.conda#40695fdfd15a92121ed2922900d0308b https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d @@ -147,7 +148,7 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda#04eedddeb68ad39871c8127dd1c21f4f https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 -https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 +https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.1-pyhd8ed1ab_0.conda#2fcb582444635e2c402e8569bb94e039 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py310h2372a71_1.conda#b23e0147fa5f7a9380e06334c7266ad5 https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.9.0-pyha770c72_0.conda#a92a6440c3fe7052d63244f3aba2a4a7 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py310h2372a71_0.conda#72637c58d36d9475fda24700c9796f19 @@ -158,32 +159,32 @@ https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30 https://conda.anaconda.org/conda-forge/noarch/zipp-3.17.0-pyhd8ed1ab_0.conda#2e4d6bc0b14e10f895fc6791a7d9b26a https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda#9669586875baeced8fc30c0826c3270e -https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 +https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda#332493000404d8411859539a5a630865 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-h3faef2a_0.conda#f907bb958910dc404647326ca80c263e https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py310h2fee648_0.conda#45846a970e71ac98fd327da5d40a0a2c -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.3-py310h2372a71_0.conda#c07e83a9bd8f5053b42be842b9871df9 -https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py310h2372a71_1.conda#a79a93c3912e9e9b0afd3bf58f2c01d7 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.46.0-py310h2372a71_0.conda#3c0109417cbcdabfed289360886b036d +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.4.1-py310h2372a71_0.conda#b2de1af90e44849451c9808312f964ae +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.3-py310h2372a71_0.conda#21362970a6fea90ca507c253c20465f2 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.48.1-py310h2372a71_0.conda#480ff621e839c5f80a52975b167500d2 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.3-nompi_h4f84152_100.conda#d471a5c3abc984b662d9bae3bb7fd8a5 -https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-7.0.0-pyha770c72_0.conda#a941237cd06538837b25cd245fcd25d8 -https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-20_linux64_openblas.conda#36d486d72ab64ffea932329a1d3729a3 +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-7.0.1-pyha770c72_0.conda#746623a787e06191d80a2133e5daff17 +https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.3-pyhd8ed1ab_0.conda#e7d8df6509ba635247ff9aea31134262 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-21_linux64_openblas.conda#4a3816d06451c4946e2db26b86472cb6 https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h119a65a_9.conda#cfebc557e54905dadc355c0e9f003004 -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-20_linux64_openblas.conda#6fabc51f5e647d09cc010c40061557e0 +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-21_linux64_openblas.conda#1a42f305615c3867684e049e85927531 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc https://conda.anaconda.org/conda-forge/noarch/partd-1.4.1-pyhd8ed1ab_0.conda#acf4b7c0bcd5fa3b0e05801c4d2accd6 -https://conda.anaconda.org/conda-forge/linux-64/pillow-10.1.0-py310h01dd4db_0.conda#95d87a906d88b5824d7d36eeef091dba -https://conda.anaconda.org/conda-forge/noarch/pip-23.3.1-pyhd8ed1ab_0.conda#2400c0b86889f43aa52067161e1fb108 +https://conda.anaconda.org/conda-forge/linux-64/pillow-10.2.0-py310h01dd4db_0.conda#9ec32d0d90f7670eb29bbba18299cf29 +https://conda.anaconda.org/conda-forge/noarch/pip-24.0-pyhd8ed1ab_0.conda#f586ac1e56c8638b64f9c8122a7b8a67 https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.1-h1d62c97_0.conda#44ec51d0857d9be26158bb85caa74fdb -https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.3-pyhd8ed1ab_0.conda#5bdca0aca30b0ee62bb84854e027eae0 +https://conda.anaconda.org/conda-forge/noarch/pytest-8.0.0-pyhd8ed1ab_0.conda#5ba1cc5b924226349d4a49fb547b7579 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.9.0-hd8ed1ab_0.conda#c16524c1b7227dc80b36b4fa6f77cc86 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.1.0-pyhd8ed1ab_0.conda#f8ced8ee63830dec7ecc1be048d1470a +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.0-pyhd8ed1ab_0.conda#6a7e0694921f668a030d52f0c47baebd https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.25.0-pyhd8ed1ab_0.conda#c119653cba436d8183c27bf6d190e587 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.3.0-h3d44ed6_0.conda#5a6f6c00ef982a9bc83558d9ac8f64a0 -https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-7.0.0-hd8ed1ab_0.conda#12aff14f84c337be5e5636bf612f4140 +https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-7.0.1-hd8ed1ab_0.conda#4a2f43a20fa404b998859c6a470ba316 https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h9612171_113.conda#b2414908e43c442ddc68e6148774a304 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.2-py310hb13e2d6_0.conda#d3147cfbf72d6ae7bba10562208f6def +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.4-py310hb13e2d6_0.conda#6593de64c935768b6bad3e19b3e978be https://conda.anaconda.org/conda-forge/noarch/pbr-6.0.0-pyhd8ed1ab_0.conda#8dbab5ba746ed14aa32cb232dc437f8f https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py310hd5c30f3_5.conda#dc2ee770a2299307f3c127af79160d25 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda#06eb685a3a0b146347a58dda979485da @@ -193,18 +194,18 @@ https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.0.4-pyhd8ed1ab_0. https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py310hd41b1e2_4.conda#35e87277fba9944b8a975113538bb5df https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.3-py310h1f7b6fc_0.conda#31beda75384647959d5792a1a7dc571a https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.2.0-py310hd41b1e2_0.conda#85d2aaa7af046528d339da1e813c3a9f -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.12.1-pyhd8ed1ab_0.conda#bf6ad72d882bc3f04e6a0fb50fd2cce8 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.1.1-pyhd8ed1ab_0.conda#1a92a5bd77b2430796696e25c3d8dbcb https://conda.anaconda.org/conda-forge/noarch/identify-2.5.33-pyhd8ed1ab_0.conda#93c8f8ceb83827d88deeba796f07fba7 https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.0-py310h2372a71_1.conda#dfcf64f67961eb9686676f96fdb4b4d1 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_103.conda#50f05f98d084805642d24dff910e11e8 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.4-py310hcc13569_0.conda#410f7e83992a591e492c25049a859254 +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.0-py310hcc13569_0.conda#514c836161e8b2e43e7d8fb7a28a92c4 https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-ha41ecd1_2.conda#1a66c10f6a0da3dbd2f3a68127e7f6a0 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py310h1f7b6fc_1.conda#be6f0382440ccbf9fb01bb19ab1f1fc0 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.4-py310hb13e2d6_0.conda#f0063b2885bfae11324a00a693f88781 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.12.0-py310hb13e2d6_2.conda#cd3baec470071490bc5ab05da64c52b5 https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.2-py310hc3e127f_1.conda#fdaca8d27b3af78d617521eb37b1d055 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py310h1f7b6fc_4.conda#0ca55ca20891d393846695354b32ebc5 -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.12.1-pyhd8ed1ab_0.conda#6b31b9b627f238a0068926d5650ae128 +https://conda.anaconda.org/conda-forge/noarch/distributed-2024.1.1-pyhd8ed1ab_0.conda#81039f39690f341dcb0a68bf62e812be https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h7f000aa_3.conda#0abfa7f9241a0f4fd732bc15773cfb0c https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a @@ -217,13 +218,13 @@ https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py310hcc13569_1.c https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 https://conda.anaconda.org/conda-forge/linux-64/graphviz-9.0.0-h78e8752_1.conda#a3f4cd4a512ec5db35ffbf25ba11f537 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.14.4-pyhd8ed1ab_0.conda#c79b8443908032263ffb40ee6215e9e4 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.15.2-pyhd8ed1ab_0.conda#ce99859070b0e17ccc63234ca58f3ed8 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.5.0-pyhd8ed1ab_0.conda#264b3c697fa9cdade87eb0abe4440d54 https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.15.0-pyhd8ed1ab_0.conda#1a49ca9515ef9a96edff2eea06143dc6 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.7-pyhd8ed1ab_0.conda#aebfabcb60c33a89c1f9290cab49bc93 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.5-pyhd8ed1ab_0.conda#ebf08f5184d8eaa486697bc060031953 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.4-pyhd8ed1ab_0.conda#a9a89000dfd19656ad004b937eeb6828 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.6-pyhd8ed1ab_0.conda#cf5c9649272c677a964a7313279e3a9b +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.8-pyhd8ed1ab_0.conda#611a35a27914fac3aa37611a6fe40bb5 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.6-pyhd8ed1ab_0.conda#d7e4954df0d3aea2eacc7835ad12671d +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.5-pyhd8ed1ab_0.conda#7e1e7437273682ada2ed5e9e9714b140 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.7-pyhd8ed1ab_0.conda#26acae54b06f178681bfb551760f5dd1 https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.9-pyhd8ed1ab_0.conda#0612e497d7860728f2cda421ea2aec09 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_0.conda#e507335cb4ca9cff4c3d0fa9cdab255e diff --git a/requirements/locks/py311-linux-64.lock b/requirements/locks/py311-linux-64.lock index 455cbd7a9b..053251b045 100644 --- a/requirements/locks/py311-linux-64.lock +++ b/requirements/locks/py311-linux-64.lock @@ -1,24 +1,24 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 51321f928e4883d91354d6e049004532b17675ee9629854a199b34e0854e0bf9 +# input_hash: 9f4d0f3ce6f3f0d7af7672fdc6f449b404e067882f805f0b3c416eb77ae0f4c0 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 -https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.11.17-hbcca054_0.conda#01ffc8d36f9eba0ce0b3c1955fa780ee +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.2.2-hbcca054_0.conda#2f4327a1cbe7f022401b236e915a5fef https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_1.conda#6185f640c43843e5ad6fd1c5372c3f80 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_3.conda#937eaed008f6bf2191c5fe76f87755e9 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_5.conda#f6f6600d18a4047b54f803cf708b868a https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.11-4_cp311.conda#d786502c97404c94d7d58d258a445a65 -https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda#939e3e74d8be4dac89ce83b20de2492a +https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda#161081fc7cec0bfda0d86d7cb595f8d8 https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_3.conda#7124cbb46b13d395bdde68f2d215c989 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_5.conda#d211c42b9ce49aee3734fdc828731689 https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_3.conda#23fdf1fef05baeb7eadc2aed5fb0011f +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_5.conda#d4ff227c46917d3b4565302a2bbb276b https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda#69b8b6202a07720f448be700e300ccf4 -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.23.0-hd590300_0.conda#d459949bc10f64dee1595c176c2e6291 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.26.0-hd590300_0.conda#a86d90025198fd411845fc245ebc06c8 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.1-h59595ed_0.conda#8c0f4f71f5a59ceb0c6fa9f51501066d https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 @@ -33,18 +33,19 @@ https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.19-hd590300_0.conda https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_3.conda#c714d905cdfa0e70200f68b80cc04764 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_5.conda#7a6bd7a12a4bd359e2afe6c0fa1acace https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_2.conda#d66573916ffcf376178462f1b61c941e https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.2-hd590300_0.conda#30de3fd9b3b602f7473f30e684eeea8c +https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda#5aa797f8787fe7a17d1b0821485b5adc https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda#f36c115f1ee199da648e0597ec2047ad https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-h59595ed_2.conda#7dbaa197d7ba6032caf7ae7f32c1efa0 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.2.0-hd590300_1.conda#603827b39ea2b835268adb8c821b8570 -https://conda.anaconda.org/conda-forge/linux-64/pixman-0.42.2-h59595ed_0.conda#700edd63ccd5fc66b70b1c028cea9a68 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.2.1-hd590300_0.conda#51a753e64a3027bd7e23a189b1f6e91e +https://conda.anaconda.org/conda-forge/linux-64/pixman-0.43.2-h59595ed_0.conda#71004cbf7924e19c02746ccde9fd7123 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a @@ -62,14 +63,14 @@ https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd7 https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_1.conda#f07002e225d7a60a694d42a7bf5ff53f https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_1.conda#5fc11c6020d421960607d821310fcd4d https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_3.conda#73031c79546ad06f1fe62e57fdd021bc +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_5.conda#e73e9cfd1191783392131e6238bdb3e9 https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_1.conda#700ac6ea6d53d5510591c4344d5c989a -https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.44.2-h2797004_0.conda#3b6a9f225c3dbe0d24f4fedd4625c5bf +https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.42-h2797004_0.conda#d67729828dc6ff7ba44a61062ad79880 +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.45.1-h2797004_0.conda#fc4ccadfbf6d4784de88c41704792562 https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.12.3-h232c23b_0.conda#bc6ac4c0cea148d924f621985bc3892b +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.12.5-h232c23b_0.conda#c442ebfda7a475f5e78f1c8e45f1e919 https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda#ac79812548e7e8cf61f7b0abdef01d3b https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.42-hcad00b1_0.conda#679c8961826aa4b50653bce17ee52abe https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 @@ -82,33 +83,33 @@ https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_1.cond https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda#cd95826dbd331ed1be26bdf401432844 https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.3-h783c2da_0.conda#9bd06b12bbfa6fd1740fd23af4b0f0c7 -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.25-pthreads_h413a1c8_0.conda#d172b34a443b95f86089e8229ddc9a17 +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.26-pthreads_h413a1c8_0.conda#760ae35415f5ba8b15d09df5afe8b23a https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-ha9c0a0a_2.conda#55ed21669b2015f77c180feb1dd41930 -https://conda.anaconda.org/conda-forge/linux-64/python-3.11.7-hab00c5b_0_cpython.conda#bf281a975393266ab95734a8cfd532ec -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.44.2-h2c6b66d_0.conda#4f2892c672829693fd978d065db4e8be +https://conda.anaconda.org/conda-forge/linux-64/python-3.11.7-hab00c5b_1_cpython.conda#27cf681282c11dba7b0b1fd266e8f289 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.45.1-h2c6b66d_0.conda#93acf31b379acebada263b9bce3dc6ed https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.7-h8ee46fc_0.conda#49e482d882669206653b095f5206c05b -https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e +https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.16-pyhd8ed1ab_0.conda#def531a3ac77b7fb8c21d17bb5d0badb https://conda.anaconda.org/conda-forge/noarch/antlr-python-runtime-4.11.1-pyhd8ed1ab_0.tar.bz2#15109c4977d39ad7aa3423f57243e286 https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.1.0-pyhd8ed1ab_0.conda#0e8715bef534217eae333c53f645c9ed https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_1.conda#f27a24d46e3ea7b70a1f98e50c62508f https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py311hb755f60_1.conda#cce9e7c3f1c307f2a5fb08a2922d6164 -https://conda.anaconda.org/conda-forge/noarch/certifi-2023.11.17-pyhd8ed1ab_0.conda#2011bcf45376341dd1d690263fdbc789 +https://conda.anaconda.org/conda-forge/noarch/certifi-2024.2.2-pyhd8ed1ab_0.conda#0876280e409658fc6f9e75d035960333 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda#7f4a9e3fcff3f6356ae99244a014da6a https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda#f3ad426304898027fc619827ff428eca https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.0.0-pyhd8ed1ab_0.conda#753d29fe41bb881e4b9c004f0abf973f https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda#5cd86562580f274031ede6aa6aa24441 -https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.6-py311hb755f60_0.conda#88cc84238dda72e11285d9cfcbe43e51 +https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.8-py311hb755f60_0.conda#28778bfea41b0f34141208783882649b https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.8-pyhd8ed1ab_0.conda#db16c66b759a64dc5183d69cc3745a52 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py311h38be061_1.tar.bz2#599159b0740e9b82e7eef0e8471be3c2 -https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.0-pyhd8ed1ab_0.conda#f6c211fee3c98229652b60a9a42ef363 +https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.0-pyhd8ed1ab_2.conda#8d652ea2ee8eaee02ed8dc820bc794aa https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 https://conda.anaconda.org/conda-forge/noarch/filelock-3.13.1-pyhd8ed1ab_0.conda#0c1729b74a8152fde6a38ba0a2ab9f45 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d -https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.12.2-pyhca7485f_0.conda#bf40f2a8835b78b1f91083d306b493d2 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.2.0-pyhca7485f_0.conda#fad86b90138cf5d82c6f5a2ed6e683d9 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h829c605_4.conda#252a696860674caf7a855e16f680d63a https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/idna-3.6-pyhd8ed1ab_0.conda#1a76f09108576397c41c0b0c5bd84134 @@ -117,28 +118,28 @@ https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py311h9547e67_1.conda#2c65bdf442b0d37aad080c8a4e0d452f https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.16-hb7c19ff_0.conda#51bb7010fc86f70eee639b4bb7a894f5 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-20_linux64_openblas.conda#2b7bb4f7562c8cf334fc2e20c2d28abc +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-21_linux64_openblas.conda#0ac9f44fc096772b0aa092119b00c3ca https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.5.0-hca28451_0.conda#7144d5a828e2cae218e0e3c98d8a0aeb https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.2-h658648e_1.conda#0ebb65e8d86843865796c7c95a941f34 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 -https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py311h459d7ec_1.conda#71120b5155a0c500826cf81536721a15 +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.5-py311h459d7ec_0.conda#a322b4185121935c871d201ae00ac143 https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.7-py311h9547e67_0.conda#3ac85c6c226e2a2e4b17864fc2ca88ff https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-h488ebb8_3.conda#128c25b7fe6a25286a48f3a6a9b5b6f3 https://conda.anaconda.org/conda-forge/noarch/packaging-23.2-pyhd8ed1ab_0.conda#79002079284aa895f883c6b7f3f88fd6 -https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.1.0-pyhd8ed1ab_0.conda#45a5065664da0d1dfa8f8cd2eaf05ab9 -https://conda.anaconda.org/conda-forge/noarch/pluggy-1.3.0-pyhd8ed1ab_0.conda#2390bd10bed1f3fdc7a537fb5a447d8d -https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py311h459d7ec_1.conda#490d7fa8675afd1aa6f1b2332d156a45 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.2.0-pyhd8ed1ab_0.conda#a0bc3eec34b0fab84be6b2da94e98e20 +https://conda.anaconda.org/conda-forge/noarch/pluggy-1.4.0-pyhd8ed1ab_0.conda#139e9feb65187e916162917bb2484976 +https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.8-py311h459d7ec_0.conda#9bc62d25dcf64eec484974a3123c9d57 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff https://conda.anaconda.org/conda-forge/noarch/pygments-2.17.2-pyhd8ed1ab_0.conda#140a7f159396547e9799aa98f9f0742e https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.1-pyhd8ed1ab_0.conda#176f7d56f0cfe9008bdf1bccd7de02fb https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 -https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 +https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.4-pyhd8ed1ab_0.conda#c79cacf8a06a51552fc651652f170208 https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.4.1-py311h459d7ec_0.conda#60b5332b3989fda37884b92c7afd6a91 -https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3.post1-pyhd8ed1ab_0.conda#c93346b446cd08c169d843ae5fc0da97 +https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda#3eeeeb9e4827ace8c0c1419c85d590ad https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.1-py311h459d7ec_1.conda#52719a74ad130de8fb5d047dc91f247a -https://conda.anaconda.org/conda-forge/noarch/setuptools-68.2.2-pyhd8ed1ab_0.conda#fc2166155db840c634a1291a5c35a709 +https://conda.anaconda.org/conda-forge/noarch/setuptools-69.0.3-pyhd8ed1ab_0.conda#40695fdfd15a92121ed2922900d0308b https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d @@ -147,7 +148,7 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda#04eedddeb68ad39871c8127dd1c21f4f https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 -https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 +https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.1-pyhd8ed1ab_0.conda#2fcb582444635e2c402e8569bb94e039 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py311h459d7ec_1.conda#a700fcb5cedd3e72d0c75d095c7a6eda https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.9.0-pyha770c72_0.conda#a92a6440c3fe7052d63244f3aba2a4a7 https://conda.anaconda.org/conda-forge/noarch/wheel-0.42.0-pyhd8ed1ab_0.conda#1cdea58981c5cbc17b51973bcaddcea7 @@ -157,32 +158,32 @@ https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30 https://conda.anaconda.org/conda-forge/noarch/zipp-3.17.0-pyhd8ed1ab_0.conda#2e4d6bc0b14e10f895fc6791a7d9b26a https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda#9669586875baeced8fc30c0826c3270e -https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 +https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda#332493000404d8411859539a5a630865 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-h3faef2a_0.conda#f907bb958910dc404647326ca80c263e https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py311hb3a22ac_0.conda#b3469563ac5e808b0cd92810d0697043 -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.3-py311h459d7ec_0.conda#9db2c1316e96068c0189beaeb716f3fe -https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py311h459d7ec_1.conda#afe341dbe834ae76d2c23157ff00e633 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.46.0-py311h459d7ec_0.conda#a14114f70e23f7fd5ab9941fec45b095 +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.4.1-py311h459d7ec_0.conda#9caf3270065a2d40fd9a443ba1568e96 +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.3-py311h459d7ec_0.conda#13d385f635d7fbe9acc93600f67a6cb4 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.48.1-py311h459d7ec_0.conda#36363685b6e56682b1b256eb0ad503f6 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.3-nompi_h4f84152_100.conda#d471a5c3abc984b662d9bae3bb7fd8a5 -https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-7.0.0-pyha770c72_0.conda#a941237cd06538837b25cd245fcd25d8 -https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-20_linux64_openblas.conda#36d486d72ab64ffea932329a1d3729a3 +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-7.0.1-pyha770c72_0.conda#746623a787e06191d80a2133e5daff17 +https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.3-pyhd8ed1ab_0.conda#e7d8df6509ba635247ff9aea31134262 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-21_linux64_openblas.conda#4a3816d06451c4946e2db26b86472cb6 https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h119a65a_9.conda#cfebc557e54905dadc355c0e9f003004 -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-20_linux64_openblas.conda#6fabc51f5e647d09cc010c40061557e0 +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-21_linux64_openblas.conda#1a42f305615c3867684e049e85927531 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc https://conda.anaconda.org/conda-forge/noarch/partd-1.4.1-pyhd8ed1ab_0.conda#acf4b7c0bcd5fa3b0e05801c4d2accd6 -https://conda.anaconda.org/conda-forge/linux-64/pillow-10.1.0-py311ha6c5da5_0.conda#83a988daf5c49e57f7d2086fb6781fe8 -https://conda.anaconda.org/conda-forge/noarch/pip-23.3.1-pyhd8ed1ab_0.conda#2400c0b86889f43aa52067161e1fb108 +https://conda.anaconda.org/conda-forge/linux-64/pillow-10.2.0-py311ha6c5da5_0.conda#a5ccd7f2271f28b7d2de0b02b64e3796 +https://conda.anaconda.org/conda-forge/noarch/pip-24.0-pyhd8ed1ab_0.conda#f586ac1e56c8638b64f9c8122a7b8a67 https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.1-h1d62c97_0.conda#44ec51d0857d9be26158bb85caa74fdb -https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.3-pyhd8ed1ab_0.conda#5bdca0aca30b0ee62bb84854e027eae0 +https://conda.anaconda.org/conda-forge/noarch/pytest-8.0.0-pyhd8ed1ab_0.conda#5ba1cc5b924226349d4a49fb547b7579 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.9.0-hd8ed1ab_0.conda#c16524c1b7227dc80b36b4fa6f77cc86 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.1.0-pyhd8ed1ab_0.conda#f8ced8ee63830dec7ecc1be048d1470a +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.0-pyhd8ed1ab_0.conda#6a7e0694921f668a030d52f0c47baebd https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.25.0-pyhd8ed1ab_0.conda#c119653cba436d8183c27bf6d190e587 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.3.0-h3d44ed6_0.conda#5a6f6c00ef982a9bc83558d9ac8f64a0 -https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-7.0.0-hd8ed1ab_0.conda#12aff14f84c337be5e5636bf612f4140 +https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-7.0.1-hd8ed1ab_0.conda#4a2f43a20fa404b998859c6a470ba316 https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h9612171_113.conda#b2414908e43c442ddc68e6148774a304 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.2-py311h64a7726_0.conda#fd2f142dcd680413b5ede5d0fb799205 +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.4-py311h64a7726_0.conda#a502d7aad449a1206efb366d6a12c52d https://conda.anaconda.org/conda-forge/noarch/pbr-6.0.0-pyhd8ed1ab_0.conda#8dbab5ba746ed14aa32cb232dc437f8f https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py311hca0b8b9_5.conda#cac429fcb9126d5e6f02c8ba61c2a811 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda#06eb685a3a0b146347a58dda979485da @@ -192,18 +193,18 @@ https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.0.4-pyhd8ed1ab_0. https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py311h9547e67_4.conda#586da7df03b68640de14dc3e8bcbf76f https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.3-py311h1f0f07a_0.conda#b7e6d52b39e199238c3400cafaabafb3 https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.2.0-py311h9547e67_0.conda#40828c5b36ef52433e21f89943e09f33 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.12.1-pyhd8ed1ab_0.conda#bf6ad72d882bc3f04e6a0fb50fd2cce8 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.1.1-pyhd8ed1ab_0.conda#1a92a5bd77b2430796696e25c3d8dbcb https://conda.anaconda.org/conda-forge/noarch/identify-2.5.33-pyhd8ed1ab_0.conda#93c8f8ceb83827d88deeba796f07fba7 https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.0-py311h459d7ec_1.conda#45b8d355bbcdd27588c2d266bcfdff84 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_103.conda#50f05f98d084805642d24dff910e11e8 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.4-py311h320fe9a_0.conda#e44ccb61b6621bf3f8053ae66eba7397 +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.0-py311h320fe9a_0.conda#b9e7a2cb2c47bbb99c05d1892500be45 https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-ha41ecd1_2.conda#1a66c10f6a0da3dbd2f3a68127e7f6a0 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py311h1f0f07a_1.conda#86b71ff85f3e4c8a98b5bace6d9c4565 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.4-py311h64a7726_0.conda#9ac5334f1b5ed072d3dbc342503d7868 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.12.0-py311h64a7726_2.conda#24ca5107ab75c5521067b8ba505dfae5 https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.2-py311h2032efe_1.conda#4ba860ff851768615b1a25b788022750 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py311h1f0f07a_4.conda#1e105c1a8ea2163507726144b401eb1b -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.12.1-pyhd8ed1ab_0.conda#6b31b9b627f238a0068926d5650ae128 +https://conda.anaconda.org/conda-forge/noarch/distributed-2024.1.1-pyhd8ed1ab_0.conda#81039f39690f341dcb0a68bf62e812be https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h7f000aa_3.conda#0abfa7f9241a0f4fd732bc15773cfb0c https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a @@ -216,13 +217,13 @@ https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py311h320fe9a_1.c https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 https://conda.anaconda.org/conda-forge/linux-64/graphviz-9.0.0-h78e8752_1.conda#a3f4cd4a512ec5db35ffbf25ba11f537 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.14.4-pyhd8ed1ab_0.conda#c79b8443908032263ffb40ee6215e9e4 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.15.2-pyhd8ed1ab_0.conda#ce99859070b0e17ccc63234ca58f3ed8 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.5.0-pyhd8ed1ab_0.conda#264b3c697fa9cdade87eb0abe4440d54 https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.15.0-pyhd8ed1ab_0.conda#1a49ca9515ef9a96edff2eea06143dc6 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.7-pyhd8ed1ab_0.conda#aebfabcb60c33a89c1f9290cab49bc93 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.5-pyhd8ed1ab_0.conda#ebf08f5184d8eaa486697bc060031953 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.4-pyhd8ed1ab_0.conda#a9a89000dfd19656ad004b937eeb6828 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.6-pyhd8ed1ab_0.conda#cf5c9649272c677a964a7313279e3a9b +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.8-pyhd8ed1ab_0.conda#611a35a27914fac3aa37611a6fe40bb5 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.6-pyhd8ed1ab_0.conda#d7e4954df0d3aea2eacc7835ad12671d +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.5-pyhd8ed1ab_0.conda#7e1e7437273682ada2ed5e9e9714b140 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.7-pyhd8ed1ab_0.conda#26acae54b06f178681bfb551760f5dd1 https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.9-pyhd8ed1ab_0.conda#0612e497d7860728f2cda421ea2aec09 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_0.conda#e507335cb4ca9cff4c3d0fa9cdab255e diff --git a/requirements/locks/py39-linux-64.lock b/requirements/locks/py39-linux-64.lock index 8724473f86..f68198c664 100644 --- a/requirements/locks/py39-linux-64.lock +++ b/requirements/locks/py39-linux-64.lock @@ -1,24 +1,24 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: f50dc073e5fb2216547509366957a7e99607a06a604840563bff4dd4b5daedcb +# input_hash: 86ffb93b06ad756fa46d24f8877077b64c3a7cd8bda0399560525e53fed33f99 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 -https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.11.17-hbcca054_0.conda#01ffc8d36f9eba0ce0b3c1955fa780ee +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.2.2-hbcca054_0.conda#2f4327a1cbe7f022401b236e915a5fef https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_1.conda#6185f640c43843e5ad6fd1c5372c3f80 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_3.conda#937eaed008f6bf2191c5fe76f87755e9 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_5.conda#f6f6600d18a4047b54f803cf708b868a https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.9-4_cp39.conda#bfe4b3259a8ac6cdf0037752904da6a7 -https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda#939e3e74d8be4dac89ce83b20de2492a +https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda#161081fc7cec0bfda0d86d7cb595f8d8 https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_3.conda#7124cbb46b13d395bdde68f2d215c989 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_5.conda#d211c42b9ce49aee3734fdc828731689 https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_3.conda#23fdf1fef05baeb7eadc2aed5fb0011f +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_5.conda#d4ff227c46917d3b4565302a2bbb276b https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda#69b8b6202a07720f448be700e300ccf4 -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.23.0-hd590300_0.conda#d459949bc10f64dee1595c176c2e6291 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.26.0-hd590300_0.conda#a86d90025198fd411845fc245ebc06c8 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.1-h59595ed_0.conda#8c0f4f71f5a59ceb0c6fa9f51501066d https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 @@ -33,18 +33,19 @@ https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.19-hd590300_0.conda https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_3.conda#c714d905cdfa0e70200f68b80cc04764 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_5.conda#7a6bd7a12a4bd359e2afe6c0fa1acace https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_2.conda#d66573916ffcf376178462f1b61c941e https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.2-hd590300_0.conda#30de3fd9b3b602f7473f30e684eeea8c +https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda#5aa797f8787fe7a17d1b0821485b5adc https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda#f36c115f1ee199da648e0597ec2047ad https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-h59595ed_2.conda#7dbaa197d7ba6032caf7ae7f32c1efa0 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.2.0-hd590300_1.conda#603827b39ea2b835268adb8c821b8570 -https://conda.anaconda.org/conda-forge/linux-64/pixman-0.42.2-h59595ed_0.conda#700edd63ccd5fc66b70b1c028cea9a68 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.2.1-hd590300_0.conda#51a753e64a3027bd7e23a189b1f6e91e +https://conda.anaconda.org/conda-forge/linux-64/pixman-0.43.2-h59595ed_0.conda#71004cbf7924e19c02746ccde9fd7123 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a @@ -62,14 +63,14 @@ https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd7 https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_1.conda#f07002e225d7a60a694d42a7bf5ff53f https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_1.conda#5fc11c6020d421960607d821310fcd4d https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_3.conda#73031c79546ad06f1fe62e57fdd021bc +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_5.conda#e73e9cfd1191783392131e6238bdb3e9 https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_1.conda#700ac6ea6d53d5510591c4344d5c989a -https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.44.2-h2797004_0.conda#3b6a9f225c3dbe0d24f4fedd4625c5bf +https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.42-h2797004_0.conda#d67729828dc6ff7ba44a61062ad79880 +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.45.1-h2797004_0.conda#fc4ccadfbf6d4784de88c41704792562 https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.12.3-h232c23b_0.conda#bc6ac4c0cea148d924f621985bc3892b +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.12.5-h232c23b_0.conda#c442ebfda7a475f5e78f1c8e45f1e919 https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda#ac79812548e7e8cf61f7b0abdef01d3b https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.42-hcad00b1_0.conda#679c8961826aa4b50653bce17ee52abe https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 @@ -82,33 +83,33 @@ https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_1.cond https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda#cd95826dbd331ed1be26bdf401432844 https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.3-h783c2da_0.conda#9bd06b12bbfa6fd1740fd23af4b0f0c7 -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.25-pthreads_h413a1c8_0.conda#d172b34a443b95f86089e8229ddc9a17 +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.26-pthreads_h413a1c8_0.conda#760ae35415f5ba8b15d09df5afe8b23a https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-ha9c0a0a_2.conda#55ed21669b2015f77c180feb1dd41930 -https://conda.anaconda.org/conda-forge/linux-64/python-3.9.18-h0755675_0_cpython.conda#3ede353bc605068d9677e700b1847382 -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.44.2-h2c6b66d_0.conda#4f2892c672829693fd978d065db4e8be +https://conda.anaconda.org/conda-forge/linux-64/python-3.9.18-h0755675_1_cpython.conda#255a7002aeec7a067ff19b545aca6328 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.45.1-h2c6b66d_0.conda#93acf31b379acebada263b9bce3dc6ed https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.7-h8ee46fc_0.conda#49e482d882669206653b095f5206c05b -https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e +https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.16-pyhd8ed1ab_0.conda#def531a3ac77b7fb8c21d17bb5d0badb https://conda.anaconda.org/conda-forge/noarch/antlr-python-runtime-4.11.1-pyhd8ed1ab_0.tar.bz2#15109c4977d39ad7aa3423f57243e286 https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.1.0-pyhd8ed1ab_0.conda#0e8715bef534217eae333c53f645c9ed https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_1.conda#f27a24d46e3ea7b70a1f98e50c62508f https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py39h3d6467e_1.conda#c48418c8b35f1d59ae9ae1174812b40a -https://conda.anaconda.org/conda-forge/noarch/certifi-2023.11.17-pyhd8ed1ab_0.conda#2011bcf45376341dd1d690263fdbc789 +https://conda.anaconda.org/conda-forge/noarch/certifi-2024.2.2-pyhd8ed1ab_0.conda#0876280e409658fc6f9e75d035960333 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda#7f4a9e3fcff3f6356ae99244a014da6a https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda#f3ad426304898027fc619827ff428eca https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.0.0-pyhd8ed1ab_0.conda#753d29fe41bb881e4b9c004f0abf973f https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda#5cd86562580f274031ede6aa6aa24441 -https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.6-py39h3d6467e_0.conda#bfde3cf098e298b81d1c1cbc9c79ab59 +https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.8-py39h3d6467e_0.conda#0261e43a0b124d1ced1e1af085e8bc3c https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.8-pyhd8ed1ab_0.conda#db16c66b759a64dc5183d69cc3745a52 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py39hf3d152e_1.tar.bz2#adb733ec2ee669f6d010758d054da60f -https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.0-pyhd8ed1ab_0.conda#f6c211fee3c98229652b60a9a42ef363 +https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.0-pyhd8ed1ab_2.conda#8d652ea2ee8eaee02ed8dc820bc794aa https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 https://conda.anaconda.org/conda-forge/noarch/filelock-3.13.1-pyhd8ed1ab_0.conda#0c1729b74a8152fde6a38ba0a2ab9f45 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d -https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.12.2-pyhca7485f_0.conda#bf40f2a8835b78b1f91083d306b493d2 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.2.0-pyhca7485f_0.conda#fad86b90138cf5d82c6f5a2ed6e683d9 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h829c605_4.conda#252a696860674caf7a855e16f680d63a https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/idna-3.6-pyhd8ed1ab_0.conda#1a76f09108576397c41c0b0c5bd84134 @@ -117,28 +118,28 @@ https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py39h7633fee_1.conda#c9f74d717e5a2847a9f8b779c54130f2 https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.16-hb7c19ff_0.conda#51bb7010fc86f70eee639b4bb7a894f5 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-20_linux64_openblas.conda#2b7bb4f7562c8cf334fc2e20c2d28abc +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-21_linux64_openblas.conda#0ac9f44fc096772b0aa092119b00c3ca https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.5.0-hca28451_0.conda#7144d5a828e2cae218e0e3c98d8a0aeb https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.2-h658648e_1.conda#0ebb65e8d86843865796c7c95a941f34 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 -https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py39hd1e30aa_1.conda#ee2b4665b852ec6ff2758f3c1b91233d +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.5-py39hd1e30aa_0.conda#9a9a22eb1f83c44953319ee3b027769f https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.7-py39h7633fee_0.conda#f668e146a2ed03a4e62ffbb98b3115fb https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-h488ebb8_3.conda#128c25b7fe6a25286a48f3a6a9b5b6f3 https://conda.anaconda.org/conda-forge/noarch/packaging-23.2-pyhd8ed1ab_0.conda#79002079284aa895f883c6b7f3f88fd6 -https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.1.0-pyhd8ed1ab_0.conda#45a5065664da0d1dfa8f8cd2eaf05ab9 -https://conda.anaconda.org/conda-forge/noarch/pluggy-1.3.0-pyhd8ed1ab_0.conda#2390bd10bed1f3fdc7a537fb5a447d8d -https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py39hd1e30aa_1.conda#c2e412b0f11e5983bcfc35d9beb91ecb +https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.2.0-pyhd8ed1ab_0.conda#a0bc3eec34b0fab84be6b2da94e98e20 +https://conda.anaconda.org/conda-forge/noarch/pluggy-1.4.0-pyhd8ed1ab_0.conda#139e9feb65187e916162917bb2484976 +https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.8-py39hd1e30aa_0.conda#ec86403fde8793ac1c36f8afa3d15902 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff https://conda.anaconda.org/conda-forge/noarch/pygments-2.17.2-pyhd8ed1ab_0.conda#140a7f159396547e9799aa98f9f0742e https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.1-pyhd8ed1ab_0.conda#176f7d56f0cfe9008bdf1bccd7de02fb https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 -https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 +https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.4-pyhd8ed1ab_0.conda#c79cacf8a06a51552fc651652f170208 https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.4.1-py39hd1e30aa_0.conda#756cb152772a225587a05ca0ec68fc08 -https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3.post1-pyhd8ed1ab_0.conda#c93346b446cd08c169d843ae5fc0da97 +https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda#3eeeeb9e4827ace8c0c1419c85d590ad https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.1-py39hd1e30aa_1.conda#37218233bcdc310e4fde6453bc1b40d8 -https://conda.anaconda.org/conda-forge/noarch/setuptools-68.2.2-pyhd8ed1ab_0.conda#fc2166155db840c634a1291a5c35a709 +https://conda.anaconda.org/conda-forge/noarch/setuptools-69.0.3-pyhd8ed1ab_0.conda#40695fdfd15a92121ed2922900d0308b https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d @@ -146,7 +147,7 @@ https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda#3 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_0.conda#da1d979339e2714c30a8e806a33ec087 https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda#04eedddeb68ad39871c8127dd1c21f4f https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 -https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 +https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.1-pyhd8ed1ab_0.conda#2fcb582444635e2c402e8569bb94e039 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py39hd1e30aa_1.conda#cbe186eefb0bcd91e8f47c3908489874 https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.9.0-pyha770c72_0.conda#a92a6440c3fe7052d63244f3aba2a4a7 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py39hd1e30aa_0.conda#1da984bbb6e765743e13388ba7b7b2c8 @@ -157,33 +158,33 @@ https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30 https://conda.anaconda.org/conda-forge/noarch/zipp-3.17.0-pyhd8ed1ab_0.conda#2e4d6bc0b14e10f895fc6791a7d9b26a https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda#9669586875baeced8fc30c0826c3270e -https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 +https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda#332493000404d8411859539a5a630865 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-h3faef2a_0.conda#f907bb958910dc404647326ca80c263e https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py39h7a31438_0.conda#ac992767d7f8ed2cb27e71e78f0fb2d7 -https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py39hd1e30aa_1.conda#e5b62f0c1f96413116f16d33973f1a44 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.46.0-py39hd1e30aa_0.conda#9b58e5973dd3d786253f4ca9534b1aba +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.3-py39hd1e30aa_0.conda#dc0fb8e157c7caba4c98f1e1f9d2e5f4 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.48.1-py39hd1e30aa_0.conda#402ef3d9608c7653187a3fd6fd45b445 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.3-nompi_h4f84152_100.conda#d471a5c3abc984b662d9bae3bb7fd8a5 -https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-7.0.0-pyha770c72_0.conda#a941237cd06538837b25cd245fcd25d8 +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-7.0.1-pyha770c72_0.conda#746623a787e06191d80a2133e5daff17 https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.1.1-pyhd8ed1ab_0.conda#3d5fa25cf42f3f32a12b2d874ace8574 -https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-20_linux64_openblas.conda#36d486d72ab64ffea932329a1d3729a3 +https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.3-pyhd8ed1ab_0.conda#e7d8df6509ba635247ff9aea31134262 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-21_linux64_openblas.conda#4a3816d06451c4946e2db26b86472cb6 https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h119a65a_9.conda#cfebc557e54905dadc355c0e9f003004 -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-20_linux64_openblas.conda#6fabc51f5e647d09cc010c40061557e0 +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-21_linux64_openblas.conda#1a42f305615c3867684e049e85927531 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc https://conda.anaconda.org/conda-forge/noarch/partd-1.4.1-pyhd8ed1ab_0.conda#acf4b7c0bcd5fa3b0e05801c4d2accd6 -https://conda.anaconda.org/conda-forge/linux-64/pillow-10.1.0-py39had0adad_0.conda#eeaa413fddccecb2ab7f747bdb55b07f -https://conda.anaconda.org/conda-forge/noarch/pip-23.3.1-pyhd8ed1ab_0.conda#2400c0b86889f43aa52067161e1fb108 +https://conda.anaconda.org/conda-forge/linux-64/pillow-10.2.0-py39had0adad_0.conda#2972754dc054bb079d1d121918b5126f +https://conda.anaconda.org/conda-forge/noarch/pip-24.0-pyhd8ed1ab_0.conda#f586ac1e56c8638b64f9c8122a7b8a67 https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.1-h1d62c97_0.conda#44ec51d0857d9be26158bb85caa74fdb -https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.3-pyhd8ed1ab_0.conda#5bdca0aca30b0ee62bb84854e027eae0 +https://conda.anaconda.org/conda-forge/noarch/pytest-8.0.0-pyhd8ed1ab_0.conda#5ba1cc5b924226349d4a49fb547b7579 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.9.0-hd8ed1ab_0.conda#c16524c1b7227dc80b36b4fa6f77cc86 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.1.0-pyhd8ed1ab_0.conda#f8ced8ee63830dec7ecc1be048d1470a +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.0-pyhd8ed1ab_0.conda#6a7e0694921f668a030d52f0c47baebd https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.25.0-pyhd8ed1ab_0.conda#c119653cba436d8183c27bf6d190e587 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.3.0-h3d44ed6_0.conda#5a6f6c00ef982a9bc83558d9ac8f64a0 https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.1.1-pyhd8ed1ab_0.conda#d04bd1b5bed9177dd7c3cef15e2b6710 -https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-7.0.0-hd8ed1ab_0.conda#12aff14f84c337be5e5636bf612f4140 +https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-7.0.1-hd8ed1ab_0.conda#4a2f43a20fa404b998859c6a470ba316 https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h9612171_113.conda#b2414908e43c442ddc68e6148774a304 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.2-py39h474f0d3_0.conda#459a58eda3e74dd5e3d596c618e7f20a +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.4-py39h474f0d3_0.conda#aa265f5697237aa13cc10f53fa8acc4f https://conda.anaconda.org/conda-forge/noarch/pbr-6.0.0-pyhd8ed1ab_0.conda#8dbab5ba746ed14aa32cb232dc437f8f https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py39h15b0fa6_5.conda#85e186c7ff673b0d0026782ec353fb2a https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.5.0-pyhd8ed1ab_0.conda#d5f595da2daead898ca958ac62f0307b @@ -192,18 +193,18 @@ https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.0.4-pyhd8ed1ab_0. https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py39h7633fee_4.conda#b66595fbda99771266f042f42c7457be https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.3-py39h44dd56e_0.conda#baea2f5dfb3ab7b1c836385d2e1daca7 https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.2.0-py39h7633fee_0.conda#ed71ad3e30eb03da363fb797419cce98 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.12.1-pyhd8ed1ab_0.conda#bf6ad72d882bc3f04e6a0fb50fd2cce8 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.1.1-pyhd8ed1ab_0.conda#1a92a5bd77b2430796696e25c3d8dbcb https://conda.anaconda.org/conda-forge/noarch/identify-2.5.33-pyhd8ed1ab_0.conda#93c8f8ceb83827d88deeba796f07fba7 https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.0-py39hd1e30aa_1.conda#ca63612907462c8e36edcc9bbacc253e https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_103.conda#50f05f98d084805642d24dff910e11e8 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.4-py39hddac248_0.conda#dcfd2f15c6f8f0bbf234412b18a2a5d0 +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.0-py39hddac248_0.conda#95aaa7baa61432a1ce85dedb7b86d2dd https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-ha41ecd1_2.conda#1a66c10f6a0da3dbd2f3a68127e7f6a0 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py39h44dd56e_1.conda#d037c20e3da2e85f03ebd20ad480c359 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.4-py39h474f0d3_0.conda#4b401c1516417b4b14aa1249d2f7929d +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.12.0-py39h474f0d3_2.conda#6ab241b2023730f6b41712dc1b503afa https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.2-py39h6404dd3_1.conda#05623249055d99c51cde021b525611db https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py39h44dd56e_4.conda#81310d21bf9d91754c1220c585bb72d6 -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.12.1-pyhd8ed1ab_0.conda#6b31b9b627f238a0068926d5650ae128 +https://conda.anaconda.org/conda-forge/noarch/distributed-2024.1.1-pyhd8ed1ab_0.conda#81039f39690f341dcb0a68bf62e812be https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h7f000aa_3.conda#0abfa7f9241a0f4fd732bc15773cfb0c https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a @@ -216,13 +217,13 @@ https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py39hddac248_1.co https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 https://conda.anaconda.org/conda-forge/linux-64/graphviz-9.0.0-h78e8752_1.conda#a3f4cd4a512ec5db35ffbf25ba11f537 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.14.4-pyhd8ed1ab_0.conda#c79b8443908032263ffb40ee6215e9e4 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.15.2-pyhd8ed1ab_0.conda#ce99859070b0e17ccc63234ca58f3ed8 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.5.0-pyhd8ed1ab_0.conda#264b3c697fa9cdade87eb0abe4440d54 https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.15.0-pyhd8ed1ab_0.conda#1a49ca9515ef9a96edff2eea06143dc6 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.7-pyhd8ed1ab_0.conda#aebfabcb60c33a89c1f9290cab49bc93 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.5-pyhd8ed1ab_0.conda#ebf08f5184d8eaa486697bc060031953 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.4-pyhd8ed1ab_0.conda#a9a89000dfd19656ad004b937eeb6828 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.6-pyhd8ed1ab_0.conda#cf5c9649272c677a964a7313279e3a9b +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.8-pyhd8ed1ab_0.conda#611a35a27914fac3aa37611a6fe40bb5 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.6-pyhd8ed1ab_0.conda#d7e4954df0d3aea2eacc7835ad12671d +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.5-pyhd8ed1ab_0.conda#7e1e7437273682ada2ed5e9e9714b140 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.7-pyhd8ed1ab_0.conda#26acae54b06f178681bfb551760f5dd1 https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.9-pyhd8ed1ab_0.conda#0612e497d7860728f2cda421ea2aec09 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_0.conda#e507335cb4ca9cff4c3d0fa9cdab255e diff --git a/requirements/py310.yml b/requirements/py310.yml index ced05dd987..f0f8f97eea 100644 --- a/requirements/py310.yml +++ b/requirements/py310.yml @@ -18,7 +18,7 @@ dependencies: - libnetcdf !=4.9.1 - matplotlib-base >=3.5 - netcdf4 - - numpy >1.21, !=1.24.3 + - numpy >=1.23, !=1.24.3 - python-xxhash - pyproj - scipy diff --git a/requirements/py311.yml b/requirements/py311.yml index 5f2b23850e..b40e5f75e1 100644 --- a/requirements/py311.yml +++ b/requirements/py311.yml @@ -18,7 +18,7 @@ dependencies: - libnetcdf !=4.9.1 - matplotlib-base >=3.5 - netcdf4 - - numpy >1.21, !=1.24.3 + - numpy >=1.23, !=1.24.3 - python-xxhash - pyproj - scipy diff --git a/requirements/py39.yml b/requirements/py39.yml index a5b32748e3..884c568575 100644 --- a/requirements/py39.yml +++ b/requirements/py39.yml @@ -18,7 +18,7 @@ dependencies: - libnetcdf !=4.9.1 - matplotlib-base >=3.5 - netcdf4 - - numpy >1.21, !=1.24.3 + - numpy >=1.23, !=1.24.3 - python-xxhash - pyproj - scipy diff --git a/requirements/pypi-core.txt b/requirements/pypi-core.txt index e286bb97bc..e6dc005883 100644 --- a/requirements/pypi-core.txt +++ b/requirements/pypi-core.txt @@ -5,7 +5,7 @@ dask[array]>=2022.9.0 # libnetcdf!=4.9.1 (not available on PyPI) matplotlib>=3.5 netcdf4 -numpy>1.21,!=1.24.3 +numpy>=1.23,!=1.24.3 pyproj scipy shapely!=1.8.3 diff --git a/setup.py b/setup.py index 6e58a7999d..28e7a003a9 100644 --- a/setup.py +++ b/setup.py @@ -1,3 +1,5 @@ +"""Iris setup.""" + import os import sys @@ -23,8 +25,10 @@ def run(self): def custom_command(cmd, help=""): - """Factory function to generate a custom command that adds additional - behaviour to build the CF standard names module. + """Create custom command with factory function. + + Custom command will add additional behaviour to build the CF + standard names module. """