diff --git a/docs/src/common_links.inc b/docs/src/common_links.inc
index 530ebc4877..81b8c30df9 100644
--- a/docs/src/common_links.inc
+++ b/docs/src/common_links.inc
@@ -40,6 +40,7 @@
.. _CF-UGRID: https://ugrid-conventions.github.io/ugrid-conventions/
.. _issues on GitHub: https://github.com/SciTools/iris/issues?q=is%3Aopen+is%3Aissue+sort%3Areactions-%2B1-desc
.. _python-stratify: https://github.com/SciTools/python-stratify
+.. _netCDF4: https://github.com/Unidata/netcdf4-python
.. comment
diff --git a/docs/src/userguide/glossary.rst b/docs/src/userguide/glossary.rst
index 818ef0c7ad..5c24f03372 100644
--- a/docs/src/userguide/glossary.rst
+++ b/docs/src/userguide/glossary.rst
@@ -1,3 +1,5 @@
+.. include:: ../common_links.inc
+
.. _glossary:
Glossary
@@ -125,7 +127,7 @@ Glossary
of formats.
| **Related:** :term:`CartoPy` **|** :term:`NumPy`
- | **More information:** `Matplotlib `_
+ | **More information:** `matplotlib`_
|
Metadata
@@ -143,9 +145,11 @@ Glossary
When Iris loads this format, it also especially recognises and interprets data
encoded according to the :term:`CF Conventions`.
+ __ `NetCDF4`_
+
| **Related:** :term:`Fields File (FF) Format`
**|** :term:`GRIB Format` **|** :term:`Post Processing (PP) Format`
- | **More information:** `NetCDF-4 Python Git `_
+ | **More information:** `NetCDF-4 Python Git`__
|
NumPy
diff --git a/docs/src/whatsnew/2.1.rst b/docs/src/whatsnew/2.1.rst
index 18c562d3da..33f3a013b1 100644
--- a/docs/src/whatsnew/2.1.rst
+++ b/docs/src/whatsnew/2.1.rst
@@ -1,3 +1,5 @@
+.. include:: ../common_links.inc
+
v2.1 (06 Jun 2018)
******************
@@ -67,7 +69,7 @@ Incompatible Changes
as an alternative.
* This release of Iris contains a number of updated metadata translations.
- See this
+ See this
`changelist `_
for further information.
@@ -84,7 +86,7 @@ Internal
calendar.
* Iris updated its time-handling functionality from the
- `netcdf4-python `_
+ `netcdf4-python`__
``netcdftime`` implementation to the standalone module
`cftime `_.
cftime is entirely compatible with netcdftime, but some issues may
@@ -92,6 +94,8 @@ Internal
In this situation, simply replacing ``netcdftime.datetime`` with
``cftime.datetime`` should be sufficient.
+__ `netCDF4`_
+
* Iris now requires version 2 of Matplotlib, and ``>=1.14`` of NumPy.
Full requirements can be seen in the `requirements `_
directory of the Iris' the source.
diff --git a/lib/iris/experimental/ugrid/load.py b/lib/iris/experimental/ugrid/load.py
index a522d91313..cfa3935991 100644
--- a/lib/iris/experimental/ugrid/load.py
+++ b/lib/iris/experimental/ugrid/load.py
@@ -209,7 +209,8 @@ def load_meshes(uris, var_name=None):
result = {}
for source in valid_sources:
- meshes_dict = _meshes_from_cf(CFUGridReader(source))
+ with CFUGridReader(source) as cf_reader:
+ meshes_dict = _meshes_from_cf(cf_reader)
meshes = list(meshes_dict.values())
if var_name is not None:
meshes = list(filter(lambda m: m.var_name == var_name, meshes))
diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py
index a3a23dc323..a21e1d975f 100644
--- a/lib/iris/fileformats/cf.py
+++ b/lib/iris/fileformats/cf.py
@@ -20,10 +20,10 @@
import re
import warnings
-import netCDF4
import numpy as np
import numpy.ma as ma
+from iris.fileformats.netcdf import _thread_safe_nc
import iris.util
#
@@ -1050,7 +1050,9 @@ def __init__(self, filename, warn=False, monotonic=False):
#: Collection of CF-netCDF variables associated with this netCDF file
self.cf_group = self.CFGroup()
- self._dataset = netCDF4.Dataset(self._filename, mode="r")
+ self._dataset = _thread_safe_nc.DatasetWrapper(
+ self._filename, mode="r"
+ )
# Issue load optimisation warning.
if warn and self._dataset.file_format in [
@@ -1068,6 +1070,19 @@ def __init__(self, filename, warn=False, monotonic=False):
self._build_cf_groups()
self._reset()
+ def __enter__(self):
+ # Enable use as a context manager
+ # N.B. this **guarantees* closure of the file, when the context is exited.
+ # Note: ideally, the class would not do so much work in the __init__ call, and
+ # would do all that here, after acquiring necessary permissions/locks.
+ # But for legacy reasons, we can't do that. So **effectively**, the context
+ # (in terms of access control) alreday started, when we created the object.
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ # When used as a context-manager, **always** close the file on exit.
+ self._close()
+
@property
def filename(self):
"""The file that the CFReader is reading."""
@@ -1294,10 +1309,15 @@ def _reset(self):
for nc_var_name in self._dataset.variables.keys():
self.cf_group[nc_var_name].cf_attrs_reset()
- def __del__(self):
+ def _close(self):
# Explicitly close dataset to prevent file remaining open.
if self._dataset is not None:
self._dataset.close()
+ self._dataset = None
+
+ def __del__(self):
+ # Be sure to close dataset when CFReader is destroyed / garbage-collected.
+ self._close()
def _getncattr(dataset, attr, default=None):
diff --git a/lib/iris/fileformats/netcdf/_thread_safe_nc.py b/lib/iris/fileformats/netcdf/_thread_safe_nc.py
new file mode 100644
index 0000000000..decca1535f
--- /dev/null
+++ b/lib/iris/fileformats/netcdf/_thread_safe_nc.py
@@ -0,0 +1,342 @@
+# Copyright Iris contributors
+#
+# This file is part of Iris and is released under the LGPL license.
+# See COPYING and COPYING.LESSER in the root of the repository for full
+# licensing details.
+"""
+Module to ensure all calls to the netCDF4 library are thread-safe.
+
+Intention is that no other Iris module should import the netCDF4 module.
+
+"""
+from abc import ABC
+from threading import Lock
+import typing
+
+import netCDF4
+import numpy as np
+
+_GLOBAL_NETCDF4_LOCK = Lock()
+
+# Doesn't need thread protection, but this allows all netCDF4 refs to be
+# replaced with thread_safe refs.
+default_fillvals = netCDF4.default_fillvals
+
+
+class _ThreadSafeWrapper(ABC):
+ """
+ Contains a netCDF4 class instance, ensuring wrapping all API calls within _GLOBAL_NETCDF4_LOCK.
+
+ Designed to 'gate keep' all the instance's API calls, but allowing the
+ same API as if working directly with the instance itself.
+
+ Using a contained object instead of inheritance, as we cannot successfully
+ subclass or monkeypatch netCDF4 classes, because they are only wrappers for
+ the C-layer.
+ """
+
+ CONTAINED_CLASS = NotImplemented
+
+ # Allows easy type checking, avoiding difficulties with isinstance and mocking.
+ THREAD_SAFE_FLAG = True
+
+ @classmethod
+ def _from_existing(cls, instance):
+ """Pass an existing instance to __init__, where it is contained."""
+ assert isinstance(instance, cls.CONTAINED_CLASS)
+ return cls(instance)
+
+ def __init__(self, *args, **kwargs):
+ """Contain an existing instance, or generate a new one from arguments."""
+ if isinstance(args[0], self.CONTAINED_CLASS):
+ instance = args[0]
+ else:
+ with _GLOBAL_NETCDF4_LOCK:
+ instance = self.CONTAINED_CLASS(*args, **kwargs)
+
+ self._contained_instance = instance
+
+ def __getattr__(self, item):
+ if item == "_contained_instance":
+ # Special behaviour when accessing the _contained_instance itself.
+ return object.__getattribute__(self, item)
+ else:
+ with _GLOBAL_NETCDF4_LOCK:
+ return getattr(self._contained_instance, item)
+
+ def __setattr__(self, key, value):
+ if key == "_contained_instance":
+ # Special behaviour when accessing the _contained_instance itself.
+ object.__setattr__(self, key, value)
+ else:
+ with _GLOBAL_NETCDF4_LOCK:
+ return setattr(self._contained_instance, key, value)
+
+ def __getitem__(self, item):
+ with _GLOBAL_NETCDF4_LOCK:
+ return self._contained_instance.__getitem__(item)
+
+ def __setitem__(self, key, value):
+ with _GLOBAL_NETCDF4_LOCK:
+ return self._contained_instance.__setitem__(key, value)
+
+
+class DimensionWrapper(_ThreadSafeWrapper):
+ """
+ Accessor for a netCDF4.Dimension, always acquiring _GLOBAL_NETCDF4_LOCK.
+
+ All API calls should be identical to those for netCDF4.Dimension.
+ """
+
+ CONTAINED_CLASS = netCDF4.Dimension
+
+
+class VariableWrapper(_ThreadSafeWrapper):
+ """
+ Accessor for a netCDF4.Variable, always acquiring _GLOBAL_NETCDF4_LOCK.
+
+ All API calls should be identical to those for netCDF4.Variable.
+ """
+
+ CONTAINED_CLASS = netCDF4.Variable
+
+ def setncattr(self, *args, **kwargs) -> None:
+ """
+ Calls netCDF4.Variable.setncattr within _GLOBAL_NETCDF4_LOCK.
+
+ Only defined explicitly in order to get some mocks to work.
+ """
+ with _GLOBAL_NETCDF4_LOCK:
+ return self._contained_instance.setncattr(*args, **kwargs)
+
+ @property
+ def dimensions(self) -> typing.List[str]:
+ """
+ Calls netCDF4.Variable.dimensions within _GLOBAL_NETCDF4_LOCK.
+
+ Only defined explicitly in order to get some mocks to work.
+ """
+ with _GLOBAL_NETCDF4_LOCK:
+ # Return value is a list of strings so no need for
+ # DimensionWrapper, unlike self.get_dims().
+ return self._contained_instance.dimensions
+
+ # All Variable API that returns Dimension(s) is wrapped to instead return
+ # DimensionWrapper(s).
+
+ def get_dims(self, *args, **kwargs) -> typing.Tuple[DimensionWrapper]:
+ """
+ Calls netCDF4.Variable.get_dims() within _GLOBAL_NETCDF4_LOCK, returning DimensionWrappers.
+
+ The original returned netCDF4.Dimensions are simply replaced with their
+ respective DimensionWrappers, ensuring that downstream calls are
+ also performed within _GLOBAL_NETCDF4_LOCK.
+ """
+ with _GLOBAL_NETCDF4_LOCK:
+ dimensions_ = list(
+ self._contained_instance.get_dims(*args, **kwargs)
+ )
+ return tuple([DimensionWrapper._from_existing(d) for d in dimensions_])
+
+
+class GroupWrapper(_ThreadSafeWrapper):
+ """
+ Accessor for a netCDF4.Group, always acquiring _GLOBAL_NETCDF4_LOCK.
+
+ All API calls should be identical to those for netCDF4.Group.
+ """
+
+ CONTAINED_CLASS = netCDF4.Group
+
+ # All Group API that returns Dimension(s) is wrapped to instead return
+ # DimensionWrapper(s).
+
+ @property
+ def dimensions(self) -> typing.Dict[str, DimensionWrapper]:
+ """
+ Calls dimensions of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning DimensionWrappers.
+
+ The original returned netCDF4.Dimensions are simply replaced with their
+ respective DimensionWrappers, ensuring that downstream calls are
+ also performed within _GLOBAL_NETCDF4_LOCK.
+ """
+ with _GLOBAL_NETCDF4_LOCK:
+ dimensions_ = self._contained_instance.dimensions
+ return {
+ k: DimensionWrapper._from_existing(v)
+ for k, v in dimensions_.items()
+ }
+
+ def createDimension(self, *args, **kwargs) -> DimensionWrapper:
+ """
+ Calls createDimension() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning DimensionWrapper.
+
+ The original returned netCDF4.Dimension is simply replaced with its
+ respective DimensionWrapper, ensuring that downstream calls are
+ also performed within _GLOBAL_NETCDF4_LOCK.
+ """
+ with _GLOBAL_NETCDF4_LOCK:
+ new_dimension = self._contained_instance.createDimension(
+ *args, **kwargs
+ )
+ return DimensionWrapper._from_existing(new_dimension)
+
+ # All Group API that returns Variable(s) is wrapped to instead return
+ # VariableWrapper(s).
+
+ @property
+ def variables(self) -> typing.Dict[str, VariableWrapper]:
+ """
+ Calls variables of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning VariableWrappers.
+
+ The original returned netCDF4.Variables are simply replaced with their
+ respective VariableWrappers, ensuring that downstream calls are
+ also performed within _GLOBAL_NETCDF4_LOCK.
+ """
+ with _GLOBAL_NETCDF4_LOCK:
+ variables_ = self._contained_instance.variables
+ return {
+ k: VariableWrapper._from_existing(v) for k, v in variables_.items()
+ }
+
+ def createVariable(self, *args, **kwargs) -> VariableWrapper:
+ """
+ Calls createVariable() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning VariableWrapper.
+
+ The original returned netCDF4.Variable is simply replaced with its
+ respective VariableWrapper, ensuring that downstream calls are
+ also performed within _GLOBAL_NETCDF4_LOCK.
+ """
+ with _GLOBAL_NETCDF4_LOCK:
+ new_variable = self._contained_instance.createVariable(
+ *args, **kwargs
+ )
+ return VariableWrapper._from_existing(new_variable)
+
+ def get_variables_by_attributes(
+ self, *args, **kwargs
+ ) -> typing.List[VariableWrapper]:
+ """
+ Calls get_variables_by_attributes() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning VariableWrappers.
+
+ The original returned netCDF4.Variables are simply replaced with their
+ respective VariableWrappers, ensuring that downstream calls are
+ also performed within _GLOBAL_NETCDF4_LOCK.
+ """
+ with _GLOBAL_NETCDF4_LOCK:
+ variables_ = list(
+ self._contained_instance.get_variables_by_attributes(
+ *args, **kwargs
+ )
+ )
+ return [VariableWrapper._from_existing(v) for v in variables_]
+
+ # All Group API that returns Group(s) is wrapped to instead return
+ # GroupWrapper(s).
+
+ @property
+ def groups(self):
+ """
+ Calls groups of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning GroupWrappers.
+
+ The original returned netCDF4.Groups are simply replaced with their
+ respective GroupWrappers, ensuring that downstream calls are
+ also performed within _GLOBAL_NETCDF4_LOCK.
+ """
+ with _GLOBAL_NETCDF4_LOCK:
+ groups_ = self._contained_instance.groups
+ return {k: GroupWrapper._from_existing(v) for k, v in groups_.items()}
+
+ @property
+ def parent(self):
+ """
+ Calls parent of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning a GroupWrapper.
+
+ The original returned netCDF4.Group is simply replaced with its
+ respective GroupWrapper, ensuring that downstream calls are
+ also performed within _GLOBAL_NETCDF4_LOCK.
+ """
+ with _GLOBAL_NETCDF4_LOCK:
+ parent_ = self._contained_instance.parent
+ return GroupWrapper._from_existing(parent_)
+
+ def createGroup(self, *args, **kwargs):
+ """
+ Calls createGroup() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning GroupWrapper.
+
+ The original returned netCDF4.Group is simply replaced with its
+ respective GroupWrapper, ensuring that downstream calls are
+ also performed within _GLOBAL_NETCDF4_LOCK.
+ """
+ with _GLOBAL_NETCDF4_LOCK:
+ new_group = self._contained_instance.createGroup(*args, **kwargs)
+ return GroupWrapper._from_existing(new_group)
+
+
+class DatasetWrapper(GroupWrapper):
+ """
+ Accessor for a netCDF4.Dataset, always acquiring _GLOBAL_NETCDF4_LOCK.
+
+ All API calls should be identical to those for netCDF4.Dataset.
+ """
+
+ CONTAINED_CLASS = netCDF4.Dataset
+
+ @classmethod
+ def fromcdl(cls, *args, **kwargs):
+ """
+ Calls netCDF4.Dataset.fromcdl() within _GLOBAL_NETCDF4_LOCK, returning a DatasetWrapper.
+
+ The original returned netCDF4.Dataset is simply replaced with its
+ respective DatasetWrapper, ensuring that downstream calls are
+ also performed within _GLOBAL_NETCDF4_LOCK.
+ """
+ with _GLOBAL_NETCDF4_LOCK:
+ instance = cls.CONTAINED_CLASS.fromcdl(*args, **kwargs)
+ return cls._from_existing(instance)
+
+
+class NetCDFDataProxy:
+ """A reference to the data payload of a single NetCDF file variable."""
+
+ __slots__ = ("shape", "dtype", "path", "variable_name", "fill_value")
+
+ def __init__(self, shape, dtype, path, variable_name, fill_value):
+ self.shape = shape
+ self.dtype = dtype
+ self.path = path
+ self.variable_name = variable_name
+ self.fill_value = fill_value
+
+ @property
+ def ndim(self):
+ return len(self.shape)
+
+ def __getitem__(self, keys):
+ # Using a DatasetWrapper causes problems with invalid ID's and the
+ # netCDF4 library, presumably because __getitem__ gets called so many
+ # times by Dask. Use _GLOBAL_NETCDF4_LOCK directly instead.
+ with _GLOBAL_NETCDF4_LOCK:
+ dataset = netCDF4.Dataset(self.path)
+ try:
+ variable = dataset.variables[self.variable_name]
+ # Get the NetCDF variable data and slice.
+ var = variable[keys]
+ finally:
+ dataset.close()
+ return np.asanyarray(var)
+
+ def __repr__(self):
+ fmt = (
+ "<{self.__class__.__name__} shape={self.shape}"
+ " dtype={self.dtype!r} path={self.path!r}"
+ " variable_name={self.variable_name!r}>"
+ )
+ return fmt.format(self=self)
+
+ def __getstate__(self):
+ return {attr: getattr(self, attr) for attr in self.__slots__}
+
+ def __setstate__(self, state):
+ for key, value in state.items():
+ setattr(self, key, value)
diff --git a/lib/iris/fileformats/netcdf/loader.py b/lib/iris/fileformats/netcdf/loader.py
index 95f394c70d..8fcab61d17 100644
--- a/lib/iris/fileformats/netcdf/loader.py
+++ b/lib/iris/fileformats/netcdf/loader.py
@@ -15,7 +15,6 @@
"""
import warnings
-import netCDF4
import numpy as np
from iris._lazy_data import as_lazy_data
@@ -34,6 +33,7 @@
import iris.coords
import iris.exceptions
import iris.fileformats.cf
+from iris.fileformats.netcdf import _thread_safe_nc
from iris.fileformats.netcdf.saver import _CF_ATTRS
import iris.io
import iris.util
@@ -44,6 +44,10 @@
# Get the logger : shared logger for all in 'iris.fileformats.netcdf'.
from . import logger
+# An expected part of the public loader API, but includes thread safety
+# concerns so is housed in _thread_safe_nc.
+NetCDFDataProxy = _thread_safe_nc.NetCDFDataProxy
+
def _actions_engine():
# Return an 'actions engine', which provides a pyke-rules-like interface to
@@ -55,48 +59,6 @@ def _actions_engine():
return engine
-class NetCDFDataProxy:
- """A reference to the data payload of a single NetCDF file variable."""
-
- __slots__ = ("shape", "dtype", "path", "variable_name", "fill_value")
-
- def __init__(self, shape, dtype, path, variable_name, fill_value):
- self.shape = shape
- self.dtype = dtype
- self.path = path
- self.variable_name = variable_name
- self.fill_value = fill_value
-
- @property
- def ndim(self):
- return len(self.shape)
-
- def __getitem__(self, keys):
- dataset = netCDF4.Dataset(self.path)
- try:
- variable = dataset.variables[self.variable_name]
- # Get the NetCDF variable data and slice.
- var = variable[keys]
- finally:
- dataset.close()
- return np.asanyarray(var)
-
- def __repr__(self):
- fmt = (
- "<{self.__class__.__name__} shape={self.shape}"
- " dtype={self.dtype!r} path={self.path!r}"
- " variable_name={self.variable_name!r}>"
- )
- return fmt.format(self=self)
-
- def __getstate__(self):
- return {attr: getattr(self, attr) for attr in self.__slots__}
-
- def __setstate__(self, state):
- for key, value in state.items():
- setattr(self, key, value)
-
-
def _assert_case_specific_facts(engine, cf, cf_group):
# Initialise a data store for built cube elements.
# This is used to patch element attributes *not* setup by the actions
@@ -219,7 +181,7 @@ def _get_cf_var_data(cf_var, filename):
fill_value = getattr(
cf_var.cf_data,
"_FillValue",
- netCDF4.default_fillvals[cf_var.dtype.str[1:]],
+ _thread_safe_nc.default_fillvals[cf_var.dtype.str[1:]],
)
proxy = NetCDFDataProxy(
cf_var.shape, dtype, filename, cf_var.cf_name, fill_value
@@ -536,59 +498,62 @@ def load_cubes(filenames, callback=None, constraints=None):
# Ingest the netCDF file.
meshes = {}
if PARSE_UGRID_ON_LOAD:
- cf = CFUGridReader(filename)
- meshes = _meshes_from_cf(cf)
+ cf_reader_class = CFUGridReader
else:
- cf = iris.fileformats.cf.CFReader(filename)
+ cf_reader_class = iris.fileformats.cf.CFReader
- # Process each CF data variable.
- data_variables = list(cf.cf_group.data_variables.values()) + list(
- cf.cf_group.promoted.values()
- )
- for cf_var in data_variables:
- if var_callback and not var_callback(cf_var):
- # Deliver only selected results.
- continue
-
- # cf_var-specific mesh handling, if a mesh is present.
- # Build the mesh_coords *before* loading the cube - avoids
- # mesh-related attributes being picked up by
- # _add_unused_attributes().
- mesh_name = None
- mesh = None
- mesh_coords, mesh_dim = [], None
+ with cf_reader_class(filename) as cf:
if PARSE_UGRID_ON_LOAD:
- mesh_name = getattr(cf_var, "mesh", None)
- if mesh_name is not None:
+ meshes = _meshes_from_cf(cf)
+
+ # Process each CF data variable.
+ data_variables = list(cf.cf_group.data_variables.values()) + list(
+ cf.cf_group.promoted.values()
+ )
+ for cf_var in data_variables:
+ if var_callback and not var_callback(cf_var):
+ # Deliver only selected results.
+ continue
+
+ # cf_var-specific mesh handling, if a mesh is present.
+ # Build the mesh_coords *before* loading the cube - avoids
+ # mesh-related attributes being picked up by
+ # _add_unused_attributes().
+ mesh_name = None
+ mesh = None
+ mesh_coords, mesh_dim = [], None
+ if PARSE_UGRID_ON_LOAD:
+ mesh_name = getattr(cf_var, "mesh", None)
+ if mesh_name is not None:
+ try:
+ mesh = meshes[mesh_name]
+ except KeyError:
+ message = (
+ f"File does not contain mesh: '{mesh_name}' - "
+ f"referenced by variable: '{cf_var.cf_name}' ."
+ )
+ logger.debug(message)
+ if mesh is not None:
+ mesh_coords, mesh_dim = _build_mesh_coords(mesh, cf_var)
+
+ cube = _load_cube(engine, cf, cf_var, filename)
+
+ # Attach the mesh (if present) to the cube.
+ for mesh_coord in mesh_coords:
+ cube.add_aux_coord(mesh_coord, mesh_dim)
+
+ # Process any associated formula terms and attach
+ # the corresponding AuxCoordFactory.
try:
- mesh = meshes[mesh_name]
- except KeyError:
- message = (
- f"File does not contain mesh: '{mesh_name}' - "
- f"referenced by variable: '{cf_var.cf_name}' ."
- )
- logger.debug(message)
- if mesh is not None:
- mesh_coords, mesh_dim = _build_mesh_coords(mesh, cf_var)
-
- cube = _load_cube(engine, cf, cf_var, filename)
-
- # Attach the mesh (if present) to the cube.
- for mesh_coord in mesh_coords:
- cube.add_aux_coord(mesh_coord, mesh_dim)
-
- # Process any associated formula terms and attach
- # the corresponding AuxCoordFactory.
- try:
- _load_aux_factory(engine, cube)
- except ValueError as e:
- warnings.warn("{}".format(e))
-
- # Perform any user registered callback function.
- cube = run_callback(callback, cube, cf_var, filename)
-
- # Callback mechanism may return None, which must not be yielded
- if cube is None:
- continue
-
- yield cube
+ _load_aux_factory(engine, cube)
+ except ValueError as e:
+ warnings.warn("{}".format(e))
+
+ # Perform any user registered callback function.
+ cube = run_callback(callback, cube, cf_var, filename)
+
+ # Callback mechanism may return None, which must not be yielded
+ if cube is None:
+ continue
+
+ yield cube
diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py
index 650c5e3338..e5d3bf2cc7 100644
--- a/lib/iris/fileformats/netcdf/saver.py
+++ b/lib/iris/fileformats/netcdf/saver.py
@@ -24,7 +24,6 @@
import cf_units
import dask.array as da
-import netCDF4
import numpy as np
import numpy.ma as ma
@@ -45,6 +44,7 @@
from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, DimCoord
import iris.exceptions
import iris.fileformats.cf
+from iris.fileformats.netcdf import _thread_safe_nc
import iris.io
import iris.util
@@ -459,7 +459,10 @@ def _setncattr(variable, name, attribute):
Put the given attribute on the given netCDF4 Data type, casting
attributes as we go to bytes rather than unicode.
+ NOTE: variable needs to be a _thread_safe_nc._ThreadSafeWrapper subclass.
+
"""
+ assert hasattr(variable, "THREAD_SAFE_FLAG")
attribute = _bytes_if_ascii(attribute)
return variable.setncattr(name, attribute)
@@ -470,9 +473,12 @@ class _FillValueMaskCheckAndStoreTarget:
given value and whether it was masked, before passing the chunk to the
given target.
+ NOTE: target needs to be a _thread_safe_nc._ThreadSafeWrapper subclass.
+
"""
def __init__(self, target, fill_value=None):
+ assert hasattr(target, "THREAD_SAFE_FLAG")
self.target = target
self.fill_value = fill_value
self.contains_value = False
@@ -544,7 +550,7 @@ def __init__(self, filename, netcdf_format):
self._formula_terms_cache = {}
#: NetCDF dataset
try:
- self._dataset = netCDF4.Dataset(
+ self._dataset = _thread_safe_nc.DatasetWrapper(
filename, mode="w", format=netcdf_format
)
except RuntimeError:
@@ -2331,7 +2337,13 @@ def _create_cf_data_variable(
dtype = data.dtype.newbyteorder("=")
def set_packing_ncattrs(cfvar):
- """Set netCDF packing attributes."""
+ """
+ Set netCDF packing attributes.
+
+ NOTE: cfvar needs to be a _thread_safe_nc._ThreadSafeWrapper subclass.
+
+ """
+ assert hasattr(cfvar, "THREAD_SAFE_FLAG")
if packing:
if scale_factor:
_setncattr(cfvar, "scale_factor", scale_factor)
@@ -2478,7 +2490,9 @@ def store(data, cf_var, fill_value):
if fill_value is not None:
fill_value_to_check = fill_value
else:
- fill_value_to_check = netCDF4.default_fillvals[dtype.str[1:]]
+ fill_value_to_check = _thread_safe_nc.default_fillvals[
+ dtype.str[1:]
+ ]
else:
fill_value_to_check = None
diff --git a/lib/iris/tests/integration/netcdf/__init__.py b/lib/iris/tests/integration/netcdf/__init__.py
new file mode 100644
index 0000000000..f500b52520
--- /dev/null
+++ b/lib/iris/tests/integration/netcdf/__init__.py
@@ -0,0 +1,6 @@
+# Copyright Iris contributors
+#
+# This file is part of Iris and is released under the LGPL license.
+# See COPYING and COPYING.LESSER in the root of the repository for full
+# licensing details.
+"""Integration tests for loading and saving netcdf files."""
diff --git a/lib/iris/tests/integration/netcdf/test_attributes.py b/lib/iris/tests/integration/netcdf/test_attributes.py
new file mode 100644
index 0000000000..a73d6c7d49
--- /dev/null
+++ b/lib/iris/tests/integration/netcdf/test_attributes.py
@@ -0,0 +1,119 @@
+# Copyright Iris contributors
+#
+# This file is part of Iris and is released under the LGPL license.
+# See COPYING and COPYING.LESSER in the root of the repository for full
+# licensing details.
+"""Integration tests for attribute-related loading and saving netcdf files."""
+
+# Import iris.tests first so that some things can be initialised before
+# importing anything else.
+import iris.tests as tests # isort:skip
+
+from contextlib import contextmanager
+from unittest import mock
+
+import iris
+from iris.cube import Cube, CubeList
+from iris.fileformats.netcdf import CF_CONVENTIONS_VERSION
+
+
+class TestUmVersionAttribute(tests.IrisTest):
+ def test_single_saves_as_global(self):
+ cube = Cube(
+ [1.0],
+ standard_name="air_temperature",
+ units="K",
+ attributes={"um_version": "4.3"},
+ )
+ with self.temp_filename(".nc") as nc_path:
+ iris.save(cube, nc_path)
+ self.assertCDL(nc_path)
+
+ def test_multiple_same_saves_as_global(self):
+ cube_a = Cube(
+ [1.0],
+ standard_name="air_temperature",
+ units="K",
+ attributes={"um_version": "4.3"},
+ )
+ cube_b = Cube(
+ [1.0],
+ standard_name="air_pressure",
+ units="hPa",
+ attributes={"um_version": "4.3"},
+ )
+ with self.temp_filename(".nc") as nc_path:
+ iris.save(CubeList([cube_a, cube_b]), nc_path)
+ self.assertCDL(nc_path)
+
+ def test_multiple_different_saves_on_variables(self):
+ cube_a = Cube(
+ [1.0],
+ standard_name="air_temperature",
+ units="K",
+ attributes={"um_version": "4.3"},
+ )
+ cube_b = Cube(
+ [1.0],
+ standard_name="air_pressure",
+ units="hPa",
+ attributes={"um_version": "4.4"},
+ )
+ with self.temp_filename(".nc") as nc_path:
+ iris.save(CubeList([cube_a, cube_b]), nc_path)
+ self.assertCDL(nc_path)
+
+
+@contextmanager
+def _patch_site_configuration():
+ def cf_patch_conventions(conventions):
+ return ", ".join([conventions, "convention1, convention2"])
+
+ def update(config):
+ config["cf_profile"] = mock.Mock(name="cf_profile")
+ config["cf_patch"] = mock.Mock(name="cf_patch")
+ config["cf_patch_conventions"] = cf_patch_conventions
+
+ orig_site_config = iris.site_configuration.copy()
+ update(iris.site_configuration)
+ yield
+ iris.site_configuration = orig_site_config
+
+
+class TestConventionsAttributes(tests.IrisTest):
+ def test_patching_conventions_attribute(self):
+ # Ensure that user defined conventions are wiped and those which are
+ # saved patched through site_config can be loaded without an exception
+ # being raised.
+ cube = Cube(
+ [1.0],
+ standard_name="air_temperature",
+ units="K",
+ attributes={"Conventions": "some user defined conventions"},
+ )
+
+ # Patch the site configuration dictionary.
+ with _patch_site_configuration(), self.temp_filename(".nc") as nc_path:
+ iris.save(cube, nc_path)
+ res = iris.load_cube(nc_path)
+
+ self.assertEqual(
+ res.attributes["Conventions"],
+ "{}, {}, {}".format(
+ CF_CONVENTIONS_VERSION, "convention1", "convention2"
+ ),
+ )
+
+
+class TestStandardName(tests.IrisTest):
+ def test_standard_name_roundtrip(self):
+ standard_name = "air_temperature detection_minimum"
+ cube = iris.cube.Cube(1, standard_name=standard_name)
+ with self.temp_filename(suffix=".nc") as fout:
+ iris.save(cube, fout)
+ detection_limit_cube = iris.load_cube(fout)
+ self.assertEqual(detection_limit_cube.standard_name, standard_name)
+
+
+if __name__ == "__main__":
+ tests.main()
diff --git a/lib/iris/tests/integration/netcdf/test_aux_factories.py b/lib/iris/tests/integration/netcdf/test_aux_factories.py
new file mode 100644
index 0000000000..d89f275336
--- /dev/null
+++ b/lib/iris/tests/integration/netcdf/test_aux_factories.py
@@ -0,0 +1,160 @@
+# Copyright Iris contributors
+#
+# This file is part of Iris and is released under the LGPL license.
+# See COPYING and COPYING.LESSER in the root of the repository for full
+# licensing details.
+"""Integration tests for aux-factory-related loading and saving netcdf files."""
+
+# Import iris.tests first so that some things can be initialised before
+# importing anything else.
+import iris.tests as tests # isort:skip
+
+import iris
+from iris.tests import stock as stock
+
+
+@tests.skip_data
+class TestAtmosphereSigma(tests.IrisTest):
+ def setUp(self):
+ # Modify stock cube so it is suitable to have a atmosphere sigma
+ # factory added to it.
+ cube = stock.realistic_4d_no_derived()
+ cube.coord("surface_altitude").rename("surface_air_pressure")
+ cube.coord("surface_air_pressure").units = "Pa"
+ cube.coord("sigma").units = "1"
+ ptop_coord = iris.coords.AuxCoord(1000.0, var_name="ptop", units="Pa")
+ cube.add_aux_coord(ptop_coord, ())
+ cube.remove_coord("level_height")
+ # Construct and add atmosphere sigma factory.
+ factory = iris.aux_factory.AtmosphereSigmaFactory(
+ cube.coord("ptop"),
+ cube.coord("sigma"),
+ cube.coord("surface_air_pressure"),
+ )
+ cube.add_aux_factory(factory)
+ self.cube = cube
+
+ def test_save(self):
+ with self.temp_filename(suffix=".nc") as filename:
+ iris.save(self.cube, filename)
+ self.assertCDL(filename)
+
+ def test_save_load_loop(self):
+ # Ensure that the AtmosphereSigmaFactory is automatically loaded
+ # when loading the file.
+ with self.temp_filename(suffix=".nc") as filename:
+ iris.save(self.cube, filename)
+ cube = iris.load_cube(filename, "air_potential_temperature")
+ assert cube.coords("air_pressure")
+
+
+@tests.skip_data
+class TestHybridPressure(tests.IrisTest):
+ def setUp(self):
+ # Modify stock cube so it is suitable to have a
+ # hybrid pressure factory added to it.
+ cube = stock.realistic_4d_no_derived()
+ cube.coord("surface_altitude").rename("surface_air_pressure")
+ cube.coord("surface_air_pressure").units = "Pa"
+ cube.coord("level_height").rename("level_pressure")
+ cube.coord("level_pressure").units = "Pa"
+ # Construct and add hybrid pressure factory.
+ factory = iris.aux_factory.HybridPressureFactory(
+ cube.coord("level_pressure"),
+ cube.coord("sigma"),
+ cube.coord("surface_air_pressure"),
+ )
+ cube.add_aux_factory(factory)
+ self.cube = cube
+
+ def test_save(self):
+ with self.temp_filename(suffix=".nc") as filename:
+ iris.save(self.cube, filename)
+ self.assertCDL(filename)
+
+ def test_save_load_loop(self):
+ # Tests an issue where the variable names in the formula
+ # terms changed to the standard_names instead of the variable names
+ # when loading a previously saved cube.
+ with self.temp_filename(suffix=".nc") as filename, self.temp_filename(
+ suffix=".nc"
+ ) as other_filename:
+ iris.save(self.cube, filename)
+ cube = iris.load_cube(filename, "air_potential_temperature")
+ iris.save(cube, other_filename)
+ other_cube = iris.load_cube(
+ other_filename, "air_potential_temperature"
+ )
+ self.assertEqual(cube, other_cube)
+
+
+@tests.skip_data
+class TestSaveMultipleAuxFactories(tests.IrisTest):
+ def test_hybrid_height_and_pressure(self):
+ cube = stock.realistic_4d()
+ cube.add_aux_coord(
+ iris.coords.DimCoord(
+ 1200.0, long_name="level_pressure", units="hPa"
+ )
+ )
+ cube.add_aux_coord(
+ iris.coords.DimCoord(0.5, long_name="other sigma", units="1")
+ )
+ cube.add_aux_coord(
+ iris.coords.DimCoord(
+ 1000.0, long_name="surface_air_pressure", units="hPa"
+ )
+ )
+ factory = iris.aux_factory.HybridPressureFactory(
+ cube.coord("level_pressure"),
+ cube.coord("other sigma"),
+ cube.coord("surface_air_pressure"),
+ )
+ cube.add_aux_factory(factory)
+ with self.temp_filename(suffix=".nc") as filename:
+ iris.save(cube, filename)
+ self.assertCDL(filename)
+
+ def test_shared_primary(self):
+ cube = stock.realistic_4d()
+ factory = iris.aux_factory.HybridHeightFactory(
+ cube.coord("level_height"),
+ cube.coord("sigma"),
+ cube.coord("surface_altitude"),
+ )
+ factory.rename("another altitude")
+ cube.add_aux_factory(factory)
+ with self.temp_filename(
+ suffix=".nc"
+ ) as filename, self.assertRaisesRegex(
+ ValueError, "multiple aux factories"
+ ):
+ iris.save(cube, filename)
+
+ def test_hybrid_height_cubes(self):
+ hh1 = stock.simple_4d_with_hybrid_height()
+ hh1.attributes["cube"] = "hh1"
+ hh2 = stock.simple_4d_with_hybrid_height()
+ hh2.attributes["cube"] = "hh2"
+ sa = hh2.coord("surface_altitude")
+ sa.points = sa.points * 10
+ with self.temp_filename(".nc") as fname:
+ iris.save([hh1, hh2], fname)
+ cubes = iris.load(fname, "air_temperature")
+ cubes = sorted(cubes, key=lambda cube: cube.attributes["cube"])
+ self.assertCML(cubes)
+
+ def test_hybrid_height_cubes_on_dimension_coordinate(self):
+ hh1 = stock.hybrid_height()
+ hh2 = stock.hybrid_height()
+ sa = hh2.coord("surface_altitude")
+ sa.points = sa.points * 10
+ emsg = "Unable to create dimensonless vertical coordinate."
+ with self.temp_filename(".nc") as fname, self.assertRaisesRegex(
+ ValueError, emsg
+ ):
+ iris.save([hh1, hh2], fname)
+
+
+if __name__ == "__main__":
+ tests.main()
diff --git a/lib/iris/tests/integration/netcdf/test_coord_systems.py b/lib/iris/tests/integration/netcdf/test_coord_systems.py
new file mode 100644
index 0000000000..8576f5ffe8
--- /dev/null
+++ b/lib/iris/tests/integration/netcdf/test_coord_systems.py
@@ -0,0 +1,281 @@
+# Copyright Iris contributors
+#
+# This file is part of Iris and is released under the LGPL license.
+# See COPYING and COPYING.LESSER in the root of the repository for full
+# licensing details.
+"""Integration tests for coord-system-related loading and saving netcdf files."""
+
+# Import iris.tests first so that some things can be initialised before
+# importing anything else.
+import iris.tests as tests # isort:skip
+
+from os.path import join as path_join
+import shutil
+import tempfile
+
+import iris
+from iris.coords import DimCoord
+from iris.cube import Cube
+from iris.tests import stock as stock
+from iris.tests.stock.netcdf import ncgen_from_cdl
+from iris.tests.unit.fileformats.netcdf import test_load_cubes as tlc
+
+
+@tests.skip_data
+class TestCoordSystem(tests.IrisTest):
+ def setUp(self):
+ tlc.setUpModule()
+
+ def tearDown(self):
+ tlc.tearDownModule()
+
+ def test_load_laea_grid(self):
+ cube = iris.load_cube(
+ tests.get_data_path(
+ ("NetCDF", "lambert_azimuthal_equal_area", "euro_air_temp.nc")
+ )
+ )
+ self.assertCML(cube, ("netcdf", "netcdf_laea.cml"))
+
+ datum_cf_var_cdl = """
+ netcdf output {
+ dimensions:
+ y = 4 ;
+ x = 3 ;
+ variables:
+ float data(y, x) ;
+ data :standard_name = "toa_brightness_temperature" ;
+ data :units = "K" ;
+ data :grid_mapping = "mercator" ;
+ int mercator ;
+ mercator:grid_mapping_name = "mercator" ;
+ mercator:longitude_of_prime_meridian = 0. ;
+ mercator:earth_radius = 6378169. ;
+ mercator:horizontal_datum_name = "OSGB36" ;
+ float y(y) ;
+ y:axis = "Y" ;
+ y:units = "m" ;
+ y:standard_name = "projection_y_coordinate" ;
+ float x(x) ;
+ x:axis = "X" ;
+ x:units = "m" ;
+ x:standard_name = "projection_x_coordinate" ;
+
+ // global attributes:
+ :Conventions = "CF-1.7" ;
+ :standard_name_vocabulary = "CF Standard Name Table v27" ;
+
+ data:
+
+ data =
+ 0, 1, 2,
+ 3, 4, 5,
+ 6, 7, 8,
+ 9, 10, 11 ;
+
+ mercator = _ ;
+
+ y = 1, 2, 3, 5 ;
+
+ x = -6, -4, -2 ;
+
+ }
+ """
+
+ datum_wkt_cdl = """
+netcdf output5 {
+dimensions:
+ y = 4 ;
+ x = 3 ;
+variables:
+ float data(y, x) ;
+ data :standard_name = "toa_brightness_temperature" ;
+ data :units = "K" ;
+ data :grid_mapping = "mercator" ;
+ int mercator ;
+ mercator:grid_mapping_name = "mercator" ;
+ mercator:longitude_of_prime_meridian = 0. ;
+ mercator:earth_radius = 6378169. ;
+ mercator:longitude_of_projection_origin = 0. ;
+ mercator:false_easting = 0. ;
+ mercator:false_northing = 0. ;
+ mercator:scale_factor_at_projection_origin = 1. ;
+ mercator:crs_wkt = "PROJCRS[\\"unknown\\",BASEGEOGCRS[\\"unknown\\",DATUM[\\"OSGB36\\",ELLIPSOID[\\"unknown\\",6378169,0,LENGTHUNIT[\\"metre\\",1,ID[\\"EPSG\\",9001]]]],PRIMEM[\\"Greenwich\\",0,ANGLEUNIT[\\"degree\\",0.0174532925199433],ID[\\"EPSG\\",8901]]],CONVERSION[\\"unknown\\",METHOD[\\"Mercator (variant B)\\",ID[\\"EPSG\\",9805]],PARAMETER[\\"Latitude of 1st standard parallel\\",0,ANGLEUNIT[\\"degree\\",0.0174532925199433],ID[\\"EPSG\\",8823]],PARAMETER[\\"Longitude of natural origin\\",0,ANGLEUNIT[\\"degree\\",0.0174532925199433],ID[\\"EPSG\\",8802]],PARAMETER[\\"False easting\\",0,LENGTHUNIT[\\"metre\\",1],ID[\\"EPSG\\",8806]],PARAMETER[\\"False northing\\",0,LENGTHUNIT[\\"metre\\",1],ID[\\"EPSG\\",8807]]],CS[Cartesian,2],AXIS[\\"(E)\\",east,ORDER[1],LENGTHUNIT[\\"metre\\",1,ID[\\"EPSG\\",9001]]],AXIS[\\"(N)\\",north,ORDER[2],LENGTHUNIT[\\"metre\\",1,ID[\\"EPSG\\",9001]]]]" ;
+ float y(y) ;
+ y:axis = "Y" ;
+ y:units = "m" ;
+ y:standard_name = "projection_y_coordinate" ;
+ float x(x) ;
+ x:axis = "X" ;
+ x:units = "m" ;
+ x:standard_name = "projection_x_coordinate" ;
+
+// global attributes:
+ :standard_name_vocabulary = "CF Standard Name Table v27" ;
+ :Conventions = "CF-1.7" ;
+data:
+
+ data =
+ 0, 1, 2,
+ 3, 4, 5,
+ 6, 7, 8,
+ 9, 10, 11 ;
+
+ mercator = _ ;
+
+ y = 1, 2, 3, 5 ;
+
+ x = -6, -4, -2 ;
+}
+ """
+
+ def test_load_datum_wkt(self):
+ expected = "OSGB 1936"
+ nc_path = tlc.cdl_to_nc(self.datum_wkt_cdl)
+ with iris.FUTURE.context(datum_support=True):
+ cube = iris.load_cube(nc_path)
+ test_crs = cube.coord("projection_y_coordinate").coord_system
+ actual = str(test_crs.as_cartopy_crs().datum)
+ self.assertMultiLineEqual(expected, actual)
+
+ def test_no_load_datum_wkt(self):
+ nc_path = tlc.cdl_to_nc(self.datum_wkt_cdl)
+ with self.assertWarnsRegex(FutureWarning, "iris.FUTURE.datum_support"):
+ cube = iris.load_cube(nc_path)
+ test_crs = cube.coord("projection_y_coordinate").coord_system
+ actual = str(test_crs.as_cartopy_crs().datum)
+ self.assertMultiLineEqual(actual, "unknown")
+
+ def test_load_datum_cf_var(self):
+ expected = "OSGB 1936"
+ nc_path = tlc.cdl_to_nc(self.datum_cf_var_cdl)
+ with iris.FUTURE.context(datum_support=True):
+ cube = iris.load_cube(nc_path)
+ test_crs = cube.coord("projection_y_coordinate").coord_system
+ actual = str(test_crs.as_cartopy_crs().datum)
+ self.assertMultiLineEqual(expected, actual)
+
+ def test_no_load_datum_cf_var(self):
+ nc_path = tlc.cdl_to_nc(self.datum_cf_var_cdl)
+ with self.assertWarnsRegex(FutureWarning, "iris.FUTURE.datum_support"):
+ cube = iris.load_cube(nc_path)
+ test_crs = cube.coord("projection_y_coordinate").coord_system
+ actual = str(test_crs.as_cartopy_crs().datum)
+ self.assertMultiLineEqual(actual, "unknown")
+
+ def test_save_datum(self):
+ expected = "OSGB 1936"
+ saved_crs = iris.coord_systems.Mercator(
+ ellipsoid=iris.coord_systems.GeogCS.from_datum("OSGB36")
+ )
+
+ base_cube = stock.realistic_3d()
+ base_lat_coord = base_cube.coord("grid_latitude")
+ test_lat_coord = DimCoord(
+ base_lat_coord.points,
+ standard_name="projection_y_coordinate",
+ coord_system=saved_crs,
+ )
+ base_lon_coord = base_cube.coord("grid_longitude")
+ test_lon_coord = DimCoord(
+ base_lon_coord.points,
+ standard_name="projection_x_coordinate",
+ coord_system=saved_crs,
+ )
+ test_cube = Cube(
+ base_cube.data,
+ standard_name=base_cube.standard_name,
+ units=base_cube.units,
+ dim_coords_and_dims=(
+ (base_cube.coord("time"), 0),
+ (test_lat_coord, 1),
+ (test_lon_coord, 2),
+ ),
+ )
+
+ with self.temp_filename(suffix=".nc") as filename:
+ iris.save(test_cube, filename)
+ with iris.FUTURE.context(datum_support=True):
+ cube = iris.load_cube(filename)
+
+ test_crs = cube.coord("projection_y_coordinate").coord_system
+ actual = str(test_crs.as_cartopy_crs().datum)
+ self.assertMultiLineEqual(expected, actual)
+
+
+class TestLoadMinimalGeostationary(tests.IrisTest):
+ """
+ Check we can load data with a geostationary grid-mapping, even when the
+ 'false-easting' and 'false_northing' properties are missing.
+
+ """
+
+ _geostationary_problem_cdl = """
+netcdf geostationary_problem_case {
+dimensions:
+ y = 2 ;
+ x = 3 ;
+variables:
+ short radiance(y, x) ;
+ radiance:standard_name = "toa_outgoing_radiance_per_unit_wavelength" ;
+ radiance:units = "W m-2 sr-1 um-1" ;
+ radiance:coordinates = "y x" ;
+ radiance:grid_mapping = "imager_grid_mapping" ;
+ short y(y) ;
+ y:units = "rad" ;
+ y:axis = "Y" ;
+ y:long_name = "fixed grid projection y-coordinate" ;
+ y:standard_name = "projection_y_coordinate" ;
+ short x(x) ;
+ x:units = "rad" ;
+ x:axis = "X" ;
+ x:long_name = "fixed grid projection x-coordinate" ;
+ x:standard_name = "projection_x_coordinate" ;
+ int imager_grid_mapping ;
+ imager_grid_mapping:grid_mapping_name = "geostationary" ;
+ imager_grid_mapping:perspective_point_height = 35786023. ;
+ imager_grid_mapping:semi_major_axis = 6378137. ;
+ imager_grid_mapping:semi_minor_axis = 6356752.31414 ;
+ imager_grid_mapping:latitude_of_projection_origin = 0. ;
+ imager_grid_mapping:longitude_of_projection_origin = -75. ;
+ imager_grid_mapping:sweep_angle_axis = "x" ;
+
+data:
+
+ // coord values, just so these can be dim-coords
+ y = 0, 1 ;
+ x = 0, 1, 2 ;
+
+}
+"""
+
+ @classmethod
+ def setUpClass(cls):
+ # Create a temp directory for transient test files.
+ cls.temp_dir = tempfile.mkdtemp()
+ cls.path_test_cdl = path_join(cls.temp_dir, "geos_problem.cdl")
+ cls.path_test_nc = path_join(cls.temp_dir, "geos_problem.nc")
+ # Create reference CDL and netcdf files from the CDL text.
+ ncgen_from_cdl(
+ cdl_str=cls._geostationary_problem_cdl,
+ cdl_path=cls.path_test_cdl,
+ nc_path=cls.path_test_nc,
+ )
+
+ @classmethod
+ def tearDownClass(cls):
+ # Destroy the temp directory.
+ shutil.rmtree(cls.temp_dir)
+
+ def test_geostationary_no_false_offsets(self):
+ # Check we can load the test data and coordinate system properties are correct.
+ cube = iris.load_cube(self.path_test_nc)
+ # Check the coordinate system properties has the correct default properties.
+ cs = cube.coord_system()
+ self.assertIsInstance(cs, iris.coord_systems.Geostationary)
+ self.assertEqual(cs.false_easting, 0.0)
+ self.assertEqual(cs.false_northing, 0.0)
+
+
+if __name__ == "__main__":
+ tests.main()
diff --git a/lib/iris/tests/integration/netcdf/test_general.py b/lib/iris/tests/integration/netcdf/test_general.py
new file mode 100644
index 0000000000..63b977674d
--- /dev/null
+++ b/lib/iris/tests/integration/netcdf/test_general.py
@@ -0,0 +1,360 @@
+# Copyright Iris contributors
+#
+# This file is part of Iris and is released under the LGPL license.
+# See COPYING and COPYING.LESSER in the root of the repository for full
+# licensing details.
+"""Integration tests for loading and saving netcdf files."""
+
+# Import iris.tests first so that some things can be initialised before
+# importing anything else.
+import iris.tests as tests # isort:skip
+
+from itertools import repeat
+import os.path
+import shutil
+import tempfile
+import warnings
+
+import numpy as np
+import numpy.ma as ma
+import pytest
+
+import iris
+import iris.coord_systems
+from iris.coords import CellMethod
+from iris.cube import Cube, CubeList
+import iris.exceptions
+from iris.fileformats.netcdf import Saver, UnknownCellMethodWarning
+from iris.tests.stock.netcdf import ncgen_from_cdl
+
+
+class TestLazySave(tests.IrisTest):
+ @tests.skip_data
+ def test_lazy_preserved_save(self):
+ fpath = tests.get_data_path(
+ ("NetCDF", "label_and_climate", "small_FC_167_mon_19601101.nc")
+ )
+ acube = iris.load_cube(fpath, "air_temperature")
+ self.assertTrue(acube.has_lazy_data())
+ # Also check a coord with lazy points + bounds.
+ self.assertTrue(acube.coord("forecast_period").has_lazy_points())
+ self.assertTrue(acube.coord("forecast_period").has_lazy_bounds())
+ with self.temp_filename(".nc") as nc_path:
+ with Saver(nc_path, "NETCDF4") as saver:
+ saver.write(acube)
+ # Check that cube data is not realised, also coord points + bounds.
+ self.assertTrue(acube.has_lazy_data())
+ self.assertTrue(acube.coord("forecast_period").has_lazy_points())
+ self.assertTrue(acube.coord("forecast_period").has_lazy_bounds())
+
+
+@tests.skip_data
+class TestCellMeasures(tests.IrisTest):
+ def setUp(self):
+ self.fname = tests.get_data_path(("NetCDF", "ORCA2", "votemper.nc"))
+
+ def test_load_raw(self):
+ (cube,) = iris.load_raw(self.fname)
+ self.assertEqual(len(cube.cell_measures()), 1)
+ self.assertEqual(cube.cell_measures()[0].measure, "area")
+
+ def test_load(self):
+ cube = iris.load_cube(self.fname)
+ self.assertEqual(len(cube.cell_measures()), 1)
+ self.assertEqual(cube.cell_measures()[0].measure, "area")
+
+ def test_merge_cell_measure_aware(self):
+ (cube1,) = iris.load_raw(self.fname)
+ (cube2,) = iris.load_raw(self.fname)
+ cube2._cell_measures_and_dims[0][0].var_name = "not_areat"
+ cubes = CubeList([cube1, cube2]).merge()
+ self.assertEqual(len(cubes), 2)
+
+ def test_concatenate_cell_measure_aware(self):
+ (cube1,) = iris.load_raw(self.fname)
+ cube1 = cube1[:, :, 0, 0]
+ cm_and_dims = cube1._cell_measures_and_dims
+ (cube2,) = iris.load_raw(self.fname)
+ cube2 = cube2[:, :, 0, 0]
+ cube2._cell_measures_and_dims[0][0].var_name = "not_areat"
+ cube2.coord("time").points = cube2.coord("time").points + 1
+ cubes = CubeList([cube1, cube2]).concatenate()
+ self.assertEqual(cubes[0]._cell_measures_and_dims, cm_and_dims)
+ self.assertEqual(len(cubes), 2)
+
+ def test_concatenate_cell_measure_match(self):
+ (cube1,) = iris.load_raw(self.fname)
+ cube1 = cube1[:, :, 0, 0]
+ cm_and_dims = cube1._cell_measures_and_dims
+ (cube2,) = iris.load_raw(self.fname)
+ cube2 = cube2[:, :, 0, 0]
+ cube2.coord("time").points = cube2.coord("time").points + 1
+ cubes = CubeList([cube1, cube2]).concatenate()
+ self.assertEqual(cubes[0]._cell_measures_and_dims, cm_and_dims)
+ self.assertEqual(len(cubes), 1)
+
+ def test_round_trip(self):
+ (cube,) = iris.load(self.fname)
+ with self.temp_filename(suffix=".nc") as filename:
+ iris.save(cube, filename, unlimited_dimensions=[])
+ (round_cube,) = iris.load_raw(filename)
+ self.assertEqual(len(round_cube.cell_measures()), 1)
+ self.assertEqual(round_cube.cell_measures()[0].measure, "area")
+
+ def test_print(self):
+ cube = iris.load_cube(self.fname)
+ printed = cube.__str__()
+ self.assertIn(
+ (
+ "Cell measures:\n"
+ " cell_area - - "
+ " x x"
+ ),
+ printed,
+ )
+
+
+class TestCellMethod_unknown(tests.IrisTest):
+ def test_unknown_method(self):
+ cube = Cube([1, 2], long_name="odd_phenomenon")
+ cube.add_cell_method(CellMethod(method="oddity", coords=("x",)))
+ temp_dirpath = tempfile.mkdtemp()
+ try:
+ temp_filepath = os.path.join(temp_dirpath, "tmp.nc")
+ iris.save(cube, temp_filepath)
+ with warnings.catch_warnings(record=True) as warning_records:
+ iris.load(temp_filepath)
+ # Filter to get the warning we are interested in.
+ warning_messages = [record.message for record in warning_records]
+ warning_messages = [
+ warn
+ for warn in warning_messages
+ if isinstance(warn, UnknownCellMethodWarning)
+ ]
+ self.assertEqual(len(warning_messages), 1)
+ message = warning_messages[0].args[0]
+ msg = (
+ "NetCDF variable 'odd_phenomenon' contains unknown cell "
+ "method 'oddity'"
+ )
+ self.assertIn(msg, message)
+ finally:
+ shutil.rmtree(temp_dirpath)
+
+
+def _get_scale_factor_add_offset(cube, datatype):
+ """Utility function used by netCDF data packing tests."""
+ if isinstance(datatype, dict):
+ dt = np.dtype(datatype["dtype"])
+ else:
+ dt = np.dtype(datatype)
+ cmax = cube.data.max()
+ cmin = cube.data.min()
+ n = dt.itemsize * 8
+ if ma.isMaskedArray(cube.data):
+ masked = True
+ else:
+ masked = False
+ if masked:
+ scale_factor = (cmax - cmin) / (2**n - 2)
+ else:
+ scale_factor = (cmax - cmin) / (2**n - 1)
+ if dt.kind == "u":
+ add_offset = cmin
+ elif dt.kind == "i":
+ if masked:
+ add_offset = (cmax + cmin) / 2
+ else:
+ add_offset = cmin + 2 ** (n - 1) * scale_factor
+ return (scale_factor, add_offset)
+
+
+@tests.skip_data
+class TestPackedData(tests.IrisTest):
+ def _single_test(self, datatype, CDLfilename, manual=False):
+ # Read PP input file.
+ file_in = tests.get_data_path(
+ (
+ "PP",
+ "cf_processing",
+ "000003000000.03.236.000128.1990.12.01.00.00.b.pp",
+ )
+ )
+ cube = iris.load_cube(file_in)
+ scale_factor, offset = _get_scale_factor_add_offset(cube, datatype)
+ if manual:
+ packspec = dict(
+ dtype=datatype, scale_factor=scale_factor, add_offset=offset
+ )
+ else:
+ packspec = datatype
+ # Write Cube to netCDF file.
+ with self.temp_filename(suffix=".nc") as file_out:
+ iris.save(cube, file_out, packing=packspec)
+ decimal = int(-np.log10(scale_factor))
+ packedcube = iris.load_cube(file_out)
+ # Check that packed cube is accurate to expected precision
+ self.assertArrayAlmostEqual(
+ cube.data, packedcube.data, decimal=decimal
+ )
+ # Check the netCDF file against CDL expected output.
+ self.assertCDL(
+ file_out,
+ (
+ "integration",
+ "netcdf",
+ "general",
+ "TestPackedData",
+ CDLfilename,
+ ),
+ )
+
+ def test_single_packed_signed(self):
+ """Test saving a single CF-netCDF file with packing."""
+ self._single_test("i2", "single_packed_signed.cdl")
+
+ def test_single_packed_unsigned(self):
+ """Test saving a single CF-netCDF file with packing into unsigned."""
+ self._single_test("u1", "single_packed_unsigned.cdl")
+
+ def test_single_packed_manual_scale(self):
+ """Test saving a single CF-netCDF file with packing with scale
+ factor and add_offset set manually."""
+ self._single_test("i2", "single_packed_manual.cdl", manual=True)
+
+ def _multi_test(self, CDLfilename, multi_dtype=False):
+ """Test saving multiple packed cubes with pack_dtype list."""
+ # Read PP input file.
+ file_in = tests.get_data_path(
+ ("PP", "cf_processing", "abcza_pa19591997_daily_29.b.pp")
+ )
+ cubes = iris.load(file_in)
+ # ensure cube order is the same:
+ cubes.sort(key=lambda cube: cube.cell_methods[0].method)
+ datatype = "i2"
+ scale_factor, offset = _get_scale_factor_add_offset(cubes[0], datatype)
+ if multi_dtype:
+ packdict = dict(
+ dtype=datatype, scale_factor=scale_factor, add_offset=offset
+ )
+ packspec = [packdict, None, "u2"]
+ dtypes = packspec
+ else:
+ packspec = datatype
+ dtypes = repeat(packspec)
+
+ # Write Cube to netCDF file.
+ with self.temp_filename(suffix=".nc") as file_out:
+ iris.save(cubes, file_out, packing=packspec)
+ # Check the netCDF file against CDL expected output.
+ self.assertCDL(
+ file_out,
+ (
+ "integration",
+ "netcdf",
+ "general",
+ "TestPackedData",
+ CDLfilename,
+ ),
+ )
+ packedcubes = iris.load(file_out)
+ packedcubes.sort(key=lambda cube: cube.cell_methods[0].method)
+ for cube, packedcube, dtype in zip(cubes, packedcubes, dtypes):
+ if dtype:
+ sf, ao = _get_scale_factor_add_offset(cube, dtype)
+ decimal = int(-np.log10(sf))
+ # Check that packed cube is accurate to expected precision
+ self.assertArrayAlmostEqual(
+ cube.data, packedcube.data, decimal=decimal
+ )
+ else:
+ self.assertArrayEqual(cube.data, packedcube.data)
+
+ def test_multi_packed_single_dtype(self):
+ """Test saving multiple packed cubes with the same pack_dtype."""
+ # Read PP input file.
+ self._multi_test("multi_packed_single_dtype.cdl")
+
+ def test_multi_packed_multi_dtype(self):
+ """Test saving multiple packed cubes with pack_dtype list."""
+ # Read PP input file.
+ self._multi_test("multi_packed_multi_dtype.cdl", multi_dtype=True)
+
+
+class TestScalarCube(tests.IrisTest):
+ def test_scalar_cube_save_load(self):
+ cube = iris.cube.Cube(1, long_name="scalar_cube")
+ with self.temp_filename(suffix=".nc") as fout:
+ iris.save(cube, fout)
+ scalar_cube = iris.load_cube(fout)
+ self.assertEqual(scalar_cube.name(), "scalar_cube")
+
+
+@tests.skip_data
+class TestConstrainedLoad(tests.IrisTest):
+ filename = tests.get_data_path(
+ ("NetCDF", "label_and_climate", "A1B-99999a-river-sep-2070-2099.nc")
+ )
+
+ def test_netcdf_with_NameConstraint(self):
+ constr = iris.NameConstraint(var_name="cdf_temp_dmax_tmean_abs")
+ cubes = iris.load(self.filename, constr)
+ self.assertEqual(len(cubes), 1)
+ self.assertEqual(cubes[0].var_name, "cdf_temp_dmax_tmean_abs")
+
+ def test_netcdf_with_no_constraint(self):
+ cubes = iris.load(self.filename)
+ self.assertEqual(len(cubes), 3)
+
+
+class TestSkippedCoord:
+ # If a coord/cell measure/etcetera cannot be added to the loaded Cube, a
+ # Warning is raised and the coord is skipped.
+ # This 'catching' is generic to all CannotAddErrors, but currently the only
+ # such problem that can exist in a NetCDF file is a mismatch of dimensions
+ # between phenomenon and coord.
+
+ cdl_core = """
+dimensions:
+ length_scale = 1 ;
+ lat = 3 ;
+variables:
+ float lat(lat) ;
+ lat:standard_name = "latitude" ;
+ lat:units = "degrees_north" ;
+ short lst_unc_sys(length_scale) ;
+ lst_unc_sys:long_name = "uncertainty from large-scale systematic
+ errors" ;
+ lst_unc_sys:units = "kelvin" ;
+ lst_unc_sys:coordinates = "lat" ;
+
+data:
+ lat = 0, 1, 2;
+ """
+
+ @pytest.fixture(autouse=True)
+ def create_nc_file(self, tmp_path):
+ file_name = "dim_mismatch"
+ cdl = f"netcdf {file_name}" + "{\n" + self.cdl_core + "\n}"
+ self.nc_path = (tmp_path / file_name).with_suffix(".nc")
+ ncgen_from_cdl(
+ cdl_str=cdl,
+ cdl_path=None,
+ nc_path=str(self.nc_path),
+ )
+ yield
+ self.nc_path.unlink()
+
+ def test_lat_not_loaded(self):
+ # iris#5068 includes discussion of possible retention of the skipped
+ # coords in the future.
+ with pytest.warns(
+ match="Missing data dimensions for multi-valued DimCoord"
+ ):
+ cube = iris.load_cube(self.nc_path)
+ with pytest.raises(iris.exceptions.CoordinateNotFoundError):
+ _ = cube.coord("lat")
+
+
+if __name__ == "__main__":
+ tests.main()
diff --git a/lib/iris/tests/integration/netcdf/test_self_referencing.py b/lib/iris/tests/integration/netcdf/test_self_referencing.py
new file mode 100644
index 0000000000..3395296e11
--- /dev/null
+++ b/lib/iris/tests/integration/netcdf/test_self_referencing.py
@@ -0,0 +1,126 @@
+# Copyright Iris contributors
+#
+# This file is part of Iris and is released under the LGPL license.
+# See COPYING and COPYING.LESSER in the root of the repository for full
+# licensing details.
+"""Integration tests for iris#3367 - loading a self-referencing NetCDF file."""
+
+# Import iris.tests first so that some things can be initialised before
+# importing anything else.
+import iris.tests as tests # isort:skip
+
+import os
+import tempfile
+from unittest import mock
+
+import numpy as np
+
+import iris
+from iris.fileformats.netcdf import _thread_safe_nc
+
+
+@tests.skip_data
+class TestCMIP6VolcelloLoad(tests.IrisTest):
+ def setUp(self):
+ self.fname = tests.get_data_path(
+ (
+ "NetCDF",
+ "volcello",
+ "volcello_Ofx_CESM2_deforest-globe_r1i1p1f1_gn.nc",
+ )
+ )
+
+ def test_cmip6_volcello_load_issue_3367(self):
+ # Ensure that reading a file which references itself in
+ # `cell_measures` can be read. At the same time, ensure that we
+ # still receive a warning about other variables mentioned in
+ # `cell_measures` i.e. a warning should be raised about missing
+ # areacello.
+ areacello_str = "areacello"
+ volcello_str = "volcello"
+ expected_msg = (
+ "Missing CF-netCDF measure variable %r, "
+ "referenced by netCDF variable %r" % (areacello_str, volcello_str)
+ )
+
+ with mock.patch("warnings.warn") as warn:
+ # ensure file loads without failure
+ cube = iris.load_cube(self.fname)
+ warn.assert_has_calls([mock.call(expected_msg)])
+
+ # extra check to ensure correct variable was found
+ assert cube.standard_name == "ocean_volume"
+
+
+class TestSelfReferencingVarLoad(tests.IrisTest):
+ def setUp(self):
+ self.temp_dir_path = os.path.join(
+ tempfile.mkdtemp(), "issue_3367_volcello_test_file.nc"
+ )
+ dataset = _thread_safe_nc.DatasetWrapper(self.temp_dir_path, "w")
+
+ dataset.createDimension("lat", 4)
+ dataset.createDimension("lon", 5)
+ dataset.createDimension("lev", 3)
+
+ latitudes = dataset.createVariable("lat", np.float64, ("lat",))
+ longitudes = dataset.createVariable("lon", np.float64, ("lon",))
+ levels = dataset.createVariable("lev", np.float64, ("lev",))
+ volcello = dataset.createVariable(
+ "volcello", np.float32, ("lat", "lon", "lev")
+ )
+
+ latitudes.standard_name = "latitude"
+ latitudes.units = "degrees_north"
+ latitudes.axis = "Y"
+ latitudes[:] = np.linspace(-90, 90, 4)
+
+ longitudes.standard_name = "longitude"
+ longitudes.units = "degrees_east"
+ longitudes.axis = "X"
+ longitudes[:] = np.linspace(0, 360, 5)
+
+ levels.standard_name = "olevel"
+ levels.units = "centimeters"
+ levels.positive = "down"
+ levels.axis = "Z"
+ levels[:] = np.linspace(0, 10**5, 3)
+
+ volcello.id = "volcello"
+ volcello.out_name = "volcello"
+ volcello.standard_name = "ocean_volume"
+ volcello.units = "m3"
+ volcello.realm = "ocean"
+ volcello.frequency = "fx"
+ volcello.cell_measures = "area: areacello volume: volcello"
+ volcello = np.arange(4 * 5 * 3).reshape((4, 5, 3))
+
+ dataset.close()
+
+ def test_self_referencing_load_issue_3367(self):
+ # Ensure that reading a file which references itself in
+ # `cell_measures` can be read. At the same time, ensure that we
+ # still receive a warning about other variables mentioned in
+ # `cell_measures` i.e. a warning should be raised about missing
+ # areacello.
+ areacello_str = "areacello"
+ volcello_str = "volcello"
+ expected_msg = (
+ "Missing CF-netCDF measure variable %r, "
+ "referenced by netCDF variable %r" % (areacello_str, volcello_str)
+ )
+
+ with mock.patch("warnings.warn") as warn:
+ # ensure file loads without failure
+ cube = iris.load_cube(self.temp_dir_path)
+ warn.assert_called_with(expected_msg)
+
+ # extra check to ensure correct variable was found
+ assert cube.standard_name == "ocean_volume"
+
+ def tearDown(self):
+ os.remove(self.temp_dir_path)
+
+
+if __name__ == "__main__":
+ tests.main()
diff --git a/lib/iris/tests/integration/netcdf/test_thread_safety.py b/lib/iris/tests/integration/netcdf/test_thread_safety.py
new file mode 100644
index 0000000000..280e0f8418
--- /dev/null
+++ b/lib/iris/tests/integration/netcdf/test_thread_safety.py
@@ -0,0 +1,109 @@
+# Copyright Iris contributors
+#
+# This file is part of Iris and is released under the LGPL license.
+# See COPYING and COPYING.LESSER in the root of the repository for full
+# licensing details.
+"""
+Integration tests covering thread safety during loading/saving netcdf files.
+
+These tests are intended to catch non-thread-safe behaviour by producing CI
+'irregularities' that are noticed and investigated. They cannot reliably
+produce standard pytest failures, since the tools for 'correctly'
+testing non-thread-safe behaviour are not available at the Python layer.
+Thread safety problems can be either produce errors (like a normal test) OR
+segfaults (test doesn't complete, pytest-xdiff starts a new group worker, the
+end exit code is still non-0), and some problems do not occur in every test
+run.
+
+Token assertions are included after the line that is expected to reveal
+a thread safety problem, as this seems to be good testing practice.
+
+"""
+from pathlib import Path
+
+import dask
+from dask import array as da
+import numpy as np
+import pytest
+
+import iris
+from iris.cube import Cube, CubeList
+from iris.tests import get_data_path
+
+
+@pytest.fixture
+def tiny_chunks():
+ """Guarantee that Dask will use >1 thread by guaranteeing >1 chunk."""
+
+ def _check_tiny_loaded_chunks(cube: Cube):
+ assert cube.has_lazy_data()
+ cube_lazy_data = cube.core_data()
+ assert np.product(cube_lazy_data.chunksize) < cube_lazy_data.size
+
+ with dask.config.set({"array.chunk-size": "1KiB"}):
+ yield _check_tiny_loaded_chunks
+
+
+@pytest.fixture
+def save_common(tmp_path):
+ save_path = tmp_path / "tmp.nc"
+
+ def _func(cube: Cube):
+ assert not save_path.exists()
+ iris.save(cube, save_path)
+ assert save_path.exists()
+
+ yield _func
+
+
+@pytest.fixture
+def get_cubes_from_netcdf():
+ load_dir_path = Path(get_data_path(["NetCDF", "global", "xyt"]))
+ loaded = iris.load(load_dir_path.glob("*"), "tcco2")
+ smaller = CubeList([c[0] for c in loaded])
+ yield smaller
+
+
+def test_realise_data(tiny_chunks, get_cubes_from_netcdf):
+ cube = get_cubes_from_netcdf[0]
+ tiny_chunks(cube)
+ _ = cube.data # Any problems are expected here.
+ assert not cube.has_lazy_data()
+
+
+def test_realise_data_multisource(get_cubes_from_netcdf):
+ """Load from multiple sources to force Dask to use multiple threads."""
+ cubes = get_cubes_from_netcdf
+ final_cube = sum(cubes)
+ _ = final_cube.data # Any problems are expected here.
+ assert not final_cube.has_lazy_data()
+
+
+def test_save(tiny_chunks, save_common):
+ cube = Cube(da.ones(10000))
+ tiny_chunks(cube)
+ save_common(cube) # Any problems are expected here.
+
+
+def test_stream(tiny_chunks, get_cubes_from_netcdf, save_common):
+ cube = get_cubes_from_netcdf[0]
+ tiny_chunks(cube)
+ save_common(cube) # Any problems are expected here.
+
+
+def test_stream_multisource(get_cubes_from_netcdf, save_common):
+ """Load from multiple sources to force Dask to use multiple threads."""
+ cubes = get_cubes_from_netcdf
+ final_cube = sum(cubes)
+ save_common(final_cube) # Any problems are expected here.
+
+
+def test_comparison(get_cubes_from_netcdf):
+ """
+ Comparing multiple loaded files forces co-realisation.
+
+ See :func:`iris._lazy_data._co_realise_lazy_arrays` .
+ """
+ cubes = get_cubes_from_netcdf
+ _ = cubes[:-1] == cubes[1:] # Any problems are expected here.
+ assert all([c.has_lazy_data() for c in cubes])
diff --git a/lib/iris/tests/integration/test_netcdf.py b/lib/iris/tests/integration/test_netcdf.py
deleted file mode 100644
index 851c539ade..0000000000
--- a/lib/iris/tests/integration/test_netcdf.py
+++ /dev/null
@@ -1,958 +0,0 @@
-# Copyright Iris contributors
-#
-# This file is part of Iris and is released under the LGPL license.
-# See COPYING and COPYING.LESSER in the root of the repository for full
-# licensing details.
-"""Integration tests for loading and saving netcdf files."""
-
-# Import iris.tests first so that some things can be initialised before
-# importing anything else.
-import iris.tests as tests # isort:skip
-
-from contextlib import contextmanager
-from itertools import repeat
-import os.path
-from os.path import join as path_join
-import shutil
-import tempfile
-from unittest import mock
-import warnings
-
-import netCDF4 as nc
-import numpy as np
-import numpy.ma as ma
-import pytest
-
-import iris
-import iris.coord_systems
-from iris.coords import CellMethod, DimCoord
-from iris.cube import Cube, CubeList
-import iris.exceptions
-from iris.fileformats.netcdf import (
- CF_CONVENTIONS_VERSION,
- Saver,
- UnknownCellMethodWarning,
-)
-import iris.tests.stock as stock
-from iris.tests.stock.netcdf import ncgen_from_cdl
-import iris.tests.unit.fileformats.netcdf.test_load_cubes as tlc
-
-
-@tests.skip_data
-class TestAtmosphereSigma(tests.IrisTest):
- def setUp(self):
- # Modify stock cube so it is suitable to have a atmosphere sigma
- # factory added to it.
- cube = stock.realistic_4d_no_derived()
- cube.coord("surface_altitude").rename("surface_air_pressure")
- cube.coord("surface_air_pressure").units = "Pa"
- cube.coord("sigma").units = "1"
- ptop_coord = iris.coords.AuxCoord(1000.0, var_name="ptop", units="Pa")
- cube.add_aux_coord(ptop_coord, ())
- cube.remove_coord("level_height")
- # Construct and add atmosphere sigma factory.
- factory = iris.aux_factory.AtmosphereSigmaFactory(
- cube.coord("ptop"),
- cube.coord("sigma"),
- cube.coord("surface_air_pressure"),
- )
- cube.add_aux_factory(factory)
- self.cube = cube
-
- def test_save(self):
- with self.temp_filename(suffix=".nc") as filename:
- iris.save(self.cube, filename)
- self.assertCDL(filename)
-
- def test_save_load_loop(self):
- # Ensure that the AtmosphereSigmaFactory is automatically loaded
- # when loading the file.
- with self.temp_filename(suffix=".nc") as filename:
- iris.save(self.cube, filename)
- cube = iris.load_cube(filename, "air_potential_temperature")
- assert cube.coords("air_pressure")
-
-
-@tests.skip_data
-class TestHybridPressure(tests.IrisTest):
- def setUp(self):
- # Modify stock cube so it is suitable to have a
- # hybrid pressure factory added to it.
- cube = stock.realistic_4d_no_derived()
- cube.coord("surface_altitude").rename("surface_air_pressure")
- cube.coord("surface_air_pressure").units = "Pa"
- cube.coord("level_height").rename("level_pressure")
- cube.coord("level_pressure").units = "Pa"
- # Construct and add hybrid pressure factory.
- factory = iris.aux_factory.HybridPressureFactory(
- cube.coord("level_pressure"),
- cube.coord("sigma"),
- cube.coord("surface_air_pressure"),
- )
- cube.add_aux_factory(factory)
- self.cube = cube
-
- def test_save(self):
- with self.temp_filename(suffix=".nc") as filename:
- iris.save(self.cube, filename)
- self.assertCDL(filename)
-
- def test_save_load_loop(self):
- # Tests an issue where the variable names in the formula
- # terms changed to the standard_names instead of the variable names
- # when loading a previously saved cube.
- with self.temp_filename(suffix=".nc") as filename, self.temp_filename(
- suffix=".nc"
- ) as other_filename:
- iris.save(self.cube, filename)
- cube = iris.load_cube(filename, "air_potential_temperature")
- iris.save(cube, other_filename)
- other_cube = iris.load_cube(
- other_filename, "air_potential_temperature"
- )
- self.assertEqual(cube, other_cube)
-
-
-@tests.skip_data
-class TestSaveMultipleAuxFactories(tests.IrisTest):
- def test_hybrid_height_and_pressure(self):
- cube = stock.realistic_4d()
- cube.add_aux_coord(
- iris.coords.DimCoord(
- 1200.0, long_name="level_pressure", units="hPa"
- )
- )
- cube.add_aux_coord(
- iris.coords.DimCoord(0.5, long_name="other sigma", units="1")
- )
- cube.add_aux_coord(
- iris.coords.DimCoord(
- 1000.0, long_name="surface_air_pressure", units="hPa"
- )
- )
- factory = iris.aux_factory.HybridPressureFactory(
- cube.coord("level_pressure"),
- cube.coord("other sigma"),
- cube.coord("surface_air_pressure"),
- )
- cube.add_aux_factory(factory)
- with self.temp_filename(suffix=".nc") as filename:
- iris.save(cube, filename)
- self.assertCDL(filename)
-
- def test_shared_primary(self):
- cube = stock.realistic_4d()
- factory = iris.aux_factory.HybridHeightFactory(
- cube.coord("level_height"),
- cube.coord("sigma"),
- cube.coord("surface_altitude"),
- )
- factory.rename("another altitude")
- cube.add_aux_factory(factory)
- with self.temp_filename(
- suffix=".nc"
- ) as filename, self.assertRaisesRegex(
- ValueError, "multiple aux factories"
- ):
- iris.save(cube, filename)
-
- def test_hybrid_height_cubes(self):
- hh1 = stock.simple_4d_with_hybrid_height()
- hh1.attributes["cube"] = "hh1"
- hh2 = stock.simple_4d_with_hybrid_height()
- hh2.attributes["cube"] = "hh2"
- sa = hh2.coord("surface_altitude")
- sa.points = sa.points * 10
- with self.temp_filename(".nc") as fname:
- iris.save([hh1, hh2], fname)
- cubes = iris.load(fname, "air_temperature")
- cubes = sorted(cubes, key=lambda cube: cube.attributes["cube"])
- self.assertCML(cubes)
-
- def test_hybrid_height_cubes_on_dimension_coordinate(self):
- hh1 = stock.hybrid_height()
- hh2 = stock.hybrid_height()
- sa = hh2.coord("surface_altitude")
- sa.points = sa.points * 10
- emsg = "Unable to create dimensonless vertical coordinate."
- with self.temp_filename(".nc") as fname, self.assertRaisesRegex(
- ValueError, emsg
- ):
- iris.save([hh1, hh2], fname)
-
-
-class TestUmVersionAttribute(tests.IrisTest):
- def test_single_saves_as_global(self):
- cube = Cube(
- [1.0],
- standard_name="air_temperature",
- units="K",
- attributes={"um_version": "4.3"},
- )
- with self.temp_filename(".nc") as nc_path:
- iris.save(cube, nc_path)
- self.assertCDL(nc_path)
-
- def test_multiple_same_saves_as_global(self):
- cube_a = Cube(
- [1.0],
- standard_name="air_temperature",
- units="K",
- attributes={"um_version": "4.3"},
- )
- cube_b = Cube(
- [1.0],
- standard_name="air_pressure",
- units="hPa",
- attributes={"um_version": "4.3"},
- )
- with self.temp_filename(".nc") as nc_path:
- iris.save(CubeList([cube_a, cube_b]), nc_path)
- self.assertCDL(nc_path)
-
- def test_multiple_different_saves_on_variables(self):
- cube_a = Cube(
- [1.0],
- standard_name="air_temperature",
- units="K",
- attributes={"um_version": "4.3"},
- )
- cube_b = Cube(
- [1.0],
- standard_name="air_pressure",
- units="hPa",
- attributes={"um_version": "4.4"},
- )
- with self.temp_filename(".nc") as nc_path:
- iris.save(CubeList([cube_a, cube_b]), nc_path)
- self.assertCDL(nc_path)
-
-
-@contextmanager
-def _patch_site_configuration():
- def cf_patch_conventions(conventions):
- return ", ".join([conventions, "convention1, convention2"])
-
- def update(config):
- config["cf_profile"] = mock.Mock(name="cf_profile")
- config["cf_patch"] = mock.Mock(name="cf_patch")
- config["cf_patch_conventions"] = cf_patch_conventions
-
- orig_site_config = iris.site_configuration.copy()
- update(iris.site_configuration)
- yield
- iris.site_configuration = orig_site_config
-
-
-class TestConventionsAttributes(tests.IrisTest):
- def test_patching_conventions_attribute(self):
- # Ensure that user defined conventions are wiped and those which are
- # saved patched through site_config can be loaded without an exception
- # being raised.
- cube = Cube(
- [1.0],
- standard_name="air_temperature",
- units="K",
- attributes={"Conventions": "some user defined conventions"},
- )
-
- # Patch the site configuration dictionary.
- with _patch_site_configuration(), self.temp_filename(".nc") as nc_path:
- iris.save(cube, nc_path)
- res = iris.load_cube(nc_path)
-
- self.assertEqual(
- res.attributes["Conventions"],
- "{}, {}, {}".format(
- CF_CONVENTIONS_VERSION, "convention1", "convention2"
- ),
- )
-
-
-class TestLazySave(tests.IrisTest):
- @tests.skip_data
- def test_lazy_preserved_save(self):
- fpath = tests.get_data_path(
- ("NetCDF", "label_and_climate", "small_FC_167_mon_19601101.nc")
- )
- acube = iris.load_cube(fpath, "air_temperature")
- self.assertTrue(acube.has_lazy_data())
- # Also check a coord with lazy points + bounds.
- self.assertTrue(acube.coord("forecast_period").has_lazy_points())
- self.assertTrue(acube.coord("forecast_period").has_lazy_bounds())
- with self.temp_filename(".nc") as nc_path:
- with Saver(nc_path, "NETCDF4") as saver:
- saver.write(acube)
- # Check that cube data is not realised, also coord points + bounds.
- self.assertTrue(acube.has_lazy_data())
- self.assertTrue(acube.coord("forecast_period").has_lazy_points())
- self.assertTrue(acube.coord("forecast_period").has_lazy_bounds())
-
-
-@tests.skip_data
-class TestCellMeasures(tests.IrisTest):
- def setUp(self):
- self.fname = tests.get_data_path(("NetCDF", "ORCA2", "votemper.nc"))
-
- def test_load_raw(self):
- (cube,) = iris.load_raw(self.fname)
- self.assertEqual(len(cube.cell_measures()), 1)
- self.assertEqual(cube.cell_measures()[0].measure, "area")
-
- def test_load(self):
- cube = iris.load_cube(self.fname)
- self.assertEqual(len(cube.cell_measures()), 1)
- self.assertEqual(cube.cell_measures()[0].measure, "area")
-
- def test_merge_cell_measure_aware(self):
- (cube1,) = iris.load_raw(self.fname)
- (cube2,) = iris.load_raw(self.fname)
- cube2._cell_measures_and_dims[0][0].var_name = "not_areat"
- cubes = CubeList([cube1, cube2]).merge()
- self.assertEqual(len(cubes), 2)
-
- def test_concatenate_cell_measure_aware(self):
- (cube1,) = iris.load_raw(self.fname)
- cube1 = cube1[:, :, 0, 0]
- cm_and_dims = cube1._cell_measures_and_dims
- (cube2,) = iris.load_raw(self.fname)
- cube2 = cube2[:, :, 0, 0]
- cube2._cell_measures_and_dims[0][0].var_name = "not_areat"
- cube2.coord("time").points = cube2.coord("time").points + 1
- cubes = CubeList([cube1, cube2]).concatenate()
- self.assertEqual(cubes[0]._cell_measures_and_dims, cm_and_dims)
- self.assertEqual(len(cubes), 2)
-
- def test_concatenate_cell_measure_match(self):
- (cube1,) = iris.load_raw(self.fname)
- cube1 = cube1[:, :, 0, 0]
- cm_and_dims = cube1._cell_measures_and_dims
- (cube2,) = iris.load_raw(self.fname)
- cube2 = cube2[:, :, 0, 0]
- cube2.coord("time").points = cube2.coord("time").points + 1
- cubes = CubeList([cube1, cube2]).concatenate()
- self.assertEqual(cubes[0]._cell_measures_and_dims, cm_and_dims)
- self.assertEqual(len(cubes), 1)
-
- def test_round_trip(self):
- (cube,) = iris.load(self.fname)
- with self.temp_filename(suffix=".nc") as filename:
- iris.save(cube, filename, unlimited_dimensions=[])
- (round_cube,) = iris.load_raw(filename)
- self.assertEqual(len(round_cube.cell_measures()), 1)
- self.assertEqual(round_cube.cell_measures()[0].measure, "area")
-
- def test_print(self):
- cube = iris.load_cube(self.fname)
- printed = cube.__str__()
- self.assertIn(
- (
- "Cell measures:\n"
- " cell_area - - "
- " x x"
- ),
- printed,
- )
-
-
-@tests.skip_data
-class TestCMIP6VolcelloLoad(tests.IrisTest):
- def setUp(self):
- self.fname = tests.get_data_path(
- (
- "NetCDF",
- "volcello",
- "volcello_Ofx_CESM2_deforest-globe_r1i1p1f1_gn.nc",
- )
- )
-
- def test_cmip6_volcello_load_issue_3367(self):
- # Ensure that reading a file which references itself in
- # `cell_measures` can be read. At the same time, ensure that we
- # still receive a warning about other variables mentioned in
- # `cell_measures` i.e. a warning should be raised about missing
- # areacello.
- areacello_str = "areacello"
- volcello_str = "volcello"
- expected_msg = (
- "Missing CF-netCDF measure variable %r, "
- "referenced by netCDF variable %r" % (areacello_str, volcello_str)
- )
-
- with mock.patch("warnings.warn") as warn:
- # ensure file loads without failure
- cube = iris.load_cube(self.fname)
- warn.assert_has_calls([mock.call(expected_msg)])
-
- # extra check to ensure correct variable was found
- assert cube.standard_name == "ocean_volume"
-
-
-class TestSelfReferencingVarLoad(tests.IrisTest):
- def setUp(self):
- self.temp_dir_path = os.path.join(
- tempfile.mkdtemp(), "issue_3367_volcello_test_file.nc"
- )
- dataset = nc.Dataset(self.temp_dir_path, "w")
-
- dataset.createDimension("lat", 4)
- dataset.createDimension("lon", 5)
- dataset.createDimension("lev", 3)
-
- latitudes = dataset.createVariable("lat", np.float64, ("lat",))
- longitudes = dataset.createVariable("lon", np.float64, ("lon",))
- levels = dataset.createVariable("lev", np.float64, ("lev",))
- volcello = dataset.createVariable(
- "volcello", np.float32, ("lat", "lon", "lev")
- )
-
- latitudes.standard_name = "latitude"
- latitudes.units = "degrees_north"
- latitudes.axis = "Y"
- latitudes[:] = np.linspace(-90, 90, 4)
-
- longitudes.standard_name = "longitude"
- longitudes.units = "degrees_east"
- longitudes.axis = "X"
- longitudes[:] = np.linspace(0, 360, 5)
-
- levels.standard_name = "olevel"
- levels.units = "centimeters"
- levels.positive = "down"
- levels.axis = "Z"
- levels[:] = np.linspace(0, 10**5, 3)
-
- volcello.id = "volcello"
- volcello.out_name = "volcello"
- volcello.standard_name = "ocean_volume"
- volcello.units = "m3"
- volcello.realm = "ocean"
- volcello.frequency = "fx"
- volcello.cell_measures = "area: areacello volume: volcello"
- volcello = np.arange(4 * 5 * 3).reshape((4, 5, 3))
-
- dataset.close()
-
- def test_self_referencing_load_issue_3367(self):
- # Ensure that reading a file which references itself in
- # `cell_measures` can be read. At the same time, ensure that we
- # still receive a warning about other variables mentioned in
- # `cell_measures` i.e. a warning should be raised about missing
- # areacello.
- areacello_str = "areacello"
- volcello_str = "volcello"
- expected_msg = (
- "Missing CF-netCDF measure variable %r, "
- "referenced by netCDF variable %r" % (areacello_str, volcello_str)
- )
-
- with mock.patch("warnings.warn") as warn:
- # ensure file loads without failure
- cube = iris.load_cube(self.temp_dir_path)
- warn.assert_called_with(expected_msg)
-
- # extra check to ensure correct variable was found
- assert cube.standard_name == "ocean_volume"
-
- def tearDown(self):
- os.remove(self.temp_dir_path)
-
-
-class TestCellMethod_unknown(tests.IrisTest):
- def test_unknown_method(self):
- cube = Cube([1, 2], long_name="odd_phenomenon")
- cube.add_cell_method(CellMethod(method="oddity", coords=("x",)))
- temp_dirpath = tempfile.mkdtemp()
- try:
- temp_filepath = os.path.join(temp_dirpath, "tmp.nc")
- iris.save(cube, temp_filepath)
- with warnings.catch_warnings(record=True) as warning_records:
- iris.load(temp_filepath)
- # Filter to get the warning we are interested in.
- warning_messages = [record.message for record in warning_records]
- warning_messages = [
- warn
- for warn in warning_messages
- if isinstance(warn, UnknownCellMethodWarning)
- ]
- self.assertEqual(len(warning_messages), 1)
- message = warning_messages[0].args[0]
- msg = (
- "NetCDF variable 'odd_phenomenon' contains unknown cell "
- "method 'oddity'"
- )
- self.assertIn(msg, message)
- finally:
- shutil.rmtree(temp_dirpath)
-
-
-@tests.skip_data
-class TestCoordSystem(tests.IrisTest):
- def setUp(self):
- tlc.setUpModule()
-
- def tearDown(self):
- tlc.tearDownModule()
-
- def test_load_laea_grid(self):
- cube = iris.load_cube(
- tests.get_data_path(
- ("NetCDF", "lambert_azimuthal_equal_area", "euro_air_temp.nc")
- )
- )
- self.assertCML(cube, ("netcdf", "netcdf_laea.cml"))
-
- datum_cf_var_cdl = """
- netcdf output {
- dimensions:
- y = 4 ;
- x = 3 ;
- variables:
- float data(y, x) ;
- data :standard_name = "toa_brightness_temperature" ;
- data :units = "K" ;
- data :grid_mapping = "mercator" ;
- int mercator ;
- mercator:grid_mapping_name = "mercator" ;
- mercator:longitude_of_prime_meridian = 0. ;
- mercator:earth_radius = 6378169. ;
- mercator:horizontal_datum_name = "OSGB36" ;
- float y(y) ;
- y:axis = "Y" ;
- y:units = "m" ;
- y:standard_name = "projection_y_coordinate" ;
- float x(x) ;
- x:axis = "X" ;
- x:units = "m" ;
- x:standard_name = "projection_x_coordinate" ;
-
- // global attributes:
- :Conventions = "CF-1.7" ;
- :standard_name_vocabulary = "CF Standard Name Table v27" ;
-
- data:
-
- data =
- 0, 1, 2,
- 3, 4, 5,
- 6, 7, 8,
- 9, 10, 11 ;
-
- mercator = _ ;
-
- y = 1, 2, 3, 5 ;
-
- x = -6, -4, -2 ;
-
- }
- """
-
- datum_wkt_cdl = """
-netcdf output5 {
-dimensions:
- y = 4 ;
- x = 3 ;
-variables:
- float data(y, x) ;
- data :standard_name = "toa_brightness_temperature" ;
- data :units = "K" ;
- data :grid_mapping = "mercator" ;
- int mercator ;
- mercator:grid_mapping_name = "mercator" ;
- mercator:longitude_of_prime_meridian = 0. ;
- mercator:earth_radius = 6378169. ;
- mercator:longitude_of_projection_origin = 0. ;
- mercator:false_easting = 0. ;
- mercator:false_northing = 0. ;
- mercator:scale_factor_at_projection_origin = 1. ;
- mercator:crs_wkt = "PROJCRS[\\"unknown\\",BASEGEOGCRS[\\"unknown\\",DATUM[\\"OSGB36\\",ELLIPSOID[\\"unknown\\",6378169,0,LENGTHUNIT[\\"metre\\",1,ID[\\"EPSG\\",9001]]]],PRIMEM[\\"Greenwich\\",0,ANGLEUNIT[\\"degree\\",0.0174532925199433],ID[\\"EPSG\\",8901]]],CONVERSION[\\"unknown\\",METHOD[\\"Mercator (variant B)\\",ID[\\"EPSG\\",9805]],PARAMETER[\\"Latitude of 1st standard parallel\\",0,ANGLEUNIT[\\"degree\\",0.0174532925199433],ID[\\"EPSG\\",8823]],PARAMETER[\\"Longitude of natural origin\\",0,ANGLEUNIT[\\"degree\\",0.0174532925199433],ID[\\"EPSG\\",8802]],PARAMETER[\\"False easting\\",0,LENGTHUNIT[\\"metre\\",1],ID[\\"EPSG\\",8806]],PARAMETER[\\"False northing\\",0,LENGTHUNIT[\\"metre\\",1],ID[\\"EPSG\\",8807]]],CS[Cartesian,2],AXIS[\\"(E)\\",east,ORDER[1],LENGTHUNIT[\\"metre\\",1,ID[\\"EPSG\\",9001]]],AXIS[\\"(N)\\",north,ORDER[2],LENGTHUNIT[\\"metre\\",1,ID[\\"EPSG\\",9001]]]]" ;
- float y(y) ;
- y:axis = "Y" ;
- y:units = "m" ;
- y:standard_name = "projection_y_coordinate" ;
- float x(x) ;
- x:axis = "X" ;
- x:units = "m" ;
- x:standard_name = "projection_x_coordinate" ;
-
-// global attributes:
- :standard_name_vocabulary = "CF Standard Name Table v27" ;
- :Conventions = "CF-1.7" ;
-data:
-
- data =
- 0, 1, 2,
- 3, 4, 5,
- 6, 7, 8,
- 9, 10, 11 ;
-
- mercator = _ ;
-
- y = 1, 2, 3, 5 ;
-
- x = -6, -4, -2 ;
-}
- """
-
- def test_load_datum_wkt(self):
- expected = "OSGB 1936"
- nc_path = tlc.cdl_to_nc(self.datum_wkt_cdl)
- with iris.FUTURE.context(datum_support=True):
- cube = iris.load_cube(nc_path)
- test_crs = cube.coord("projection_y_coordinate").coord_system
- actual = str(test_crs.as_cartopy_crs().datum)
- self.assertMultiLineEqual(expected, actual)
-
- def test_no_load_datum_wkt(self):
- nc_path = tlc.cdl_to_nc(self.datum_wkt_cdl)
- with self.assertWarnsRegex(FutureWarning, "iris.FUTURE.datum_support"):
- cube = iris.load_cube(nc_path)
- test_crs = cube.coord("projection_y_coordinate").coord_system
- actual = str(test_crs.as_cartopy_crs().datum)
- self.assertMultiLineEqual(actual, "unknown")
-
- def test_load_datum_cf_var(self):
- expected = "OSGB 1936"
- nc_path = tlc.cdl_to_nc(self.datum_cf_var_cdl)
- with iris.FUTURE.context(datum_support=True):
- cube = iris.load_cube(nc_path)
- test_crs = cube.coord("projection_y_coordinate").coord_system
- actual = str(test_crs.as_cartopy_crs().datum)
- self.assertMultiLineEqual(expected, actual)
-
- def test_no_load_datum_cf_var(self):
- nc_path = tlc.cdl_to_nc(self.datum_cf_var_cdl)
- with self.assertWarnsRegex(FutureWarning, "iris.FUTURE.datum_support"):
- cube = iris.load_cube(nc_path)
- test_crs = cube.coord("projection_y_coordinate").coord_system
- actual = str(test_crs.as_cartopy_crs().datum)
- self.assertMultiLineEqual(actual, "unknown")
-
- def test_save_datum(self):
- expected = "OSGB 1936"
- saved_crs = iris.coord_systems.Mercator(
- ellipsoid=iris.coord_systems.GeogCS.from_datum("OSGB36")
- )
-
- base_cube = stock.realistic_3d()
- base_lat_coord = base_cube.coord("grid_latitude")
- test_lat_coord = DimCoord(
- base_lat_coord.points,
- standard_name="projection_y_coordinate",
- coord_system=saved_crs,
- )
- base_lon_coord = base_cube.coord("grid_longitude")
- test_lon_coord = DimCoord(
- base_lon_coord.points,
- standard_name="projection_x_coordinate",
- coord_system=saved_crs,
- )
- test_cube = Cube(
- base_cube.data,
- standard_name=base_cube.standard_name,
- units=base_cube.units,
- dim_coords_and_dims=(
- (base_cube.coord("time"), 0),
- (test_lat_coord, 1),
- (test_lon_coord, 2),
- ),
- )
-
- with self.temp_filename(suffix=".nc") as filename:
- iris.save(test_cube, filename)
- with iris.FUTURE.context(datum_support=True):
- cube = iris.load_cube(filename)
-
- test_crs = cube.coord("projection_y_coordinate").coord_system
- actual = str(test_crs.as_cartopy_crs().datum)
- self.assertMultiLineEqual(expected, actual)
-
-
-def _get_scale_factor_add_offset(cube, datatype):
- """Utility function used by netCDF data packing tests."""
- if isinstance(datatype, dict):
- dt = np.dtype(datatype["dtype"])
- else:
- dt = np.dtype(datatype)
- cmax = cube.data.max()
- cmin = cube.data.min()
- n = dt.itemsize * 8
- if ma.isMaskedArray(cube.data):
- masked = True
- else:
- masked = False
- if masked:
- scale_factor = (cmax - cmin) / (2**n - 2)
- else:
- scale_factor = (cmax - cmin) / (2**n - 1)
- if dt.kind == "u":
- add_offset = cmin
- elif dt.kind == "i":
- if masked:
- add_offset = (cmax + cmin) / 2
- else:
- add_offset = cmin + 2 ** (n - 1) * scale_factor
- return (scale_factor, add_offset)
-
-
-@tests.skip_data
-class TestPackedData(tests.IrisTest):
- def _single_test(self, datatype, CDLfilename, manual=False):
- # Read PP input file.
- file_in = tests.get_data_path(
- (
- "PP",
- "cf_processing",
- "000003000000.03.236.000128.1990.12.01.00.00.b.pp",
- )
- )
- cube = iris.load_cube(file_in)
- scale_factor, offset = _get_scale_factor_add_offset(cube, datatype)
- if manual:
- packspec = dict(
- dtype=datatype, scale_factor=scale_factor, add_offset=offset
- )
- else:
- packspec = datatype
- # Write Cube to netCDF file.
- with self.temp_filename(suffix=".nc") as file_out:
- iris.save(cube, file_out, packing=packspec)
- decimal = int(-np.log10(scale_factor))
- packedcube = iris.load_cube(file_out)
- # Check that packed cube is accurate to expected precision
- self.assertArrayAlmostEqual(
- cube.data, packedcube.data, decimal=decimal
- )
- # Check the netCDF file against CDL expected output.
- self.assertCDL(
- file_out,
- ("integration", "netcdf", "TestPackedData", CDLfilename),
- )
-
- def test_single_packed_signed(self):
- """Test saving a single CF-netCDF file with packing."""
- self._single_test("i2", "single_packed_signed.cdl")
-
- def test_single_packed_unsigned(self):
- """Test saving a single CF-netCDF file with packing into unsigned."""
- self._single_test("u1", "single_packed_unsigned.cdl")
-
- def test_single_packed_manual_scale(self):
- """Test saving a single CF-netCDF file with packing with scale
- factor and add_offset set manually."""
- self._single_test("i2", "single_packed_manual.cdl", manual=True)
-
- def _multi_test(self, CDLfilename, multi_dtype=False):
- """Test saving multiple packed cubes with pack_dtype list."""
- # Read PP input file.
- file_in = tests.get_data_path(
- ("PP", "cf_processing", "abcza_pa19591997_daily_29.b.pp")
- )
- cubes = iris.load(file_in)
- # ensure cube order is the same:
- cubes.sort(key=lambda cube: cube.cell_methods[0].method)
- datatype = "i2"
- scale_factor, offset = _get_scale_factor_add_offset(cubes[0], datatype)
- if multi_dtype:
- packdict = dict(
- dtype=datatype, scale_factor=scale_factor, add_offset=offset
- )
- packspec = [packdict, None, "u2"]
- dtypes = packspec
- else:
- packspec = datatype
- dtypes = repeat(packspec)
-
- # Write Cube to netCDF file.
- with self.temp_filename(suffix=".nc") as file_out:
- iris.save(cubes, file_out, packing=packspec)
- # Check the netCDF file against CDL expected output.
- self.assertCDL(
- file_out,
- ("integration", "netcdf", "TestPackedData", CDLfilename),
- )
- packedcubes = iris.load(file_out)
- packedcubes.sort(key=lambda cube: cube.cell_methods[0].method)
- for cube, packedcube, dtype in zip(cubes, packedcubes, dtypes):
- if dtype:
- sf, ao = _get_scale_factor_add_offset(cube, dtype)
- decimal = int(-np.log10(sf))
- # Check that packed cube is accurate to expected precision
- self.assertArrayAlmostEqual(
- cube.data, packedcube.data, decimal=decimal
- )
- else:
- self.assertArrayEqual(cube.data, packedcube.data)
-
- def test_multi_packed_single_dtype(self):
- """Test saving multiple packed cubes with the same pack_dtype."""
- # Read PP input file.
- self._multi_test("multi_packed_single_dtype.cdl")
-
- def test_multi_packed_multi_dtype(self):
- """Test saving multiple packed cubes with pack_dtype list."""
- # Read PP input file.
- self._multi_test("multi_packed_multi_dtype.cdl", multi_dtype=True)
-
-
-class TestScalarCube(tests.IrisTest):
- def test_scalar_cube_save_load(self):
- cube = iris.cube.Cube(1, long_name="scalar_cube")
- with self.temp_filename(suffix=".nc") as fout:
- iris.save(cube, fout)
- scalar_cube = iris.load_cube(fout)
- self.assertEqual(scalar_cube.name(), "scalar_cube")
-
-
-class TestStandardName(tests.IrisTest):
- def test_standard_name_roundtrip(self):
- standard_name = "air_temperature detection_minimum"
- cube = iris.cube.Cube(1, standard_name=standard_name)
- with self.temp_filename(suffix=".nc") as fout:
- iris.save(cube, fout)
- detection_limit_cube = iris.load_cube(fout)
- self.assertEqual(detection_limit_cube.standard_name, standard_name)
-
-
-class TestLoadMinimalGeostationary(tests.IrisTest):
- """
- Check we can load data with a geostationary grid-mapping, even when the
- 'false-easting' and 'false_northing' properties are missing.
-
- """
-
- _geostationary_problem_cdl = """
-netcdf geostationary_problem_case {
-dimensions:
- y = 2 ;
- x = 3 ;
-variables:
- short radiance(y, x) ;
- radiance:standard_name = "toa_outgoing_radiance_per_unit_wavelength" ;
- radiance:units = "W m-2 sr-1 um-1" ;
- radiance:coordinates = "y x" ;
- radiance:grid_mapping = "imager_grid_mapping" ;
- short y(y) ;
- y:units = "rad" ;
- y:axis = "Y" ;
- y:long_name = "fixed grid projection y-coordinate" ;
- y:standard_name = "projection_y_coordinate" ;
- short x(x) ;
- x:units = "rad" ;
- x:axis = "X" ;
- x:long_name = "fixed grid projection x-coordinate" ;
- x:standard_name = "projection_x_coordinate" ;
- int imager_grid_mapping ;
- imager_grid_mapping:grid_mapping_name = "geostationary" ;
- imager_grid_mapping:perspective_point_height = 35786023. ;
- imager_grid_mapping:semi_major_axis = 6378137. ;
- imager_grid_mapping:semi_minor_axis = 6356752.31414 ;
- imager_grid_mapping:latitude_of_projection_origin = 0. ;
- imager_grid_mapping:longitude_of_projection_origin = -75. ;
- imager_grid_mapping:sweep_angle_axis = "x" ;
-
-data:
-
- // coord values, just so these can be dim-coords
- y = 0, 1 ;
- x = 0, 1, 2 ;
-
-}
-"""
-
- @classmethod
- def setUpClass(cls):
- # Create a temp directory for transient test files.
- cls.temp_dir = tempfile.mkdtemp()
- cls.path_test_cdl = path_join(cls.temp_dir, "geos_problem.cdl")
- cls.path_test_nc = path_join(cls.temp_dir, "geos_problem.nc")
- # Create reference CDL and netcdf files from the CDL text.
- ncgen_from_cdl(
- cdl_str=cls._geostationary_problem_cdl,
- cdl_path=cls.path_test_cdl,
- nc_path=cls.path_test_nc,
- )
-
- @classmethod
- def tearDownClass(cls):
- # Destroy the temp directory.
- shutil.rmtree(cls.temp_dir)
-
- def test_geostationary_no_false_offsets(self):
- # Check we can load the test data and coordinate system properties are correct.
- cube = iris.load_cube(self.path_test_nc)
- # Check the coordinate system properties has the correct default properties.
- cs = cube.coord_system()
- self.assertIsInstance(cs, iris.coord_systems.Geostationary)
- self.assertEqual(cs.false_easting, 0.0)
- self.assertEqual(cs.false_northing, 0.0)
-
-
-@tests.skip_data
-class TestConstrainedLoad(tests.IrisTest):
- filename = tests.get_data_path(
- ("NetCDF", "label_and_climate", "A1B-99999a-river-sep-2070-2099.nc")
- )
-
- def test_netcdf_with_NameConstraint(self):
- constr = iris.NameConstraint(var_name="cdf_temp_dmax_tmean_abs")
- cubes = iris.load(self.filename, constr)
- self.assertEqual(len(cubes), 1)
- self.assertEqual(cubes[0].var_name, "cdf_temp_dmax_tmean_abs")
-
- def test_netcdf_with_no_constraint(self):
- cubes = iris.load(self.filename)
- self.assertEqual(len(cubes), 3)
-
-
-class TestSkippedCoord:
- # If a coord/cell measure/etcetera cannot be added to the loaded Cube, a
- # Warning is raised and the coord is skipped.
- # This 'catching' is generic to all CannotAddErrors, but currently the only
- # such problem that can exist in a NetCDF file is a mismatch of dimensions
- # between phenomenon and coord.
-
- cdl_core = """
-dimensions:
- length_scale = 1 ;
- lat = 3 ;
-variables:
- float lat(lat) ;
- lat:standard_name = "latitude" ;
- lat:units = "degrees_north" ;
- short lst_unc_sys(length_scale) ;
- lst_unc_sys:long_name = "uncertainty from large-scale systematic
- errors" ;
- lst_unc_sys:units = "kelvin" ;
- lst_unc_sys:coordinates = "lat" ;
-
-data:
- lat = 0, 1, 2;
- """
-
- @pytest.fixture(autouse=True)
- def create_nc_file(self, tmp_path):
- file_name = "dim_mismatch"
- cdl = f"netcdf {file_name}" + "{\n" + self.cdl_core + "\n}"
- self.nc_path = (tmp_path / file_name).with_suffix(".nc")
- ncgen_from_cdl(
- cdl_str=cdl,
- cdl_path=None,
- nc_path=str(self.nc_path),
- )
- yield
- self.nc_path.unlink()
-
- def test_lat_not_loaded(self):
- # iris#5068 includes discussion of possible retention of the skipped
- # coords in the future.
- with pytest.warns(
- match="Missing data dimensions for multi-valued DimCoord"
- ):
- cube = iris.load_cube(self.nc_path)
- with pytest.raises(iris.exceptions.CoordinateNotFoundError):
- _ = cube.coord("lat")
-
-
-if __name__ == "__main__":
- tests.main()
diff --git a/lib/iris/tests/results/integration/netcdf/TestUmVersionAttribute/multiple_different_saves_on_variables.cdl b/lib/iris/tests/results/integration/netcdf/attributes/TestUmVersionAttribute/multiple_different_saves_on_variables.cdl
similarity index 100%
rename from lib/iris/tests/results/integration/netcdf/TestUmVersionAttribute/multiple_different_saves_on_variables.cdl
rename to lib/iris/tests/results/integration/netcdf/attributes/TestUmVersionAttribute/multiple_different_saves_on_variables.cdl
diff --git a/lib/iris/tests/results/integration/netcdf/TestUmVersionAttribute/multiple_same_saves_as_global.cdl b/lib/iris/tests/results/integration/netcdf/attributes/TestUmVersionAttribute/multiple_same_saves_as_global.cdl
similarity index 100%
rename from lib/iris/tests/results/integration/netcdf/TestUmVersionAttribute/multiple_same_saves_as_global.cdl
rename to lib/iris/tests/results/integration/netcdf/attributes/TestUmVersionAttribute/multiple_same_saves_as_global.cdl
diff --git a/lib/iris/tests/results/integration/netcdf/TestUmVersionAttribute/single_saves_as_global.cdl b/lib/iris/tests/results/integration/netcdf/attributes/TestUmVersionAttribute/single_saves_as_global.cdl
similarity index 100%
rename from lib/iris/tests/results/integration/netcdf/TestUmVersionAttribute/single_saves_as_global.cdl
rename to lib/iris/tests/results/integration/netcdf/attributes/TestUmVersionAttribute/single_saves_as_global.cdl
diff --git a/lib/iris/tests/results/integration/netcdf/TestAtmosphereSigma/save.cdl b/lib/iris/tests/results/integration/netcdf/aux_factories/TestAtmosphereSigma/save.cdl
similarity index 100%
rename from lib/iris/tests/results/integration/netcdf/TestAtmosphereSigma/save.cdl
rename to lib/iris/tests/results/integration/netcdf/aux_factories/TestAtmosphereSigma/save.cdl
diff --git a/lib/iris/tests/results/integration/netcdf/TestHybridPressure/save.cdl b/lib/iris/tests/results/integration/netcdf/aux_factories/TestHybridPressure/save.cdl
similarity index 100%
rename from lib/iris/tests/results/integration/netcdf/TestHybridPressure/save.cdl
rename to lib/iris/tests/results/integration/netcdf/aux_factories/TestHybridPressure/save.cdl
diff --git a/lib/iris/tests/results/integration/netcdf/TestSaveMultipleAuxFactories/hybrid_height_and_pressure.cdl b/lib/iris/tests/results/integration/netcdf/aux_factories/TestSaveMultipleAuxFactories/hybrid_height_and_pressure.cdl
similarity index 100%
rename from lib/iris/tests/results/integration/netcdf/TestSaveMultipleAuxFactories/hybrid_height_and_pressure.cdl
rename to lib/iris/tests/results/integration/netcdf/aux_factories/TestSaveMultipleAuxFactories/hybrid_height_and_pressure.cdl
diff --git a/lib/iris/tests/results/integration/netcdf/TestSaveMultipleAuxFactories/hybrid_height_cubes.cml b/lib/iris/tests/results/integration/netcdf/aux_factories/TestSaveMultipleAuxFactories/hybrid_height_cubes.cml
similarity index 100%
rename from lib/iris/tests/results/integration/netcdf/TestSaveMultipleAuxFactories/hybrid_height_cubes.cml
rename to lib/iris/tests/results/integration/netcdf/aux_factories/TestSaveMultipleAuxFactories/hybrid_height_cubes.cml
diff --git a/lib/iris/tests/results/integration/netcdf/TestPackedData/multi_packed_multi_dtype.cdl b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/multi_packed_multi_dtype.cdl
similarity index 100%
rename from lib/iris/tests/results/integration/netcdf/TestPackedData/multi_packed_multi_dtype.cdl
rename to lib/iris/tests/results/integration/netcdf/general/TestPackedData/multi_packed_multi_dtype.cdl
diff --git a/lib/iris/tests/results/integration/netcdf/TestPackedData/multi_packed_single_dtype.cdl b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/multi_packed_single_dtype.cdl
similarity index 100%
rename from lib/iris/tests/results/integration/netcdf/TestPackedData/multi_packed_single_dtype.cdl
rename to lib/iris/tests/results/integration/netcdf/general/TestPackedData/multi_packed_single_dtype.cdl
diff --git a/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_manual.cdl b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_manual.cdl
similarity index 100%
rename from lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_manual.cdl
rename to lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_manual.cdl
diff --git a/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_signed.cdl b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_signed.cdl
similarity index 100%
rename from lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_signed.cdl
rename to lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_signed.cdl
diff --git a/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_unsigned.cdl b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_unsigned.cdl
similarity index 100%
rename from lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_unsigned.cdl
rename to lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_unsigned.cdl
diff --git a/lib/iris/tests/stock/netcdf.py b/lib/iris/tests/stock/netcdf.py
index 8a448f7d34..a13b9dd269 100644
--- a/lib/iris/tests/stock/netcdf.py
+++ b/lib/iris/tests/stock/netcdf.py
@@ -12,9 +12,9 @@
import dask
from dask import array as da
-import netCDF4
import numpy as np
+from iris.fileformats.netcdf import _thread_safe_nc
from iris.tests import env_bin_path
NCGEN_PATHSTR = str(env_bin_path("ncgen"))
@@ -100,7 +100,7 @@ def _add_standard_data(nc_path, unlimited_dim_size=0):
"""
- ds = netCDF4.Dataset(nc_path, "r+")
+ ds = _thread_safe_nc.DatasetWrapper(nc_path, "r+")
unlimited_dim_names = [
dim for dim in ds.dimensions if ds.dimensions[dim].isunlimited()
diff --git a/lib/iris/tests/test_cf.py b/lib/iris/tests/test_cf.py
index 034fb1dbda..ec4728b697 100644
--- a/lib/iris/tests/test_cf.py
+++ b/lib/iris/tests/test_cf.py
@@ -15,6 +15,8 @@
import io
from unittest import mock
+import pytest
+
import iris
import iris.fileformats.cf as cf
@@ -52,11 +54,14 @@ def test_cached(self):
@tests.skip_data
class TestCFReader(tests.IrisTest):
- def setUp(self):
+ @pytest.fixture(autouse=True)
+ def set_up(self):
filename = tests.get_data_path(
("NetCDF", "rotated", "xyt", "small_rotPole_precipitation.nc")
)
self.cfr = cf.CFReader(filename)
+ with self.cfr:
+ yield
def test_ancillary_variables_pass_0(self):
self.assertEqual(self.cfr.cf_group.ancillary_variables, {})
@@ -350,7 +355,8 @@ def test_cell_methods(self):
@tests.skip_data
class TestClimatology(tests.IrisTest):
- def setUp(self):
+ @pytest.fixture(autouse=True)
+ def set_up(self):
filename = tests.get_data_path(
(
"NetCDF",
@@ -359,6 +365,8 @@ def setUp(self):
)
)
self.cfr = cf.CFReader(filename)
+ with self.cfr:
+ yield
def test_bounds(self):
time = self.cfr.cf_group["temp_dmax_tmean_abs"].cf_group.coordinates[
@@ -375,7 +383,8 @@ def test_bounds(self):
@tests.skip_data
class TestLabels(tests.IrisTest):
- def setUp(self):
+ @pytest.fixture(autouse=True)
+ def set_up(self):
filename = tests.get_data_path(
(
"NetCDF",
@@ -390,6 +399,10 @@ def setUp(self):
)
self.cfr_end = cf.CFReader(filename)
+ with self.cfr_start:
+ with self.cfr_end:
+ yield
+
def test_label_dim_start(self):
cf_data_var = self.cfr_start.cf_group["temp_dmax_tmean_abs"]
diff --git a/lib/iris/tests/test_coding_standards.py b/lib/iris/tests/test_coding_standards.py
index 01f6f777fa..b52934c568 100644
--- a/lib/iris/tests/test_coding_standards.py
+++ b/lib/iris/tests/test_coding_standards.py
@@ -12,9 +12,12 @@
from fnmatch import fnmatch
from glob import glob
import os
+from pathlib import Path
import subprocess
import iris
+from iris.fileformats.netcdf import _thread_safe_nc
+from iris.tests import system_test
LICENSE_TEMPLATE = """# Copyright Iris contributors
#
@@ -40,6 +43,29 @@
IRIS_REPO_DIRPATH = os.environ.get("IRIS_REPO_DIR", IRIS_INSTALL_DIR)
+def test_netcdf4_import():
+ """Use of netCDF4 must be via iris.fileformats.netcdf._thread_safe_nc ."""
+ # Please avoid including these phrases in any comments/strings throughout
+ # Iris (e.g. use "from the netCDF4 library" instead) - this allows the
+ # below search to remain quick and simple.
+ import_strings = ("import netCDF4", "from netCDF4")
+
+ files_including_import = []
+ for file_path in Path(IRIS_DIR).rglob("*.py"):
+ with file_path.open("r") as open_file:
+ file_text = open_file.read()
+
+ if any([i in file_text for i in import_strings]):
+ files_including_import.append(file_path)
+
+ expected = [
+ Path(_thread_safe_nc.__file__),
+ Path(system_test.__file__),
+ Path(__file__),
+ ]
+ assert set(files_including_import) == set(expected)
+
+
class TestLicenseHeaders(tests.IrisTest):
@staticmethod
def whatchanged_parse(whatchanged_output):
diff --git a/lib/iris/tests/test_load.py b/lib/iris/tests/test_load.py
index 4749236abc..adb33924e5 100644
--- a/lib/iris/tests/test_load.py
+++ b/lib/iris/tests/test_load.py
@@ -14,9 +14,8 @@
import pathlib
from unittest import mock
-import netCDF4
-
import iris
+from iris.fileformats.netcdf import _thread_safe_nc
import iris.io
@@ -193,10 +192,11 @@ def test_netCDF_Dataset_call(self):
filename = tests.get_data_path(
("NetCDF", "global", "xyt", "SMALL_total_column_co2.nc")
)
- fake_dataset = netCDF4.Dataset(filename)
+ fake_dataset = _thread_safe_nc.DatasetWrapper(filename)
with mock.patch(
- "netCDF4.Dataset", return_value=fake_dataset
+ "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper",
+ return_value=fake_dataset,
) as dataset_loader:
next(iris.io.load_http([self.url], callback=None))
dataset_loader.assert_called_with(self.url, mode="r")
diff --git a/lib/iris/tests/test_netcdf.py b/lib/iris/tests/test_netcdf.py
index 5017698a22..9af2cb800e 100644
--- a/lib/iris/tests/test_netcdf.py
+++ b/lib/iris/tests/test_netcdf.py
@@ -19,7 +19,6 @@
import tempfile
from unittest import mock
-import netCDF4 as nc
import numpy as np
import numpy.ma as ma
@@ -29,6 +28,7 @@
import iris.coord_systems as icoord_systems
from iris.fileformats._nc_load_rules import helpers as ncload_helpers
import iris.fileformats.netcdf
+from iris.fileformats.netcdf import _thread_safe_nc
from iris.fileformats.netcdf import load_cubes as nc_load_cubes
import iris.std_names
import iris.tests.stock as stock
@@ -81,7 +81,7 @@ def test_missing_time_bounds(self):
("NetCDF", "global", "xyt", "SMALL_hires_wind_u_for_ipcc4.nc")
)
shutil.copyfile(src, filename)
- dataset = nc.Dataset(filename, mode="a")
+ dataset = _thread_safe_nc.DatasetWrapper(filename, mode="a")
dataset.renameVariable("time_bnds", "foo")
dataset.close()
_ = iris.load_cube(filename, "eastward_wind")
@@ -204,7 +204,7 @@ def test_missing_climatology(self):
("NetCDF", "transverse_mercator", "tmean_1910_1910.nc")
)
shutil.copyfile(src, filename)
- dataset = nc.Dataset(filename, mode="a")
+ dataset = _thread_safe_nc.DatasetWrapper(filename, mode="a")
dataset.renameVariable("climatology_bounds", "foo")
dataset.close()
_ = iris.load_cube(filename, "Mean temperature")
@@ -634,7 +634,7 @@ def test_netcdf_save_format(self):
with self.temp_filename(suffix=".nc") as file_out:
# Test default NETCDF4 file format saving.
iris.save(cube, file_out)
- ds = nc.Dataset(file_out)
+ ds = _thread_safe_nc.DatasetWrapper(file_out)
self.assertEqual(
ds.file_format, "NETCDF4", "Failed to save as NETCDF4 format"
)
@@ -642,7 +642,7 @@ def test_netcdf_save_format(self):
# Test NETCDF4_CLASSIC file format saving.
iris.save(cube, file_out, netcdf_format="NETCDF4_CLASSIC")
- ds = nc.Dataset(file_out)
+ ds = _thread_safe_nc.DatasetWrapper(file_out)
self.assertEqual(
ds.file_format,
"NETCDF4_CLASSIC",
@@ -652,7 +652,7 @@ def test_netcdf_save_format(self):
# Test NETCDF3_CLASSIC file format saving.
iris.save(cube, file_out, netcdf_format="NETCDF3_CLASSIC")
- ds = nc.Dataset(file_out)
+ ds = _thread_safe_nc.DatasetWrapper(file_out)
self.assertEqual(
ds.file_format,
"NETCDF3_CLASSIC",
@@ -662,7 +662,7 @@ def test_netcdf_save_format(self):
# Test NETCDF4_64BIT file format saving.
iris.save(cube, file_out, netcdf_format="NETCDF3_64BIT")
- ds = nc.Dataset(file_out)
+ ds = _thread_safe_nc.DatasetWrapper(file_out)
self.assertTrue(
ds.file_format in ["NETCDF3_64BIT", "NETCDF3_64BIT_OFFSET"],
"Failed to save as NETCDF3_64BIT format",
@@ -1049,7 +1049,7 @@ def test_attributes(self):
with self.temp_filename(suffix=".nc") as filename:
iris.save(self.cube, filename)
# Load the dataset.
- ds = nc.Dataset(filename, "r")
+ ds = _thread_safe_nc.DatasetWrapper(filename, "r")
exceptions = []
# Should be global attributes.
for gkey in aglobals:
@@ -1213,7 +1213,7 @@ def test_shared(self):
self.assertCDL(filename)
# Also check that only one, shared ancillary variable was written.
- ds = nc.Dataset(filename)
+ ds = _thread_safe_nc.DatasetWrapper(filename)
self.assertIn("air_potential_temperature", ds.variables)
self.assertIn("alternate_data", ds.variables)
self.assertEqual(
diff --git a/lib/iris/tests/test_pp_cf.py b/lib/iris/tests/test_pp_cf.py
index 2b497cb53b..49bedaf1e2 100644
--- a/lib/iris/tests/test_pp_cf.py
+++ b/lib/iris/tests/test_pp_cf.py
@@ -10,10 +10,9 @@
import os
import tempfile
-import netCDF4
-
import iris
import iris.coords
+from iris.fileformats.netcdf import _thread_safe_nc
from iris.fileformats.pp import STASH
import iris.tests.pp as pp
import iris.util
@@ -95,7 +94,7 @@ def _test_file(self, name):
for index, cube in enumerate(cubes):
# Explicitly set a fill-value as a workaround for
# https://github.com/Unidata/netcdf4-python/issues/725
- fill_value = netCDF4.default_fillvals[cube.dtype.str[1:]]
+ fill_value = _thread_safe_nc.default_fillvals[cube.dtype.str[1:]]
file_nc = tempfile.NamedTemporaryFile(
suffix=".nc", delete=False
diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridReader.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridReader.py
index e44aee730a..d9de814b05 100644
--- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridReader.py
+++ b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridReader.py
@@ -94,7 +94,10 @@ def setUp(self):
# Restrict the CFUGridReader functionality to only performing
# translations and building first level cf-groups for variables.
self.patch("iris.experimental.ugrid.cf.CFUGridReader._reset")
- self.patch("netCDF4.Dataset", return_value=self.dataset)
+ self.patch(
+ "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper",
+ return_value=self.dataset,
+ )
cf_reader = CFUGridReader("dummy")
self.cf_group = cf_reader.cf_group
diff --git a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py
index dee28e98cc..9e5cf9b7a5 100644
--- a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py
+++ b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py
@@ -70,7 +70,10 @@ def setUp(self):
)
def test_create_global_attributes(self):
- with mock.patch("netCDF4.Dataset", return_value=self.dataset):
+ with mock.patch(
+ "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper",
+ return_value=self.dataset,
+ ):
global_attrs = CFReader("dummy").cf_group.global_attributes
self.assertEqual(
global_attrs["dimensions"], "something something_else"
@@ -145,7 +148,10 @@ def setUp(self):
self.addCleanup(reset_patch.stop)
def test_create_formula_terms(self):
- with mock.patch("netCDF4.Dataset", return_value=self.dataset):
+ with mock.patch(
+ "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper",
+ return_value=self.dataset,
+ ):
cf_group = CFReader("dummy").cf_group
self.assertEqual(len(cf_group), len(self.variables))
# Check there is a singular data variable.
@@ -247,7 +253,10 @@ def setUp(self):
self.addCleanup(patcher.stop)
def test_associate_formula_terms_with_data_variable(self):
- with mock.patch("netCDF4.Dataset", return_value=self.dataset):
+ with mock.patch(
+ "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper",
+ return_value=self.dataset,
+ ):
cf_group = CFReader("dummy").cf_group
self.assertEqual(len(cf_group), len(self.variables))
# Check the cf-group associated with the data variable.
@@ -296,7 +305,10 @@ def test_associate_formula_terms_with_data_variable(self):
)
def test_promote_reference(self):
- with mock.patch("netCDF4.Dataset", return_value=self.dataset):
+ with mock.patch(
+ "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper",
+ return_value=self.dataset,
+ ):
cf_group = CFReader("dummy").cf_group
self.assertEqual(len(cf_group), len(self.variables))
# Check the number of data variables.
@@ -316,7 +328,8 @@ def test_promote_reference(self):
def test_formula_terms_ignore(self):
self.orography.dimensions = ["lat", "wibble"]
with mock.patch(
- "netCDF4.Dataset", return_value=self.dataset
+ "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper",
+ return_value=self.dataset,
), mock.patch("warnings.warn") as warn:
cf_group = CFReader("dummy").cf_group
group = cf_group.promoted
@@ -327,7 +340,8 @@ def test_formula_terms_ignore(self):
def test_auxiliary_ignore(self):
self.x.dimensions = ["lat", "wibble"]
with mock.patch(
- "netCDF4.Dataset", return_value=self.dataset
+ "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper",
+ return_value=self.dataset,
), mock.patch("warnings.warn") as warn:
cf_group = CFReader("dummy").cf_group
promoted = ["x", "orography"]
@@ -342,7 +356,8 @@ def test_promoted_auxiliary_ignore(self):
self.variables["wibble"] = self.wibble
self.orography.coordinates = "wibble"
with mock.patch(
- "netCDF4.Dataset", return_value=self.dataset
+ "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper",
+ return_value=self.dataset,
), mock.patch("warnings.warn") as warn:
cf_group = CFReader("dummy").cf_group.promoted
promoted = ["wibble", "orography"]
diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py
index 0cc3d09426..399a987f11 100644
--- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py
+++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py
@@ -80,42 +80,44 @@ def load_cube_from_cdl(self, cdl_string, cdl_path, nc_path):
# Simulate the inner part of the file reading process.
cf = CFReader(nc_path)
- # Grab a data variable : FOR NOW always grab the 'phenom' variable.
- cf_var = cf.cf_group.data_variables["phenom"]
-
- engine = iris.fileformats.netcdf.loader._actions_engine()
-
- # If debug enabled, switch on the activation summary debug output.
- # Use 'patch' so it is restored after the test.
- self.patch("iris.fileformats.netcdf.loader.DEBUG", self.debug)
-
- with warnings.catch_warnings():
- warnings.filterwarnings(
- "ignore",
- message="Ignoring a datum in netCDF load for consistency with existing "
- "behaviour. In a future version of Iris, this datum will be "
- "applied. To apply the datum when loading, use the "
- "iris.FUTURE.datum_support flag.",
- category=FutureWarning,
- )
- # Call the main translation function to load a single cube.
- # _load_cube establishes per-cube facts, activates rules and
- # produces an actual cube.
- cube = _load_cube(engine, cf, cf_var, nc_path)
-
- # Also Record, on the cubes, which hybrid coord elements were identified
- # by the rules operation.
- # Unlike the other translations, _load_cube does *not* convert this
- # information into actual cube elements. That is instead done by
- # `iris.fileformats.netcdf._load_aux_factory`.
- # For rules testing, it is anyway more convenient to deal with the raw
- # data, as each factory type has different validity requirements to
- # build it, and none of that is relevant to the rules operation.
- cube._formula_type_name = engine.requires.get("formula_type")
- cube._formula_terms_byname = engine.requires.get("formula_terms")
-
- # Always returns a single cube.
- return cube
+
+ with cf:
+ # Grab a data variable : FOR NOW always grab the 'phenom' variable.
+ cf_var = cf.cf_group.data_variables["phenom"]
+
+ engine = iris.fileformats.netcdf.loader._actions_engine()
+
+ # If debug enabled, switch on the activation summary debug output.
+ # Use 'patch' so it is restored after the test.
+ self.patch("iris.fileformats.netcdf.loader.DEBUG", self.debug)
+
+ with warnings.catch_warnings():
+ warnings.filterwarnings(
+ "ignore",
+ message="Ignoring a datum in netCDF load for consistency with existing "
+ "behaviour. In a future version of Iris, this datum will be "
+ "applied. To apply the datum when loading, use the "
+ "iris.FUTURE.datum_support flag.",
+ category=FutureWarning,
+ )
+ # Call the main translation function to load a single cube.
+ # _load_cube establishes per-cube facts, activates rules and
+ # produces an actual cube.
+ cube = _load_cube(engine, cf, cf_var, nc_path)
+
+ # Also Record, on the cubes, which hybrid coord elements were identified
+ # by the rules operation.
+ # Unlike the other translations, _load_cube does *not* convert this
+ # information into actual cube elements. That is instead done by
+ # `iris.fileformats.netcdf._load_aux_factory`.
+ # For rules testing, it is anyway more convenient to deal with the raw
+ # data, as each factory type has different validity requirements to
+ # build it, and none of that is relevant to the rules operation.
+ cube._formula_type_name = engine.requires.get("formula_type")
+ cube._formula_terms_byname = engine.requires.get("formula_terms")
+
+ # Always returns a single cube.
+ return cube
def run_testcase(self, warning_regex=None, **testcase_kwargs):
"""
diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_Saver.py b/lib/iris/tests/unit/fileformats/netcdf/test_Saver.py
index 174a46fdb7..6fa9e9e096 100644
--- a/lib/iris/tests/unit/fileformats/netcdf/test_Saver.py
+++ b/lib/iris/tests/unit/fileformats/netcdf/test_Saver.py
@@ -13,7 +13,6 @@
from contextlib import contextmanager
from unittest import mock
-import netCDF4 as nc
import numpy as np
from numpy import ma
@@ -32,7 +31,7 @@
)
from iris.coords import AuxCoord, DimCoord
from iris.cube import Cube
-from iris.fileformats.netcdf import Saver
+from iris.fileformats.netcdf import Saver, _thread_safe_nc
import iris.tests.stock as stock
@@ -203,12 +202,12 @@ def test_big_endian(self):
def test_zlib(self):
cube = self._simple_cube(">f4")
- api = self.patch("iris.fileformats.netcdf.saver.netCDF4")
+ api = self.patch("iris.fileformats.netcdf.saver._thread_safe_nc")
# Define mocked default fill values to prevent deprecation warning (#4374).
api.default_fillvals = collections.defaultdict(lambda: -99.0)
with Saver("/dummy/path", "NETCDF4") as saver:
saver.write(cube, zlib=True)
- dataset = api.Dataset.return_value
+ dataset = api.DatasetWrapper.return_value
create_var_call = mock.call(
"air_pressure_anomaly",
np.dtype("float32"),
@@ -249,7 +248,7 @@ def test_default_unlimited_dimensions(self):
with self.temp_filename(".nc") as nc_path:
with Saver(nc_path, "NETCDF4") as saver:
saver.write(cube)
- ds = nc.Dataset(nc_path)
+ ds = _thread_safe_nc.DatasetWrapper(nc_path)
self.assertFalse(ds.dimensions["dim0"].isunlimited())
self.assertFalse(ds.dimensions["dim1"].isunlimited())
ds.close()
@@ -259,7 +258,7 @@ def test_no_unlimited_dimensions(self):
with self.temp_filename(".nc") as nc_path:
with Saver(nc_path, "NETCDF4") as saver:
saver.write(cube, unlimited_dimensions=None)
- ds = nc.Dataset(nc_path)
+ ds = _thread_safe_nc.DatasetWrapper(nc_path)
for dim in ds.dimensions.values():
self.assertFalse(dim.isunlimited())
ds.close()
@@ -281,7 +280,7 @@ def test_custom_unlimited_dimensions(self):
with self.temp_filename(".nc") as nc_path:
with Saver(nc_path, "NETCDF4") as saver:
saver.write(cube, unlimited_dimensions=unlimited_dimensions)
- ds = nc.Dataset(nc_path)
+ ds = _thread_safe_nc.DatasetWrapper(nc_path)
for dim in unlimited_dimensions:
self.assertTrue(ds.dimensions[dim].isunlimited())
ds.close()
@@ -290,7 +289,7 @@ def test_custom_unlimited_dimensions(self):
coords = [cube.coord(dim) for dim in unlimited_dimensions]
with Saver(nc_path, "NETCDF4") as saver:
saver.write(cube, unlimited_dimensions=coords)
- ds = nc.Dataset(nc_path)
+ ds = _thread_safe_nc.DatasetWrapper(nc_path)
for dim in unlimited_dimensions:
self.assertTrue(ds.dimensions[dim].isunlimited())
ds.close()
@@ -301,7 +300,7 @@ def test_reserved_attributes(self):
with self.temp_filename(".nc") as nc_path:
with Saver(nc_path, "NETCDF4") as saver:
saver.write(cube)
- ds = nc.Dataset(nc_path)
+ ds = _thread_safe_nc.DatasetWrapper(nc_path)
res = ds.getncattr("dimensions")
ds.close()
self.assertEqual(res, "something something_else")
@@ -323,7 +322,7 @@ def test_dimensional_to_scalar(self):
with self.temp_filename(".nc") as nc_path:
with Saver(nc_path, "NETCDF4") as saver:
saver.write(cube)
- ds = nc.Dataset(nc_path)
+ ds = _thread_safe_nc.DatasetWrapper(nc_path)
# Confirm that the only dimension is the one denoting the number
# of bounds - have successfully saved the 2D bounds array into 1D.
self.assertEqual(["bnds"], list(ds.dimensions.keys()))
@@ -363,7 +362,7 @@ def _check_bounds_setting(self, climatological=False):
saver._ensure_valid_dtype.return_value = mock.Mock(
shape=coord.bounds.shape, dtype=coord.bounds.dtype
)
- var = mock.MagicMock(spec=nc.Variable)
+ var = mock.MagicMock(spec=_thread_safe_nc.VariableWrapper)
# Make the main call.
Saver._create_cf_bounds(saver, coord, var, "time")
@@ -404,7 +403,7 @@ def test_valid_range_saved(self):
with self.temp_filename(".nc") as nc_path:
with Saver(nc_path, "NETCDF4") as saver:
saver.write(cube, unlimited_dimensions=[])
- ds = nc.Dataset(nc_path)
+ ds = _thread_safe_nc.DatasetWrapper(nc_path)
self.assertArrayEqual(ds.valid_range, vrange)
ds.close()
@@ -416,7 +415,7 @@ def test_valid_min_saved(self):
with self.temp_filename(".nc") as nc_path:
with Saver(nc_path, "NETCDF4") as saver:
saver.write(cube, unlimited_dimensions=[])
- ds = nc.Dataset(nc_path)
+ ds = _thread_safe_nc.DatasetWrapper(nc_path)
self.assertArrayEqual(ds.valid_min, 1)
ds.close()
@@ -428,7 +427,7 @@ def test_valid_max_saved(self):
with self.temp_filename(".nc") as nc_path:
with Saver(nc_path, "NETCDF4") as saver:
saver.write(cube, unlimited_dimensions=[])
- ds = nc.Dataset(nc_path)
+ ds = _thread_safe_nc.DatasetWrapper(nc_path)
self.assertArrayEqual(ds.valid_max, 2)
ds.close()
@@ -448,7 +447,7 @@ def test_valid_range_saved(self):
with self.temp_filename(".nc") as nc_path:
with Saver(nc_path, "NETCDF4") as saver:
saver.write(cube, unlimited_dimensions=[])
- ds = nc.Dataset(nc_path)
+ ds = _thread_safe_nc.DatasetWrapper(nc_path)
self.assertArrayEqual(
ds.variables["longitude"].valid_range, vrange
)
@@ -462,7 +461,7 @@ def test_valid_min_saved(self):
with self.temp_filename(".nc") as nc_path:
with Saver(nc_path, "NETCDF4") as saver:
saver.write(cube, unlimited_dimensions=[])
- ds = nc.Dataset(nc_path)
+ ds = _thread_safe_nc.DatasetWrapper(nc_path)
self.assertArrayEqual(ds.variables["longitude"].valid_min, 1)
ds.close()
@@ -474,7 +473,7 @@ def test_valid_max_saved(self):
with self.temp_filename(".nc") as nc_path:
with Saver(nc_path, "NETCDF4") as saver:
saver.write(cube, unlimited_dimensions=[])
- ds = nc.Dataset(nc_path)
+ ds = _thread_safe_nc.DatasetWrapper(nc_path)
self.assertArrayEqual(ds.variables["longitude"].valid_max, 2)
ds.close()
@@ -506,7 +505,7 @@ def _netCDF_var(self, cube, **kwargs):
with self.temp_filename(".nc") as nc_path:
with Saver(nc_path, "NETCDF4") as saver:
saver.write(cube, **kwargs)
- ds = nc.Dataset(nc_path)
+ ds = _thread_safe_nc.DatasetWrapper(nc_path)
(var,) = [
var
for var in ds.variables.values()
@@ -572,7 +571,7 @@ def test_contains_default_fill_value(self):
# Test that a warning is raised if the data contains the default fill
# value if no fill_value argument is supplied.
cube = self._make_cube(">f4")
- cube.data[0, 0] = nc.default_fillvals["f4"]
+ cube.data[0, 0] = _thread_safe_nc.default_fillvals["f4"]
with self.assertWarnsRegex(
UserWarning,
"contains unmasked data points equal to the fill-value",
@@ -647,7 +646,9 @@ def setUp(self):
self.container = mock.Mock(name="container", attributes={})
self.data_dtype = np.dtype("int32")
- patch = mock.patch("netCDF4.Dataset")
+ patch = mock.patch(
+ "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper"
+ )
_ = patch.start()
self.addCleanup(patch.stop)
diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_Saver__ugrid.py b/lib/iris/tests/unit/fileformats/netcdf/test_Saver__ugrid.py
index 575c852ece..666f7962a4 100644
--- a/lib/iris/tests/unit/fileformats/netcdf/test_Saver__ugrid.py
+++ b/lib/iris/tests/unit/fileformats/netcdf/test_Saver__ugrid.py
@@ -18,7 +18,6 @@
import shutil
import tempfile
-import netCDF4 as nc
import numpy as np
from iris import save
@@ -26,6 +25,7 @@
from iris.cube import Cube, CubeList
from iris.experimental.ugrid.mesh import Connectivity, Mesh
from iris.experimental.ugrid.save import save_mesh
+from iris.fileformats.netcdf import _thread_safe_nc
from iris.tests.stock import realistic_4d
XY_LOCS = ("x", "y")
@@ -259,7 +259,7 @@ def scan_dataset(filepath):
variable's dims.
"""
- ds = nc.Dataset(filepath)
+ ds = _thread_safe_nc.DatasetWrapper(filepath)
# dims dict is {name: len}
dimsdict = {name: dim.size for name, dim in ds.dimensions.items()}
# vars dict is {name: {attr:val}}
@@ -824,7 +824,7 @@ def test_nonuniform_connectivity(self):
self.assertNotIn("_FillValue", fn_props)
# For what it's worth, *also* check the actual data array in the file
- ds = nc.Dataset(tempfile_path)
+ ds = _thread_safe_nc.DatasetWrapper(tempfile_path)
conn_var = ds.variables[ff_conn_name]
data = conn_var[:]
ds.close()
diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_save.py b/lib/iris/tests/unit/fileformats/netcdf/test_save.py
index 030edbfce2..b274a8be0d 100644
--- a/lib/iris/tests/unit/fileformats/netcdf/test_save.py
+++ b/lib/iris/tests/unit/fileformats/netcdf/test_save.py
@@ -14,14 +14,17 @@
from tempfile import mkdtemp
from unittest import mock
-import netCDF4 as nc
import numpy as np
import iris
from iris.coords import AuxCoord, DimCoord
from iris.cube import Cube, CubeList
from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD
-from iris.fileformats.netcdf import CF_CONVENTIONS_VERSION, save
+from iris.fileformats.netcdf import (
+ CF_CONVENTIONS_VERSION,
+ _thread_safe_nc,
+ save,
+)
from iris.tests.stock import lat_lon_cube
from iris.tests.stock.mesh import sample_mesh_cube
@@ -38,7 +41,7 @@ def test_custom_conventions__ignored(self):
# CF convention.
with self.temp_filename(".nc") as nc_path:
save(self.cube, nc_path, "NETCDF4")
- ds = nc.Dataset(nc_path)
+ ds = _thread_safe_nc.DatasetWrapper(nc_path)
res = ds.getncattr("Conventions")
ds.close()
self.assertEqual(res, CF_CONVENTIONS_VERSION)
@@ -49,7 +52,7 @@ def test_custom_conventions__allowed(self):
with mock.patch.object(self.options, "conventions_override", True):
with self.temp_filename(".nc") as nc_path:
save(self.cube, nc_path, "NETCDF4")
- ds = nc.Dataset(nc_path)
+ ds = _thread_safe_nc.DatasetWrapper(nc_path)
res = ds.getncattr("Conventions")
ds.close()
self.assertEqual(res, self.custom_conventions)
@@ -61,7 +64,7 @@ def test_custom_conventions__allowed__missing(self):
with mock.patch.object(self.options, "conventions_override", True):
with self.temp_filename(".nc") as nc_path:
save(self.cube, nc_path, "NETCDF4")
- ds = nc.Dataset(nc_path)
+ ds = _thread_safe_nc.DatasetWrapper(nc_path)
res = ds.getncattr("Conventions")
ds.close()
self.assertEqual(res, CF_CONVENTIONS_VERSION)
@@ -76,7 +79,7 @@ def test_attributes_arrays(self):
with self.temp_filename("foo.nc") as nc_out:
save([c1, c2], nc_out)
- ds = nc.Dataset(nc_out)
+ ds = _thread_safe_nc.DatasetWrapper(nc_out)
res = ds.getncattr("bar")
ds.close()
self.assertArrayEqual(res, np.arange(2))
@@ -92,7 +95,7 @@ def test_no_special_attribute_clash(self):
with self.temp_filename("foo.nc") as nc_out:
save([c1, c2], nc_out)
- ds = nc.Dataset(nc_out)
+ ds = _thread_safe_nc.DatasetWrapper(nc_out)
res = ds.variables["test"].getncattr("name")
res_1 = ds.variables["test_1"].getncattr("name")
ds.close()
@@ -105,7 +108,7 @@ def test_no_unlimited_dims(self):
cube = lat_lon_cube()
with self.temp_filename("foo.nc") as nc_out:
save(cube, nc_out)
- ds = nc.Dataset(nc_out)
+ ds = _thread_safe_nc.DatasetWrapper(nc_out)
self.assertFalse(ds.dimensions["latitude"].isunlimited())
def test_unlimited_dim_latitude(self):
@@ -113,7 +116,7 @@ def test_unlimited_dim_latitude(self):
unlim_dim_name = "latitude"
with self.temp_filename("foo.nc") as nc_out:
save(cube, nc_out, unlimited_dimensions=[unlim_dim_name])
- ds = nc.Dataset(nc_out)
+ ds = _thread_safe_nc.DatasetWrapper(nc_out)
self.assertTrue(ds.dimensions[unlim_dim_name].isunlimited())
diff --git a/requirements/ci/py310.yml b/requirements/ci/py310.yml
index 6815c7fe6d..4b1b59f7b2 100644
--- a/requirements/ci/py310.yml
+++ b/requirements/ci/py310.yml
@@ -16,7 +16,7 @@ dependencies:
- cftime >=1.5
- dask-core >=2.26
- matplotlib >=3.5
- - netcdf4 <1.6.1
+ - netcdf4
- numpy >=1.19
- python-xxhash
- pyproj
diff --git a/requirements/ci/py38.yml b/requirements/ci/py38.yml
index 316e0868ac..6e83a095f0 100644
--- a/requirements/ci/py38.yml
+++ b/requirements/ci/py38.yml
@@ -16,7 +16,7 @@ dependencies:
- cftime >=1.5
- dask-core >=2.26
- matplotlib >=3.5
- - netcdf4 <1.6.1
+ - netcdf4
- numpy >=1.19
- python-xxhash
- pyproj
diff --git a/requirements/ci/py39.yml b/requirements/ci/py39.yml
index 66e22c230f..bbc9722152 100644
--- a/requirements/ci/py39.yml
+++ b/requirements/ci/py39.yml
@@ -16,7 +16,7 @@ dependencies:
- cftime >=1.5
- dask-core >=2.26
- matplotlib >=3.5
- - netcdf4 <1.6.1
+ - netcdf4
- numpy >=1.19
- python-xxhash
- pyproj
diff --git a/setup.cfg b/setup.cfg
index f6276cb173..47904cfe5f 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -52,7 +52,7 @@ install_requires =
cftime>=1.5.0
dask[array]>=2.26
matplotlib>=3.5
- netcdf4<1.6.1
+ netcdf4
numpy>=1.19
scipy
shapely!=1.8.3