Skip to content
forked from pydata/xarray

Commit

Permalink
Merge remote-tracking branch 'upstream/master' into compat-override
Browse files Browse the repository at this point in the history
* upstream/master:
  ignore h5py 2.10.0 warnings and fix invalid_netcdf warning test. (pydata#3301)
  Update why-xarray.rst with clearer expression (pydata#3307)
  Updater to testing environment name (pydata#3253)
  • Loading branch information
dcherian committed Sep 13, 2019
2 parents 71ec88d + e90e8bc commit 223c998
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 7 deletions.
5 changes: 2 additions & 3 deletions doc/why-xarray.rst
Original file line number Diff line number Diff line change
Expand Up @@ -62,9 +62,8 @@ The power of the dataset over a plain dictionary is that, in addition to
pulling out arrays by name, it is possible to select or combine data along a
dimension across all arrays simultaneously. Like a
:py:class:`~pandas.DataFrame`, datasets facilitate array operations with
heterogeneous data -- the difference is that the arrays in a dataset can not
only have different data types, but can also have different numbers of
dimensions.
heterogeneous data -- the difference is that the arrays in a dataset can have
not only different data types, but also different numbers of dimensions.

This data model is borrowed from the netCDF_ file format, which also provides
xarray with a natural and portable serialization format. NetCDF is very popular
Expand Down
19 changes: 15 additions & 4 deletions xarray/tests/test_backends.py
Original file line number Diff line number Diff line change
Expand Up @@ -2163,6 +2163,7 @@ def test_encoding_unlimited_dims(self):

@requires_h5netcdf
@requires_netCDF4
@pytest.mark.filterwarnings("ignore:use make_scale(name) instead")
class TestH5NetCDFData(NetCDF4Base):
engine = "h5netcdf"

Expand All @@ -2173,16 +2174,25 @@ def create_store(self):

@pytest.mark.filterwarnings("ignore:complex dtypes are supported by h5py")
@pytest.mark.parametrize(
"invalid_netcdf, warns, num_warns",
"invalid_netcdf, warntype, num_warns",
[(None, FutureWarning, 1), (False, FutureWarning, 1), (True, None, 0)],
)
def test_complex(self, invalid_netcdf, warns, num_warns):
def test_complex(self, invalid_netcdf, warntype, num_warns):
expected = Dataset({"x": ("y", np.ones(5) + 1j * np.ones(5))})
save_kwargs = {"invalid_netcdf": invalid_netcdf}
with pytest.warns(warns) as record:
with pytest.warns(warntype) as record:
with self.roundtrip(expected, save_kwargs=save_kwargs) as actual:
assert_equal(expected, actual)
assert len(record) == num_warns

recorded_num_warns = 0
if warntype:
for warning in record:
if issubclass(warning.category, warntype) and (
"complex dtypes" in str(warning.message)
):
recorded_num_warns += 1

assert recorded_num_warns == num_warns

def test_cross_engine_read_write_netcdf4(self):
# Drop dim3, because its labels include strings. These appear to be
Expand Down Expand Up @@ -2451,6 +2461,7 @@ def skip_if_not_engine(engine):


@requires_dask
@pytest.mark.filterwarnings("ignore:use make_scale(name) instead")
def test_open_mfdataset_manyfiles(
readengine, nfiles, parallel, chunks, file_cache_maxsize
):
Expand Down

0 comments on commit 223c998

Please sign in to comment.