Skip to content

Commit

Permalink
Change an == to an is. Fix tests so that this won't happen again.
Browse files Browse the repository at this point in the history
Closes pydata#2647 and re-affirms pydata#1988.
  • Loading branch information
WeatherGod committed Jan 4, 2019
1 parent 28123bb commit e1bb375
Show file tree
Hide file tree
Showing 2 changed files with 24 additions and 1 deletion.
2 changes: 1 addition & 1 deletion xarray/backends/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -606,7 +606,7 @@ def open_mfdataset(paths, chunks=None, concat_dim=_CONCAT_DIM_DEFAULT,
# Coerce 1D input into ND to maintain backwards-compatible API until API
# for N-D combine decided
# (see https://github.com/pydata/xarray/pull/2553/#issuecomment-445892746)
if concat_dim is None or concat_dim == _CONCAT_DIM_DEFAULT:
if concat_dim is None or concat_dim is _CONCAT_DIM_DEFAULT:
concat_dims = concat_dim
elif not isinstance(concat_dim, list):
concat_dims = [concat_dim]
Expand Down
23 changes: 23 additions & 0 deletions xarray/tests/test_backends.py
Original file line number Diff line number Diff line change
Expand Up @@ -2367,6 +2367,29 @@ def test_open_single_dataset(self):
with open_mfdataset([tmp], concat_dim=dim) as actual:
assert_identical(expected, actual)

def test_open_multi_dataset(self):
# Test for issue GH #1988 and #2647. This makes sure that the
# concat_dim is utilized when specified in open_mfdataset().
# The additional wrinkle is to ensure that a length greater
# than one is tested as well due to numpy's implicit casting
# of 1-length arrays to booleans in tests, which allowed
# #2647 to still pass the test_open_single_dataset(),
# which is itself still needed as-is because the original
# bug caused one-length arrays to not be used correctly
# in concatenation.
rnddata = np.random.randn(10)
original = Dataset({'foo': ('x', rnddata)})
dim = DataArray([100, 150], name='baz', dims='baz')
expected = Dataset({'foo': (('baz', 'x'),
np.tile(rnddata[np.newaxis, :], (2, 1)))},
{'baz': [100, 150]})
with create_tmp_file() as tmp1, \
create_tmp_file() as tmp2:
original.to_netcdf(tmp1)
original.to_netcdf(tmp2)
with open_mfdataset([tmp1, tmp2], concat_dim=dim) as actual:
assert_identical(expected, actual)

def test_dask_roundtrip(self):
with create_tmp_file() as tmp:
data = create_test_data()
Expand Down

0 comments on commit e1bb375

Please sign in to comment.