From edcccdf22efed8324fc4c8cabefb4cccb24a2d77 Mon Sep 17 00:00:00 2001 From: dcherian Date: Mon, 16 Sep 2019 08:15:32 -0600 Subject: [PATCH] undo auto_combine deprecation. --- xarray/backends/api.py | 18 ++-------- xarray/core/combine.py | 29 ++------------- xarray/tests/test_backends.py | 67 ----------------------------------- xarray/tests/test_combine.py | 14 ++------ 4 files changed, 8 insertions(+), 120 deletions(-) diff --git a/xarray/backends/api.py b/xarray/backends/api.py index 1f0869cfc53..abc01c2a2d3 100644 --- a/xarray/backends/api.py +++ b/xarray/backends/api.py @@ -4,7 +4,6 @@ from io import BytesIO from numbers import Number from pathlib import Path -from textwrap import dedent from typing import ( TYPE_CHECKING, Callable, @@ -714,7 +713,7 @@ def open_mfdataset( lock=None, data_vars="all", coords="different", - combine="_old_auto", + combine="auto", autoclose=None, parallel=False, join="outer", @@ -910,20 +909,7 @@ def open_mfdataset( # Combine all datasets, closing them in case of a ValueError try: - if combine == "_old_auto": - # Use the old auto_combine for now - # Remove this after deprecation cycle from #2616 is complete - basic_msg = dedent( - """\ - In xarray version 0.13 the default behaviour of `open_mfdataset` - will change. To retain the existing behavior, pass - combine='nested'. To use future default behavior, pass - combine='by_coords'. See - http://xarray.pydata.org/en/stable/combining.html#combining-multi - """ - ) - warnings.warn(basic_msg, FutureWarning, stacklevel=2) - + if combine == "auto": combined = auto_combine( datasets, concat_dim=concat_dim, diff --git a/xarray/core/combine.py b/xarray/core/combine.py index e35bb51e030..3f09a61c37a 100644 --- a/xarray/core/combine.py +++ b/xarray/core/combine.py @@ -713,21 +713,13 @@ def auto_combine( Dataset.merge """ - if not from_openmfds: - basic_msg = dedent( - """\ - In xarray version 0.13 `auto_combine` will be deprecated. See - http://xarray.pydata.org/en/stable/combining.html#combining-multi""" - ) - warnings.warn(basic_msg, FutureWarning, stacklevel=2) - if concat_dim == "_not_supplied": concat_dim = _CONCAT_DIM_DEFAULT message = "" else: message = dedent( """\ - Also `open_mfdataset` will no longer accept a `concat_dim` argument. + `open_mfdataset` will no longer accept a `concat_dim` argument. To get equivalent behaviour from now on please use the new `combine_nested` function instead (or the `combine='nested'` option to `open_mfdataset`).""" @@ -753,24 +745,9 @@ def auto_combine( `combine='nested'` option to open_mfdataset.""" ) - if _requires_concat_and_merge(datasets): - manual_dims = [concat_dim].append(None) - message += dedent( - """\ - The datasets supplied require both concatenation and merging. From - xarray version 0.13 this will operation will require either using the - new `combine_nested` function (or the `combine='nested'` option to - open_mfdataset), with a nested list structure such that you can combine - along the dimensions {}. Alternatively if your datasets have global - dimension coordinates then you can use the new `combine_by_coords` - function.""".format( - manual_dims - ) - ) - warnings.warn(message, FutureWarning, stacklevel=2) - return _old_auto_combine( + return _auto_combine( datasets, concat_dim=concat_dim, compat=compat, @@ -816,7 +793,7 @@ def _requires_concat_and_merge(datasets): return len(list(grouped_by_vars)) > 1 -def _old_auto_combine( +def _auto_combine( datasets, concat_dim=_CONCAT_DIM_DEFAULT, compat="no_conflicts", diff --git a/xarray/tests/test_backends.py b/xarray/tests/test_backends.py index f6254b32f4f..8a609675ef2 100644 --- a/xarray/tests/test_backends.py +++ b/xarray/tests/test_backends.py @@ -3061,73 +3061,6 @@ def test_load_dataarray(self): ds.to_netcdf(tmp) -@requires_scipy_or_netCDF4 -@requires_dask -class TestOpenMFDataSetDeprecation: - """ - Set of tests to check that FutureWarnings are correctly raised until the - deprecation cycle is complete. #2616 - """ - - def test_open_mfdataset_default(self): - ds1, ds2 = Dataset({"x": [0]}), Dataset({"x": [1]}) - with create_tmp_file() as tmp1: - with create_tmp_file() as tmp2: - ds1.to_netcdf(tmp1) - ds2.to_netcdf(tmp2) - - with pytest.warns( - FutureWarning, match="default behaviour of" " `open_mfdataset`" - ): - open_mfdataset([tmp1, tmp2]) - - def test_open_mfdataset_with_concat_dim(self): - ds1, ds2 = Dataset({"x": [0]}), Dataset({"x": [1]}) - with create_tmp_file() as tmp1: - with create_tmp_file() as tmp2: - ds1.to_netcdf(tmp1) - ds2.to_netcdf(tmp2) - - with pytest.warns(FutureWarning, match="`concat_dim`"): - open_mfdataset([tmp1, tmp2], concat_dim="x") - - def test_auto_combine_with_merge_and_concat(self): - ds1, ds2 = Dataset({"x": [0]}), Dataset({"x": [1]}) - ds3 = Dataset({"z": ((), 99)}) - with create_tmp_file() as tmp1: - with create_tmp_file() as tmp2: - with create_tmp_file() as tmp3: - ds1.to_netcdf(tmp1) - ds2.to_netcdf(tmp2) - ds3.to_netcdf(tmp3) - - with pytest.warns( - FutureWarning, match="require both concatenation" - ): - open_mfdataset([tmp1, tmp2, tmp3]) - - def test_auto_combine_with_coords(self): - ds1 = Dataset({"foo": ("x", [0])}, coords={"x": ("x", [0])}) - ds2 = Dataset({"foo": ("x", [1])}, coords={"x": ("x", [1])}) - with create_tmp_file() as tmp1: - with create_tmp_file() as tmp2: - ds1.to_netcdf(tmp1) - ds2.to_netcdf(tmp2) - - with pytest.warns(FutureWarning, match="supplied have global"): - open_mfdataset([tmp1, tmp2]) - - def test_auto_combine_without_coords(self): - ds1, ds2 = Dataset({"foo": ("x", [0])}), Dataset({"foo": ("x", [1])}) - with create_tmp_file() as tmp1: - with create_tmp_file() as tmp2: - ds1.to_netcdf(tmp1) - ds2.to_netcdf(tmp2) - - with pytest.warns(FutureWarning, match="supplied do not have global"): - open_mfdataset([tmp1, tmp2]) - - @requires_scipy_or_netCDF4 @requires_pydap @pytest.mark.filterwarnings("ignore:The binary mode of fromstring is deprecated") diff --git a/xarray/tests/test_combine.py b/xarray/tests/test_combine.py index 1abca30d199..f585e36ef48 100644 --- a/xarray/tests/test_combine.py +++ b/xarray/tests/test_combine.py @@ -583,7 +583,7 @@ def test_combine_nested_fill_value(self, fill_value): assert_identical(expected, actual) -class TestCombineAuto: +class TestCombineCoords: def test_combine_by_coords(self): objs = [Dataset({"x": [0]}), Dataset({"x": [1]})] actual = combine_by_coords(objs) @@ -713,12 +713,9 @@ def test_check_for_impossible_ordering(self): combine_by_coords([ds1, ds0]) -@pytest.mark.filterwarnings( - "ignore:In xarray version 0.13 `auto_combine` " "will be deprecated" -) -@pytest.mark.filterwarnings("ignore:Also `open_mfdataset` will no longer") +@pytest.mark.filterwarnings("ignore:`open_mfdataset` will no longer") @pytest.mark.filterwarnings("ignore:The datasets supplied") -class TestAutoCombineOldAPI: +class TestAutoCombine: """ Set of tests which check that old 1-dimensional auto_combine behaviour is still satisfied. #2616 @@ -862,11 +859,6 @@ def test_auto_combine_with_concat_dim(self): with pytest.warns(FutureWarning, match="`concat_dim`"): auto_combine(objs, concat_dim="x") - def test_auto_combine_with_merge_and_concat(self): - objs = [Dataset({"x": [0]}), Dataset({"x": [1]}), Dataset({"z": ((), 99)})] - with pytest.warns(FutureWarning, match="require both concatenation"): - auto_combine(objs) - def test_auto_combine_with_coords(self): objs = [ Dataset({"foo": ("x", [0])}, coords={"x": ("x", [0])}),