Skip to content

Commit

Permalink
DEPR: remove v018 resample compatibilitiy
Browse files Browse the repository at this point in the history
  • Loading branch information
jreback committed Apr 21, 2018
1 parent 8def649 commit 0797db2
Show file tree
Hide file tree
Showing 3 changed files with 12 additions and 276 deletions.
4 changes: 2 additions & 2 deletions doc/source/whatsnew/v0.23.0.txt
Original file line number Diff line number Diff line change
Expand Up @@ -555,7 +555,6 @@ Convert to an xarray DataArray
p.to_xarray()



.. _whatsnew_0230.api_breaking.core_common:

pandas.core.common removals
Expand Down Expand Up @@ -923,7 +922,8 @@ Removal of prior version deprecations/changes
- The ``infer_dst`` keyword in :meth:`Series.tz_localize`, :meth:`DatetimeIndex.tz_localize`
and :class:`DatetimeIndex` have been removed. ``infer_dst=True`` is equivalent to
``ambiguous='infer'``, and ``infer_dst=False`` to ``ambiguous='raise'`` (:issue:`7963`).

- When ``.resample()`` was changed from an eager to a lazy operation, like ``.groupby()`` in v0.18.0, we put in place compatibility (with a ``FutureWarning``),
so operations would continue to work. This has now be fully removed (:issue:`20554`)

.. _whatsnew_0230.performance:

Expand Down
125 changes: 10 additions & 115 deletions pandas/core/resample.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,20 +62,6 @@ class Resampler(_GroupBy):
_attributes = ['freq', 'axis', 'closed', 'label', 'convention',
'loffset', 'base', 'kind']

# API compat of allowed attributes
_deprecated_valids = _attributes + ['__doc__', '_cache', '_attributes',
'binner', 'grouper', 'groupby',
'sort', 'kind', 'squeeze', 'keys',
'group_keys', 'as_index', 'exclusions',
'_groupby']

# don't raise deprecation warning on attributes starting with these
# patterns - prevents warnings caused by IPython introspection
_deprecated_valid_patterns = ['_ipython', '_repr']

# API compat of disallowed attributes
_deprecated_invalids = ['iloc', 'loc', 'ix', 'iat', 'at']

def __init__(self, obj, groupby=None, axis=0, kind=None, **kwargs):
self.groupby = groupby
self.keys = None
Expand All @@ -100,6 +86,16 @@ def __unicode__(self):
return "{klass} [{attrs}]".format(klass=self.__class__.__name__,
attrs=', '.join(attrs))

def __getattr__(self, attr):
if attr in self._internal_names_set:
return object.__getattribute__(self, attr)
if attr in self._attributes:
return getattr(self.groupby, attr)
if attr in self.obj:
return self[attr]

return object.__getattribute__(self, attr)

@property
def obj(self):
return self.groupby.obj
Expand All @@ -124,100 +120,6 @@ def _from_selection(self):
(self.groupby.key is not None or
self.groupby.level is not None))

def _deprecated(self, op):
warnings.warn(("\n.resample() is now a deferred operation\n"
"You called {op}(...) on this deferred object "
"which materialized it into a {klass}\nby implicitly "
"taking the mean. Use .resample(...).mean() "
"instead").format(op=op, klass=self._typ),
FutureWarning, stacklevel=3)
return self.mean()

def _make_deprecated_binop(op):
# op is a string

def _evaluate_numeric_binop(self, other):
result = self._deprecated(op)
return getattr(result, op)(other)
return _evaluate_numeric_binop

def _make_deprecated_unary(op, name):
# op is a callable

def _evaluate_numeric_unary(self):
result = self._deprecated(name)
return op(result)
return _evaluate_numeric_unary

def __array__(self):
return self._deprecated('__array__').__array__()

__gt__ = _make_deprecated_binop('__gt__')
__ge__ = _make_deprecated_binop('__ge__')
__lt__ = _make_deprecated_binop('__lt__')
__le__ = _make_deprecated_binop('__le__')
__eq__ = _make_deprecated_binop('__eq__')
__ne__ = _make_deprecated_binop('__ne__')

__add__ = __radd__ = _make_deprecated_binop('__add__')
__sub__ = __rsub__ = _make_deprecated_binop('__sub__')
__mul__ = __rmul__ = _make_deprecated_binop('__mul__')
__floordiv__ = __rfloordiv__ = _make_deprecated_binop('__floordiv__')
__truediv__ = __rtruediv__ = _make_deprecated_binop('__truediv__')
if not compat.PY3:
__div__ = __rdiv__ = _make_deprecated_binop('__div__')
__neg__ = _make_deprecated_unary(lambda x: -x, '__neg__')
__pos__ = _make_deprecated_unary(lambda x: x, '__pos__')
__abs__ = _make_deprecated_unary(lambda x: np.abs(x), '__abs__')
__inv__ = _make_deprecated_unary(lambda x: -x, '__inv__')

def __getattr__(self, attr):
if attr in self._internal_names_set:
return object.__getattribute__(self, attr)
if attr in self._attributes:
return getattr(self.groupby, attr)
if attr in self.obj:
return self[attr]

if attr in self._deprecated_invalids:
raise ValueError(".resample() is now a deferred operation\n"
"\tuse .resample(...).mean() instead of "
".resample(...)")

matches_pattern = any(attr.startswith(x) for x
in self._deprecated_valid_patterns)
if not matches_pattern and attr not in self._deprecated_valids:
# avoid the warning, if it's just going to be an exception
# anyway.
if not hasattr(self.obj, attr):
raise AttributeError("'{}' has no attribute '{}'".format(
type(self.obj).__name__, attr
))
self = self._deprecated(attr)

return object.__getattribute__(self, attr)

def __setattr__(self, attr, value):
if attr not in self._deprecated_valids:
raise ValueError("cannot set values on {0}".format(
self.__class__.__name__))
object.__setattr__(self, attr, value)

def __getitem__(self, key):
try:
return super(Resampler, self).__getitem__(key)
except (KeyError, AbstractMethodError):

# compat for deprecated
if isinstance(self.obj, ABCSeries):
return self._deprecated('__getitem__')[key]

raise

def __setitem__(self, attr, value):
raise ValueError("cannot set items on {0}".format(
self.__class__.__name__))

def _convert_obj(self, obj):
"""
provide any conversions for the object in order to correctly handle
Expand Down Expand Up @@ -282,11 +184,6 @@ def _assure_grouper(self):
def pipe(self, func, *args, **kwargs):
return super(Resampler, self).pipe(func, *args, **kwargs)

def plot(self, *args, **kwargs):
# for compat with prior versions, we want to
# have the warnings shown here and just have this work
return self._deprecated('plot').plot(*args, **kwargs)

_agg_doc = dedent("""
Examples
Expand Down Expand Up @@ -853,8 +750,6 @@ def size(self):
return result


Resampler._deprecated_valids += dir(Resampler)


# downsample methods
for method in ['sum', 'prod']:
Expand Down
159 changes: 0 additions & 159 deletions pandas/tests/test_resample.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,122 +84,6 @@ def test_api(self):
assert isinstance(result, DataFrame)
assert len(result) == 217

def test_api_changes_v018(self):

# change from .resample(....., how=...)
# to .resample(......).how()

r = self.series.resample('H')
assert isinstance(r, DatetimeIndexResampler)

for how in ['sum', 'mean', 'prod', 'min', 'max', 'var', 'std']:
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
result = self.series.resample('H', how=how)
expected = getattr(self.series.resample('H'), how)()
tm.assert_series_equal(result, expected)

with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
result = self.series.resample('H', how='ohlc')
expected = self.series.resample('H').ohlc()
tm.assert_frame_equal(result, expected)

# compat for pandas-like methods
for how in ['sort_values', 'isna']:
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
getattr(r, how)()

# invalids as these can be setting operations
r = self.series.resample('H')
pytest.raises(ValueError, lambda: r.iloc[0])
pytest.raises(ValueError, lambda: r.iat[0])
pytest.raises(ValueError, lambda: r.loc[0])
pytest.raises(ValueError, lambda: r.loc[
Timestamp('2013-01-01 00:00:00', offset='H')])
pytest.raises(ValueError, lambda: r.at[
Timestamp('2013-01-01 00:00:00', offset='H')])

def f():
r[0] = 5

pytest.raises(ValueError, f)

# str/repr
r = self.series.resample('H')
with tm.assert_produces_warning(None):
str(r)
with tm.assert_produces_warning(None):
repr(r)

with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
tm.assert_numpy_array_equal(np.array(r), np.array(r.mean()))

# masquerade as Series/DataFrame as needed for API compat
assert isinstance(self.series.resample('H'), ABCSeries)
assert not isinstance(self.frame.resample('H'), ABCSeries)
assert not isinstance(self.series.resample('H'), ABCDataFrame)
assert isinstance(self.frame.resample('H'), ABCDataFrame)

# bin numeric ops
for op in ['__add__', '__mul__', '__truediv__', '__div__', '__sub__']:

if getattr(self.series, op, None) is None:
continue
r = self.series.resample('H')

with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
assert isinstance(getattr(r, op)(2), Series)

# unary numeric ops
for op in ['__pos__', '__neg__', '__abs__', '__inv__']:

if getattr(self.series, op, None) is None:
continue
r = self.series.resample('H')

with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
assert isinstance(getattr(r, op)(), Series)

# comparison ops
for op in ['__lt__', '__le__', '__gt__', '__ge__', '__eq__', '__ne__']:
r = self.series.resample('H')

with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
assert isinstance(getattr(r, op)(2), Series)

# IPython introspection shouldn't trigger warning GH 13618
for op in ['_repr_json', '_repr_latex',
'_ipython_canary_method_should_not_exist_']:
r = self.series.resample('H')
with tm.assert_produces_warning(None):
getattr(r, op, None)

# getitem compat
df = self.series.to_frame('foo')

# same as prior versions for DataFrame
pytest.raises(KeyError, lambda: df.resample('H')[0])

# compat for Series
# but we cannot be sure that we need a warning here
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
result = self.series.resample('H')[0]
expected = self.series.resample('H').mean()[0]
assert result == expected

with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
result = self.series.resample('H')['2005-01-09 23:00:00']
expected = self.series.resample('H').mean()['2005-01-09 23:00:00']
assert result == expected

def test_groupby_resample_api(self):

# GH 12448
Expand Down Expand Up @@ -251,23 +135,6 @@ def test_pipe(self):
result = r.pipe(lambda x: x.max() - x.mean())
tm.assert_frame_equal(result, expected)

@td.skip_if_no_mpl
def test_plot_api(self):
# .resample(....).plot(...)
# hitting warnings
# GH 12448
s = Series(np.random.randn(60),
index=date_range('2016-01-01', periods=60, freq='1min'))
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
result = s.resample('15min').plot()
tm.assert_is_valid_plot_return_object(result)

with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
result = s.resample('15min', how='sum').plot()
tm.assert_is_valid_plot_return_object(result)

def test_getitem(self):

r = self.frame.resample('H')
Expand Down Expand Up @@ -301,15 +168,6 @@ def test_attribute_access(self):
r = self.frame.resample('H')
tm.assert_series_equal(r.A.sum(), r['A'].sum())

# getting
pytest.raises(AttributeError, lambda: r.F)

# setting
def f():
r.F = 'bah'

pytest.raises(ValueError, f)

def test_api_compat_before_use(self):

# make sure that we are setting the binner
Expand Down Expand Up @@ -3012,23 +2870,6 @@ def setup_method(self, method):
freq='s',
periods=40))

def test_back_compat_v180(self):

df = self.frame
for how in ['sum', 'mean', 'prod', 'min', 'max', 'var', 'std']:
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
result = df.groupby('A').resample('4s', how=how)
expected = getattr(df.groupby('A').resample('4s'), how)()
assert_frame_equal(result, expected)

with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
result = df.groupby('A').resample('4s', how='mean',
fill_method='ffill')
expected = df.groupby('A').resample('4s').mean().ffill()
assert_frame_equal(result, expected)

def test_tab_complete_ipython6_warning(self, ip):
from IPython.core.completer import provisionalcompleter
code = dedent("""\
Expand Down

0 comments on commit 0797db2

Please sign in to comment.