Skip to content

Commit

Permalink
STYLE: Add code check to avoid returning Exceptions instead of raising (
Browse files Browse the repository at this point in the history
  • Loading branch information
mroeschke committed Oct 7, 2022
1 parent c0e6baf commit c0c6537
Show file tree
Hide file tree
Showing 10 changed files with 47 additions and 32 deletions.
7 changes: 7 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -226,6 +226,13 @@ repos:
entry: python scripts/no_bool_in_generic.py
language: python
files: ^pandas/core/generic\.py$
- id: no-return-exception
name: Use raise instead of return for exceptions
language: pygrep
entry: 'return [A-Za-z]+(Error|Exit|Interrupt|Exception|Iteration)'
files: ^pandas/
types: [python]
exclude: ^pandas/tests/
- id: pandas-errors-documented
name: Ensure pandas errors are documented in doc/source/reference/testing.rst
entry: python scripts/pandas_errors_documented.py
Expand Down
27 changes: 18 additions & 9 deletions pandas/core/indexes/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -554,7 +554,7 @@ def __new__(
return klass._simple_new(arr, name)

elif is_scalar(data):
raise cls._scalar_data_error(data)
raise cls._raise_scalar_data_error(data)
elif hasattr(data, "__array__"):
return Index(np.asarray(data), dtype=dtype, copy=copy, name=name, **kwargs)
else:
Expand Down Expand Up @@ -4386,14 +4386,23 @@ def is_int(v):
return indexer

@final
def _invalid_indexer(self, form: str_t, key) -> TypeError:
def _raise_invalid_indexer(
self,
form: str_t,
key,
reraise: lib.NoDefault | None | Exception = lib.no_default,
) -> None:
"""
Consistent invalid indexer message.
Raise consistent invalid indexer message.
"""
return TypeError(
msg = (
f"cannot do {form} indexing on {type(self).__name__} with these "
f"indexers [{key}] of type {type(key).__name__}"
)
if reraise is not lib.no_default:
raise TypeError(msg) from reraise
else:
raise TypeError(msg)

# --------------------------------------------------------------------
# Reindex Methods
Expand Down Expand Up @@ -5279,10 +5288,10 @@ def where(self, cond, other=None) -> Index:
# construction helpers
@final
@classmethod
def _scalar_data_error(cls, data):
def _raise_scalar_data_error(cls, data):
# We return the TypeError so that we can raise it from the constructor
# in order to keep mypy happy
return TypeError(
raise TypeError(
f"{cls.__name__}(...) must be called with a collection of some "
f"kind, {repr(data)} was passed"
)
Expand Down Expand Up @@ -6674,15 +6683,15 @@ def _maybe_cast_listlike_indexer(self, target) -> Index:
return ensure_index(target)

@final
def _validate_indexer(self, form: str_t, key, kind: str_t):
def _validate_indexer(self, form: str_t, key, kind: str_t) -> None:
"""
If we are positional indexer, validate that we have appropriate
typed bounds must be an integer.
"""
assert kind in ["getitem", "iloc"]

if key is not None and not is_integer(key):
raise self._invalid_indexer(form, key)
self._raise_invalid_indexer(form, key)

def _maybe_cast_slice_bound(self, label, side: str_t, kind=no_default):
"""
Expand Down Expand Up @@ -6714,7 +6723,7 @@ def _maybe_cast_slice_bound(self, label, side: str_t, kind=no_default):
# datetimelike Indexes
# reject them, if index does not contain label
if (is_float(label) or is_integer(label)) and label not in self:
raise self._invalid_indexer("slice", label)
self._raise_invalid_indexer("slice", label)

return label

Expand Down
2 changes: 1 addition & 1 deletion pandas/core/indexes/category.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,7 @@ def __new__(
data = []

if is_scalar(data):
raise cls._scalar_data_error(data)
cls._raise_scalar_data_error(data)

data = Categorical(
data, categories=categories, ordered=ordered, dtype=dtype, copy=copy
Expand Down
4 changes: 2 additions & 2 deletions pandas/core/indexes/datetimelike.py
Original file line number Diff line number Diff line change
Expand Up @@ -314,12 +314,12 @@ def _maybe_cast_slice_bound(self, label, side: str, kind=lib.no_default):
# DTI -> parsing.DateParseError
# TDI -> 'unit abbreviation w/o a number'
# PI -> string cannot be parsed as datetime-like
raise self._invalid_indexer("slice", label) from err
self._raise_invalid_indexer("slice", label, err)

lower, upper = self._parsed_string_to_bounds(reso, parsed)
return lower if side == "left" else upper
elif not isinstance(label, self._data._recognized_scalars):
raise self._invalid_indexer("slice", label)
self._raise_invalid_indexer("slice", label)

return label

Expand Down
2 changes: 1 addition & 1 deletion pandas/core/indexes/datetimes.py
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,7 @@ def __new__(
) -> DatetimeIndex:

if is_scalar(data):
raise cls._scalar_data_error(data)
cls._raise_scalar_data_error(data)

# - Cases checked above all return/raise before reaching here - #

Expand Down
2 changes: 1 addition & 1 deletion pandas/core/indexes/numeric.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ def _ensure_array(cls, data, dtype, copy: bool):
if not isinstance(data, (np.ndarray, Index)):
# Coerce to ndarray if not already ndarray or Index
if is_scalar(data):
raise cls._scalar_data_error(data)
cls._raise_scalar_data_error(data)

# other iterable of some kind
if not isinstance(data, (ABCSeries, list, tuple)):
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/indexes/period.py
Original file line number Diff line number Diff line change
Expand Up @@ -240,7 +240,7 @@ def __new__(
# range-based.
if not fields:
# test_pickle_compat_construction
raise cls._scalar_data_error(None)
cls._raise_scalar_data_error(None)

data, freq2 = PeriodArray._generate_range(None, None, None, freq, fields)
# PeriodArray._generate range does validation that fields is
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/indexes/timedeltas.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ def __new__(
name = maybe_extract_name(name, data, cls)

if is_scalar(data):
raise cls._scalar_data_error(data)
cls._raise_scalar_data_error(data)

if unit in {"Y", "y", "M"}:
raise ValueError(
Expand Down
14 changes: 7 additions & 7 deletions pandas/core/internals/managers.py
Original file line number Diff line number Diff line change
Expand Up @@ -1038,7 +1038,7 @@ def _verify_integrity(self) -> None:
tot_items = sum(len(x.mgr_locs) for x in self.blocks)
for block in self.blocks:
if block.shape[1:] != mgr_shape[1:]:
raise construction_error(tot_items, block.shape[1:], self.axes)
raise_construction_error(tot_items, block.shape[1:], self.axes)
if len(self.items) != tot_items:
raise AssertionError(
"Number of manager items must equal union of "
Expand Down Expand Up @@ -2145,7 +2145,7 @@ def create_block_manager_from_blocks(
except ValueError as err:
arrays = [blk.values for blk in blocks]
tot_items = sum(arr.shape[0] for arr in arrays)
raise construction_error(tot_items, arrays[0].shape[1:], axes, err)
raise_construction_error(tot_items, arrays[0].shape[1:], axes, err)

if consolidate:
mgr._consolidate_inplace()
Expand All @@ -2172,13 +2172,13 @@ def create_block_manager_from_column_arrays(
blocks = _form_blocks(arrays, consolidate)
mgr = BlockManager(blocks, axes, verify_integrity=False)
except ValueError as e:
raise construction_error(len(arrays), arrays[0].shape, axes, e)
raise_construction_error(len(arrays), arrays[0].shape, axes, e)
if consolidate:
mgr._consolidate_inplace()
return mgr


def construction_error(
def raise_construction_error(
tot_items: int,
block_shape: Shape,
axes: list[Index],
Expand All @@ -2198,10 +2198,10 @@ def construction_error(
# We return the exception object instead of raising it so that we
# can raise it in the caller; mypy plays better with that
if passed == implied and e is not None:
return e
raise e
if block_shape[0] == 0:
return ValueError("Empty data passed with indices specified.")
return ValueError(f"Shape of passed values is {passed}, indices imply {implied}")
raise ValueError("Empty data passed with indices specified.")
raise ValueError(f"Shape of passed values is {passed}, indices imply {implied}")


# -----------------------------------------------------------------------
Expand Down
17 changes: 8 additions & 9 deletions pandas/io/pytables.py
Original file line number Diff line number Diff line change
Expand Up @@ -1658,13 +1658,6 @@ def _create_storer(
if value is not None and not isinstance(value, (Series, DataFrame)):
raise TypeError("value must be None, Series, or DataFrame")

def error(t):
# return instead of raising so mypy can tell where we are raising
return TypeError(
f"cannot properly create the storer for: [{t}] [group->"
f"{group},value->{type(value)},format->{format}"
)

pt = _ensure_decoded(getattr(group._v_attrs, "pandas_type", None))
tt = _ensure_decoded(getattr(group._v_attrs, "table_type", None))

Expand Down Expand Up @@ -1699,7 +1692,10 @@ def error(t):
try:
cls = _STORER_MAP[pt]
except KeyError as err:
raise error("_STORER_MAP") from err
raise TypeError(
f"cannot properly create the storer for: [_STORER_MAP] [group->"
f"{group},value->{type(value)},format->{format}"
) from err
return cls(self, group, encoding=encoding, errors=errors)

# existing node (and must be a table)
Expand Down Expand Up @@ -1732,7 +1728,10 @@ def error(t):
try:
cls = _TABLE_MAP[tt]
except KeyError as err:
raise error("_TABLE_MAP") from err
raise TypeError(
f"cannot properly create the storer for: [_TABLE_MAP] [group->"
f"{group},value->{type(value)},format->{format}"
) from err

return cls(self, group, encoding=encoding, errors=errors)

Expand Down

0 comments on commit c0c6537

Please sign in to comment.