Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add pylint #4100

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions py-polars/build.requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ isort~=5.10.1
mypy==0.961
ghp-import==2.1.0
flake8==4.0.1
pylint==2.14.5
sphinx==4.2.0
pydata-sphinx-theme==0.6.3
sphinx-panels==0.6.0
Expand Down
4 changes: 2 additions & 2 deletions py-polars/polars/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import os
import warnings

try:
Expand All @@ -10,7 +11,7 @@ def version() -> str:
# this is only useful for documentation
warnings.warn("polars binary missing!")

import polars.testing as testing
from polars import testing
from polars.cfg import Config, toggle_string_cache # We do not export in __all__
from polars.convert import (
from_arrow,
Expand Down Expand Up @@ -238,6 +239,5 @@ def version() -> str:

__version__ = version()

import os

os.environ["POLARS_ALLOW_EXTENSION"] = "true"
16 changes: 8 additions & 8 deletions py-polars/polars/convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,8 +165,8 @@ def from_records(
DeprecationWarning,
)
return DataFrame._from_numpy(data, columns=columns, orient=orient)
else:
return DataFrame._from_records(data, columns=columns, orient=orient)

return DataFrame._from_records(data, columns=columns, orient=orient)


def from_numpy(
Expand Down Expand Up @@ -281,10 +281,10 @@ def from_arrow(
raise ImportError("'pyarrow' is required when using from_arrow().")
if isinstance(a, pa.Table):
return DataFrame._from_arrow(a, rechunk=rechunk)
elif isinstance(a, (pa.Array, pa.ChunkedArray)):
if isinstance(a, (pa.Array, pa.ChunkedArray)):
return Series._from_arrow("", a, rechunk)
else:
raise ValueError(f"Expected Arrow Table or Array, got {type(a)}.")

raise ValueError(f"Expected Arrow Table or Array, got {type(a)}.")


@overload
Expand Down Expand Up @@ -369,7 +369,7 @@ def from_pandas(

if isinstance(df, (pd.Series, pd.DatetimeIndex)):
return Series._from_pandas("", df, nan_to_none=nan_to_none)
elif isinstance(df, pd.DataFrame):
if isinstance(df, pd.DataFrame):
return DataFrame._from_pandas(df, rechunk=rechunk, nan_to_none=nan_to_none)
else:
raise ValueError(f"Expected pandas DataFrame or Series, got {type(df)}.")

raise ValueError(f"Expected pandas DataFrame or Series, got {type(df)}.")
24 changes: 12 additions & 12 deletions py-polars/polars/datatypes.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,13 +126,13 @@ def __eq__(self, other: type[DataType]) -> bool: # type: ignore[override]
# List[i64] == List[f32] == False

# allow comparing object instances to class
if type(other) is type and issubclass(other, List):
if isinstance(other, type) and issubclass(other, List): # type: ignore[redundant-expr]
return True
if isinstance(other, List):
if self.inner is None or other.inner is None:
return True
else:
return self.inner == other.inner

return self.inner == other.inner
else:
return False

Expand Down Expand Up @@ -421,26 +421,26 @@ def dtype_to_ctype(dtype: PolarsDataType) -> type[_SimpleCData]:
try:
return _DTYPE_TO_CTYPE[dtype]
except KeyError: # pragma: no cover
raise NotImplementedError
raise NotImplementedError from None


def dtype_to_ffiname(dtype: PolarsDataType) -> str:
try:
return _DTYPE_TO_FFINAME[dtype]
except KeyError: # pragma: no cover
raise NotImplementedError
raise NotImplementedError from None


def dtype_to_py_type(dtype: PolarsDataType) -> type:
try:
return _DTYPE_TO_PY_TYPE[dtype]
except KeyError: # pragma: no cover
raise NotImplementedError
raise NotImplementedError from None


def is_polars_dtype(data_type: Any) -> bool:
return (
type(data_type) is type
isinstance(data_type, type)
and issubclass(data_type, DataType)
or isinstance(data_type, DataType)
)
Expand All @@ -453,7 +453,7 @@ def py_type_to_dtype(data_type: Any) -> type[DataType]:
try:
return _PY_TYPE_TO_DTYPE[data_type]
except KeyError: # pragma: no cover
raise NotImplementedError
raise NotImplementedError from None


def py_type_to_arrow_type(dtype: type[Any]) -> pa.lib.DataType:
Expand All @@ -463,7 +463,7 @@ def py_type_to_arrow_type(dtype: type[Any]) -> pa.lib.DataType:
try:
return _PY_TYPE_TO_ARROW_TYPE[dtype]
except KeyError: # pragma: no cover
raise ValueError(f"Cannot parse dtype {dtype} into Arrow dtype.")
raise ValueError(f"Cannot parse dtype {dtype} into Arrow dtype.") from None


def dtype_to_arrow_type(dtype: PolarsDataType) -> pa.lib.DataType:
Expand All @@ -473,7 +473,7 @@ def dtype_to_arrow_type(dtype: PolarsDataType) -> pa.lib.DataType:
try:
return _DTYPE_TO_ARROW_TYPE[dtype]
except KeyError: # pragma: no cover
raise ValueError(f"Cannot parse dtype {dtype} into Arrow dtype.")
raise ValueError(f"Cannot parse dtype {dtype} into Arrow dtype.") from None


def supported_numpy_char_code(dtype: str) -> bool:
Expand All @@ -484,7 +484,7 @@ def numpy_char_code_to_dtype(dtype: str) -> type[DataType]:
try:
return _NUMPY_CHAR_CODE_TO_DTYPE[dtype]
except KeyError: # pragma: no cover
raise NotImplementedError
raise NotImplementedError from None


def maybe_cast(
Expand All @@ -495,7 +495,7 @@ def maybe_cast(

if isinstance(el, datetime):
return _datetime_to_pl_timestamp(el, time_unit)
elif isinstance(el, timedelta):
if isinstance(el, timedelta):
return _timedelta_to_pl_timedelta(el, time_unit)
py_type = dtype_to_py_type(dtype)
if not isinstance(el, py_type):
Expand Down
6 changes: 3 additions & 3 deletions py-polars/polars/datatypes_constructor.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def polars_type_to_constructor(
try:
return _POLARS_TYPE_TO_CONSTRUCTOR[dtype]
except KeyError: # pragma: no cover
raise ValueError(f"Cannot construct PySeries for type {dtype}.")
raise ValueError(f"Cannot construct PySeries for type {dtype}.") from None


if _NUMPY_AVAILABLE and not _DOCUMENTING:
Expand Down Expand Up @@ -101,8 +101,8 @@ def numpy_type_to_constructor(dtype: type[np.dtype]) -> Callable[..., PySeries]:
return _NUMPY_TYPE_TO_CONSTRUCTOR[dtype]
except KeyError:
return PySeries.new_object
except NameError: # pragma: no cover
raise ImportError("'numpy' is required for this functionality.")
except NameError as err: # pragma: no cover
raise ImportError("'numpy' is required for this functionality.") from err


if not _DOCUMENTING:
Expand Down
13 changes: 6 additions & 7 deletions py-polars/polars/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,6 @@
from pathlib import Path
from typing import Any, BinaryIO, Callable, Mapping, TextIO, cast

from polars.utils import format_path, handle_projection_columns

try:
import pyarrow as pa

Expand All @@ -22,6 +20,7 @@
from polars.datatypes import DataType, Utf8
from polars.internals import DataFrame, LazyFrame, _scan_ds
from polars.internals.io import _prepare_file_arg
from polars.utils import format_path, handle_projection_columns

try:
import connectorx as cx
Expand Down Expand Up @@ -967,10 +966,10 @@ def read_sql(
protocol=protocol,
)
return cast(DataFrame, from_arrow(tbl))
else:
raise ImportError(
"connectorx is not installed. Please run `pip install connectorx>=0.2.2`."
)

raise ImportError(
"connectorx is not installed. Please run `pip install connectorx>=0.2.2`."
)


def read_excel(
Expand Down Expand Up @@ -1060,7 +1059,7 @@ def read_excel(
except ImportError:
raise ImportError(
"xlsx2csv is not installed. Please run `pip install xlsx2csv`."
)
) from None

if isinstance(file, (str, Path)):
file = format_path(file)
Expand Down
42 changes: 21 additions & 21 deletions py-polars/polars/testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,11 +275,11 @@ def _getattr_multi(obj: object, op: str) -> Any:
get the attribute "str", and then the attribute "lengths"
"""
op_list = op.split(".")
return reduce(lambda o, m: getattr(o, m), op_list, obj)
return reduce(getattr, op_list, obj)


def verify_series_and_expr_api(
input: Series, expected: Series | None, op: str, *args: Any, **kwargs: Any
result: Series, expected: Series | None, op: str, *args: Any, **kwargs: Any
) -> None:
"""
Small helper function to test element-wise functions for both the series and expressions api.
Expand All @@ -291,8 +291,8 @@ def verify_series_and_expr_api(
>>> verify_series_and_expr_api(s, expected, "sort")
"""
expr = _getattr_multi(col("*"), op)(*args, **kwargs)
result_expr: Series = input.to_frame().select(expr)[:, 0] # type: ignore[assignment]
result_series = _getattr_multi(input, op)(*args, **kwargs)
result_expr: Series = result.to_frame().select(expr)[:, 0] # type: ignore[assignment]
result_series = _getattr_multi(result, op)(*args, **kwargs)
if expected is None:
assert_series_equal(result_series, result_expr)
else:
Expand All @@ -305,7 +305,7 @@ def is_categorical_dtype(data_type: Any) -> bool:
Check if the input is a polars Categorical dtype.
"""
return (
type(data_type) is type
isinstance(data_type, type)
and issubclass(data_type, Categorical)
or isinstance(data_type, Categorical)
)
Expand Down Expand Up @@ -409,23 +409,23 @@ def __post_init__(self) -> None:
raise InvalidArgument(
f"No strategy (currently) available for {self.dtype} type"
)

# given a custom strategy, but no explicit dtype. infer one
# from the first non-None value that the strategy produces.
with warnings.catch_warnings():
# note: usually you should not call "example()" outside of an interactive shell, hence
# the warning. however, here it is reasonable to do so, so we catch and ignore it
warnings.simplefilter("ignore", NonInteractiveExampleWarning)
sample_value_iter = (self.strategy.example() for _ in range(100)) # type: ignore[union-attr]
sample_value_type = type(
next(e for e in sample_value_iter if e is not None)
)
if sample_value_type is not None:
self.dtype = py_type_to_dtype(sample_value_type)
else:
# given a custom strategy, but no explicit dtype. infer one
# from the first non-None value that the strategy produces.
with warnings.catch_warnings():
# note: usually you should not call "example()" outside of an interactive shell, hence
# the warning. however, here it is reasonable to do so, so we catch and ignore it
warnings.simplefilter("ignore", NonInteractiveExampleWarning)
sample_value_iter = (self.strategy.example() for _ in range(100)) # type: ignore[union-attr]
sample_value_type = type(
next(e for e in sample_value_iter if e is not None)
)
if sample_value_type is not None:
self.dtype = py_type_to_dtype(sample_value_type)
else:
raise InvalidArgument(
f"Unable to determine dtype for strategy {self.dtype} type"
)
raise InvalidArgument(
f"Unable to determine dtype for strategy {self.dtype} type"
)

def columns(
cols: int | Sequence[str] | None = None,
Expand Down
Loading