Skip to content

Commit

Permalink
docs(python): Address ignored Ruff doc rules (#9919)
Browse files Browse the repository at this point in the history
  • Loading branch information
zundertj authored Jul 17, 2023
1 parent 1a4eaa5 commit 6b0bacc
Show file tree
Hide file tree
Showing 14 changed files with 140 additions and 51 deletions.
4 changes: 2 additions & 2 deletions py-polars/docs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,14 +235,14 @@ def _minify_classpaths(s: str) -> str:
)


def process_signature(app, what, name, obj, opts, sig, ret):
def process_signature(app, what, name, obj, opts, sig, ret): # noqa: D103
return (
_minify_classpaths(sig) if sig else sig,
_minify_classpaths(ret) if ret else ret,
)


def setup(app):
def setup(app): # noqa: D103
# TODO: a handful of methods do not seem to trigger the event for
# some reason (possibly @overloads?) - investigate further...
app.connect("autodoc-process-signature", process_signature)
6 changes: 3 additions & 3 deletions py-polars/polars/dataframe/frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -3306,9 +3306,9 @@ def write_delta(
... ) # doctest: +SKIP
"""
from polars.io.delta import check_if_delta_available, resolve_delta_lake_uri
from polars.io.delta import _check_if_delta_available, _resolve_delta_lake_uri

check_if_delta_available()
_check_if_delta_available()

from deltalake.writer import (
try_get_deltatable,
Expand All @@ -3319,7 +3319,7 @@ def write_delta(
delta_write_options = {}

if isinstance(target, (str, Path)):
target = resolve_delta_lake_uri(str(target), strict=False)
target = _resolve_delta_lake_uri(str(target), strict=False)

unsupported_cols = {}
unsupported_types = [Time, Categorical, Null]
Expand Down
26 changes: 26 additions & 0 deletions py-polars/polars/dataframe/groupby.py
Original file line number Diff line number Diff line change
Expand Up @@ -845,6 +845,19 @@ def agg(
*aggs: IntoExpr | Iterable[IntoExpr],
**named_aggs: IntoExpr,
) -> DataFrame:
"""
Compute aggregations for each group of a groupby operation.
Parameters
----------
*aggs
Aggregations to compute for each group of the groupby operation,
specified as positional arguments.
Accepts expression input. Strings are parsed as column names.
**named_aggs
Additional aggregations, specified as keyword arguments.
The resulting columns will be renamed to the keyword used.
"""
return (
self.df.lazy()
.groupby_rolling(
Expand Down Expand Up @@ -1046,6 +1059,19 @@ def agg(
*aggs: IntoExpr | Iterable[IntoExpr],
**named_aggs: IntoExpr,
) -> DataFrame:
"""
Compute aggregations for each group of a groupby operation.
Parameters
----------
*aggs
Aggregations to compute for each group of the groupby operation,
specified as positional arguments.
Accepts expression input. Strings are parsed as column names.
**named_aggs
Additional aggregations, specified as keyword arguments.
The resulting columns will be renamed to the keyword used.
"""
return (
self.df.lazy()
.groupby_dynamic(
Expand Down
39 changes: 34 additions & 5 deletions py-polars/polars/datatypes/classes.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import contextlib
from datetime import timezone
from inspect import isclass
from typing import TYPE_CHECKING, Any, Callable, Iterator, Mapping, Sequence
from typing import TYPE_CHECKING, Any, Callable, Iterable, Iterator, Mapping, Sequence

import polars.datatypes

Expand Down Expand Up @@ -31,7 +31,7 @@ def __init__(self, method: Callable[..., Any] | None = None) -> None:
def __get__(self, instance: Any, cls: type | None = None) -> Any:
return self.fget(cls) # type: ignore[misc]

def getter(self, method: Callable[..., Any]) -> Any:
def getter(self, method: Callable[..., Any]) -> Any: # noqa: D102
self.fget = method
return self

Expand All @@ -46,25 +46,29 @@ def _string_repr(cls) -> str:
return _dtype_str_repr(cls)

def base_type(cls) -> PolarsDataType:
"""Return the base type."""
return cls

@classproperty
def is_nested(self) -> bool:
"""Check if this data type is nested."""
return False

@classmethod
def is_(cls, other: PolarsDataType) -> bool:
"""Check if this DataType is the same as another DataType."""
return cls == other and hash(cls) == hash(other)

@classmethod
def is_not(cls, other: PolarsDataType) -> bool:
"""Check if this DataType is NOT the same as another DataType."""
return not cls.is_(other)


class DataType(metaclass=DataTypeClass):
"""Base class for all Polars data types."""

def __new__(cls, *args: Any, **kwargs: Any) -> PolarsDataType: # type: ignore[misc]
def __new__(cls, *args: Any, **kwargs: Any) -> PolarsDataType: # type: ignore[misc] # noqa: D102
# this formulation allows for equivalent use of "pl.Type" and "pl.Type()", while
# still respecting types that take initialisation params (eg: Duration/Datetime)
if args or kwargs:
Expand Down Expand Up @@ -95,6 +99,7 @@ def base_type(cls) -> DataTypeClass:

@classproperty
def is_nested(self) -> bool:
"""Check if this data type is nested."""
return False

@classinstmethod # type: ignore[arg-type]
Expand Down Expand Up @@ -158,15 +163,30 @@ def _custom_reconstruct(


class DataTypeGroup(frozenset): # type: ignore[type-arg]
"""Group of data types."""

_match_base_type: bool

def __new__(cls, items: Any, *, match_base_type: bool = True) -> DataTypeGroup:
def __new__(
cls, items: Iterable[DataType | DataTypeClass], *, match_base_type: bool = True
) -> DataTypeGroup:
"""
Construct a DataTypeGroup.
Parameters
----------
items :
iterable of data types
match_base_type:
match the base type
"""
for it in items:
if not isinstance(it, (DataType, DataTypeClass)):
raise TypeError(
f"DataTypeGroup items must be dtypes; found {type(it).__name__!r}"
)
dtype_group = super().__new__(cls, items)
dtype_group = super().__new__(cls, items) # type: ignore[arg-type]
dtype_group._match_base_type = match_base_type
return dtype_group

Expand Down Expand Up @@ -201,6 +221,7 @@ class NestedType(DataType):

@classproperty
def is_nested(self) -> bool:
"""Check if this data type is nested."""
return True


Expand Down Expand Up @@ -406,6 +427,8 @@ class Unknown(DataType):


class List(NestedType):
"""Nested list/array type with variable length of inner lists."""

inner: PolarsDataType | None = None

def __init__(self, inner: PolarsDataType | PythonDataType):
Expand Down Expand Up @@ -466,6 +489,8 @@ def __repr__(self) -> str:


class Array(NestedType):
"""Nested list/array type with fixed length of inner arrays."""

inner: PolarsDataType | None = None
width: int

Expand Down Expand Up @@ -524,6 +549,8 @@ def __repr__(self) -> str:


class Field:
"""Definition of a single field within a `Struct` DataType."""

def __init__(self, name: str, dtype: PolarsDataType):
"""
Definition of a single field within a `Struct` DataType.
Expand Down Expand Up @@ -551,6 +578,8 @@ def __repr__(self) -> str:


class Struct(NestedType):
"""Struct composite type."""

def __init__(self, fields: Sequence[Field] | SchemaDict):
"""
Struct composite type.
Expand Down
13 changes: 8 additions & 5 deletions py-polars/polars/datatypes/convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@
T = TypeVar("T")


def cache(function: Callable[..., T]) -> T:
def cache(function: Callable[..., T]) -> T: # noqa: D103
# need this to satisfy mypy issue with "@property/@cache combination"
# See: https://github.com/python/mypy/issues/5858
return functools.lru_cache()(function) # type: ignore[return-value]
Expand All @@ -98,7 +98,10 @@ def cache(function: Callable[..., T]) -> T:


@functools.lru_cache(16)
def map_py_type_to_dtype(python_dtype: PythonDataType | type[object]) -> PolarsDataType:
def _map_py_type_to_dtype(
python_dtype: PythonDataType | type[object],
) -> PolarsDataType:
"""Convert Python data type to Polars data type."""
if python_dtype is float:
return Float64
if python_dtype is int:
Expand Down Expand Up @@ -134,14 +137,14 @@ def map_py_type_to_dtype(python_dtype: PythonDataType | type[object]) -> PolarsD
if hasattr(python_dtype, "__origin__") and hasattr(python_dtype, "__args__"):
base_type = python_dtype.__origin__
if base_type is not None:
dtype = map_py_type_to_dtype(base_type)
dtype = _map_py_type_to_dtype(base_type)
nested = python_dtype.__args__
if len(nested) == 1:
nested = nested[0]
return (
dtype
if nested is None
else dtype(map_py_type_to_dtype(nested)) # type: ignore[operator]
else dtype(_map_py_type_to_dtype(nested)) # type: ignore[operator]
)

raise TypeError("Invalid type")
Expand Down Expand Up @@ -424,7 +427,7 @@ def py_type_to_dtype(
if is_polars_dtype(data_type):
return data_type
try:
return map_py_type_to_dtype(data_type)
return _map_py_type_to_dtype(data_type)
except (KeyError, TypeError): # pragma: no cover
if not raise_unmatched:
return None
Expand Down
Loading

0 comments on commit 6b0bacc

Please sign in to comment.