Skip to content

Commit

Permalink
testing ruff's new docstring code snippet formatter
Browse files Browse the repository at this point in the history
  • Loading branch information
BurntSushi committed Nov 27, 2023
1 parent 38d016b commit 559b9d6
Show file tree
Hide file tree
Showing 31 changed files with 162 additions and 499 deletions.
13 changes: 2 additions & 11 deletions py-polars/polars/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,6 @@ def register_expr_namespace(name: str) -> Callable[[type[NS]], type[NS]]:
...
... def nearest(self, p: int) -> pl.Expr:
... return (p ** (self._expr.log(p)).round(0).cast(pl.Int64)).cast(pl.Int64)
...
>>>
>>> df = pl.DataFrame([1.4, 24.3, 55.0, 64.001], schema=["n"])
>>> df.select(
Expand Down Expand Up @@ -155,11 +154,8 @@ def register_dataframe_namespace(name: str) -> Callable[[type[NS]], type[NS]]:
... def by_first_letter_of_column_values(self, col: str) -> list[pl.DataFrame]:
... return [
... self._df.filter(pl.col(col).str.starts_with(c))
... for c in sorted(
... set(df.select(pl.col(col).str.slice(0, 1)).to_series())
... )
... for c in sorted(set(df.select(pl.col(col).str.slice(0, 1)).to_series()))
... ]
...
>>>
>>> df = pl.DataFrame(
... data=[["xx", 2, 3, 4], ["xy", 4, 5, 6], ["yy", 5, 6, 7], ["yz", 6, 7, 8]],
Expand Down Expand Up @@ -247,16 +243,12 @@ def register_lazyframe_namespace(name: str) -> Callable[[type[NS]], type[NS]]:
... self._ldf = ldf
...
... def split_by_column_dtypes(self) -> list[pl.LazyFrame]:
... return [
... self._ldf.select(pl.col(tp))
... for tp in dict.fromkeys(self._ldf.dtypes)
... ]
... return [self._ldf.select(pl.col(tp)) for tp in dict.fromkeys(self._ldf.dtypes)]
...
... def upcast_integer_types(self) -> pl.LazyFrame:
... return self._ldf.with_columns(
... pl.col(tp).cast(pl.Int64) for tp in (pl.Int8, pl.Int16, pl.Int32)
... )
...
>>>
>>> ldf = pl.DataFrame(
... data={"a": [1, 2], "b": [3, 4], "c": [5.6, 6.7]},
Expand Down Expand Up @@ -356,7 +348,6 @@ def register_series_namespace(name: str) -> Callable[[type[NS]], type[NS]]:
...
... def cube(self) -> pl.Series:
... return self._s * self._s * self._s
...
>>>
>>> s = pl.Series("n", [1.5, 31.0, 42.0, 64.5])
>>> s.math.square().alias("s^2")
Expand Down
27 changes: 3 additions & 24 deletions py-polars/polars/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,14 +94,12 @@ class Config(contextlib.ContextDecorator):
>>> with pl.Config() as cfg:
... # set verbose for more detailed output within the scope
... cfg.set_verbose(True) # doctest: +IGNORE_RESULT
...
>>> # scope exit - no longer in verbose mode
This can also be written more compactly as:
>>> with pl.Config(verbose=True):
... pass
...
(The compact format is available for all `Config` methods that take a single value).
Expand All @@ -111,7 +109,6 @@ class Config(contextlib.ContextDecorator):
>>> @pl.Config(verbose=True)
... def test():
... pass
...
"""

Expand Down Expand Up @@ -414,7 +411,6 @@ def set_auto_structify(cls, active: bool | None = False) -> type[Config]:
>>> df = pl.DataFrame({"v": [1, 2, 3], "v2": [4, 5, 6]})
>>> with pl.Config(set_auto_structify=True):
... out = df.select(pl.all())
...
>>> out
shape: (3, 1)
┌───────────┐
Expand Down Expand Up @@ -577,7 +573,6 @@ def set_float_precision(cls, precision: int | None = None) -> type[Config]:
>>> df = pl.DataFrame({"const": ["pi", "e"], "value": [pi, e]})
>>> with pl.Config(float_precision=15):
... print(df)
...
shape: (2, 2)
┌───────┬───────────────────┐
│ const ┆ value │
Expand Down Expand Up @@ -639,7 +634,6 @@ def set_fmt_float(cls, fmt: FloatFmt | None = "mixed") -> type[Config]:
>>> s = pl.Series([1.2304980958725870923, 1e6, 1e-8])
>>> with pl.Config(set_fmt_float="mixed"):
... print(s)
...
shape: (3,)
Series: '' [f64]
[
Expand All @@ -652,7 +646,6 @@ def set_fmt_float(cls, fmt: FloatFmt | None = "mixed") -> type[Config]:
>>> with pl.Config(set_fmt_float="full"):
... print(s)
...
shape: (3,)
Series: '' [f64]
[
Expand Down Expand Up @@ -697,7 +690,6 @@ def set_fmt_str_lengths(cls, n: int | None) -> type[Config]:
└───────────────────────────────────┴─────┘
>>> with pl.Config(fmt_str_lengths=50):
... print(df)
...
shape: (2, 1)
┌──────────────────────────────────────────────────┐
│ txt │
Expand Down Expand Up @@ -752,7 +744,6 @@ def set_fmt_table_cell_list_len(cls, n: int | None) -> type[Config]:
└─────────────┘
>>> with pl.Config(fmt_table_cell_list_len=10):
... print(df)
...
shape: (1, 1)
┌────────────────────┐
│ nums │
Expand Down Expand Up @@ -810,9 +801,7 @@ def set_tbl_cell_alignment(
Examples
--------
>>> df = pl.DataFrame(
... {"column_abc": [1.0, 2.5, 5.0], "column_xyz": [True, False, True]}
... )
>>> df = pl.DataFrame({"column_abc": [1.0, 2.5, 5.0], "column_xyz": [True, False, True]})
>>> pl.Config.set_tbl_cell_alignment("RIGHT") # doctest: +IGNORE_RESULT
>>> print(df)
shape: (3, 2)
Expand Down Expand Up @@ -907,7 +896,6 @@ def set_tbl_cols(cls, n: int | None) -> type[Config]:
... cfg.set_tbl_cols(5)
... df = pl.DataFrame({str(i): [i] for i in range(100)})
... print(df)
...
<class 'polars.config.Config'>
shape: (1, 100)
┌─────┬─────┬─────┬───┬─────┬─────┐
Expand All @@ -920,7 +908,6 @@ def set_tbl_cols(cls, n: int | None) -> type[Config]:
>>> with pl.Config(tbl_cols=10):
... print(df)
...
shape: (1, 100)
┌─────┬─────┬─────┬─────┬─────┬───┬─────┬─────┬─────┬─────┬─────┐
│ 0 ┆ 1 ┆ 2 ┆ 3 ┆ 4 ┆ … ┆ 95 ┆ 96 ┆ 97 ┆ 98 ┆ 99 │
Expand Down Expand Up @@ -1032,9 +1019,7 @@ def set_tbl_formatting(
Examples
--------
>>> df = pl.DataFrame(
... {"abc": [-2.5, 5.0], "mno": ["hello", "world"], "xyz": [True, False]}
... )
>>> df = pl.DataFrame({"abc": [-2.5, 5.0], "mno": ["hello", "world"], "xyz": [True, False]})
>>> with pl.Config(
... tbl_formatting="ASCII_MARKDOWN",
... tbl_hide_column_data_types=True,
Expand Down Expand Up @@ -1201,12 +1186,9 @@ def set_tbl_rows(cls, n: int | None) -> type[Config]:
Examples
--------
>>> df = pl.DataFrame(
... {"abc": [1.0, 2.5, 3.5, 5.0], "xyz": [True, False, True, False]}
... )
>>> df = pl.DataFrame({"abc": [1.0, 2.5, 3.5, 5.0], "xyz": [True, False, True, False]})
>>> with pl.Config(tbl_rows=2):
... print(df)
...
shape: (4, 2)
┌─────┬───────┐
│ abc ┆ xyz │
Expand Down Expand Up @@ -1261,7 +1243,6 @@ def set_trim_decimal_zeros(cls, active: bool | None = True) -> type[Config]:
... )
>>> with pl.Config(trim_decimal_zeros=False):
... print(df)
...
shape: (2, 1)
┌──────────────┐
│ d │
Expand All @@ -1273,7 +1254,6 @@ def set_trim_decimal_zeros(cls, active: bool | None = True) -> type[Config]:
└──────────────┘
>>> with pl.Config(trim_decimal_zeros=True):
... print(df)
...
shape: (2, 1)
┌──────────────┐
│ d │
Expand All @@ -1298,7 +1278,6 @@ def set_verbose(cls, active: bool | None = True) -> type[Config]:
>>> pl.Config.set_verbose(True) # doctest: +SKIP
>>> with pl.Config(verbose=True): # doctest: +SKIP
... do_polars_operations()
...
"""
if active is None:
os.environ.pop("POLARS_VERBOSE", None)
Expand Down
49 changes: 13 additions & 36 deletions py-polars/polars/dataframe/frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -333,7 +333,6 @@ class DataFrame:
>>> class MyDataFrame(pl.DataFrame):
... pass
...
>>> isinstance(MyDataFrame().lazy().collect(), MyDataFrame)
False
Expand Down Expand Up @@ -1889,9 +1888,7 @@ def to_arrow(self) -> pa.Table:
Examples
--------
>>> df = pl.DataFrame(
... {"foo": [1, 2, 3, 4, 5, 6], "bar": ["a", "b", "c", "d", "e", "f"]}
... )
>>> df = pl.DataFrame({"foo": [1, 2, 3, 4, 5, 6], "bar": ["a", "b", "c", "d", "e", "f"]})
>>> df.to_arrow()
pyarrow.Table
foo: int64
Expand Down Expand Up @@ -2188,9 +2185,7 @@ def to_pandas( # noqa: D417
bar float64
ham object
dtype: object
>>> pandas_df2_pa = df2.to_pandas(
... use_pyarrow_extension_array=True
... ) # doctest: +SKIP
>>> pandas_df2_pa = df2.to_pandas(use_pyarrow_extension_array=True) # doctest: +SKIP
>>> pandas_df2_pa # doctest: +SKIP
foo bar ham
0 1 6 <NA>
Expand Down Expand Up @@ -2940,7 +2935,6 @@ def write_excel(
... )
... ws.write(2, 1, "Basic/default conditional formatting", fmt_title)
... ws.write(len(df) + 6, 1, "Customised conditional formatting", fmt_title)
...
Export a table containing two different types of sparklines. Use default
options for the "trend" sparkline and customised options (and positioning)
Expand Down Expand Up @@ -3762,9 +3756,7 @@ def transpose(
Include the header as a separate column
>>> df.transpose(
... include_header=True, header_name="foo", column_names=["a", "b", "c"]
... )
>>> df.transpose(include_header=True, header_name="foo", column_names=["a", "b", "c"])
shape: (2, 4)
┌─────┬─────┬─────┬─────┐
│ foo ┆ a ┆ b ┆ c │
Expand All @@ -3783,7 +3775,6 @@ def transpose(
... while True:
... yield f"{base_name}{count}"
... count += 1
...
>>> df.transpose(include_header=False, column_names=name_generator())
shape: (2, 3)
┌─────────────┬─────────────┬─────────────┐
Expand Down Expand Up @@ -3862,9 +3853,7 @@ def rename(self, mapping: dict[str, str]) -> DataFrame:
Examples
--------
>>> df = pl.DataFrame(
... {"foo": [1, 2, 3], "bar": [6, 7, 8], "ham": ["a", "b", "c"]}
... )
>>> df = pl.DataFrame({"foo": [1, 2, 3], "bar": [6, 7, 8], "ham": ["a", "b", "c"]})
>>> df.rename({"foo": "apple"})
shape: (3, 3)
┌───────┬─────┬─────┐
Expand Down Expand Up @@ -4250,9 +4239,7 @@ def get_column_index(self, name: str) -> int:
Examples
--------
>>> df = pl.DataFrame(
... {"foo": [1, 2, 3], "bar": [6, 7, 8], "ham": ["a", "b", "c"]}
... )
>>> df = pl.DataFrame({"foo": [1, 2, 3], "bar": [6, 7, 8], "ham": ["a", "b", "c"]})
>>> df.get_column_index("ham")
2
Expand Down Expand Up @@ -4954,7 +4941,6 @@ def pipe(
--------
>>> def cast_str_to_int(data, col_name):
... return data.with_columns(pl.col(col_name).cast(pl.Int64))
...
>>> df = pl.DataFrame({"a": [1, 2, 3, 4], "b": ["10", "20", "30", "40"]})
>>> df.pipe(cast_str_to_int, col_name="b")
shape: (4, 2)
Expand Down Expand Up @@ -5134,7 +5120,6 @@ def group_by(
>>> for name, data in df.group_by("a"): # doctest: +SKIP
... print(name)
... print(data)
...
a
shape: (2, 3)
┌─────┬─────┬─────┐
Expand Down Expand Up @@ -5515,9 +5500,9 @@ def group_by_dynamic(
The window boundaries can also be added to the aggregation result
>>> df.group_by_dynamic(
... "time", every="1h", include_boundaries=True, closed="right"
... ).agg(pl.col("n").mean())
>>> df.group_by_dynamic("time", every="1h", include_boundaries=True, closed="right").agg(
... pl.col("n").mean()
... )
shape: (4, 4)
┌─────────────────────┬─────────────────────┬─────────────────────┬─────┐
│ _lower_boundary ┆ _upper_boundary ┆ time ┆ n │
Expand Down Expand Up @@ -5724,9 +5709,9 @@ def upsample(
... "values": [0, 1, 2, 3],
... }
... ).set_sorted("time")
>>> df.upsample(
... time_column="time", every="1mo", by="groups", maintain_order=True
... ).select(pl.all().forward_fill())
>>> df.upsample(time_column="time", every="1mo", by="groups", maintain_order=True).select(
... pl.all().forward_fill()
... )
shape: (7, 3)
┌─────────────────────┬────────┬────────┐
│ time ┆ groups ┆ values │
Expand Down Expand Up @@ -7779,7 +7764,6 @@ def select(
... df.select(
... is_odd=(pl.col(pl.INTEGER_DTYPES) % 2).name.suffix("_is_odd"),
... )
...
shape: (3, 1)
┌───────────┐
│ is_odd │
Expand Down Expand Up @@ -7954,7 +7938,6 @@ def with_columns(
... df.drop("c").with_columns(
... diffs=pl.col(["a", "b"]).diff().name.suffix("_diff"),
... )
...
shape: (4, 3)
┌─────┬──────┬─────────────┐
│ a ┆ b ┆ diffs │
Expand Down Expand Up @@ -9661,7 +9644,6 @@ def iter_slices(self, n_rows: int = 10_000) -> Iterator[DataFrame]:
... )
>>> for idx, frame in enumerate(df.iter_slices()):
... print(f"{type(frame).__name__}:[{idx}]:{len(frame)}")
...
DataFrame:[0]:10000
DataFrame:[1]:7500
Expand All @@ -9671,7 +9653,6 @@ def iter_slices(self, n_rows: int = 10_000) -> Iterator[DataFrame]:
>>> for frame in df.iter_slices(n_rows=15_000):
... record_batch = frame.to_arrow().to_batches()[0]
... print(f"{record_batch.schema}\n<< {len(record_batch)}")
...
a: int32
b: date32[day]
c: large_string
Expand Down Expand Up @@ -9968,9 +9949,7 @@ def merge_sorted(self, other: DataFrame, key: str) -> DataFrame:
Examples
--------
>>> df0 = pl.DataFrame(
... {"name": ["steve", "elise", "bob"], "age": [42, 44, 18]}
... ).sort("age")
>>> df0 = pl.DataFrame({"name": ["steve", "elise", "bob"], "age": [42, 44, 18]}).sort("age")
>>> df0
shape: (3, 2)
┌───────┬─────┐
Expand Down Expand Up @@ -10163,9 +10142,7 @@ def update(
Update `df` values including null values in `new_df`, using an outer join
strategy that defines explicit join columns in each frame:
>>> df.update(
... new_df, left_on="A", right_on="C", how="outer", include_nulls=True
... )
>>> df.update(new_df, left_on="A", right_on="C", how="outer", include_nulls=True)
shape: (5, 2)
┌─────┬──────┐
│ A ┆ B │
Expand Down
1 change: 0 additions & 1 deletion py-polars/polars/dataframe/group_by.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,6 @@ def __iter__(self) -> Self:
>>> for name, data in df.group_by("foo"): # doctest: +SKIP
... print(name)
... print(data)
...
a
shape: (2, 2)
┌─────┬─────┐
Expand Down
4 changes: 1 addition & 3 deletions py-polars/polars/datatypes/classes.py
Original file line number Diff line number Diff line change
Expand Up @@ -632,9 +632,7 @@ def __init__( # noqa: D417
Examples
--------
>>> s = pl.Series(
... "a", [[1, 2], [4, 3]], dtype=pl.Array(inner=pl.Int64, width=2)
... )
>>> s = pl.Series("a", [[1, 2], [4, 3]], dtype=pl.Array(inner=pl.Int64, width=2))
>>> s
shape: (2,)
Series: 'a' [array[i64, 2]]
Expand Down
4 changes: 1 addition & 3 deletions py-polars/polars/expr/binary.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,9 +143,7 @@ def starts_with(self, prefix: IntoExpr) -> Expr:
>>> colors.select(
... "name",
... pl.col("code").bin.starts_with(b"\xff").alias("starts_with_lit"),
... pl.col("code")
... .bin.starts_with(pl.col("prefix"))
... .alias("starts_with_expr"),
... pl.col("code").bin.starts_with(pl.col("prefix")).alias("starts_with_expr"),
... )
shape: (3, 3)
┌────────┬─────────────────┬──────────────────┐
Expand Down
Loading

0 comments on commit 559b9d6

Please sign in to comment.