Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: Return appropriate data type for duration mean and median #14376

Merged
merged 1 commit into from
Feb 13, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -114,14 +114,14 @@ impl Series {
Float64 => SeriesWrap(self.f64().unwrap().clone()).agg_median(groups),
dt if dt.is_numeric() => apply_method_physical_integer!(self, agg_median, groups),
#[cfg(feature = "dtype-datetime")]
dt @ Datetime(_, _) => self
dt @ (Datetime(_, _) | Duration(_)) => self
.to_physical_repr()
.agg_median(groups)
.cast(&Int64)
.unwrap()
.cast(dt)
.unwrap(),
dt @ (Date | Duration(_) | Time) => {
dt @ (Date | Time) => {
let ca = self.to_physical_repr();
let physical_type = ca.dtype();
let s = apply_method_physical_integer!(ca, agg_median, groups);
Expand Down Expand Up @@ -172,14 +172,14 @@ impl Series {
Float64 => SeriesWrap(self.f64().unwrap().clone()).agg_mean(groups),
dt if dt.is_numeric() => apply_method_physical_integer!(self, agg_mean, groups),
#[cfg(feature = "dtype-datetime")]
dt @ Datetime(_, _) => self
dt @ (Datetime(_, _) | Duration(_)) => self
.to_physical_repr()
.agg_mean(groups)
.cast(&Int64)
.unwrap()
.cast(dt)
.unwrap(),
dt @ (Date | Duration(_) | Time) => {
dt @ (Date | Time) => {
let ca = self.to_physical_repr();
let physical_type = ca.dtype();
let s = apply_method_physical_integer!(ca, agg_mean, groups);
Expand Down
8 changes: 1 addition & 7 deletions crates/polars-core/src/series/implementations/duration.rs
Original file line number Diff line number Diff line change
Expand Up @@ -422,13 +422,7 @@ impl SeriesTrait for SeriesWrap<DurationChunked> {
.into_duration(TimeUnit::Milliseconds))
}
fn median_as_series(&self) -> PolarsResult<Series> {
Ok(self
.0
.median_as_series()
.cast(&self.dtype().to_physical())
.unwrap()
.cast(self.dtype())
.unwrap())
Series::new(self.name(), &[self.median().map(|v| v as i64)]).cast(self.dtype())
}
fn quantile_as_series(
&self,
Expand Down
8 changes: 7 additions & 1 deletion crates/polars-lazy/src/frame/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1419,7 +1419,13 @@ impl LazyFrame {
/// - String columns will sum to None.
pub fn median(self) -> PolarsResult<LazyFrame> {
self.stats_helper(
|dt| dt.is_numeric() || matches!(dt, DataType::Boolean | DataType::Datetime(_, _)),
|dt| {
dt.is_numeric()
|| matches!(
dt,
DataType::Boolean | DataType::Duration(_) | DataType::Datetime(_, _)
)
},
|name| col(name).median(),
)
}
Expand Down
6 changes: 3 additions & 3 deletions py-polars/polars/series/datetime.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from typing import TYPE_CHECKING

from polars.datatypes import Date, Datetime
from polars.datatypes import Date, Datetime, Duration
from polars.series.utils import expr_dispatch
from polars.utils._wrap import wrap_s
from polars.utils.convert import _to_python_date, _to_python_datetime
Expand Down Expand Up @@ -82,7 +82,7 @@ def median(self) -> TemporalLiteral | float | None:
if out is not None:
if s.dtype == Date:
return _to_python_date(int(out)) # type: ignore[arg-type]
elif s.dtype == Datetime:
elif s.dtype in (Datetime, Duration):
return out # type: ignore[return-value]
else:
return _to_python_datetime(int(out), s.dtype.time_unit) # type: ignore[arg-type, attr-defined]
Expand All @@ -106,7 +106,7 @@ def mean(self) -> TemporalLiteral | float | None:
if out is not None:
if s.dtype == Date:
return _to_python_date(int(out)) # type: ignore[arg-type]
elif s.dtype == Datetime:
elif s.dtype in (Datetime, Duration):
return out # type: ignore[return-value]
else:
return _to_python_datetime(int(out), s.dtype.time_unit) # type: ignore[arg-type, attr-defined]
Expand Down
4 changes: 2 additions & 2 deletions py-polars/src/series/aggregation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ impl PySeries {
.map_err(PyPolarsErr::from)?,
)
.into_py(py)),
DataType::Datetime(_, _) => Ok(Wrap(
DataType::Datetime(_, _) | DataType::Duration(_) => Ok(Wrap(
self.series
.mean_as_series()
.get(0)
Expand All @@ -77,7 +77,7 @@ impl PySeries {
.map_err(PyPolarsErr::from)?,
)
.into_py(py)),
DataType::Datetime(_, _) => Ok(Wrap(
DataType::Datetime(_, _) | DataType::Duration(_) => Ok(Wrap(
self.series
.median_as_series()
.map_err(PyPolarsErr::from)?
Expand Down
94 changes: 77 additions & 17 deletions py-polars/tests/unit/namespaces/test_datetime.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from backports.zoneinfo._zoneinfo import ZoneInfo

if TYPE_CHECKING:
from polars.type_aliases import TimeUnit
from polars.type_aliases import TemporalLiteral, TimeUnit


@pytest.fixture()
Expand Down Expand Up @@ -931,21 +931,36 @@ def test_weekday(time_unit: TimeUnit) -> None:
([date(2022, 1, 1)], date(2022, 1, 1)),
([date(2022, 1, 1), date(2022, 1, 2), date(2022, 1, 3)], date(2022, 1, 2)),
([date(2022, 1, 1), date(2022, 1, 2), date(2024, 5, 15)], date(2022, 1, 2)),
([datetime(2022, 1, 1)], datetime(2022, 1, 1)),
(
[datetime(2022, 1, 1), datetime(2022, 1, 2), datetime(2022, 1, 3)],
datetime(2022, 1, 2),
),
(
[datetime(2022, 1, 1), datetime(2022, 1, 2), datetime(2024, 5, 15)],
datetime(2022, 1, 2),
),
([timedelta(days=1)], timedelta(days=1)),
([timedelta(days=1), timedelta(days=2), timedelta(days=3)], timedelta(days=2)),
([timedelta(days=1), timedelta(days=2), timedelta(days=15)], timedelta(days=2)),
],
ids=[
"empty",
"Nones",
"single",
"spread_even",
"spread_skewed",
"spread_skewed_dt",
"single_date",
"spread_even_date",
"spread_skewed_date",
"single_datetime",
"spread_even_datetime",
"spread_skewed_datetime",
"single_dur",
"spread_even_dur",
"spread_skewed_dur",
],
)
def test_median(values: list[date | None], expected_median: date | None) -> None:
def test_median(
values: list[TemporalLiteral | None], expected_median: TemporalLiteral | None
) -> None:
s = pl.Series(values)
assert s.dt.median() == expected_median

Expand All @@ -961,22 +976,35 @@ def test_median(values: list[date | None], expected_median: date | None) -> None
([date(2022, 1, 1)], date(2022, 1, 1)),
([date(2022, 1, 1), date(2022, 1, 2), date(2022, 1, 3)], date(2022, 1, 2)),
([date(2022, 1, 1), date(2022, 1, 2), date(2024, 5, 15)], date(2022, 10, 16)),
([datetime(2022, 1, 1)], datetime(2022, 1, 1)),
(
[datetime(2022, 1, 1), datetime(2022, 1, 2), datetime(2022, 1, 3)],
datetime(2022, 1, 2),
),
(
[datetime(2022, 1, 1), datetime(2022, 1, 2), datetime(2024, 5, 15)],
datetime(2022, 10, 16, 16, 0, 0),
),
([timedelta(days=1)], timedelta(days=1)),
([timedelta(days=1), timedelta(days=2), timedelta(days=3)], timedelta(days=2)),
([timedelta(days=1), timedelta(days=2), timedelta(days=15)], timedelta(days=6)),
],
ids=[
"empty",
"Nones",
"single",
"spread_even",
"spread_skewed",
"spread_skewed_dt",
"single_date",
"spread_even_date",
"spread_skewed_date",
"single_datetime",
"spread_even_datetime",
"spread_skewed_datetime",
"single_duration",
"spread_even_duration",
"spread_skewed_duration",
],
)
def test_mean(
values: list[date | datetime | None], expected_mean: date | datetime | None
values: list[TemporalLiteral | None], expected_mean: TemporalLiteral | None
) -> None:
s = pl.Series(values)
assert s.dt.mean() == expected_mean
Expand All @@ -996,12 +1024,44 @@ def test_mean(
ids=["spread_skewed_dt"],
)
def test_datetime_mean_with_tu(values: list[datetime], expected_mean: datetime) -> None:
assert pl.Series(values, dtype=pl.Datetime("ms")).mean() == expected_mean
assert pl.Series(values, dtype=pl.Datetime("ms")).dt.mean() == expected_mean
assert pl.Series(values, dtype=pl.Datetime("us")).mean() == expected_mean
assert pl.Series(values, dtype=pl.Datetime("us")).dt.mean() == expected_mean
assert pl.Series(values, dtype=pl.Datetime("ns")).mean() == expected_mean
assert pl.Series(values, dtype=pl.Datetime("ns")).dt.mean() == expected_mean
assert pl.Series(values, dtype=pl.Duration("ms")).mean() == expected_mean
assert pl.Series(values, dtype=pl.Duration("ms")).dt.mean() == expected_mean
assert pl.Series(values, dtype=pl.Duration("us")).mean() == expected_mean
assert pl.Series(values, dtype=pl.Duration("us")).dt.mean() == expected_mean
assert pl.Series(values, dtype=pl.Duration("ns")).mean() == expected_mean
assert pl.Series(values, dtype=pl.Duration("ns")).dt.mean() == expected_mean


@pytest.mark.parametrize(
("values", "expected_mean"),
[([timedelta(days=1), timedelta(days=2), timedelta(days=15)], timedelta(days=6))],
ids=["spread_skewed_dur"],
)
def test_duration_mean_with_tu(
values: list[timedelta], expected_mean: timedelta
) -> None:
assert pl.Series(values, dtype=pl.Duration("ms")).mean() == expected_mean
assert pl.Series(values, dtype=pl.Duration("ms")).dt.mean() == expected_mean
assert pl.Series(values, dtype=pl.Duration("us")).mean() == expected_mean
assert pl.Series(values, dtype=pl.Duration("us")).dt.mean() == expected_mean
assert pl.Series(values, dtype=pl.Duration("ns")).mean() == expected_mean
assert pl.Series(values, dtype=pl.Duration("ns")).dt.mean() == expected_mean


@pytest.mark.parametrize(
("values", "expected_median"),
[([timedelta(days=1), timedelta(days=2), timedelta(days=15)], timedelta(days=2))],
ids=["spread_skewed_dur"],
)
def test_duration_median_with_tu(
values: list[timedelta], expected_median: timedelta
) -> None:
assert pl.Series(values, dtype=pl.Duration("ms")).median() == expected_median
assert pl.Series(values, dtype=pl.Duration("ms")).dt.median() == expected_median
assert pl.Series(values, dtype=pl.Duration("us")).median() == expected_median
assert pl.Series(values, dtype=pl.Duration("us")).dt.median() == expected_median
assert pl.Series(values, dtype=pl.Duration("ns")).median() == expected_median
assert pl.Series(values, dtype=pl.Duration("ns")).dt.median() == expected_median


def test_agg_expr() -> None:
Expand Down
Loading