From 3d447c21b4263f9be79f60a9220a59363a3c0650 Mon Sep 17 00:00:00 2001 From: Ruifeng Zheng Date: Fri, 15 Dec 2023 11:49:27 -0800 Subject: [PATCH] [SPARK-46419][PS][TESTS] Reorganize `DatetimeIndexTests`: Factor out 3 slow tests ### What changes were proposed in this pull request? Reorganize `DatetimeIndexTests`: Factor out 3 slow tests ### Why are the changes needed? its parity test is slow, sometime take > 10 mins, this PR move 3 slow tests from it. (will move other slow tests in a followup to control the change) ### Does this PR introduce _any_ user-facing change? no ### How was this patch tested? test-only ### Was this patch authored or co-authored using generative AI tooling? no Closes #44369 from zhengruifeng/ps_test_idx_dt_I. Authored-by: Ruifeng Zheng Signed-off-by: Hyukjin Kwon --- dev/sparktestsupport/modules.py | 8 +- .../connect/indexes/test_parity_datetime.py | 6 +- .../indexes/test_parity_datetime_at.py | 41 ++++++++++ .../indexes/test_parity_datetime_between.py | 41 ++++++++++ .../indexes/test_parity_datetime_ceil.py | 41 ++++++++++ .../indexes/test_parity_datetime_property.py | 6 +- .../pandas/tests/indexes/test_datetime.py | 67 +--------------- .../pandas/tests/indexes/test_datetime_at.py | 68 ++++++++++++++++ .../tests/indexes/test_datetime_between.py | 79 +++++++++++++++++++ .../tests/indexes/test_datetime_ceil.py | 48 +++++++++++ .../tests/indexes/test_datetime_property.py | 45 ++--------- 11 files changed, 343 insertions(+), 107 deletions(-) create mode 100644 python/pyspark/pandas/tests/connect/indexes/test_parity_datetime_at.py create mode 100644 python/pyspark/pandas/tests/connect/indexes/test_parity_datetime_between.py create mode 100644 python/pyspark/pandas/tests/connect/indexes/test_parity_datetime_ceil.py create mode 100644 python/pyspark/pandas/tests/indexes/test_datetime_at.py create mode 100644 python/pyspark/pandas/tests/indexes/test_datetime_between.py create mode 100644 python/pyspark/pandas/tests/indexes/test_datetime_ceil.py diff --git a/dev/sparktestsupport/modules.py b/dev/sparktestsupport/modules.py index 8de69e65d7417..a9feb43e9ef17 100644 --- a/dev/sparktestsupport/modules.py +++ b/dev/sparktestsupport/modules.py @@ -797,6 +797,9 @@ def __hash__(self): "pyspark.pandas.tests.indexes.test_base", "pyspark.pandas.tests.indexes.test_base_slow", "pyspark.pandas.tests.indexes.test_datetime", + "pyspark.pandas.tests.indexes.test_datetime_at", + "pyspark.pandas.tests.indexes.test_datetime_between", + "pyspark.pandas.tests.indexes.test_datetime_ceil", "pyspark.pandas.tests.indexes.test_datetime_property", "pyspark.pandas.tests.indexes.test_align", "pyspark.pandas.tests.indexes.test_indexing", @@ -1135,7 +1138,6 @@ def __hash__(self): "pyspark.pandas.tests.connect.computation.test_parity_pivot", "pyspark.pandas.tests.connect.computation.test_parity_stats", "pyspark.pandas.tests.connect.indexes.test_parity_base_slow", - "pyspark.pandas.tests.connect.indexes.test_parity_datetime_property", "pyspark.pandas.tests.connect.frame.test_parity_interpolate", "pyspark.pandas.tests.connect.frame.test_parity_interpolate_error", "pyspark.pandas.tests.connect.series.test_parity_interpolate", @@ -1186,6 +1188,10 @@ def __hash__(self): python_test_goals=[ # pandas-on-Spark unittests "pyspark.pandas.tests.connect.indexes.test_parity_datetime", + "pyspark.pandas.tests.connect.indexes.test_parity_datetime_at", + "pyspark.pandas.tests.connect.indexes.test_parity_datetime_between", + "pyspark.pandas.tests.connect.indexes.test_parity_datetime_ceil", + "pyspark.pandas.tests.connect.indexes.test_parity_datetime_property", "pyspark.pandas.tests.connect.test_parity_ops_on_diff_frames", "pyspark.pandas.tests.connect.test_parity_ops_on_diff_frames_groupby", ], diff --git a/python/pyspark/pandas/tests/connect/indexes/test_parity_datetime.py b/python/pyspark/pandas/tests/connect/indexes/test_parity_datetime.py index 48c3e490f4afa..58be389287e93 100644 --- a/python/pyspark/pandas/tests/connect/indexes/test_parity_datetime.py +++ b/python/pyspark/pandas/tests/connect/indexes/test_parity_datetime.py @@ -18,11 +18,13 @@ from pyspark.pandas.tests.indexes.test_datetime import DatetimeIndexTestsMixin from pyspark.testing.connectutils import ReusedConnectTestCase -from pyspark.testing.pandasutils import PandasOnSparkTestUtils, TestUtils +from pyspark.testing.pandasutils import PandasOnSparkTestUtils class DatetimeIndexParityTests( - DatetimeIndexTestsMixin, PandasOnSparkTestUtils, TestUtils, ReusedConnectTestCase + DatetimeIndexTestsMixin, + PandasOnSparkTestUtils, + ReusedConnectTestCase, ): pass diff --git a/python/pyspark/pandas/tests/connect/indexes/test_parity_datetime_at.py b/python/pyspark/pandas/tests/connect/indexes/test_parity_datetime_at.py new file mode 100644 index 0000000000000..8df1839b375bb --- /dev/null +++ b/python/pyspark/pandas/tests/connect/indexes/test_parity_datetime_at.py @@ -0,0 +1,41 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import unittest + +from pyspark.pandas.tests.indexes.test_datetime_at import DatetimeIndexAtMixin +from pyspark.testing.connectutils import ReusedConnectTestCase +from pyspark.testing.pandasutils import PandasOnSparkTestUtils + + +class DatetimeIndexAtParityTests( + DatetimeIndexAtMixin, + PandasOnSparkTestUtils, + ReusedConnectTestCase, +): + pass + + +if __name__ == "__main__": + from pyspark.pandas.tests.connect.indexes.test_parity_datetime_at import * # noqa: F401 + + try: + import xmlrunner # type: ignore[import] + + testRunner = xmlrunner.XMLTestRunner(output="target/test-reports", verbosity=2) + except ImportError: + testRunner = None + unittest.main(testRunner=testRunner, verbosity=2) diff --git a/python/pyspark/pandas/tests/connect/indexes/test_parity_datetime_between.py b/python/pyspark/pandas/tests/connect/indexes/test_parity_datetime_between.py new file mode 100644 index 0000000000000..72020d2b81e66 --- /dev/null +++ b/python/pyspark/pandas/tests/connect/indexes/test_parity_datetime_between.py @@ -0,0 +1,41 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import unittest + +from pyspark.pandas.tests.indexes.test_datetime_between import DatetimeIndexBetweenMixin +from pyspark.testing.connectutils import ReusedConnectTestCase +from pyspark.testing.pandasutils import PandasOnSparkTestUtils + + +class DatetimeIndexBetweenParityTests( + DatetimeIndexBetweenMixin, + PandasOnSparkTestUtils, + ReusedConnectTestCase, +): + pass + + +if __name__ == "__main__": + from pyspark.pandas.tests.connect.indexes.test_parity_datetime_between import * # noqa: F401 + + try: + import xmlrunner # type: ignore[import] + + testRunner = xmlrunner.XMLTestRunner(output="target/test-reports", verbosity=2) + except ImportError: + testRunner = None + unittest.main(testRunner=testRunner, verbosity=2) diff --git a/python/pyspark/pandas/tests/connect/indexes/test_parity_datetime_ceil.py b/python/pyspark/pandas/tests/connect/indexes/test_parity_datetime_ceil.py new file mode 100644 index 0000000000000..d275496b0dcbe --- /dev/null +++ b/python/pyspark/pandas/tests/connect/indexes/test_parity_datetime_ceil.py @@ -0,0 +1,41 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import unittest + +from pyspark.pandas.tests.indexes.test_datetime_ceil import DatetimeIndexCeilMixin +from pyspark.testing.connectutils import ReusedConnectTestCase +from pyspark.testing.pandasutils import PandasOnSparkTestUtils + + +class DatetimeIndexCeilParityTests( + DatetimeIndexCeilMixin, + PandasOnSparkTestUtils, + ReusedConnectTestCase, +): + pass + + +if __name__ == "__main__": + from pyspark.pandas.tests.connect.indexes.test_parity_datetime_ceil import * # noqa: F401 + + try: + import xmlrunner # type: ignore[import] + + testRunner = xmlrunner.XMLTestRunner(output="target/test-reports", verbosity=2) + except ImportError: + testRunner = None + unittest.main(testRunner=testRunner, verbosity=2) diff --git a/python/pyspark/pandas/tests/connect/indexes/test_parity_datetime_property.py b/python/pyspark/pandas/tests/connect/indexes/test_parity_datetime_property.py index c5202e99305ba..c8806df9b6ceb 100644 --- a/python/pyspark/pandas/tests/connect/indexes/test_parity_datetime_property.py +++ b/python/pyspark/pandas/tests/connect/indexes/test_parity_datetime_property.py @@ -18,11 +18,13 @@ from pyspark.pandas.tests.indexes.test_datetime_property import DatetimeIndexPropertyTestsMixin from pyspark.testing.connectutils import ReusedConnectTestCase -from pyspark.testing.pandasutils import PandasOnSparkTestUtils, TestUtils +from pyspark.testing.pandasutils import PandasOnSparkTestUtils class DatetimeIndexParityTests( - DatetimeIndexPropertyTestsMixin, PandasOnSparkTestUtils, TestUtils, ReusedConnectTestCase + DatetimeIndexPropertyTestsMixin, + PandasOnSparkTestUtils, + ReusedConnectTestCase, ): pass diff --git a/python/pyspark/pandas/tests/indexes/test_datetime.py b/python/pyspark/pandas/tests/indexes/test_datetime.py index 51c5103fd496e..db5c3d623a42d 100644 --- a/python/pyspark/pandas/tests/indexes/test_datetime.py +++ b/python/pyspark/pandas/tests/indexes/test_datetime.py @@ -24,7 +24,7 @@ from pyspark.testing.pandasutils import PandasOnSparkTestCase, TestUtils -class DatetimeIndexTestsMixin: +class DatetimeIndexTestingFuncMixin: @property def fixed_freqs(self): return [ @@ -63,6 +63,8 @@ def _disallow_nanoseconds(self, f): self.assertRaises(ValueError, lambda: f(freq="ns")) self.assertRaises(ValueError, lambda: f(freq="N")) + +class DatetimeIndexTestsMixin(DatetimeIndexTestingFuncMixin): def test_datetime_index(self): with self.assertRaisesRegex(TypeError, "Index.name must be a hashable type"): ps.DatetimeIndex(["2004-01-01", "2002-12-31", "2000-04-01"], name=[(1, 2)]) @@ -71,13 +73,6 @@ def test_datetime_index(self): ): ps.DatetimeIndex(["2004-01-01", "2002-12-31", "2000-04-01"]).all() - def test_ceil(self): - for psidx, pidx in self.idx_pairs: - for freq in self.fixed_freqs: - self.assert_eq(psidx.ceil(freq), pidx.ceil(freq)) - - self._disallow_nanoseconds(self.psidxs[0].ceil) - def test_floor(self): for psidx, pidx in self.idx_pairs: for freq in self.fixed_freqs: @@ -110,62 +105,6 @@ def test_strftime(self): psidx.strftime(date_format="%B %d, %Y"), pidx.strftime(date_format="%B %d, %Y") ) - def test_indexer_between_time(self): - for psidx, pidx in self.idx_pairs: - self.assert_eq( - psidx.indexer_between_time("00:00:00", "00:01:00").sort_values(), - pd.Index(pidx.indexer_between_time("00:00:00", "00:01:00")), - ) - - self.assert_eq( - psidx.indexer_between_time( - datetime.time(0, 0, 0), datetime.time(0, 1, 0) - ).sort_values(), - pd.Index(pidx.indexer_between_time(datetime.time(0, 0, 0), datetime.time(0, 1, 0))), - ) - - self.assert_eq( - psidx.indexer_between_time("00:00:00", "00:01:00", True, False).sort_values(), - pd.Index(pidx.indexer_between_time("00:00:00", "00:01:00", True, False)), - ) - - self.assert_eq( - psidx.indexer_between_time("00:00:00", "00:01:00", False, True).sort_values(), - pd.Index(pidx.indexer_between_time("00:00:00", "00:01:00", False, True)), - ) - - self.assert_eq( - psidx.indexer_between_time("00:00:00", "00:01:00", False, False).sort_values(), - pd.Index(pidx.indexer_between_time("00:00:00", "00:01:00", False, False)), - ) - - self.assert_eq( - psidx.indexer_between_time("00:00:00", "00:01:00", True, True).sort_values(), - pd.Index(pidx.indexer_between_time("00:00:00", "00:01:00", True, True)), - ) - - def test_indexer_at_time(self): - for psidx, pidx in self.idx_pairs: - self.assert_eq( - psidx.indexer_at_time("00:00:00").sort_values(), - pd.Index(pidx.indexer_at_time("00:00:00")), - ) - - self.assert_eq( - psidx.indexer_at_time(datetime.time(0, 1, 0)).sort_values(), - pd.Index(pidx.indexer_at_time(datetime.time(0, 1, 0))), - ) - - self.assert_eq( - psidx.indexer_at_time("00:00:01").sort_values(), - pd.Index(pidx.indexer_at_time("00:00:01")), - ) - - self.assertRaises( - NotImplementedError, - lambda: ps.DatetimeIndex([0]).indexer_at_time("00:00:00", asof=True), - ) - def test_arithmetic_op_exceptions(self): for psidx, pidx in self.idx_pairs: py_datetime = pidx.to_pydatetime() diff --git a/python/pyspark/pandas/tests/indexes/test_datetime_at.py b/python/pyspark/pandas/tests/indexes/test_datetime_at.py new file mode 100644 index 0000000000000..5b213096ccb80 --- /dev/null +++ b/python/pyspark/pandas/tests/indexes/test_datetime_at.py @@ -0,0 +1,68 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import datetime + +import pandas as pd + +import pyspark.pandas as ps +from pyspark.testing.pandasutils import PandasOnSparkTestCase +from pyspark.pandas.tests.indexes.test_datetime import DatetimeIndexTestingFuncMixin + + +class DatetimeIndexAtMixin(DatetimeIndexTestingFuncMixin): + def test_indexer_at_time(self): + for psidx, pidx in self.idx_pairs: + self.assert_eq( + psidx.indexer_at_time("00:00:00").sort_values(), + pd.Index(pidx.indexer_at_time("00:00:00")), + ) + + self.assert_eq( + psidx.indexer_at_time(datetime.time(0, 1, 0)).sort_values(), + pd.Index(pidx.indexer_at_time(datetime.time(0, 1, 0))), + ) + + self.assert_eq( + psidx.indexer_at_time("00:00:01").sort_values(), + pd.Index(pidx.indexer_at_time("00:00:01")), + ) + + self.assertRaises( + NotImplementedError, + lambda: ps.DatetimeIndex([0]).indexer_at_time("00:00:00", asof=True), + ) + + +class DatetimeIndexAtTests( + DatetimeIndexAtMixin, + PandasOnSparkTestCase, +): + pass + + +if __name__ == "__main__": + import unittest + from pyspark.pandas.tests.indexes.test_datetime_at import * # noqa: F401 + + try: + import xmlrunner + + testRunner = xmlrunner.XMLTestRunner(output="target/test-reports", verbosity=2) + except ImportError: + testRunner = None + unittest.main(testRunner=testRunner, verbosity=2) diff --git a/python/pyspark/pandas/tests/indexes/test_datetime_between.py b/python/pyspark/pandas/tests/indexes/test_datetime_between.py new file mode 100644 index 0000000000000..bcaf2f79ac755 --- /dev/null +++ b/python/pyspark/pandas/tests/indexes/test_datetime_between.py @@ -0,0 +1,79 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import datetime + +import pandas as pd + +from pyspark.testing.pandasutils import PandasOnSparkTestCase +from pyspark.pandas.tests.indexes.test_datetime import DatetimeIndexTestingFuncMixin + + +class DatetimeIndexBetweenMixin(DatetimeIndexTestingFuncMixin): + def test_indexer_between_time(self): + for psidx, pidx in self.idx_pairs: + self.assert_eq( + psidx.indexer_between_time("00:00:00", "00:01:00").sort_values(), + pd.Index(pidx.indexer_between_time("00:00:00", "00:01:00")), + ) + + self.assert_eq( + psidx.indexer_between_time( + datetime.time(0, 0, 0), datetime.time(0, 1, 0) + ).sort_values(), + pd.Index(pidx.indexer_between_time(datetime.time(0, 0, 0), datetime.time(0, 1, 0))), + ) + + self.assert_eq( + psidx.indexer_between_time("00:00:00", "00:01:00", True, False).sort_values(), + pd.Index(pidx.indexer_between_time("00:00:00", "00:01:00", True, False)), + ) + + self.assert_eq( + psidx.indexer_between_time("00:00:00", "00:01:00", False, True).sort_values(), + pd.Index(pidx.indexer_between_time("00:00:00", "00:01:00", False, True)), + ) + + self.assert_eq( + psidx.indexer_between_time("00:00:00", "00:01:00", False, False).sort_values(), + pd.Index(pidx.indexer_between_time("00:00:00", "00:01:00", False, False)), + ) + + self.assert_eq( + psidx.indexer_between_time("00:00:00", "00:01:00", True, True).sort_values(), + pd.Index(pidx.indexer_between_time("00:00:00", "00:01:00", True, True)), + ) + + +class DatetimeIndexBetweenTests( + DatetimeIndexBetweenMixin, + PandasOnSparkTestCase, +): + pass + + +if __name__ == "__main__": + import unittest + from pyspark.pandas.tests.indexes.test_datetime_between import * # noqa: F401 + + try: + import xmlrunner + + testRunner = xmlrunner.XMLTestRunner(output="target/test-reports", verbosity=2) + except ImportError: + testRunner = None + unittest.main(testRunner=testRunner, verbosity=2) diff --git a/python/pyspark/pandas/tests/indexes/test_datetime_ceil.py b/python/pyspark/pandas/tests/indexes/test_datetime_ceil.py new file mode 100644 index 0000000000000..af03148954bc8 --- /dev/null +++ b/python/pyspark/pandas/tests/indexes/test_datetime_ceil.py @@ -0,0 +1,48 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from pyspark.testing.pandasutils import PandasOnSparkTestCase +from pyspark.pandas.tests.indexes.test_datetime import DatetimeIndexTestingFuncMixin + + +class DatetimeIndexCeilMixin(DatetimeIndexTestingFuncMixin): + def test_ceil(self): + for psidx, pidx in self.idx_pairs: + for freq in self.fixed_freqs: + self.assert_eq(psidx.ceil(freq), pidx.ceil(freq)) + + self._disallow_nanoseconds(self.psidxs[0].ceil) + + +class DatetimeIndexCeilTests( + DatetimeIndexCeilMixin, + PandasOnSparkTestCase, +): + pass + + +if __name__ == "__main__": + import unittest + from pyspark.pandas.tests.indexes.test_datetime_ceil import * # noqa: F401 + + try: + import xmlrunner + + testRunner = xmlrunner.XMLTestRunner(output="target/test-reports", verbosity=2) + except ImportError: + testRunner = None + unittest.main(testRunner=testRunner, verbosity=2) diff --git a/python/pyspark/pandas/tests/indexes/test_datetime_property.py b/python/pyspark/pandas/tests/indexes/test_datetime_property.py index 048f5d1462bf8..13442885ee7d3 100644 --- a/python/pyspark/pandas/tests/indexes/test_datetime_property.py +++ b/python/pyspark/pandas/tests/indexes/test_datetime_property.py @@ -17,45 +17,11 @@ import numpy as np import pandas as pd -import pyspark.pandas as ps -from pyspark.testing.pandasutils import PandasOnSparkTestCase, TestUtils +from pyspark.testing.pandasutils import PandasOnSparkTestCase +from pyspark.pandas.tests.indexes.test_datetime import DatetimeIndexTestingFuncMixin -class DatetimeIndexPropertyTestsMixin: - @property - def fixed_freqs(self): - return [ - "D", - "H", - "T", # min - "S", - "L", # ms - "U", # us - # 'N' not supported - ] - - @property - def non_fixed_freqs(self): - return ["W", "Q"] - - @property - def pidxs(self): - return [ - pd.DatetimeIndex([0]), - pd.DatetimeIndex(["2004-01-01", "2002-12-31", "2000-04-01"]), - ] + [ - pd.date_range("2000-01-01", periods=3, freq=freq) - for freq in (self.fixed_freqs + self.non_fixed_freqs) - ] - - @property - def psidxs(self): - return [ps.from_pandas(pidx) for pidx in self.pidxs] - - @property - def idx_pairs(self): - return list(zip(self.psidxs, self.pidxs)) - +class DatetimeIndexPropertyTestsMixin(DatetimeIndexTestingFuncMixin): def test_properties(self): for psidx, pidx in self.idx_pairs: self.assert_eq(psidx.year, pidx.year) @@ -83,7 +49,10 @@ def test_properties(self): self.assert_eq(psidx.isocalendar().week, pidx.isocalendar().week.astype(np.int64)) -class DatetimeIndexPropertyTests(DatetimeIndexPropertyTestsMixin, PandasOnSparkTestCase, TestUtils): +class DatetimeIndexPropertyTests( + DatetimeIndexPropertyTestsMixin, + PandasOnSparkTestCase, +): pass