diff --git a/python/pyspark/pandas/resample.py b/python/pyspark/pandas/resample.py index 30f8c9d31695e..0d2c3cc753cd1 100644 --- a/python/pyspark/pandas/resample.py +++ b/python/pyspark/pandas/resample.py @@ -67,7 +67,6 @@ scol_for, verify_temp_column_name, ) -from pyspark.sql.utils import is_remote from pyspark.pandas.spark.functions import timestampdiff @@ -145,22 +144,15 @@ def _agg_columns_scols(self) -> List[Column]: def get_make_interval( # type: ignore[return] self, unit: str, col: Union[Column, int, float] ) -> Column: - if is_remote(): - from pyspark.sql.connect.functions import lit, make_interval - - col = col if not isinstance(col, (int, float)) else lit(col) # type: ignore[assignment] - if unit == "MONTH": - return make_interval(months=col) # type: ignore - if unit == "HOUR": - return make_interval(hours=col) # type: ignore - if unit == "MINUTE": - return make_interval(mins=col) # type: ignore - if unit == "SECOND": - return make_interval(secs=col) # type: ignore - else: - sql_utils = SparkContext._active_spark_context._jvm.PythonSQLUtils - col = col._jc if isinstance(col, Column) else F.lit(col)._jc - return sql_utils.makeInterval(unit, col) + col = col if not isinstance(col, (int, float)) else F.lit(col) # type: ignore[assignment] + if unit == "MONTH": + return F.make_interval(months=col) # type: ignore + if unit == "HOUR": + return F.make_interval(hours=col) # type: ignore + if unit == "MINUTE": + return F.make_interval(mins=col) # type: ignore + if unit == "SECOND": + return F.make_interval(secs=col) # type: ignore def _bin_timestamp(self, origin: pd.Timestamp, ts_scol: Column) -> Column: key_type = self._resamplekey_type