diff --git a/python/pyspark/sql/connect/functions/builtin.py b/python/pyspark/sql/connect/functions/builtin.py index 37398ca9ccf39..2ccf360ed972d 100644 --- a/python/pyspark/sql/connect/functions/builtin.py +++ b/python/pyspark/sql/connect/functions/builtin.py @@ -3349,19 +3349,15 @@ def unix_timestamp( unix_timestamp.__doc__ = pysparkfuncs.unix_timestamp.__doc__ -def from_utc_timestamp(timestamp: "ColumnOrName", tz: "ColumnOrName") -> Column: - if isinstance(tz, str): - tz = lit(tz) - return _invoke_function_over_columns("from_utc_timestamp", timestamp, tz) +def from_utc_timestamp(timestamp: "ColumnOrName", tz: Union[Column, str]) -> Column: + return _invoke_function_over_columns("from_utc_timestamp", timestamp, lit(tz)) from_utc_timestamp.__doc__ = pysparkfuncs.from_utc_timestamp.__doc__ -def to_utc_timestamp(timestamp: "ColumnOrName", tz: "ColumnOrName") -> Column: - if isinstance(tz, str): - tz = lit(tz) - return _invoke_function_over_columns("to_utc_timestamp", timestamp, tz) +def to_utc_timestamp(timestamp: "ColumnOrName", tz: Union[Column, str]) -> Column: + return _invoke_function_over_columns("to_utc_timestamp", timestamp, lit(tz)) to_utc_timestamp.__doc__ = pysparkfuncs.to_utc_timestamp.__doc__ diff --git a/python/pyspark/sql/functions/builtin.py b/python/pyspark/sql/functions/builtin.py index 6ee3117805208..5b9d0dd870027 100644 --- a/python/pyspark/sql/functions/builtin.py +++ b/python/pyspark/sql/functions/builtin.py @@ -9144,7 +9144,7 @@ def unix_timestamp( @_try_remote_functions -def from_utc_timestamp(timestamp: "ColumnOrName", tz: "ColumnOrName") -> Column: +def from_utc_timestamp(timestamp: "ColumnOrName", tz: Union[Column, str]) -> Column: """ This is a common function for databases supporting TIMESTAMP WITHOUT TIMEZONE. This function takes a timestamp which is timezone-agnostic, and interprets it as a timestamp in UTC, and @@ -9192,11 +9192,7 @@ def from_utc_timestamp(timestamp: "ColumnOrName", tz: "ColumnOrName") -> Column: >>> df.select(from_utc_timestamp(df.ts, df.tz).alias('local_time')).collect() [Row(local_time=datetime.datetime(1997, 2, 28, 19, 30))] """ - from pyspark.sql.classic.column import _to_java_column - - if isinstance(tz, Column): - tz = _to_java_column(tz) - return _invoke_function("from_utc_timestamp", _to_java_column(timestamp), tz) + return _invoke_function_over_columns("from_utc_timestamp", timestamp, lit(tz)) @_try_remote_functions @@ -9248,11 +9244,7 @@ def to_utc_timestamp(timestamp: "ColumnOrName", tz: "ColumnOrName") -> Column: >>> df.select(to_utc_timestamp(df.ts, df.tz).alias('utc_time')).collect() [Row(utc_time=datetime.datetime(1997, 2, 28, 1, 30))] """ - from pyspark.sql.classic.column import _to_java_column - - if isinstance(tz, Column): - tz = _to_java_column(tz) - return _invoke_function("to_utc_timestamp", _to_java_column(timestamp), tz) + return _invoke_function_over_columns("to_utc_timestamp", timestamp, lit(tz)) @_try_remote_functions