diff --git a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala index 342782e8e1292..8d137ba88cb1e 100644 --- a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala +++ b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala @@ -20,12 +20,12 @@ package org.apache.spark.sql.jdbc import java.math.{BigDecimal => JBigDecimal} import java.sql.{Connection, Date, Timestamp} import java.text.SimpleDateFormat -import java.time.{LocalDateTime, ZoneOffset} +import java.time.LocalDateTime import java.util.Properties import org.apache.spark.sql.{Column, Row} import org.apache.spark.sql.catalyst.expressions.Literal -import org.apache.spark.sql.types.{ArrayType, DecimalType, FloatType, NullType, ShortType} +import org.apache.spark.sql.types._ import org.apache.spark.tags.DockerTest /** @@ -454,8 +454,8 @@ class PostgresIntegrationSuite extends DockerJDBCIntegrationSuite { val negativeInfinity = row(1).getAs[Timestamp]("timestamp_column") val infinitySeq = row(0).getAs[scala.collection.Seq[Timestamp]]("timestamp_array") val negativeInfinitySeq = row(1).getAs[scala.collection.Seq[Timestamp]]("timestamp_array") - val minTimeStamp = LocalDateTime.of(1, 1, 1, 0, 0, 0).toEpochSecond(ZoneOffset.UTC) - val maxTimestamp = LocalDateTime.of(9999, 12, 31, 23, 59, 59).toEpochSecond(ZoneOffset.UTC) + val minTimeStamp = -62135596800000L + val maxTimestamp = 253402300799999L assert(infinity.getTime == maxTimestamp) assert(negativeInfinity.getTime == minTimeStamp) assert(infinitySeq.head.getTime == maxTimestamp) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/PostgresDialect.scala b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/PostgresDialect.scala index a8e95533e0be0..c9737867d3e07 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/PostgresDialect.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/PostgresDialect.scala @@ -312,8 +312,9 @@ private object PostgresDialect extends JdbcDialect with SQLConfHelper { val POSTGRESQL_DATE_POSITIVE_INFINITY = 9223372036825200000L val POSTGRESQL_DATE_DATE_POSITIVE_SMALLER_INFINITY = 185543533774800000L - val minTimeStamp = LocalDateTime.of(1, 1, 1, 0, 0, 0).toEpochSecond(ZoneOffset.UTC) - val maxTimestamp = LocalDateTime.of(9999, 12, 31, 23, 59, 59).toEpochSecond(ZoneOffset.UTC) + val minTimeStamp = LocalDateTime.of(1, 1, 1, 0, 0, 0).toInstant(ZoneOffset.UTC).toEpochMilli + val maxTimestamp = + LocalDateTime.of(9999, 12, 31, 23, 59, 59, 999999999).toInstant(ZoneOffset.UTC).toEpochMilli val time = t.getTime if (time == POSTGRESQL_DATE_POSITIVE_INFINITY ||