From 5881a66480566aa4885f311f49183bd7c5d93830 Mon Sep 17 00:00:00 2001 From: Yuya Ebihara Date: Fri, 8 Apr 2022 11:56:59 +0900 Subject: [PATCH] Support partition pseudo-columns in BigQuery --- docs/src/main/sphinx/connector/bigquery.rst | 29 +++++++ .../bigquery/BigQueryFilterQueryBuilder.java | 3 +- .../plugin/bigquery/BigQueryMetadata.java | 17 +++- .../plugin/bigquery/BigQueryPseudoColumn.java | 74 +++++++++++++++++ .../bigquery/BigQueryResultPageSource.java | 3 +- .../plugin/bigquery/BigQueryTableHandle.java | 50 ++++++++++- .../trino/plugin/bigquery/BigQueryUtil.java | 14 +++- .../plugin/bigquery/ReadSessionCreator.java | 2 +- .../bigquery/TestBigQueryConnectorTest.java | 82 +++++++++++++++++++ 9 files changed, 263 insertions(+), 11 deletions(-) create mode 100644 plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryPseudoColumn.java diff --git a/docs/src/main/sphinx/connector/bigquery.rst b/docs/src/main/sphinx/connector/bigquery.rst index 4574fd670076..65c00af962ab 100644 --- a/docs/src/main/sphinx/connector/bigquery.rst +++ b/docs/src/main/sphinx/connector/bigquery.rst @@ -169,6 +169,35 @@ For each Trino table which maps to BigQuery view there exists a system table whi Given a BigQuery view ``customer_view`` you can send query ``SELECT * customer_view$view_definition`` to see the SQL which defines view in BigQuery. +.. _bigquery_special_columns: + +Special columns +--------------- + +In addition to the defined columns, the BigQuery connector exposes +partition information in a number of hidden columns: + +* ``$partition_date``: Equivalent to ``_PARTITIONDATE`` pseudo-column in BigQuery + +* ``$partition_time``: Equivalent to ``_PARTITIONTIME`` pseudo-column in BigQuery + +You can use these columns in your SQL statements like any other column. They +can be selected directly, or used in conditional statements. For example, you +can inspect the partition date and time for each record:: + + SELECT *, "$partition_date", "$partition_time" + FROM bigquery.web.page_views; + +Retrieve all records stored in the partition ``_PARTITIONDATE = '2022-04-07'``:: + + SELECT * + FROM bigquery.web.page_views + WHERE "$partition_date" = date '2022-04-07'; + +.. note:: + + Two special partitions ``__NULL__`` and ``__UNPARTITIONED__`` are not supported. + .. _bigquery-sql-support: SQL support diff --git a/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryFilterQueryBuilder.java b/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryFilterQueryBuilder.java index 208103e2d5d4..2511fdda0424 100644 --- a/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryFilterQueryBuilder.java +++ b/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryFilterQueryBuilder.java @@ -26,6 +26,7 @@ import static com.google.common.base.Preconditions.checkState; import static com.google.common.collect.Iterables.getOnlyElement; +import static io.trino.plugin.bigquery.BigQueryUtil.toBigQueryColumnName; import static java.util.stream.Collectors.joining; import static java.util.stream.Collectors.toList; @@ -72,7 +73,7 @@ private List toConjuncts(List columns) for (BigQueryColumnHandle column : columns) { Domain domain = tupleDomain.getDomains().get().get(column); if (domain != null) { - toPredicate(column.getName(), domain, column).ifPresent(clauses::add); + toPredicate(toBigQueryColumnName(column.getName()), domain, column).ifPresent(clauses::add); } } return clauses.build(); diff --git a/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryMetadata.java b/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryMetadata.java index 55199a5d332d..57d7a5502687 100644 --- a/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryMetadata.java +++ b/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryMetadata.java @@ -72,6 +72,9 @@ import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.ImmutableMap.toImmutableMap; import static io.trino.plugin.bigquery.BigQueryErrorCode.BIGQUERY_LISTING_DATASET_ERROR; +import static io.trino.plugin.bigquery.BigQueryPseudoColumn.PARTITION_DATE; +import static io.trino.plugin.bigquery.BigQueryPseudoColumn.PARTITION_TIME; +import static io.trino.plugin.bigquery.BigQueryTableHandle.BigQueryPartitionType.INGESTION; import static io.trino.plugin.bigquery.BigQueryType.toField; import static java.util.Locale.ENGLISH; import static java.util.Objects.requireNonNull; @@ -247,6 +250,10 @@ public ConnectorTableMetadata getTableMetadata(ConnectorSession session, Connect for (BigQueryColumnHandle column : client.getColumns(handle)) { columnMetadata.add(column.getColumnMetadata()); } + if (handle.getPartitionType().isPresent() && handle.getPartitionType().get() == INGESTION) { + columnMetadata.add(PARTITION_DATE.getColumnMetadata()); + columnMetadata.add(PARTITION_TIME.getColumnMetadata()); + } return new ConnectorTableMetadata(handle.getSchemaTableName(), columnMetadata.build()); } @@ -290,7 +297,15 @@ public Map getColumnHandles(ConnectorSession session, Conn { BigQueryClient client = bigQueryClientFactory.create(session); log.debug("getColumnHandles(session=%s, tableHandle=%s)", session, tableHandle); - return client.getColumns((BigQueryTableHandle) tableHandle).stream() + + BigQueryTableHandle table = (BigQueryTableHandle) tableHandle; + ImmutableList.Builder columns = ImmutableList.builder(); + columns.addAll(client.getColumns(table)); + if (table.getPartitionType().isPresent() && table.getPartitionType().get() == INGESTION) { + columns.add(PARTITION_DATE.getColumnHandle()); + columns.add(PARTITION_TIME.getColumnHandle()); + } + return columns.build().stream() .collect(toImmutableMap(columnHandle -> columnHandle.getColumnMetadata().getName(), identity())); } diff --git a/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryPseudoColumn.java b/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryPseudoColumn.java new file mode 100644 index 000000000000..cb297b1689ad --- /dev/null +++ b/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryPseudoColumn.java @@ -0,0 +1,74 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.bigquery; + +import com.google.cloud.bigquery.Field; +import com.google.common.collect.ImmutableList; +import io.trino.spi.connector.ColumnMetadata; +import io.trino.spi.type.Type; + +import static io.trino.spi.type.DateType.DATE; +import static io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_MICROS; + +public enum BigQueryPseudoColumn +{ + PARTITION_DATE("$partition_date", "_PARTITIONDATE", DATE, BigQueryType.DATE), + PARTITION_TIME("$partition_time", "_PARTITIONTIME", TIMESTAMP_TZ_MICROS, BigQueryType.TIMESTAMP), + /**/; + + private final String trinoColumnName; + private final String bigqueryColumnName; + private final Type trinoType; + private final BigQueryType bigqueryType; + + BigQueryPseudoColumn(String trinoColumnName, String bigqueryColumnName, Type type, BigQueryType bigqueryType) + { + this.trinoColumnName = trinoColumnName; + this.bigqueryColumnName = bigqueryColumnName; + this.trinoType = type; + this.bigqueryType = bigqueryType; + } + + public String getTrinoColumnName() + { + return trinoColumnName; + } + + public String getBigqueryColumnName() + { + return bigqueryColumnName; + } + + public BigQueryColumnHandle getColumnHandle() + { + return new BigQueryColumnHandle( + trinoColumnName, + bigqueryType, + Field.Mode.REQUIRED, + null, + null, + ImmutableList.of(), + null, + true); + } + + public ColumnMetadata getColumnMetadata() + { + return ColumnMetadata.builder() + .setName(trinoColumnName) + .setType(trinoType) + .setHidden(true) + .build(); + } +} diff --git a/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryResultPageSource.java b/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryResultPageSource.java index 5c7b8ae7d052..9af7ac985b61 100644 --- a/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryResultPageSource.java +++ b/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryResultPageSource.java @@ -56,6 +56,7 @@ import static com.google.common.collect.ImmutableList.toImmutableList; import static io.airlift.slice.Slices.utf8Slice; import static io.trino.plugin.bigquery.BigQueryType.toTrinoTimestamp; +import static io.trino.plugin.bigquery.BigQueryUtil.toBigQueryColumnName; import static io.trino.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR; import static io.trino.spi.type.BigintType.BIGINT; import static io.trino.spi.type.DateType.DATE; @@ -141,7 +142,7 @@ public Page getNextPage() pageBuilder.declarePosition(); for (int column = 0; column < columnTypes.size(); column++) { BlockBuilder output = pageBuilder.getBlockBuilder(column); - appendTo(columnTypes.get(column), record.get(columnNames.get(column)), output); + appendTo(columnTypes.get(column), record.get(toBigQueryColumnName(columnNames.get(column))), output); } } diff --git a/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryTableHandle.java b/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryTableHandle.java index 80c12399d801..8c5a6f7242be 100644 --- a/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryTableHandle.java +++ b/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryTableHandle.java @@ -15,7 +15,11 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.cloud.bigquery.RangePartitioning; +import com.google.cloud.bigquery.StandardTableDefinition; +import com.google.cloud.bigquery.TableDefinition; import com.google.cloud.bigquery.TableInfo; +import com.google.cloud.bigquery.TimePartitioning; import io.trino.spi.connector.ColumnHandle; import io.trino.spi.connector.ConnectorTableHandle; import io.trino.spi.connector.SchemaTableName; @@ -34,6 +38,7 @@ public class BigQueryTableHandle private final SchemaTableName schemaTableName; private final RemoteTableName remoteTableName; private final String type; + private final Optional partitionType; private final TupleDomain constraint; private final Optional> projectedColumns; @@ -42,12 +47,14 @@ public BigQueryTableHandle( @JsonProperty("schemaTableName") SchemaTableName schemaTableName, @JsonProperty("remoteTableName") RemoteTableName remoteTableName, @JsonProperty("type") String type, + @JsonProperty("partitionType") Optional partitionType, @JsonProperty("constraint") TupleDomain constraint, @JsonProperty("projectedColumns") Optional> projectedColumns) { this.schemaTableName = requireNonNull(schemaTableName, "schemaTableName is null"); this.remoteTableName = requireNonNull(remoteTableName, "remoteTableName is null"); this.type = requireNonNull(type, "type is null"); + this.partitionType = requireNonNull(partitionType, "partitionType is null"); this.constraint = requireNonNull(constraint, "constraint is null"); this.projectedColumns = requireNonNull(projectedColumns, "projectedColumns is null"); } @@ -58,6 +65,7 @@ public BigQueryTableHandle(SchemaTableName schemaTableName, RemoteTableName remo schemaTableName, remoteTableName, tableInfo.getDefinition().getType().toString(), + getPartitionType(tableInfo.getDefinition()), TupleDomain.all(), Optional.empty()); } @@ -80,6 +88,12 @@ public String getType() return type; } + @JsonProperty + public Optional getPartitionType() + { + return partitionType; + } + @JsonProperty public TupleDomain getConstraint() { @@ -106,6 +120,7 @@ public boolean equals(Object o) // TODO: Add tests for this (see TestJdbcTableHandle#testEquivalence for reference) return Objects.equals(schemaTableName, that.schemaTableName) && Objects.equals(type, that.type) && + Objects.equals(partitionType, that.partitionType) && Objects.equals(constraint, that.constraint) && Objects.equals(projectedColumns, that.projectedColumns); } @@ -113,7 +128,7 @@ public boolean equals(Object o) @Override public int hashCode() { - return Objects.hash(schemaTableName, type, constraint, projectedColumns); + return Objects.hash(schemaTableName, type, partitionType, constraint, projectedColumns); } @Override @@ -123,6 +138,7 @@ public String toString() .add("remoteTableName", remoteTableName) .add("schemaTableName", schemaTableName) .add("type", type) + .add("partitionType", partitionType) .add("constraint", constraint) .add("projectedColumns", projectedColumns) .toString(); @@ -130,11 +146,39 @@ public String toString() BigQueryTableHandle withConstraint(TupleDomain newConstraint) { - return new BigQueryTableHandle(schemaTableName, remoteTableName, type, newConstraint, projectedColumns); + return new BigQueryTableHandle(schemaTableName, remoteTableName, type, partitionType, newConstraint, projectedColumns); } BigQueryTableHandle withProjectedColumns(List newProjectedColumns) { - return new BigQueryTableHandle(schemaTableName, remoteTableName, type, constraint, Optional.of(newProjectedColumns)); + return new BigQueryTableHandle(schemaTableName, remoteTableName, type, partitionType, constraint, Optional.of(newProjectedColumns)); + } + + public enum BigQueryPartitionType + { + TIME, + INGESTION, + RANGE, + /**/ + } + + private static Optional getPartitionType(TableDefinition definition) + { + if (definition instanceof StandardTableDefinition) { + StandardTableDefinition standardTableDefinition = (StandardTableDefinition) definition; + RangePartitioning rangePartition = standardTableDefinition.getRangePartitioning(); + if (rangePartition != null) { + return Optional.of(BigQueryPartitionType.RANGE); + } + + TimePartitioning timePartition = standardTableDefinition.getTimePartitioning(); + if (timePartition != null) { + if (timePartition.getField() != null) { + return Optional.of(BigQueryPartitionType.TIME); + } + return Optional.of(BigQueryPartitionType.INGESTION); + } + } + return Optional.empty(); } } diff --git a/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryUtil.java b/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryUtil.java index e274e0fa592b..69faf8ebdb25 100644 --- a/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryUtil.java +++ b/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryUtil.java @@ -19,6 +19,8 @@ import io.grpc.Status; import io.grpc.StatusRuntimeException; +import java.util.Arrays; +import java.util.Optional; import java.util.Set; import static com.google.cloud.http.BaseHttpServiceException.UNKNOWN_CODE; @@ -31,8 +33,6 @@ public final class BigQueryUtil "Connection closed with unknown cause", "Received unexpected EOS on DATA frame from server"); - private static final Set INVALID_COLUMN_NAMES = ImmutableSet.of("_partitiondate", "_PARTITIONDATE", "_partitiontime", "_PARTITIONTIME"); - private BigQueryUtil() {} public static boolean isRetryable(Throwable cause) @@ -56,8 +56,14 @@ public static BigQueryException convertToBigQueryException(BigQueryError error) return new BigQueryException(UNKNOWN_CODE, error.getMessage(), error); } - public static boolean validColumnName(String columnName) + public static String toBigQueryColumnName(String columnName) { - return !INVALID_COLUMN_NAMES.contains(columnName); + Optional pseudoColumn = Arrays.stream(BigQueryPseudoColumn.values()) + .filter(column -> column.getTrinoColumnName().equals(columnName)) + .findFirst(); + if (pseudoColumn.isPresent()) { + return pseudoColumn.get().getBigqueryColumnName(); + } + return columnName; } } diff --git a/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/ReadSessionCreator.java b/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/ReadSessionCreator.java index 08e68efa4f58..468cc492b3ca 100644 --- a/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/ReadSessionCreator.java +++ b/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/ReadSessionCreator.java @@ -62,7 +62,7 @@ public ReadSession create(ConnectorSession session, TableId remoteTable, List filteredSelectedFields = selectedFields.stream() - .filter(BigQueryUtil::validColumnName) + .map(BigQueryUtil::toBigQueryColumnName) .collect(toList()); try (BigQueryReadClient bigQueryReadClient = bigQueryReadClientFactory.create(session)) { diff --git a/plugin/trino-bigquery/src/test/java/io/trino/plugin/bigquery/TestBigQueryConnectorTest.java b/plugin/trino-bigquery/src/test/java/io/trino/plugin/bigquery/TestBigQueryConnectorTest.java index 2e41ba364027..9e798609692e 100644 --- a/plugin/trino-bigquery/src/test/java/io/trino/plugin/bigquery/TestBigQueryConnectorTest.java +++ b/plugin/trino-bigquery/src/test/java/io/trino/plugin/bigquery/TestBigQueryConnectorTest.java @@ -331,6 +331,88 @@ protected boolean isColumnNameRejected(Exception exception, String columnName, b return nullToEmpty(exception.getMessage()).matches(".*(Fields must contain only letters, numbers, and underscores, start with a letter or underscore, and be at most 300 characters long).*"); } + @Test + public void testPartitionDateColumn() + { + try (TestTable table = new TestTable(bigQuerySqlExecutor, "test.partition_date_column", "(value INT64) PARTITION BY _PARTITIONDATE")) { + // BigQuery doesn't allow omitting column list for ingestion-time partitioned table + // Using _PARTITIONTIME special column because _PARTITIONDATE is unsupported in INSERT statement + onBigQuery(format("INSERT INTO %s (_PARTITIONTIME, value) VALUES ('1960-01-01', 1)", table.getName())); + onBigQuery(format("INSERT INTO %s (_PARTITIONTIME, value) VALUES ('2159-12-31', 2)", table.getName())); + + assertThat(query("SELECT value, \"$partition_date\" FROM " + table.getName())) + .matches("VALUES (BIGINT '1', DATE '1960-01-01'), (BIGINT '2', DATE '2159-12-31')"); + + assertQuery(format("SELECT value FROM %s WHERE \"$partition_date\" = DATE '1960-01-01'", table.getName()), "VALUES 1"); + assertQuery(format("SELECT value FROM %s WHERE \"$partition_date\" = DATE '2159-12-31'", table.getName()), "VALUES 2"); + + // Verify DESCRIBE result doesn't have hidden columns + assertThat(query("DESCRIBE " + table.getName())).projected(0).skippingTypesCheck().matches("VALUES 'value'"); + } + } + + @Test + public void testPartitionTimeColumn() + { + try (TestTable table = new TestTable(bigQuerySqlExecutor, "test.partition_time_column", "(value INT64) PARTITION BY DATE_TRUNC(_PARTITIONTIME, HOUR)")) { + // BigQuery doesn't allow omitting column list for ingestion-time partitioned table + onBigQuery(format("INSERT INTO %s (_PARTITIONTIME, value) VALUES ('1960-01-01 00:00:00', 1)", table.getName())); + onBigQuery(format("INSERT INTO %s (_PARTITIONTIME, value) VALUES ('2159-12-31 23:00:00', 2)", table.getName())); // Hour and minute must be zero + + assertThat(query("SELECT value, \"$partition_time\" FROM " + table.getName())) + .matches("VALUES (BIGINT '1', CAST('1960-01-01 00:00:00 UTC' AS TIMESTAMP(6) WITH TIME ZONE)), (BIGINT '2', CAST('2159-12-31 23:00:00 UTC' AS TIMESTAMP(6) WITH TIME ZONE))"); + + assertQuery(format("SELECT value FROM %s WHERE \"$partition_time\" = CAST('1960-01-01 00:00:00 UTC' AS TIMESTAMP(6) WITH TIME ZONE)", table.getName()), "VALUES 1"); + assertQuery(format("SELECT value FROM %s WHERE \"$partition_time\" = CAST('2159-12-31 23:00:00 UTC' AS TIMESTAMP(6) WITH TIME ZONE)", table.getName()), "VALUES 2"); + + // Verify DESCRIBE result doesn't have hidden columns + assertThat(query("DESCRIBE " + table.getName())).projected(0).skippingTypesCheck().matches("VALUES 'value'"); + } + } + + @Test + public void testIngestionTimePartitionedTableInvalidValue() + { + try (TestTable table = new TestTable(bigQuerySqlExecutor, "test.invalid_ingestion_time", "(value INT64) PARTITION BY _PARTITIONDATE")) { + assertThatThrownBy(() -> onBigQuery(format("INSERT INTO %s (_PARTITIONTIME, value) VALUES ('0001-01-01', 1)", table.getName()))) + .hasMessageMatching("Cannot set pseudo column for automatic partitioned table.* Supported values are in the range \\[1960-01-01, 2159-12-31]"); + + assertThatThrownBy(() -> onBigQuery(format("INSERT INTO %s (_PARTITIONTIME, value) VALUES ('1959-12-31', 1)", table.getName()))) + .hasMessageMatching("Cannot set pseudo column for automatic partitioned table.* Supported values are in the range \\[1960-01-01, 2159-12-31]"); + + assertThatThrownBy(() -> onBigQuery(format("INSERT INTO %s (_PARTITIONTIME, value) VALUES ('2160-01-01', 1)", table.getName()))) + .hasMessageMatching("Cannot set pseudo column for automatic partitioned table.* Supported values are in the range \\[1960-01-01, 2159-12-31]"); + + assertThatThrownBy(() -> onBigQuery(format("INSERT INTO %s (_PARTITIONTIME, value) VALUES ('9999-12-31', 1)", table.getName()))) + .hasMessageMatching("Cannot set pseudo column for automatic partitioned table.* Supported values are in the range \\[1960-01-01, 2159-12-31]"); + + assertThatThrownBy(() -> onBigQuery(format("INSERT INTO %s (_PARTITIONTIME, value) VALUES (NULL, 1)", table.getName()))) + .hasMessageContaining("Cannot set timestamp pseudo column for automatic partitioned table to NULL"); + } + } + + @Test + public void testPseudoColumnNotExist() + { + // Normal table without partitions + try (TestTable table = new TestTable(bigQuerySqlExecutor, "test.non_partitioned_table", "(value INT64, ts TIMESTAMP)")) { + assertQueryFails("SELECT \"$partition_date\" FROM " + table.getName(), ".* Column '\\$partition_date' cannot be resolved"); + assertQueryFails("SELECT \"$partition_time\" FROM " + table.getName(), ".* Column '\\$partition_time' cannot be resolved"); + } + + // Time-unit partitioned table + try (TestTable table = new TestTable(bigQuerySqlExecutor, "test.time_unit_partition", "(value INT64, dt DATE) PARTITION BY dt")) { + assertQueryFails("SELECT \"$partition_date\" FROM " + table.getName(), ".* Column '\\$partition_date' cannot be resolved"); + assertQueryFails("SELECT \"$partition_time\" FROM " + table.getName(), ".* Column '\\$partition_time' cannot be resolved"); + } + + // Integer-range partitioned table + try (TestTable table = new TestTable(bigQuerySqlExecutor, "test.integer_range_partition", "(value INT64, dt DATE) PARTITION BY RANGE_BUCKET(value, GENERATE_ARRAY(0, 100, 10))")) { + assertQueryFails("SELECT \"$partition_date\" FROM " + table.getName(), ".* Column '\\$partition_date' cannot be resolved"); + assertQueryFails("SELECT \"$partition_time\" FROM " + table.getName(), ".* Column '\\$partition_time' cannot be resolved"); + } + } + @Test public void testSelectFromHourlyPartitionedTable() {