From 792abcecd0e15abb14e4c6fe85d1652bdeec7e84 Mon Sep 17 00:00:00 2001
From: wgzhao
Date: Wed, 20 Jan 2021 14:10:19 +0800
Subject: [PATCH] Add ClickHouse Connector
---
.../trino-server/src/main/provisio/presto.xml | 6 +
docs/src/main/sphinx/connector.rst | 1 +
docs/src/main/sphinx/connector/clickhouse.rst | 73 ++++
plugin/trino-clickhouse/pom.xml | 171 +++++++++
.../plugin/clickhouse/ClickHouseClient.java | 353 ++++++++++++++++++
.../clickhouse/ClickHouseClientModule.java | 53 +++
.../plugin/clickhouse/ClickHousePlugin.java | 25 ++
.../clickhouse/ClickHouseTableProperties.java | 66 ++++
.../clickhouse/ClickHouseQueryRunner.java | 100 +++++
.../clickhouse/ClickHouseSqlExecutor.java | 59 +++
.../TestClickHouseDistributedQueries.java | 125 +++++++
.../clickhouse/TestClickHousePlugin.java | 33 ++
.../clickhouse/TestClickHouseTypeMapping.java | 254 +++++++++++++
.../clickhouse/TestingClickHouseServer.java | 69 ++++
pom.xml | 1 +
15 files changed, 1389 insertions(+)
create mode 100644 docs/src/main/sphinx/connector/clickhouse.rst
create mode 100644 plugin/trino-clickhouse/pom.xml
create mode 100644 plugin/trino-clickhouse/src/main/java/io/trino/plugin/clickhouse/ClickHouseClient.java
create mode 100644 plugin/trino-clickhouse/src/main/java/io/trino/plugin/clickhouse/ClickHouseClientModule.java
create mode 100644 plugin/trino-clickhouse/src/main/java/io/trino/plugin/clickhouse/ClickHousePlugin.java
create mode 100644 plugin/trino-clickhouse/src/main/java/io/trino/plugin/clickhouse/ClickHouseTableProperties.java
create mode 100644 plugin/trino-clickhouse/src/test/java/io/trino/plugin/clickhouse/ClickHouseQueryRunner.java
create mode 100644 plugin/trino-clickhouse/src/test/java/io/trino/plugin/clickhouse/ClickHouseSqlExecutor.java
create mode 100644 plugin/trino-clickhouse/src/test/java/io/trino/plugin/clickhouse/TestClickHouseDistributedQueries.java
create mode 100644 plugin/trino-clickhouse/src/test/java/io/trino/plugin/clickhouse/TestClickHousePlugin.java
create mode 100644 plugin/trino-clickhouse/src/test/java/io/trino/plugin/clickhouse/TestClickHouseTypeMapping.java
create mode 100644 plugin/trino-clickhouse/src/test/java/io/trino/plugin/clickhouse/TestingClickHouseServer.java
diff --git a/core/trino-server/src/main/provisio/presto.xml b/core/trino-server/src/main/provisio/presto.xml
index 465f6c616c68..9a20605da764 100644
--- a/core/trino-server/src/main/provisio/presto.xml
+++ b/core/trino-server/src/main/provisio/presto.xml
@@ -56,6 +56,12 @@
+
+
+
+
+
+
diff --git a/docs/src/main/sphinx/connector.rst b/docs/src/main/sphinx/connector.rst
index 9e58f87791d4..3c833565c0c0 100644
--- a/docs/src/main/sphinx/connector.rst
+++ b/docs/src/main/sphinx/connector.rst
@@ -12,6 +12,7 @@ from different data sources.
BigQuery
Black Hole
Cassandra
+ ClickHouse
Druid
Elasticsearch
Google Sheets
diff --git a/docs/src/main/sphinx/connector/clickhouse.rst b/docs/src/main/sphinx/connector/clickhouse.rst
new file mode 100644
index 000000000000..4821d808177c
--- /dev/null
+++ b/docs/src/main/sphinx/connector/clickhouse.rst
@@ -0,0 +1,73 @@
+====================
+ClickHouse Connector
+====================
+
+The ClickHouse connector allows querying tables in an external
+`Yandex ClickHouse `_ instance. This can be used to join data between different
+systems like ClickHouse and Hive, or between two different ClickHouse instances.
+
+Configuration
+-------------
+
+To configure the ClickHouse connector, create a catalog properties file``etc/catalog/clickhouse.properties``,
+replace the connection properties as needed for your setup:
+
+.. code-block:: none
+
+ connector.name=clickhouse
+ connection-url=jdbc:clickhouse://host1:8123:/default
+ connection-user=default
+ connection-password=
+
+
+Multiple ClickHouse servers
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+If you have multiple ClickHouse servers you need to configure one catalog for each instance.
+To add another catalog:
+
+* Add another properties file to ``etc/catalog``
+* Save it with a different name that ends in ``.properties``
+
+For example, if you name the property file ``sales.properties``, Trino uses the configured
+connector to create a catalog named ``sales``.
+
+Querying ClickHouse
+-------------------
+
+The ClickHouse connector provides a schema for every ClickHouse *database*.
+run ``SHOW SCHEMAS`` to see the available ClickHouse databases::
+
+ SHOW SCHEMAS FROM clickhouse;
+
+If you have a ClickHouse database named ``web``, run ``SHOW TABLES`` to view the tables
+in this database::
+
+ SHOW TABLES FROM clickhouse.web;
+
+Run ``DESCRIBE`` or ``SHOW COLUMNS`` to list the columns in the ``clicks`` table in the
+``web`` databases::
+
+ DESCRIBE clickhouse.web.clicks;
+ SHOW COLUMNS FROM clickhouse.web.clicks;
+
+Run ``SELECT`` to access the ``clicks`` table in the ``web`` database::
+
+ SELECT * FROM clickhouse.web.clicks;
+
+.. note::
+
+ If you used a different name for your catalog properties file, use
+ that catalog name instead of ``ClickHouse`` in the above examples.
+
+
+ClickHouse Connector Limitations
+--------------------------------
+
+The following SQL statements aren't supported:
+
+* :doc:`/sql/grant`
+* :doc:`/sql/revoke`
+* :doc:`/sql/show-grants`
+* :doc:`/sql/show-roles`
+* :doc:`/sql/show-role-grants`
diff --git a/plugin/trino-clickhouse/pom.xml b/plugin/trino-clickhouse/pom.xml
new file mode 100644
index 000000000000..a6ab9b8381b6
--- /dev/null
+++ b/plugin/trino-clickhouse/pom.xml
@@ -0,0 +1,171 @@
+
+
+ 4.0.0
+
+
+ io.trino
+ trino-root
+ 353-SNAPSHOT
+ ../../pom.xml
+
+
+ trino-clickhouse
+ Trino - ClickHouse Connector
+ trino-plugin
+
+
+ ${project.parent.basedir}
+
+
+
+
+ io.trino
+ trino-base-jdbc
+
+
+
+ io.airlift
+ log
+
+
+
+ io.airlift
+ log-manager
+
+
+
+ com.google.code.findbugs
+ jsr305
+
+
+
+ com.google.guava
+ guava
+
+
+
+ com.google.inject
+ guice
+
+
+
+ javax.inject
+ javax.inject
+
+
+
+ ru.yandex.clickhouse
+ clickhouse-jdbc
+ 0.2.4
+
+
+ org.slf4j
+ jcl-over-slf4j
+
+
+ commons-logging
+ commons-logging
+
+
+
+
+
+
+ io.trino
+ trino-spi
+ provided
+
+
+
+ io.airlift
+ slice
+ provided
+
+
+
+ com.fasterxml.jackson.core
+ jackson-annotations
+ provided
+
+
+
+ org.openjdk.jol
+ jol-core
+ provided
+
+
+
+
+ io.trino
+ trino-main
+ test
+
+
+
+ io.trino
+ trino-main
+ test-jar
+ test
+
+
+
+ io.trino
+ trino-testing
+ test
+
+
+
+ io.trino
+ trino-tpch
+ test
+
+
+
+ io.trino.tpch
+ tpch
+ test
+
+
+
+ io.airlift
+ testing
+ test
+
+
+
+ org.assertj
+ assertj-core
+ test
+
+
+
+ org.jetbrains
+ annotations
+ test
+
+
+
+ org.testcontainers
+ clickhouse
+ test
+
+
+
+ org.testcontainers
+ jdbc
+ test
+
+
+
+ org.testcontainers
+ testcontainers
+ test
+
+
+
+ org.testng
+ testng
+ test
+
+
+
diff --git a/plugin/trino-clickhouse/src/main/java/io/trino/plugin/clickhouse/ClickHouseClient.java b/plugin/trino-clickhouse/src/main/java/io/trino/plugin/clickhouse/ClickHouseClient.java
new file mode 100644
index 000000000000..2eeccb4acb02
--- /dev/null
+++ b/plugin/trino-clickhouse/src/main/java/io/trino/plugin/clickhouse/ClickHouseClient.java
@@ -0,0 +1,353 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.clickhouse;
+
+import com.google.common.collect.ImmutableList;
+import io.trino.plugin.jdbc.BaseJdbcClient;
+import io.trino.plugin.jdbc.BaseJdbcConfig;
+import io.trino.plugin.jdbc.ColumnMapping;
+import io.trino.plugin.jdbc.ConnectionFactory;
+import io.trino.plugin.jdbc.JdbcColumnHandle;
+import io.trino.plugin.jdbc.JdbcTableHandle;
+import io.trino.plugin.jdbc.JdbcTypeHandle;
+import io.trino.plugin.jdbc.RemoteTableName;
+import io.trino.plugin.jdbc.WriteMapping;
+import io.trino.spi.TrinoException;
+import io.trino.spi.connector.ColumnMetadata;
+import io.trino.spi.connector.ConnectorSession;
+import io.trino.spi.connector.ConnectorTableMetadata;
+import io.trino.spi.connector.SchemaTableName;
+import io.trino.spi.type.CharType;
+import io.trino.spi.type.DecimalType;
+import io.trino.spi.type.Decimals;
+import io.trino.spi.type.Type;
+import io.trino.spi.type.TypeManager;
+import io.trino.spi.type.VarbinaryType;
+import io.trino.spi.type.VarcharType;
+
+import javax.annotation.Nullable;
+import javax.inject.Inject;
+
+import java.sql.Connection;
+import java.sql.DatabaseMetaData;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Types;
+import java.util.List;
+import java.util.Optional;
+import java.util.function.BiFunction;
+
+import static com.google.common.base.Strings.isNullOrEmpty;
+import static io.trino.plugin.jdbc.DecimalConfig.DecimalMapping.ALLOW_OVERFLOW;
+import static io.trino.plugin.jdbc.DecimalSessionSessionProperties.getDecimalDefaultScale;
+import static io.trino.plugin.jdbc.DecimalSessionSessionProperties.getDecimalRounding;
+import static io.trino.plugin.jdbc.DecimalSessionSessionProperties.getDecimalRoundingMode;
+import static io.trino.plugin.jdbc.JdbcErrorCode.JDBC_ERROR;
+import static io.trino.plugin.jdbc.PredicatePushdownController.FULL_PUSHDOWN;
+import static io.trino.plugin.jdbc.StandardColumnMappings.bigintColumnMapping;
+import static io.trino.plugin.jdbc.StandardColumnMappings.bigintWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.booleanWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.charColumnMapping;
+import static io.trino.plugin.jdbc.StandardColumnMappings.dateColumnMapping;
+import static io.trino.plugin.jdbc.StandardColumnMappings.dateWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.decimalColumnMapping;
+import static io.trino.plugin.jdbc.StandardColumnMappings.doubleColumnMapping;
+import static io.trino.plugin.jdbc.StandardColumnMappings.doubleWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.integerColumnMapping;
+import static io.trino.plugin.jdbc.StandardColumnMappings.integerWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.longDecimalWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.realColumnMapping;
+import static io.trino.plugin.jdbc.StandardColumnMappings.realWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.shortDecimalWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.smallintColumnMapping;
+import static io.trino.plugin.jdbc.StandardColumnMappings.smallintWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.timestampColumnMappingUsingSqlTimestampWithRounding;
+import static io.trino.plugin.jdbc.StandardColumnMappings.tinyintColumnMapping;
+import static io.trino.plugin.jdbc.StandardColumnMappings.tinyintWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.varbinaryReadFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.varbinaryWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.varcharColumnMapping;
+import static io.trino.plugin.jdbc.StandardColumnMappings.varcharWriteFunction;
+import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;
+import static io.trino.spi.type.BigintType.BIGINT;
+import static io.trino.spi.type.BooleanType.BOOLEAN;
+import static io.trino.spi.type.DateType.DATE;
+import static io.trino.spi.type.DecimalType.createDecimalType;
+import static io.trino.spi.type.DoubleType.DOUBLE;
+import static io.trino.spi.type.IntegerType.INTEGER;
+import static io.trino.spi.type.RealType.REAL;
+import static io.trino.spi.type.SmallintType.SMALLINT;
+import static io.trino.spi.type.TimestampType.TIMESTAMP_MILLIS;
+import static io.trino.spi.type.TinyintType.TINYINT;
+import static io.trino.spi.type.VarbinaryType.VARBINARY;
+import static io.trino.spi.type.VarcharType.createUnboundedVarcharType;
+import static java.lang.Math.max;
+import static java.lang.String.format;
+import static java.lang.String.join;
+import static java.util.Locale.ENGLISH;
+
+public class ClickHouseClient
+ extends BaseJdbcClient
+{
+ @Inject
+ public ClickHouseClient(BaseJdbcConfig config, ConnectionFactory connectionFactory, TypeManager typeManager)
+ {
+ super(config, "\"", connectionFactory);
+ }
+
+ @Override
+ protected String quoted(@Nullable String catalog, @Nullable String schema, String table)
+ {
+ StringBuilder sb = new StringBuilder();
+ if (!isNullOrEmpty(schema)) {
+ sb.append(quoted(schema)).append(".");
+ }
+ sb.append(quoted(table));
+ return sb.toString();
+ }
+
+ @Override
+ protected void copyTableSchema(Connection connection, String catalogName, String schemaName, String tableName, String newTableName, List columnNames)
+ {
+ // ClickHouse does not support `create table tbl as select * from tbl2 where 0=1`
+ // ClickHouse supports the following two methods to copy schema
+ // 1. create table tbl as tbl2
+ // 2. create table tbl1 ENGINE= as select * from tbl2
+ String sql = format(
+ "CREATE TABLE %s AS %s ",
+ quoted(null, schemaName, newTableName),
+ quoted(null, schemaName, tableName));
+ execute(connection, sql);
+ }
+
+ @Override
+ protected String createTableSql(RemoteTableName remoteTableName, List columns, ConnectorTableMetadata tableMetadata)
+ {
+ ImmutableList.Builder tableOptions = ImmutableList.builder();
+ ClickHouseTableProperties.getEngine(tableMetadata.getProperties())
+ .ifPresent(value -> tableOptions.add("ENGINE =" + quoted(value)));
+ return format("CREATE TABLE %s (%s) %s", quoted(remoteTableName), join(", ", columns), join(", ", tableOptions.build()));
+ }
+
+ @Override
+ protected String getColumnDefinitionSql(ConnectorSession session, ColumnMetadata column, String columnName)
+ {
+ StringBuilder sb = new StringBuilder()
+ .append(quoted(columnName))
+ .append(" ");
+ if (column.isNullable()) {
+ // set column nullable property explicitly
+ sb.append("Nullable(").append(toWriteMapping(session, column.getType()).getDataType()).append(")");
+ }
+ else {
+ // By default, the clickhouse column is not allowed to be null
+ sb.append(toWriteMapping(session, column.getType()).getDataType());
+ }
+ return sb.toString();
+ }
+
+ @Override
+ public void createSchema(ConnectorSession session, String schemaName)
+ {
+ execute(session, "CREATE DATABASE " + quoted(schemaName));
+ }
+
+ @Override
+ public void dropSchema(ConnectorSession session, String schemaName)
+ {
+ execute(session, "DROP DATABASE " + quoted(schemaName));
+ }
+
+ @Override
+ public void renameColumn(ConnectorSession session, JdbcTableHandle handle, JdbcColumnHandle jdbcColumn, String newColumnName)
+ {
+ try (Connection connection = connectionFactory.openConnection(session)) {
+ DatabaseMetaData metadata = connection.getMetaData();
+ if (metadata.storesUpperCaseIdentifiers()) {
+ newColumnName = newColumnName.toUpperCase(ENGLISH);
+ }
+ String sql = format("ALTER TABLE %s RENAME COLUMN %s TO %s ",
+ quoted(handle.getRemoteTableName()),
+ jdbcColumn.getColumnName(),
+ newColumnName);
+ execute(connection, sql);
+ }
+ catch (SQLException e) {
+ throw new TrinoException(JDBC_ERROR, e);
+ }
+ }
+
+ @Override
+ protected ResultSet getTables(Connection connection, Optional schemaName, Optional tableName)
+ throws SQLException
+ {
+ // ClickHouse maps their "database" to SQL catalogs and does not have schemas
+ DatabaseMetaData metadata = connection.getMetaData();
+ return metadata.getTables(
+ null,
+ schemaName.orElse(null),
+ escapeNamePattern(tableName, metadata.getSearchStringEscape()).orElse(null),
+ new String[] {"TABLE", "VIEW"});
+ }
+
+ @Override
+ public void dropTable(ConnectorSession session, JdbcTableHandle handle)
+ {
+ String sql = "DROP TABLE " + quoted(handle.getRemoteTableName());
+ execute(session, sql);
+ }
+
+ @Override
+ protected void renameTable(ConnectorSession session, String catalogName, String schemaName, String tableName, SchemaTableName newTable)
+ {
+ String sql = format("RENAME TABLE %s.%s TO %s.%s",
+ quoted(schemaName),
+ quoted(tableName),
+ newTable.getSchemaName(),
+ newTable.getTableName());
+ execute(session, sql);
+ }
+
+ @Override
+ protected Optional> limitFunction()
+ {
+ return Optional.of((sql, limit) -> sql + " LIMIT " + limit);
+ }
+
+ @Override
+ public boolean isLimitGuaranteed(ConnectorSession session)
+ {
+ return true;
+ }
+
+ @Override
+ public Optional toColumnMapping(ConnectorSession session, Connection connection, JdbcTypeHandle typeHandle)
+ {
+ String jdbcTypeName = typeHandle.getJdbcTypeName()
+ .orElseThrow(() -> new TrinoException(JDBC_ERROR, "Type name is missing: " + typeHandle));
+
+ Optional mapping = getForcedMappingToVarchar(typeHandle);
+ if (mapping.isPresent()) {
+ return mapping;
+ }
+
+ switch (typeHandle.getJdbcType()) {
+ case Types.TINYINT:
+ return Optional.of(tinyintColumnMapping());
+
+ case Types.SMALLINT:
+ return Optional.of(smallintColumnMapping());
+
+ case Types.INTEGER:
+ return Optional.of(integerColumnMapping());
+
+ case Types.BIGINT:
+ return Optional.of(bigintColumnMapping());
+
+ case Types.REAL:
+ case Types.FLOAT:
+ return Optional.of(realColumnMapping());
+
+ case Types.DOUBLE:
+ return Optional.of(doubleColumnMapping());
+
+ case Types.DECIMAL:
+ int decimalDigits = typeHandle.getRequiredDecimalDigits();
+ int precision = typeHandle.getRequiredColumnSize();
+ if (getDecimalRounding(session) == ALLOW_OVERFLOW && precision > Decimals.MAX_PRECISION) {
+ int scale = Math.min(decimalDigits, getDecimalDefaultScale(session));
+ return Optional.of(decimalColumnMapping(createDecimalType(Decimals.MAX_PRECISION, scale), getDecimalRoundingMode(session)));
+ }
+ return Optional.of(decimalColumnMapping(createDecimalType(precision, max(decimalDigits, 0))));
+
+ case Types.CHAR:
+ case Types.VARCHAR:
+ case Types.NVARCHAR:
+ case Types.LONGVARCHAR:
+ case Types.LONGNVARCHAR:
+ return Optional.of(varcharColumnMapping(createUnboundedVarcharType(), false));
+
+ case Types.BINARY:
+ case Types.VARBINARY:
+ case Types.LONGVARBINARY:
+ return Optional.of(ColumnMapping.sliceMapping(VARBINARY, varbinaryReadFunction(), varbinaryWriteFunction(), FULL_PUSHDOWN));
+
+ case Types.DATE:
+ return Optional.of(dateColumnMapping());
+
+ case Types.TIMESTAMP:
+ // clickhouse not implemented for type=class java.time.LocalDateTime
+ // TODO replace it using timestamp relative function after clickhouse adds support for LocalDateTime
+ return Optional.of(timestampColumnMappingUsingSqlTimestampWithRounding(TIMESTAMP_MILLIS));
+
+ case Types.OTHER:
+ // ClickHouse-special data type
+ if (jdbcTypeName.startsWith("FixedString")) {
+ return Optional.of(charColumnMapping(CharType.createCharType(typeHandle.getRequiredColumnSize()), false));
+ }
+ else {
+ return Optional.of(varcharColumnMapping(createUnboundedVarcharType(), false));
+ }
+ default:
+ return Optional.empty();
+ }
+ }
+
+ @Override
+ public WriteMapping toWriteMapping(ConnectorSession session, Type type)
+ {
+ if (type == BOOLEAN) {
+ // ClickHouse is no separate type for boolean values. Use UInt8 type, restricted to the values 0 or 1.
+ return WriteMapping.booleanMapping("UInt8", booleanWriteFunction());
+ }
+ if (type == TINYINT) {
+ return WriteMapping.longMapping("Int8", tinyintWriteFunction());
+ }
+ if (type == SMALLINT) {
+ return WriteMapping.longMapping("Int16", smallintWriteFunction());
+ }
+ if (type == INTEGER) {
+ return WriteMapping.longMapping("Int32", integerWriteFunction());
+ }
+ if (type == BIGINT) {
+ return WriteMapping.longMapping("Int64", bigintWriteFunction());
+ }
+ if (type == REAL) {
+ return WriteMapping.longMapping("Float32", realWriteFunction());
+ }
+ if (type == DOUBLE) {
+ return WriteMapping.doubleMapping("Float64", doubleWriteFunction());
+ }
+ if (type instanceof DecimalType) {
+ DecimalType decimalType = (DecimalType) type;
+ String dataType = format("Decimal(%s, %s)", decimalType.getPrecision(), decimalType.getScale());
+ if (decimalType.isShort()) {
+ return WriteMapping.longMapping(dataType, shortDecimalWriteFunction(decimalType));
+ }
+ return WriteMapping.sliceMapping(dataType, longDecimalWriteFunction(decimalType));
+ }
+ if (type instanceof CharType || type instanceof VarcharType) {
+ // The String type replaces the types VARCHAR, BLOB, CLOB, and others from other DBMSs.
+ return WriteMapping.sliceMapping("String", varcharWriteFunction());
+ }
+ if (type instanceof VarbinaryType) {
+ // Strings of an arbitrary length. The length is not limited
+ return WriteMapping.sliceMapping("String", varbinaryWriteFunction());
+ }
+ if (type == DATE) {
+ return WriteMapping.longMapping("Date", dateWriteFunction());
+ }
+ throw new TrinoException(NOT_SUPPORTED, "Unsupported column type: " + type);
+ }
+}
diff --git a/plugin/trino-clickhouse/src/main/java/io/trino/plugin/clickhouse/ClickHouseClientModule.java b/plugin/trino-clickhouse/src/main/java/io/trino/plugin/clickhouse/ClickHouseClientModule.java
new file mode 100644
index 000000000000..19c728f614b1
--- /dev/null
+++ b/plugin/trino-clickhouse/src/main/java/io/trino/plugin/clickhouse/ClickHouseClientModule.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.clickhouse;
+
+import com.google.inject.Binder;
+import com.google.inject.Module;
+import com.google.inject.Provides;
+import com.google.inject.Scopes;
+import com.google.inject.Singleton;
+import io.trino.plugin.jdbc.BaseJdbcConfig;
+import io.trino.plugin.jdbc.ConnectionFactory;
+import io.trino.plugin.jdbc.DecimalModule;
+import io.trino.plugin.jdbc.DriverConnectionFactory;
+import io.trino.plugin.jdbc.ForBaseJdbc;
+import io.trino.plugin.jdbc.JdbcClient;
+import io.trino.plugin.jdbc.credential.CredentialProvider;
+import ru.yandex.clickhouse.ClickHouseDriver;
+
+import java.sql.SQLException;
+
+import static io.trino.plugin.jdbc.JdbcModule.bindTablePropertiesProvider;
+
+public class ClickHouseClientModule
+ implements Module
+{
+ @Provides
+ @Singleton
+ @ForBaseJdbc
+ public static ConnectionFactory createConnectionFactory(BaseJdbcConfig config, CredentialProvider credentialProvider)
+ throws SQLException
+ {
+ return new DriverConnectionFactory(new ClickHouseDriver(), config, credentialProvider);
+ }
+
+ @Override
+ public void configure(Binder binder)
+ {
+ binder.bind(JdbcClient.class).annotatedWith(ForBaseJdbc.class).to(ClickHouseClient.class).in(Scopes.SINGLETON);
+ bindTablePropertiesProvider(binder, ClickHouseTableProperties.class);
+ binder.install(new DecimalModule());
+ }
+}
diff --git a/plugin/trino-clickhouse/src/main/java/io/trino/plugin/clickhouse/ClickHousePlugin.java b/plugin/trino-clickhouse/src/main/java/io/trino/plugin/clickhouse/ClickHousePlugin.java
new file mode 100644
index 000000000000..f40ed2443dff
--- /dev/null
+++ b/plugin/trino-clickhouse/src/main/java/io/trino/plugin/clickhouse/ClickHousePlugin.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.clickhouse;
+
+import io.trino.plugin.jdbc.JdbcPlugin;
+
+public class ClickHousePlugin
+ extends JdbcPlugin
+{
+ public ClickHousePlugin()
+ {
+ super("clickhouse", new ClickHouseClientModule());
+ }
+}
diff --git a/plugin/trino-clickhouse/src/main/java/io/trino/plugin/clickhouse/ClickHouseTableProperties.java b/plugin/trino-clickhouse/src/main/java/io/trino/plugin/clickhouse/ClickHouseTableProperties.java
new file mode 100644
index 000000000000..30a8a087fb9c
--- /dev/null
+++ b/plugin/trino-clickhouse/src/main/java/io/trino/plugin/clickhouse/ClickHouseTableProperties.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.clickhouse;
+
+import com.google.common.collect.ImmutableList;
+import io.trino.plugin.jdbc.TablePropertiesProvider;
+import io.trino.spi.session.PropertyMetadata;
+
+import javax.inject.Inject;
+
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+
+import static io.trino.spi.session.PropertyMetadata.stringProperty;
+import static java.util.Objects.requireNonNull;
+
+/**
+ * Class contains all table properties for the Clickhouse connector. Used when creating a table:
+ *
+ *
CREATE TABLE foo (a VARCHAR , b INT) WITH (engine='Log');
+ *
+ */
+public final class ClickHouseTableProperties
+ implements TablePropertiesProvider
+{
+ public static final String ENGINE_PROPERTY = "engine";
+ public static final String DEFAULT_TABLE_ENGINE = "Log";
+
+ private final List> tableProperties;
+
+ @Inject
+ public ClickHouseTableProperties()
+ {
+ tableProperties = ImmutableList.of(
+ stringProperty(
+ ENGINE_PROPERTY,
+ "ClickHouse Table Engine, defaults to Log",
+ DEFAULT_TABLE_ENGINE,
+ false));
+ }
+
+ public static Optional getEngine(Map tableProperties)
+ {
+ requireNonNull(tableProperties);
+
+ return Optional.ofNullable(tableProperties.get(ENGINE_PROPERTY)).map(String.class::cast);
+ }
+
+ @Override
+ public List> getTableProperties()
+ {
+ return tableProperties;
+ }
+}
diff --git a/plugin/trino-clickhouse/src/test/java/io/trino/plugin/clickhouse/ClickHouseQueryRunner.java b/plugin/trino-clickhouse/src/test/java/io/trino/plugin/clickhouse/ClickHouseQueryRunner.java
new file mode 100644
index 000000000000..1233ae8b8f2f
--- /dev/null
+++ b/plugin/trino-clickhouse/src/test/java/io/trino/plugin/clickhouse/ClickHouseQueryRunner.java
@@ -0,0 +1,100 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.clickhouse;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import io.airlift.log.Logger;
+import io.airlift.log.Logging;
+import io.trino.Session;
+import io.trino.plugin.tpch.TpchPlugin;
+import io.trino.testing.DistributedQueryRunner;
+import io.trino.testing.QueryRunner;
+import io.trino.tpch.TpchTable;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import static io.airlift.testing.Closeables.closeAllSuppress;
+import static io.trino.plugin.tpch.TpchMetadata.TINY_SCHEMA_NAME;
+import static io.trino.testing.QueryAssertions.copyTpchTables;
+import static io.trino.testing.TestingSession.testSessionBuilder;
+
+public final class ClickHouseQueryRunner
+{
+ private static final String TPCH_SCHEMA = "tpch";
+
+ private ClickHouseQueryRunner() {}
+
+ public static QueryRunner createClickHouseQueryRunner(TestingClickHouseServer server, TpchTable>... tables)
+ throws Exception
+ {
+ return createClickHouseQueryRunner(server, ImmutableMap.of(), ImmutableMap.of(), ImmutableList.copyOf(tables));
+ }
+
+ public static DistributedQueryRunner createClickHouseQueryRunner(
+ TestingClickHouseServer server,
+ Map extraProperties,
+ Map connectorProperties,
+ Iterable> tables)
+ throws Exception
+ {
+ DistributedQueryRunner queryRunner = null;
+ try {
+ queryRunner = DistributedQueryRunner.builder(createSession())
+ .setExtraProperties(extraProperties)
+ .build();
+
+ queryRunner.installPlugin(new TpchPlugin());
+ queryRunner.createCatalog("tpch", "tpch");
+
+ connectorProperties = new HashMap<>(ImmutableMap.copyOf(connectorProperties));
+ connectorProperties.putIfAbsent("connection-url", server.getJdbcUrl());
+
+ queryRunner.installPlugin(new ClickHousePlugin());
+ queryRunner.createCatalog("clickhouse", "clickhouse", connectorProperties);
+ server.execute("CREATE DATABASE " + TPCH_SCHEMA);
+ copyTpchTables(queryRunner, "tpch", TINY_SCHEMA_NAME, createSession(), tables);
+ return queryRunner;
+ }
+ catch (Throwable e) {
+ closeAllSuppress(e, queryRunner);
+ throw e;
+ }
+ }
+
+ public static Session createSession()
+ {
+ return testSessionBuilder()
+ .setCatalog("clickhouse")
+ .setSchema(TPCH_SCHEMA)
+ .build();
+ }
+
+ public static void main(String[] args)
+ throws Exception
+ {
+ Logging.initialize();
+
+ DistributedQueryRunner queryRunner = createClickHouseQueryRunner(
+ new TestingClickHouseServer(),
+ ImmutableMap.of("http-server.http.port", "8080"),
+ ImmutableMap.of(),
+ TpchTable.getTables());
+
+ Logger log = Logger.get(ClickHouseQueryRunner.class);
+ log.info("======== SERVER STARTED ========");
+ log.info("\n====\n%s\n====", queryRunner.getCoordinator().getBaseUrl());
+ }
+}
diff --git a/plugin/trino-clickhouse/src/test/java/io/trino/plugin/clickhouse/ClickHouseSqlExecutor.java b/plugin/trino-clickhouse/src/test/java/io/trino/plugin/clickhouse/ClickHouseSqlExecutor.java
new file mode 100644
index 000000000000..515e3164e6ac
--- /dev/null
+++ b/plugin/trino-clickhouse/src/test/java/io/trino/plugin/clickhouse/ClickHouseSqlExecutor.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.clickhouse;
+
+import io.trino.testing.sql.SqlExecutor;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.Properties;
+
+import static java.util.Objects.requireNonNull;
+
+public class ClickHouseSqlExecutor
+ implements SqlExecutor
+{
+ private final String jdbcUrl;
+ private final Properties jdbcProperties;
+
+ public ClickHouseSqlExecutor(String jdbcUrl)
+ {
+ this(jdbcUrl, new Properties());
+ }
+
+ public ClickHouseSqlExecutor(String jdbcUrl, Properties jdbcProperties)
+ {
+ this.jdbcUrl = requireNonNull(jdbcUrl, "jdbcUrl is null");
+ this.jdbcProperties = new Properties();
+ this.jdbcProperties.putAll(requireNonNull(jdbcProperties, "jdbcProperties is null"));
+ }
+
+ @Override
+ public void execute(String sql)
+ {
+ if (sql.startsWith("CREATE TABLE")) {
+ sql = sql + " ENGINE=Log";
+ }
+ try (Connection connection = DriverManager.getConnection(jdbcUrl, jdbcProperties);
+ Statement statement = connection.createStatement()) {
+ statement.execute(sql);
+ connection.commit();
+ }
+ catch (SQLException e) {
+ throw new RuntimeException("Error executing sql:\n" + sql, e);
+ }
+ }
+}
diff --git a/plugin/trino-clickhouse/src/test/java/io/trino/plugin/clickhouse/TestClickHouseDistributedQueries.java b/plugin/trino-clickhouse/src/test/java/io/trino/plugin/clickhouse/TestClickHouseDistributedQueries.java
new file mode 100644
index 000000000000..7d36b5c843c2
--- /dev/null
+++ b/plugin/trino-clickhouse/src/test/java/io/trino/plugin/clickhouse/TestClickHouseDistributedQueries.java
@@ -0,0 +1,125 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.clickhouse;
+
+import com.google.common.collect.ImmutableMap;
+import io.trino.testing.AbstractTestDistributedQueries;
+import io.trino.testing.QueryRunner;
+import io.trino.testing.sql.JdbcSqlExecutor;
+import io.trino.testing.sql.TestTable;
+import io.trino.tpch.TpchTable;
+import org.testng.SkipException;
+import org.testng.annotations.Test;
+
+import static io.trino.plugin.clickhouse.ClickHouseQueryRunner.createClickHouseQueryRunner;
+
+@Test
+public class TestClickHouseDistributedQueries
+ extends AbstractTestDistributedQueries
+{
+ private TestingClickHouseServer clickhouseServer;
+
+ @Override
+ protected QueryRunner createQueryRunner()
+ throws Exception
+ {
+ this.clickhouseServer = closeAfterClass(new TestingClickHouseServer());
+ // caching here speeds up tests highly, caching is not used in smoke tests
+ return createClickHouseQueryRunner(clickhouseServer, ImmutableMap.of(), ImmutableMap.builder()
+ .put("metadata.cache-ttl", "10m")
+ .put("metadata.cache-missing", "true")
+ .put("allow-drop-table", "true")
+ .build(), TpchTable.getTables());
+ }
+
+ @Override
+ protected boolean supportsDelete()
+ {
+ return false;
+ }
+
+ @Override
+ protected boolean supportsArrays()
+ {
+ return false;
+ }
+
+ @Override
+ protected boolean supportsViews()
+ {
+ return false;
+ }
+
+ @Override
+ protected boolean supportsCommentOnTable()
+ {
+ return false;
+ }
+
+ @Override
+ public void testCommentColumn()
+ {
+ // currently does not support
+ throw new SkipException("TODO: test not implemented yet");
+ }
+
+ @Override
+ @Test(dataProvider = "testColumnNameDataProvider")
+ public void testColumnName(String columnName)
+ {
+ throw new SkipException("TODO: test not implemented yet");
+ }
+
+ @Override
+ public void testRenameColumn()
+ {
+ // TODO
+ throw new SkipException("TODO: test not implemented yet");
+ }
+
+ @Override
+ public void testDropColumn()
+ {
+ // TODO
+ throw new SkipException("TODO: test not implemented yet");
+ }
+
+ @Override
+ public void testAddColumn()
+ {
+ // TODO
+ throw new SkipException("TODO: test not implemented yet");
+ }
+
+ @Override
+ protected TestTable createTableWithDefaultColumns()
+ {
+ return new TestTable(
+ new JdbcSqlExecutor(clickhouseServer.getJdbcUrl(), clickhouseServer.getProperties()),
+ "tpch.tbl",
+ "(col_required Int64," +
+ "col_nullable Nullable(Int64)," +
+ "col_default Nullable(Int64) DEFAULT 43," +
+ "col_nonnull_default Int64 DEFAULT 42," +
+ "col_required2 Int64) ENGINE=Log");
+ }
+
+ @Override
+ @Test(dataProvider = "testDataMappingSmokeTestDataProvider")
+ public void testDataMappingSmokeTest(DataMappingTestSetup dataMappingTestSetup)
+ {
+ // currently does not support
+ throw new SkipException("TODO: test not implemented yet");
+ }
+}
diff --git a/plugin/trino-clickhouse/src/test/java/io/trino/plugin/clickhouse/TestClickHousePlugin.java b/plugin/trino-clickhouse/src/test/java/io/trino/plugin/clickhouse/TestClickHousePlugin.java
new file mode 100644
index 000000000000..deed8e669dcd
--- /dev/null
+++ b/plugin/trino-clickhouse/src/test/java/io/trino/plugin/clickhouse/TestClickHousePlugin.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.clickhouse;
+
+import com.google.common.collect.ImmutableMap;
+import io.trino.spi.Plugin;
+import io.trino.spi.connector.ConnectorFactory;
+import io.trino.testing.TestingConnectorContext;
+import org.testng.annotations.Test;
+
+import static com.google.common.collect.Iterables.getOnlyElement;
+
+public class TestClickHousePlugin
+{
+ @Test
+ public void testCreateConnector()
+ {
+ Plugin plugin = new ClickHousePlugin();
+ ConnectorFactory factory = getOnlyElement(plugin.getConnectorFactories());
+ factory.create("test", ImmutableMap.of("connection-url", "jdbc:clickhouse://test"), new TestingConnectorContext());
+ }
+}
diff --git a/plugin/trino-clickhouse/src/test/java/io/trino/plugin/clickhouse/TestClickHouseTypeMapping.java b/plugin/trino-clickhouse/src/test/java/io/trino/plugin/clickhouse/TestClickHouseTypeMapping.java
new file mode 100644
index 000000000000..68a68d8b49b8
--- /dev/null
+++ b/plugin/trino-clickhouse/src/test/java/io/trino/plugin/clickhouse/TestClickHouseTypeMapping.java
@@ -0,0 +1,254 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.clickhouse;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import io.trino.Session;
+import io.trino.spi.type.TimeZoneKey;
+import io.trino.testing.AbstractTestQueryFramework;
+import io.trino.testing.QueryRunner;
+import io.trino.testing.datatype.CreateAndInsertDataSetup;
+import io.trino.testing.datatype.CreateAsSelectDataSetup;
+import io.trino.testing.datatype.DataSetup;
+import io.trino.testing.datatype.DataTypeTest;
+import io.trino.testing.datatype.SqlDataTypeTest;
+import io.trino.testing.sql.JdbcSqlExecutor;
+import io.trino.testing.sql.TrinoSqlExecutor;
+import org.testng.annotations.AfterClass;
+import org.testng.annotations.Test;
+
+import java.math.BigDecimal;
+import java.time.LocalDate;
+import java.time.LocalDateTime;
+import java.time.ZoneId;
+
+import static com.google.common.base.Preconditions.checkState;
+import static com.google.common.base.Verify.verify;
+import static io.trino.plugin.clickhouse.ClickHouseQueryRunner.createClickHouseQueryRunner;
+import static io.trino.spi.type.TimeZoneKey.UTC_KEY;
+import static io.trino.spi.type.VarcharType.createUnboundedVarcharType;
+import static io.trino.testing.datatype.DataType.bigintDataType;
+import static io.trino.testing.datatype.DataType.dateDataType;
+import static io.trino.testing.datatype.DataType.decimalDataType;
+import static io.trino.testing.datatype.DataType.doubleDataType;
+import static io.trino.testing.datatype.DataType.integerDataType;
+import static io.trino.testing.datatype.DataType.realDataType;
+import static io.trino.testing.datatype.DataType.smallintDataType;
+import static io.trino.testing.datatype.DataType.tinyintDataType;
+
+public class TestClickHouseTypeMapping
+ extends AbstractTestQueryFramework
+{
+ private TestingClickHouseServer clickhouseServer;
+
+ private static void checkIsGap(ZoneId zone, LocalDateTime dateTime)
+ {
+ verify(isGap(zone, dateTime), "Expected %s to be a gap in %s", dateTime, zone);
+ }
+
+ private static boolean isGap(ZoneId zone, LocalDateTime dateTime)
+ {
+ return zone.getRules().getValidOffsets(dateTime).isEmpty();
+ }
+
+ private static void checkIsDoubled(ZoneId zone, LocalDateTime dateTime)
+ {
+ verify(zone.getRules().getValidOffsets(dateTime).size() == 2, "Expected %s to be doubled in %s", dateTime, zone);
+ }
+
+ @Override
+ protected QueryRunner createQueryRunner()
+ throws Exception
+ {
+ clickhouseServer = new TestingClickHouseServer();
+ return createClickHouseQueryRunner(clickhouseServer, ImmutableMap.of(),
+ ImmutableMap.builder()
+ .put("metadata.cache-ttl", "10m")
+ .put("metadata.cache-missing", "true")
+ .put("allow-drop-table", "true")
+ .build(),
+ ImmutableList.of());
+ }
+
+ @AfterClass(alwaysRun = true)
+ public final void destroy()
+ {
+ clickhouseServer.close();
+ }
+
+ @Test
+ public void testBasicTypes()
+ {
+ DataTypeTest.create()
+ .addRoundTrip(bigintDataType(), 123_456_789_012L)
+ .addRoundTrip(integerDataType(), 1_234_567_890)
+ .addRoundTrip(smallintDataType(), (short) 32_456)
+ .addRoundTrip(tinyintDataType(), (byte) 5)
+ .addRoundTrip(doubleDataType(), 123.45d)
+ .addRoundTrip(realDataType(), 123.45f)
+ .execute(getQueryRunner(), trinoCreateAsSelect("test_basic_types"));
+ }
+
+ @Test
+ public void testFloat()
+ {
+ DataTypeTest.create()
+ .addRoundTrip(realDataType(), Float.NaN)
+ .addRoundTrip(realDataType(), Float.NEGATIVE_INFINITY)
+ .addRoundTrip(realDataType(), Float.POSITIVE_INFINITY)
+ .execute(getQueryRunner(), trinoCreateAsSelect("trino__test_real"));
+ }
+
+ @Test
+ public void testDouble()
+ {
+ DataTypeTest.create()
+ .addRoundTrip(doubleDataType(), 3.1415926835)
+ .addRoundTrip(doubleDataType(), 1.79769E+308)
+ .addRoundTrip(doubleDataType(), 2.225E-307)
+ .execute(getQueryRunner(), trinoCreateAsSelect("trino_test_double"));
+ }
+
+ @Test
+ public void testClickHouseCreatedDecimal()
+ {
+ decimalTests()
+ .execute(getQueryRunner(), clickhouseCreateAndInsert("tpch.test_decimal"));
+ }
+
+ @Test
+ public void testTrinoCreatedDecimal()
+ {
+ decimalTests()
+ .execute(getQueryRunner(), trinoCreateAsSelect("test_decimal"));
+ }
+
+ private DataTypeTest decimalTests()
+ {
+ return DataTypeTest.create()
+ .addRoundTrip(decimalDataType(3, 0), new BigDecimal("193"))
+ .addRoundTrip(decimalDataType(3, 0), new BigDecimal("19"))
+ .addRoundTrip(decimalDataType(3, 0), new BigDecimal("-193"))
+ .addRoundTrip(decimalDataType(3, 1), new BigDecimal("10.0"))
+ .addRoundTrip(decimalDataType(3, 1), new BigDecimal("10.1"))
+ .addRoundTrip(decimalDataType(3, 1), new BigDecimal("-10.1"))
+ .addRoundTrip(decimalDataType(4, 2), new BigDecimal("2"))
+ .addRoundTrip(decimalDataType(4, 2), new BigDecimal("2.3"))
+ .addRoundTrip(decimalDataType(24, 2), new BigDecimal("2"))
+ .addRoundTrip(decimalDataType(24, 2), new BigDecimal("2.3"))
+ .addRoundTrip(decimalDataType(24, 2), new BigDecimal("123456789.3"))
+ .addRoundTrip(decimalDataType(24, 4), new BigDecimal("12345678901234567890.31"))
+ .addRoundTrip(decimalDataType(30, 5), new BigDecimal("3141592653589793238462643.38327"))
+ .addRoundTrip(decimalDataType(30, 5), new BigDecimal("-3141592653589793238462643.38327"))
+ .addRoundTrip(decimalDataType(38, 0), new BigDecimal("27182818284590452353602874713526624977"))
+ .addRoundTrip(decimalDataType(38, 0), new BigDecimal("-27182818284590452353602874713526624977"));
+ }
+
+ @Test
+ public void testCharAndVarchar()
+ {
+ SqlDataTypeTest.create()
+ .addRoundTrip("char(10)", "'text_a'", createUnboundedVarcharType(), "CAST('text_a' AS varchar)")
+ .addRoundTrip("char(255)", "'text_b'", createUnboundedVarcharType(), "CAST('text_b' AS varchar)")
+ .addRoundTrip("char(5)", "'攻殻機動隊'", createUnboundedVarcharType(), "CAST('攻殻機動隊' AS varchar)")
+ .addRoundTrip("char(32)", "'攻殻機動隊'", createUnboundedVarcharType(), "CAST('攻殻機動隊' AS varchar)")
+ .addRoundTrip("varchar(30)", "'Piękna łąka w 東京都'", createUnboundedVarcharType(), "cast('Piękna łąka w 東京都' as varchar)")
+ .addRoundTrip("char(1)", "'😂'", createUnboundedVarcharType(), "CAST('😂' AS varchar)")
+ .addRoundTrip("char(77)", "'Ну, погоди!'", createUnboundedVarcharType(), "CAST('Ну, погоди!' AS varchar)")
+ .execute(getQueryRunner(), clickhouseCreateAndInsert("tpch.test_char"))
+ .execute(getQueryRunner(), trinoCreateAsSelect("test_char"));
+ }
+
+ @Test
+ public void testDate()
+ {
+ ZoneId jvmZone = ZoneId.systemDefault();
+ checkState(jvmZone.getId().equals("America/Bahia_Banderas"), "This test assumes certain JVM time zone");
+ LocalDate dateOfLocalTimeChangeForwardAtMidnightInJvmZone = LocalDate.of(1970, 1, 1);
+ checkIsGap(jvmZone, dateOfLocalTimeChangeForwardAtMidnightInJvmZone.atStartOfDay());
+
+ ZoneId someZone = ZoneId.of("Europe/Vilnius");
+ LocalDate dateOfLocalTimeChangeForwardAtMidnightInSomeZone = LocalDate.of(1983, 4, 1);
+ checkIsGap(someZone, dateOfLocalTimeChangeForwardAtMidnightInSomeZone.atStartOfDay());
+ LocalDate dateOfLocalTimeChangeBackwardAtMidnightInSomeZone = LocalDate.of(1983, 10, 1);
+ checkIsDoubled(someZone, dateOfLocalTimeChangeBackwardAtMidnightInSomeZone.atStartOfDay().minusMinutes(1));
+
+ DataTypeTest testCases = DataTypeTest.create(true)
+ .addRoundTrip(dateDataType(), LocalDate.of(1970, 1, 1))
+ .addRoundTrip(dateDataType(), LocalDate.of(1970, 2, 3))
+ .addRoundTrip(dateDataType(), LocalDate.of(2017, 7, 1)) // summer on northern hemisphere (possible DST)
+ .addRoundTrip(dateDataType(), LocalDate.of(2017, 1, 1)) // winter on northern hemisphere (possible DST on southern hemisphere)
+ .addRoundTrip(dateDataType(), dateOfLocalTimeChangeForwardAtMidnightInJvmZone)
+ .addRoundTrip(dateDataType(), dateOfLocalTimeChangeForwardAtMidnightInSomeZone)
+ .addRoundTrip(dateDataType(), dateOfLocalTimeChangeBackwardAtMidnightInSomeZone);
+
+ for (String timeZoneId : ImmutableList.of(UTC_KEY.getId(), jvmZone.getId(), someZone.getId())) {
+ Session session = Session.builder(getSession())
+ .setTimeZoneKey(TimeZoneKey.getTimeZoneKey(timeZoneId))
+ .build();
+ testCases.execute(getQueryRunner(), session, clickhouseCreateAndInsert("tpch.test_date"));
+ testCases.execute(getQueryRunner(), session, trinoCreateAsSelect(session, "test_date"));
+ }
+ }
+
+ @Test
+ public void testEnum()
+ {
+ JdbcSqlExecutor jdbcSqlExecutor = new JdbcSqlExecutor(clickhouseServer.getJdbcUrl(), clickhouseServer.getProperties());
+ jdbcSqlExecutor.execute("CREATE TABLE tpch.t_enum (id Int32, x Enum('hello' = 1, 'world' = 2))ENGINE = Log");
+ jdbcSqlExecutor.execute("INSERT INTO tpch.t_enum VALUES (1, 'hello'), (2, 'world'), (3, 'hello')");
+ try {
+ assertQuery(
+ "SELECT column_name, data_type FROM information_schema.columns WHERE table_schema = 'tpch' AND table_name = 't_enum'",
+ "VALUES ('id','integer'),('x','varchar')");
+ assertQuery("SELECT * FROM tpch.t_enum", "VALUES (1,'hello'),(2,'world'),(3,'hello')");
+ assertQuery("SELECT * FROM tpch.t_enum WHERE x='hello'", "VALUES (1,'hello'),(3,'hello')");
+ }
+ finally {
+ jdbcSqlExecutor.execute("DROP TABLE tpch.t_enum");
+ }
+ }
+
+ @Test
+ public void testClickHouseSpecialDataType()
+ {
+ JdbcSqlExecutor jdbcSqlExecutor = new JdbcSqlExecutor(clickhouseServer.getJdbcUrl(), clickhouseServer.getProperties());
+ jdbcSqlExecutor.execute("CREATE TABLE tpch.tbl_ck_type (c_fs FixedString(10), c_uuid UUID, c_ipv4 IPv4, c_ipv6 IPv6) ENGINE = Log");
+ jdbcSqlExecutor.execute("INSERT INTO tpch.tbl_ck_type VALUES ('c12345678b', '417ddc5d-e556-4d27-95dd-a34d84e46a50', '116.253.40.133', '2001:44c8:129:2632:33:0:252:2')");
+ try {
+ assertQuery("SELECT column_name, data_type FROM information_schema.columns WHERE table_schema = 'tpch' AND table_name = 'tbl_ck_type'",
+ "VALUES ('c_fs', 'varchar'), ('c_uuid', 'varchar'), ('c_ipv4', 'varchar'), ('c_ipv6', 'varchar')");
+ assertQuery("SELECT * FROM tpch.tbl_ck_type", "VALUES ('c12345678b', '417ddc5d-e556-4d27-95dd-a34d84e46a50', '116.253.40.133', '2001:44c8:129:2632:33:0:252:2')");
+ }
+ finally {
+ jdbcSqlExecutor.execute("DROP TABLE tpch.tbl_ck_type");
+ }
+ }
+
+ private DataSetup trinoCreateAsSelect(String tableNamePrefix)
+ {
+ return trinoCreateAsSelect(getSession(), tableNamePrefix);
+ }
+
+ private DataSetup trinoCreateAsSelect(Session session, String tableNamePrefix)
+ {
+ return new CreateAsSelectDataSetup(new TrinoSqlExecutor(getQueryRunner(), session), tableNamePrefix);
+ }
+
+ private DataSetup clickhouseCreateAndInsert(String tableNamePrefix)
+ {
+ return new CreateAndInsertDataSetup(new ClickHouseSqlExecutor(clickhouseServer.getJdbcUrl()), tableNamePrefix);
+ }
+}
diff --git a/plugin/trino-clickhouse/src/test/java/io/trino/plugin/clickhouse/TestingClickHouseServer.java b/plugin/trino-clickhouse/src/test/java/io/trino/plugin/clickhouse/TestingClickHouseServer.java
new file mode 100644
index 000000000000..ec2f39fc158a
--- /dev/null
+++ b/plugin/trino-clickhouse/src/test/java/io/trino/plugin/clickhouse/TestingClickHouseServer.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.clickhouse;
+
+import org.testcontainers.containers.ClickHouseContainer;
+
+import java.io.Closeable;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.Statement;
+import java.util.Properties;
+
+import static java.lang.String.format;
+import static org.testcontainers.containers.ClickHouseContainer.HTTP_PORT;
+
+public class TestingClickHouseServer
+ implements Closeable
+{
+ private static final String CLICKHOUSE_IMAGE = "yandex/clickhouse-server:20.8";
+ private final ClickHouseContainer dockerContainer;
+
+ public TestingClickHouseServer()
+ {
+ // Use 2nd stable version
+ dockerContainer = (ClickHouseContainer) new ClickHouseContainer(CLICKHOUSE_IMAGE)
+ .withStartupAttempts(10);
+
+ dockerContainer.start();
+ }
+
+ public void execute(String sql)
+ {
+ try (Connection connection = DriverManager.getConnection(getJdbcUrl());
+ Statement statement = connection.createStatement()) {
+ statement.execute(sql);
+ }
+ catch (Exception e) {
+ throw new RuntimeException("Failed to execute statement: " + sql, e);
+ }
+ }
+
+ public String getJdbcUrl()
+ {
+ return format("jdbc:clickhouse://%s:%s/", dockerContainer.getContainerIpAddress(),
+ dockerContainer.getMappedPort(HTTP_PORT));
+ }
+
+ @Override
+ public void close()
+ {
+ dockerContainer.stop();
+ }
+
+ public Properties getProperties()
+ {
+ return new Properties();
+ }
+}
diff --git a/pom.xml b/pom.xml
index 04907a9a4774..a0ce239aa876 100644
--- a/pom.xml
+++ b/pom.xml
@@ -107,6 +107,7 @@
plugin/trino-bigquery
plugin/trino-blackhole
plugin/trino-cassandra
+ plugin/trino-clickhouse
plugin/trino-druid
plugin/trino-elasticsearch
plugin/trino-example-http