Skip to content

Commit

Permalink
Various style fixes and cleanup (trinodb#15) (trinodb#17)
Browse files Browse the repository at this point in the history
Co-authored-by: Martin Traverso <mtraverso@gmail.com>
  • Loading branch information
yuuteng and martint authored Jan 10, 2024
1 parent 36920e0 commit fe244d3
Show file tree
Hide file tree
Showing 3 changed files with 129 additions and 26 deletions.
20 changes: 20 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -614,6 +614,7 @@ jobs:
- { modules: plugin/trino-resource-group-managers }
- { modules: plugin/trino-singlestore }
- { modules: plugin/trino-snowflake }
- { modules: plugin/trino-snowflake, profile: cloud-tests }
- { modules: plugin/trino-sqlserver }
- { modules: testing/trino-faulttolerant-tests, profile: default }
- { modules: testing/trino-faulttolerant-tests, profile: test-fault-tolerant-delta }
Expand Down Expand Up @@ -781,6 +782,24 @@ jobs:
if: matrix.modules == 'plugin/trino-bigquery' && !contains(matrix.profile, 'cloud-tests-arrow-and-fte') && (env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || env.BIGQUERY_CASE_INSENSITIVE_CREDENTIALS_KEY != '')
run: |
$MAVEN test ${MAVEN_TEST} -pl :trino-bigquery -Pcloud-tests-case-insensitive-mapping -Dbigquery.credentials-key="${BIGQUERY_CASE_INSENSITIVE_CREDENTIALS_KEY}"
- name: Cloud Snowflake Tests
env:
SNOWFLAKE_TEST_SERVER_URL: ${{ secrets.SNOWFLAKE_TEST_SERVER_URL }}
SNOWFLAKE_TEST_SERVER_USER: ${{ secrets.SNOWFLAKE_TEST_SERVER_USER }}
SNOWFLAKE_TEST_SERVER_PASSWORD: ${{ secrets.SNOWFLAKE_TEST_SERVER_PASSWORD }}
SNOWFLAKE_TEST_SERVER_DATABASE: ${{ secrets.SNOWFLAKE_TEST_SERVER_DATABASE }}
SNOWFLAKE_TEST_SERVER_ROLE: ${{ secrets.SNOWFLAKE_TEST_SERVER_ROLE }}
SNOWFLAKE_TEST_SERVER_WAREHOUSE: ${{ secrets.SNOWFLAKE_TEST_SERVER_WAREHOUSE }}
if: matrix.modules == 'plugin/trino-snowflake' && !contains(matrix.profile, 'cloud-tests') && (env.SNOWFLAKE_TEST_SERVER_URL != '' && env.SNOWFLAKE_TEST_SERVER_USER != '' && env.SNOWFLAKE_TEST_SERVER_PASSWORD != '')
run: |
$MAVEN test ${MAVEN_TEST} -pl :trino-snowflake -Pcloud-tests \
-Dconnector.name="snowflake" \
-Dsnowflake.test.server.url="${SNOWFLAKE_TEST_SERVER_URL}" \
-Dsnowflake.test.server.user="${SNOWFLAKE_TEST_SERVER_USER}" \
-Dsnowflake.test.server.password="${SNOWFLAKE_TEST_SERVER_PASSWORD}" \
-Dsnowflake.test.server.database="${SNOWFLAKE_TEST_SERVER_DATABASE}" \
-Dsnowflake.test.server.role="${SNOWFLAKE_TEST_SERVER_ROLE}" \
-Dsnowflake.test.server.warehouse="${SNOWFLAKE_TEST_SERVER_WAREHOUSE}"
- name: Iceberg Cloud Tests
env:
AWS_ACCESS_KEY_ID: ${{ secrets.TRINO_AWS_ACCESS_KEY_ID }}
Expand Down Expand Up @@ -976,6 +995,7 @@ jobs:
- suite-clickhouse
- suite-mysql
- suite-iceberg
- suite-snowflake
- suite-hudi
- suite-ignite
exclude:
Expand Down
28 changes: 9 additions & 19 deletions plugin/trino-snowflake/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@

<properties>
<air.main.basedir>${project.parent.basedir}</air.main.basedir>
<air.test.jvm.additional-arguments>--add-opens=java.base/java.nio=ALL-UNNAMED</air.test.jvm.additional-arguments>
</properties>

<dependencies>
Expand Down Expand Up @@ -185,18 +186,6 @@
</dependency>
</dependencies>

<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<argLine>--add-opens=java.base/java.nio=ALL-UNNAMED</argLine>
</configuration>
</plugin>
</plugins>
</build>

<profiles>
<profile>
<id>default</id>
Expand All @@ -210,8 +199,6 @@
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<excludes>
<exclude>**/TestSnowflakeClient.java</exclude>
<exclude>**/TestSnowflakeConfig.java</exclude>
<exclude>**/TestSnowflakeConnectorTest.java</exclude>
<exclude>**/TestSnowflakePlugin.java</exclude>
<exclude>**/TestSnowflakeTypeMapping.java</exclude>
Expand All @@ -225,18 +212,21 @@
<profile>
<!-- Tests which require third party cloud services are separated from the main test profile -->
<id>cloud-tests</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<includes>
<exclude>**/TestSnowflakeClient.java</exclude>
<exclude>**/TestSnowflakeConfig.java</exclude>
<exclude>**/TestSnowflakeConnectorTest.java</exclude>
<exclude>**/TestSnowflakePlugin.java</exclude>
<exclude>**/TestSnowflakeTypeMapping.java</exclude>
<include>**/TestSnowflakeClient.java</include>
<include>**/TestSnowflakeConfig.java</include>
<include>**/TestSnowflakeConnectorTest.java</include>
<include>**/TestSnowflakePlugin.java</include>
<include>**/TestSnowflakeTypeMapping.java</include>
</includes>
</configuration>
</plugin>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -244,17 +244,62 @@ public void abortReadConnection(Connection connection, ResultSet resultSet)
@Override
public Optional<ColumnMapping> toColumnMapping(ConnectorSession session, Connection connection, JdbcTypeHandle typeHandle)
{
Optional<ColumnMapping> mapping = getForcedMappingToVarchar(typeHandle);
if (mapping.isPresent()) {
return mapping;
}

String jdbcTypeName = typeHandle.getJdbcTypeName()
.orElseThrow(() -> new TrinoException(JDBC_ERROR, "Type name is missing: " + typeHandle));
jdbcTypeName = jdbcTypeName.toLowerCase(Locale.ENGLISH);
int type = typeHandle.getJdbcType();

ColumnMapping columnMap = STANDARD_COLUMN_MAPPINGS.get(type);
// Mappings for JDBC column types to internal Trino types
final Map<Integer, ColumnMapping> standardColumnMappings = ImmutableMap.<Integer, ColumnMapping>builder()
.put(Types.BOOLEAN, StandardColumnMappings.booleanColumnMapping())
.put(Types.TINYINT, StandardColumnMappings.tinyintColumnMapping())
.put(Types.SMALLINT, StandardColumnMappings.smallintColumnMapping())
.put(Types.INTEGER, StandardColumnMappings.integerColumnMapping())
.put(Types.BIGINT, StandardColumnMappings.bigintColumnMapping())
.put(Types.REAL, StandardColumnMappings.realColumnMapping())
.put(Types.DOUBLE, StandardColumnMappings.doubleColumnMapping())
.put(Types.FLOAT, StandardColumnMappings.doubleColumnMapping())
.put(Types.BINARY, StandardColumnMappings.varbinaryColumnMapping())
.put(Types.VARBINARY, StandardColumnMappings.varbinaryColumnMapping())
.put(Types.LONGVARBINARY, StandardColumnMappings.varbinaryColumnMapping())
.buildOrThrow();

ColumnMapping columnMap = standardColumnMappings.get(type);
if (columnMap != null) {
return Optional.of(columnMap);
}

ColumnMappingFunction columnMappingFunction = SHOWFLAKE_COLUMN_MAPPINGS.get(jdbcTypeName);
final Map<String, ColumnMappingFunction> snowflakeColumnMappings = ImmutableMap.<String, ColumnMappingFunction>builder()
.put("time", handle -> {
return Optional.of(timeColumnMapping(handle));
})
.put("date", handle -> {
return Optional.of(ColumnMapping.longMapping(
DateType.DATE, (resultSet, columnIndex) ->
LocalDate.ofEpochDay(resultSet.getLong(columnIndex)).toEpochDay(),
snowFlakeDateWriter()));
})
.put("varchar", handle -> {
return Optional.of(varcharColumnMapping(handle.getRequiredColumnSize()));
})
.put("number", handle -> {
int decimalDigits = handle.getRequiredDecimalDigits();
int precision = handle.getRequiredColumnSize() + Math.max(-decimalDigits, 0);
if (precision > 38) {
return Optional.empty();
}
return Optional.of(columnMappingPushdown(
StandardColumnMappings.decimalColumnMapping(DecimalType.createDecimalType(
precision, Math.max(decimalDigits, 0)), RoundingMode.UNNECESSARY)));
})
.buildOrThrow();

ColumnMappingFunction columnMappingFunction = snowflakeColumnMappings.get(jdbcTypeName);
if (columnMappingFunction != null) {
return columnMappingFunction.convert(typeHandle);
}
Expand All @@ -269,18 +314,67 @@ public WriteMapping toWriteMapping(ConnectorSession session, Type type)
Class<?> myClass = type.getClass();
String simple = myClass.getSimpleName();

WriteMapping writeMapping = STANDARD_WRITE_MAPPINGS.get(simple);
// Mappings for internal Trino types to JDBC column types
final Map<String, WriteMapping> standardWriteMappings = ImmutableMap.<String, WriteMapping>builder()
.put("BooleanType", WriteMapping.booleanMapping("boolean", StandardColumnMappings.booleanWriteFunction()))
.put("BigintType", WriteMapping.longMapping("number(19)", StandardColumnMappings.bigintWriteFunction()))
.put("IntegerType", WriteMapping.longMapping("number(10)", StandardColumnMappings.integerWriteFunction()))
.put("SmallintType", WriteMapping.longMapping("number(5)", StandardColumnMappings.smallintWriteFunction()))
.put("TinyintType", WriteMapping.longMapping("number(3)", StandardColumnMappings.tinyintWriteFunction()))
.put("DoubleType", WriteMapping.doubleMapping("double precision", StandardColumnMappings.doubleWriteFunction()))
.put("RealType", WriteMapping.longMapping("real", StandardColumnMappings.realWriteFunction()))
.put("VarbinaryType", WriteMapping.sliceMapping("varbinary", StandardColumnMappings.varbinaryWriteFunction()))
.put("DateType", WriteMapping.longMapping("date", snowFlakeDateWriter()))
.buildOrThrow();

WriteMapping writeMapping = standardWriteMappings.get(simple);
if (writeMapping != null) {
return writeMapping;
}

WriteMappingFunction writeMappingFunction = SNOWFLAKE_WRITE_MAPPINGS.get(simple);
final Map<String, WriteMappingFunction> snowflakeWriteMappings = ImmutableMap.<String, WriteMappingFunction>builder()
.put("TimeType", writeType -> {
return WriteMapping.longMapping("time", SnowflakeClient.snowFlaketimeWriter(writeType));
})
.put("ShortTimestampType", writeType -> {
WriteMapping myMap = SnowflakeClient.snowFlakeTimestampWriter(writeType);
return myMap;
})
.put("ShortTimestampWithTimeZoneType", writeType -> {
WriteMapping myMap = SnowflakeClient.snowFlakeTimestampWithTZWriter(writeType);
return myMap;
})
.put("LongTimestampType", writeType -> {
WriteMapping myMap = SnowflakeClient.snowFlakeTimestampWithTZWriter(writeType);
return myMap;
})
.put("LongTimestampWithTimeZoneType", writeType -> {
WriteMapping myMap = SnowflakeClient.snowFlakeTimestampWithTZWriter(writeType);
return myMap;
})
.put("VarcharType", writeType -> {
WriteMapping myMap = SnowflakeClient.snowFlakeVarCharWriter(writeType);
return myMap;
})
.put("CharType", writeType -> {
WriteMapping myMap = SnowflakeClient.snowFlakeCharWriter(writeType);
return myMap;
})
.put("LongDecimalType", writeType -> {
WriteMapping myMap = SnowflakeClient.snowFlakeDecimalWriter(writeType);
return myMap;
})
.put("ShortDecimalType", writeType -> {
WriteMapping myMap = SnowflakeClient.snowFlakeDecimalWriter(writeType);
return myMap;
})
.buildOrThrow();

WriteMappingFunction writeMappingFunction = snowflakeWriteMappings.get(simple);
if (writeMappingFunction != null) {
return writeMappingFunction.convert(type);
}

log.debug("SnowflakeClient.toWriteMapping: SNOWFLAKE_CONNECTOR_COLUMN_TYPE_NOT_SUPPORTED: Unsupported column type: " + type.getDisplayName() + ", simple:" + simple);

throw new TrinoException(NOT_SUPPORTED, "SNOWFLAKE_CONNECTOR_COLUMN_TYPE_NOT_SUPPORTED: Unsupported column type: " + type.getDisplayName() + ", simple:" + simple);
}

Expand Down Expand Up @@ -322,7 +416,6 @@ private static SliceReadFunction variantReadFunction()
private static ColumnMapping columnMappingPushdown(ColumnMapping mapping)
{
if (mapping.getPredicatePushdownController() == PredicatePushdownController.DISABLE_PUSHDOWN) {
log.debug("SnowflakeClient.columnMappingPushdown: NOT_SUPPORTED mapping.getPredicatePushdownController() is DISABLE_PUSHDOWN. Type was " + mapping.getType());
throw new TrinoException(NOT_SUPPORTED, "mapping.getPredicatePushdownController() is DISABLE_PUSHDOWN. Type was " + mapping.getType());
}

Expand Down

0 comments on commit fe244d3

Please sign in to comment.