Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Introduce typeMetadata to hive-metastore's Column #17055

Merged
merged 1 commit into from
Dec 13, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ protected QueryRunner createQueryRunner()
HdfsEnvironment hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, metastoreClientConfig, new NoHdfsAuthentication());

FileHiveMetastore metastore = new FileHiveMetastore(hdfsEnvironment, baseDir.toURI().toString(), "test");
metastore.createDatabase(new MetastoreContext("test_user", "test_queryId", Optional.empty(), Optional.empty(), Optional.empty()), Database.builder()
metastore.createDatabase(new MetastoreContext("test_user", "test_queryId", Optional.empty(), Optional.empty(), Optional.empty(), false), Database.builder()
.setDatabaseName("default")
.setOwnerName("public")
.setOwnerType(PrincipalType.ROLE)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.hive;

import com.facebook.presto.common.type.TypeSignature;
import com.facebook.presto.hive.metastore.Column;
import org.apache.hadoop.hive.metastore.api.FieldSchema;

import java.util.Optional;

public interface ColumnConverter
{
/**
* Converts the provided {@param fieldSchema} to Column
*/
Column toColumn(FieldSchema fieldSchema);

/**
* Converts the provided {@param column} to FieldSchema
*/
FieldSchema fromColumn(Column column);

/**
* Generates a new TypeSignature using {@param hiveType} and extra {@param typeMetadata}
*/
TypeSignature getTypeSignature(HiveType hiveType, Optional<String> typeMetadata);
}
Original file line number Diff line number Diff line change
Expand Up @@ -1008,7 +1008,7 @@ public String toString()

private <T> KeyAndContext<T> getCachingKey(MetastoreContext context, T key)
{
MetastoreContext metastoreContext = metastoreImpersonationEnabled ? new MetastoreContext(context.getUsername(), context.getQueryId(), context.getClientInfo(), context.getSource(), true, context.getMetastoreHeaders()) : context;
MetastoreContext metastoreContext = metastoreImpersonationEnabled ? new MetastoreContext(context.getUsername(), context.getQueryId(), context.getClientInfo(), context.getSource(), true, context.getMetastoreHeaders(), context.isUserDefinedTypeEncodingEnabled()) : context;
return new KeyAndContext<>(metastoreContext, key);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,16 +34,19 @@ public class Column
private final String name;
private final HiveType type;
private final Optional<String> comment;
private final Optional<String> typeMetadata;

@JsonCreator
public Column(
@JsonProperty("name") String name,
@JsonProperty("type") HiveType type,
@JsonProperty("comment") Optional<String> comment)
@JsonProperty("comment") Optional<String> comment,
@JsonProperty("typeMetadata") Optional<String> typeMetadata)
{
this.name = requireNonNull(name, "name is null");
this.type = requireNonNull(type, "type is null");
this.comment = requireNonNull(comment, "comment is null");
this.typeMetadata = requireNonNull(typeMetadata, "typeMetadata is null");
}

@JsonProperty
Expand All @@ -64,6 +67,12 @@ public Optional<String> getComment()
return comment;
}

@JsonProperty
public Optional<String> getTypeMetadata()
{
return typeMetadata;
}

@Override
public String toString()
{
Expand Down Expand Up @@ -103,4 +112,9 @@ public int getEstimatedSizeInBytes()
result += comment.map(String::length).orElse(0);
return result;
}

public static Column partitionColumn(String name, HiveType type, Optional<String> comment)
{
return new Column(name, type, comment, Optional.empty());
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.hive.metastore;

import com.facebook.presto.common.type.TypeSignature;
import com.facebook.presto.hive.ColumnConverter;
import com.facebook.presto.hive.HiveType;
import org.apache.hadoop.hive.metastore.api.FieldSchema;

import javax.inject.Inject;

import java.util.Optional;

import static com.google.common.base.Strings.emptyToNull;

public class HiveColumnConverter
implements ColumnConverter
{
@Inject
public HiveColumnConverter() {}

@Override
public Column toColumn(FieldSchema fieldSchema)
{
return new Column(fieldSchema.getName(), HiveType.valueOf(fieldSchema.getType()), Optional.ofNullable(emptyToNull(fieldSchema.getComment())), Optional.empty());
}

@Override
public FieldSchema fromColumn(Column column)
{
return new FieldSchema(column.getName(), column.getType().getHiveTypeName().toString(), column.getComment().orElse(null));
}

@Override
public TypeSignature getTypeSignature(HiveType hiveType, Optional<String> typeMetadata)
{
return hiveType.getTypeSignature();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -29,25 +29,27 @@ public class MetastoreContext
private final Optional<String> source;
private final boolean impersonationEnabled;
private final Optional<String> metastoreHeaders;
private final boolean userDefinedTypeEncodingEnabled;

public MetastoreContext(ConnectorIdentity identity, String queryId, Optional<String> clientInfo, Optional<String> source, Optional<String> metastoreHeaders)
public MetastoreContext(ConnectorIdentity identity, String queryId, Optional<String> clientInfo, Optional<String> source, Optional<String> metastoreHeaders, boolean userDefinedTypeEncodingEnabled)
{
this(requireNonNull(identity, "identity is null").getUser(), queryId, clientInfo, source, metastoreHeaders);
this(requireNonNull(identity, "identity is null").getUser(), queryId, clientInfo, source, metastoreHeaders, userDefinedTypeEncodingEnabled);
}

public MetastoreContext(String username, String queryId, Optional<String> clientInfo, Optional<String> source, Optional<String> metastoreHeaders)
public MetastoreContext(String username, String queryId, Optional<String> clientInfo, Optional<String> source, Optional<String> metastoreHeaders, boolean userDefinedTypeEncodingEnabled)
{
this(username, queryId, clientInfo, source, false, metastoreHeaders);
this(username, queryId, clientInfo, source, false, metastoreHeaders, userDefinedTypeEncodingEnabled);
}

public MetastoreContext(String username, String queryId, Optional<String> clientInfo, Optional<String> source, boolean impersonationEnabled, Optional<String> metastoreHeaders)
public MetastoreContext(String username, String queryId, Optional<String> clientInfo, Optional<String> source, boolean impersonationEnabled, Optional<String> metastoreHeaders, boolean userDefinedTypeEncodingEnabled)
{
this.username = requireNonNull(username, "username is null");
this.queryId = requireNonNull(queryId, "queryId is null");
this.clientInfo = requireNonNull(clientInfo, "clientInfo is null");
this.source = requireNonNull(source, "source is null");
this.impersonationEnabled = impersonationEnabled;
this.metastoreHeaders = requireNonNull(metastoreHeaders, "metastoreHeaders is null");
this.userDefinedTypeEncodingEnabled = userDefinedTypeEncodingEnabled;
}

public String getUsername()
Expand Down Expand Up @@ -75,6 +77,11 @@ public boolean isImpersonationEnabled()
return impersonationEnabled;
}

public boolean isUserDefinedTypeEncodingEnabled()
{
return userDefinedTypeEncodingEnabled;
}

public Optional<String> getMetastoreHeaders()
{
return metastoreHeaders;
Expand All @@ -89,6 +96,7 @@ public String toString()
.add("clientInfo", clientInfo.orElse(""))
.add("source", source.orElse(""))
.add("impersonationEnabled", Boolean.toString(impersonationEnabled))
.add("userDefinedTypeEncodingEnabled", Boolean.toString(userDefinedTypeEncodingEnabled))
.toString();
}

Expand All @@ -107,12 +115,13 @@ public boolean equals(Object o)
Objects.equals(queryId, other.queryId) &&
Objects.equals(clientInfo, other.clientInfo) &&
Objects.equals(source, other.source) &&
impersonationEnabled == other.impersonationEnabled;
impersonationEnabled == other.impersonationEnabled &&
userDefinedTypeEncodingEnabled == other.userDefinedTypeEncodingEnabled;
}

@Override
public int hashCode()
{
return Objects.hash(username, queryId, clientInfo, source, impersonationEnabled);
return Objects.hash(username, queryId, clientInfo, source, impersonationEnabled, userDefinedTypeEncodingEnabled);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,8 @@ public class MetastoreUtil
public static final String PRESTO_MATERIALIZED_VIEW_FLAG = "presto_materialized_view";
public static final String PRESTO_QUERY_ID_NAME = "presto_query_id";
public static final String HIVE_DEFAULT_DYNAMIC_PARTITION = "__HIVE_DEFAULT_PARTITION__";
public static final String USER_DEFINED_TYPE_ENCODING_ENABLED = "user_defined_type_encoding";

@SuppressWarnings("OctalInteger")
public static final FsPermission ALL_PERMISSIONS = new FsPermission((short) 0777);

Expand Down Expand Up @@ -914,4 +916,14 @@ public static Optional<String> getMetastoreHeaders(ConnectorSession session)
return Optional.empty();
}
}

public static boolean isUserDefinedTypeEncodingEnabled(ConnectorSession session)
{
try {
return session.getProperty(USER_DEFINED_TYPE_ENCODING_ENABLED, Boolean.class);
}
catch (Exception e) {
return false;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@
import static com.facebook.presto.hive.metastore.MetastoreUtil.getFileSystem;
import static com.facebook.presto.hive.metastore.MetastoreUtil.getMetastoreHeaders;
import static com.facebook.presto.hive.metastore.MetastoreUtil.isPrestoView;
import static com.facebook.presto.hive.metastore.MetastoreUtil.isUserDefinedTypeEncodingEnabled;
import static com.facebook.presto.hive.metastore.MetastoreUtil.pathExists;
import static com.facebook.presto.hive.metastore.MetastoreUtil.renameFile;
import static com.facebook.presto.hive.metastore.MetastoreUtil.toPartitionValues;
Expand Down Expand Up @@ -459,7 +460,7 @@ public synchronized void finishInsertIntoExistingTable(
setShared();
SchemaTableName schemaTableName = new SchemaTableName(databaseName, tableName);
Action<TableAndMore> oldTableAction = tableActions.get(schemaTableName);
MetastoreContext metastoreContext = new MetastoreContext(session.getIdentity(), session.getQueryId(), session.getClientInfo(), session.getSource(), getMetastoreHeaders(session));
MetastoreContext metastoreContext = new MetastoreContext(session.getIdentity(), session.getQueryId(), session.getClientInfo(), session.getSource(), getMetastoreHeaders(session), isUserDefinedTypeEncodingEnabled(session));
if (oldTableAction == null || oldTableAction.getData().getTable().getTableType().equals(TEMPORARY_TABLE)) {
Table table = getTable(metastoreContext, databaseName, tableName)
.orElseThrow(() -> new TableNotFoundException(schemaTableName));
Expand Down Expand Up @@ -497,7 +498,16 @@ public synchronized void finishInsertIntoExistingTable(
public synchronized void truncateUnpartitionedTable(ConnectorSession session, String databaseName, String tableName)
{
checkReadable();
Optional<Table> table = getTable(new MetastoreContext(session.getIdentity(), session.getQueryId(), session.getClientInfo(), session.getSource(), getMetastoreHeaders(session)), databaseName, tableName);
Optional<Table> table = getTable(
new MetastoreContext(
session.getIdentity(),
session.getQueryId(),
session.getClientInfo(),
session.getSource(),
getMetastoreHeaders(session),
isUserDefinedTypeEncodingEnabled(session)),
databaseName,
tableName);
SchemaTableName schemaTableName = new SchemaTableName(databaseName, tableName);
if (!table.isPresent()) {
throw new TableNotFoundException(schemaTableName);
Expand Down Expand Up @@ -784,7 +794,7 @@ public synchronized void finishInsertIntoExistingPartition(
SchemaTableName schemaTableName = new SchemaTableName(databaseName, tableName);
Map<List<String>, Action<PartitionAndMore>> partitionActionsOfTable = partitionActions.computeIfAbsent(schemaTableName, k -> new HashMap<>());
Action<PartitionAndMore> oldPartitionAction = partitionActionsOfTable.get(partitionValues);
MetastoreContext metastoreContext = new MetastoreContext(session.getIdentity(), session.getQueryId(), session.getClientInfo(), session.getSource(), getMetastoreHeaders(session));
MetastoreContext metastoreContext = new MetastoreContext(session.getIdentity(), session.getQueryId(), session.getClientInfo(), session.getSource(), getMetastoreHeaders(session), isUserDefinedTypeEncodingEnabled(session));

if (oldPartitionAction == null) {
Partition partition = delegate.getPartition(metastoreContext, databaseName, tableName, partitionValues)
Expand Down Expand Up @@ -985,7 +995,13 @@ private void commitShared()
SchemaTableName schemaTableName = entry.getKey();
Action<TableAndMore> action = entry.getValue();
HdfsContext hdfsContext = action.getContext();
MetastoreContext metastoreContext = new MetastoreContext(hdfsContext.getIdentity(), hdfsContext.getQueryId().orElse(""), hdfsContext.getClientInfo(), hdfsContext.getSource(), hdfsContext.getSession().flatMap(MetastoreUtil::getMetastoreHeaders));
MetastoreContext metastoreContext = new MetastoreContext(
hdfsContext.getIdentity(),
hdfsContext.getQueryId().orElse(""),
hdfsContext.getClientInfo(),
hdfsContext.getSource(),
hdfsContext.getSession().flatMap(MetastoreUtil::getMetastoreHeaders),
hdfsContext.getSession().map(MetastoreUtil::isUserDefinedTypeEncodingEnabled).orElse(false));
switch (action.getType()) {
case DROP:
committer.prepareDropTable(metastoreContext, schemaTableName);
Expand All @@ -1009,7 +1025,13 @@ private void commitShared()
List<String> partitionValues = partitionEntry.getKey();
Action<PartitionAndMore> action = partitionEntry.getValue();
HdfsContext hdfsContext = action.getContext();
MetastoreContext metastoreContext = new MetastoreContext(hdfsContext.getIdentity(), hdfsContext.getQueryId().orElse(""), hdfsContext.getClientInfo(), hdfsContext.getSource(), hdfsContext.getSession().flatMap(MetastoreUtil::getMetastoreHeaders));
MetastoreContext metastoreContext = new MetastoreContext(
hdfsContext.getIdentity(),
hdfsContext.getQueryId().orElse(""),
hdfsContext.getClientInfo(),
hdfsContext.getSource(),
hdfsContext.getSession().flatMap(MetastoreUtil::getMetastoreHeaders),
hdfsContext.getSession().map(MetastoreUtil::isUserDefinedTypeEncodingEnabled).orElse(false));
switch (action.getType()) {
case DROP:
committer.prepareDropPartition(metastoreContext, schemaTableName, partitionValues);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,7 @@ public static HiveColumnStatistics fromProto(ColumnStatisticsData columnStatisti
private static Column fromProto(alluxio.grpc.table.FieldSchema column)
{
Optional<String> comment = column.hasComment() ? Optional.of(column.getComment()) : Optional.empty();
return new Column(column.getName(), HiveType.valueOf(column.getType()), comment);
return new Column(column.getName(), HiveType.valueOf(column.getType()), comment, Optional.empty());
}

public static Partition fromProto(alluxio.grpc.table.layout.hive.PartitionInfo info)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -503,7 +503,7 @@ public synchronized void addColumn(MetastoreContext metastoreContext, String dat

return oldTable.withDataColumns(ImmutableList.<Column>builder()
.addAll(oldTable.getDataColumns())
.add(new Column(columnName, columnType, Optional.ofNullable(columnComment)))
.add(new Column(columnName, columnType, Optional.ofNullable(columnComment), Optional.empty()))
.build());
});
}
Expand All @@ -528,7 +528,7 @@ public synchronized void renameColumn(MetastoreContext metastoreContext, String
ImmutableList.Builder<Column> newDataColumns = ImmutableList.builder();
for (Column fieldSchema : oldTable.getDataColumns()) {
if (fieldSchema.getName().equals(oldColumnName)) {
newDataColumns.add(new Column(newColumnName, fieldSchema.getType(), fieldSchema.getComment()));
newDataColumns.add(new Column(newColumnName, fieldSchema.getType(), fieldSchema.getComment(), fieldSchema.getTypeMetadata()));
}
else {
newDataColumns.add(fieldSchema);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -584,7 +584,7 @@ public void addColumn(MetastoreContext metastoreContext, String databaseName, St
com.amazonaws.services.glue.model.Table table = getGlueTableOrElseThrow(databaseName, tableName);
ImmutableList.Builder<com.amazonaws.services.glue.model.Column> newDataColumns = ImmutableList.builder();
newDataColumns.addAll(table.getStorageDescriptor().getColumns());
newDataColumns.add(convertColumn(new Column(columnName, columnType, Optional.ofNullable(columnComment))));
newDataColumns.add(convertColumn(new Column(columnName, columnType, Optional.ofNullable(columnComment), Optional.empty())));
table.getStorageDescriptor().setColumns(newDataColumns.build());
replaceGlueTable(databaseName, tableName, table);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ public static Table convertTable(com.amazonaws.services.glue.model.Table glueTab

private static Column convertColumn(com.amazonaws.services.glue.model.Column glueColumn)
{
return new Column(glueColumn.getName(), HiveType.valueOf(glueColumn.getType().toLowerCase(Locale.ENGLISH)), Optional.ofNullable(glueColumn.getComment()));
return new Column(glueColumn.getName(), HiveType.valueOf(glueColumn.getType().toLowerCase(Locale.ENGLISH)), Optional.ofNullable(glueColumn.getComment()), Optional.empty());
}

private static List<Column> convertColumns(List<com.amazonaws.services.glue.model.Column> glueColumns)
Expand Down
Loading