-
Notifications
You must be signed in to change notification settings - Fork 3.1k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Flush transaction log cache in Delta flush_metadata_cache procedure
Co-Authored-By: Marius Grama <findinpath@gmail.com>
- Loading branch information
1 parent
7158624
commit 3f88633
Showing
9 changed files
with
379 additions
and
2 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
114 changes: 114 additions & 0 deletions
114
...a-lake/src/main/java/io/trino/plugin/deltalake/procedure/FlushMetadataCacheProcedure.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,114 @@ | ||
/* | ||
* Licensed under the Apache License, Version 2.0 (the "License"); | ||
* you may not use this file except in compliance with the License. | ||
* You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, | ||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the License for the specific language governing permissions and | ||
* limitations under the License. | ||
*/ | ||
package io.trino.plugin.deltalake.procedure; | ||
|
||
import com.google.common.collect.ImmutableList; | ||
import io.trino.plugin.deltalake.transactionlog.TransactionLogAccess; | ||
import io.trino.plugin.hive.metastore.HiveMetastore; | ||
import io.trino.plugin.hive.metastore.HiveMetastoreFactory; | ||
import io.trino.plugin.hive.metastore.Table; | ||
import io.trino.plugin.hive.metastore.cache.CachingHiveMetastore; | ||
import io.trino.spi.TrinoException; | ||
import io.trino.spi.classloader.ThreadContextClassLoader; | ||
import io.trino.spi.connector.ConnectorSession; | ||
import io.trino.spi.connector.SchemaTableName; | ||
import io.trino.spi.connector.TableNotFoundException; | ||
import io.trino.spi.procedure.Procedure; | ||
|
||
import javax.inject.Inject; | ||
import javax.inject.Provider; | ||
|
||
import java.lang.invoke.MethodHandle; | ||
import java.util.Optional; | ||
|
||
import static io.trino.plugin.deltalake.metastore.HiveMetastoreBackedDeltaLakeMetastore.getTableLocation; | ||
import static io.trino.plugin.deltalake.metastore.HiveMetastoreBackedDeltaLakeMetastore.verifyDeltaLakeTable; | ||
import static io.trino.spi.StandardErrorCode.INVALID_PROCEDURE_ARGUMENT; | ||
import static io.trino.spi.type.VarcharType.VARCHAR; | ||
import static java.lang.invoke.MethodHandles.lookup; | ||
import static java.util.Objects.requireNonNull; | ||
|
||
public class FlushMetadataCacheProcedure | ||
implements Provider<Procedure> | ||
{ | ||
private static final String PROCEDURE_NAME = "flush_metadata_cache"; | ||
|
||
private static final String PARAM_SCHEMA_NAME = "SCHEMA_NAME"; | ||
private static final String PARAM_TABLE_NAME = "TABLE_NAME"; | ||
|
||
private final HiveMetastoreFactory metastoreFactory; | ||
private final Optional<CachingHiveMetastore> cachingHiveMetastore; | ||
private final TransactionLogAccess transactionLogAccess; | ||
|
||
private static final MethodHandle FLUSH_METADATA_CACHE; | ||
|
||
static { | ||
try { | ||
FLUSH_METADATA_CACHE = lookup().unreflect(FlushMetadataCacheProcedure.class.getMethod("flushMetadataCache", ConnectorSession.class, String.class, String.class)); | ||
} | ||
catch (ReflectiveOperationException e) { | ||
throw new AssertionError(e); | ||
} | ||
} | ||
|
||
@Inject | ||
public FlushMetadataCacheProcedure( | ||
HiveMetastoreFactory metastoreFactory, | ||
Optional<CachingHiveMetastore> cachingHiveMetastore, | ||
TransactionLogAccess transactionLogAccess) | ||
{ | ||
this.metastoreFactory = requireNonNull(metastoreFactory, "metastoreFactory is null"); | ||
this.cachingHiveMetastore = requireNonNull(cachingHiveMetastore, "cachingHiveMetastore is null"); | ||
this.transactionLogAccess = requireNonNull(transactionLogAccess, "transactionLogAccess is null"); | ||
} | ||
|
||
@Override | ||
public Procedure get() | ||
{ | ||
return new Procedure( | ||
"system", | ||
PROCEDURE_NAME, | ||
ImmutableList.of( | ||
new Procedure.Argument(PARAM_SCHEMA_NAME, VARCHAR, false, null), | ||
new Procedure.Argument(PARAM_TABLE_NAME, VARCHAR, false, null)), | ||
FLUSH_METADATA_CACHE.bindTo(this), | ||
true); | ||
} | ||
|
||
public void flushMetadataCache(ConnectorSession session, String schemaName, String tableName) | ||
{ | ||
try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(getClass().getClassLoader())) { | ||
doFlushMetadataCache(session, Optional.ofNullable(schemaName), Optional.ofNullable(tableName)); | ||
} | ||
} | ||
|
||
private void doFlushMetadataCache(ConnectorSession session, Optional<String> schemaName, Optional<String> tableName) | ||
{ | ||
if (schemaName.isEmpty() && tableName.isEmpty()) { | ||
cachingHiveMetastore.ifPresent(CachingHiveMetastore::flushCache); | ||
transactionLogAccess.flushCache(); | ||
} | ||
else if (schemaName.isPresent() && tableName.isPresent()) { | ||
HiveMetastore metastore = metastoreFactory.createMetastore(Optional.of(session.getIdentity())); | ||
Table table = metastore.getTable(schemaName.get(), tableName.get()) | ||
.orElseThrow(() -> new TableNotFoundException(new SchemaTableName(schemaName.get(), tableName.get()))); | ||
verifyDeltaLakeTable(table); | ||
cachingHiveMetastore.ifPresent(caching -> caching.invalidateTable(table.getDatabaseName(), table.getTableName())); | ||
transactionLogAccess.invalidateCaches(getTableLocation(table)); | ||
} | ||
else { | ||
throw new TrinoException(INVALID_PROCEDURE_ARGUMENT, "Illegal parameter set passed"); | ||
} | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
114 changes: 114 additions & 0 deletions
114
...ake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeFlushMetadataCacheProcedure.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,114 @@ | ||
/* | ||
* Licensed under the Apache License, Version 2.0 (the "License"); | ||
* you may not use this file except in compliance with the License. | ||
* You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, | ||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the License for the specific language governing permissions and | ||
* limitations under the License. | ||
*/ | ||
package io.trino.plugin.deltalake; | ||
|
||
import com.google.common.collect.ImmutableMap; | ||
import io.trino.plugin.hive.containers.HiveMinioDataLake; | ||
import io.trino.plugin.hive.metastore.HiveMetastore; | ||
import io.trino.plugin.hive.metastore.thrift.BridgingHiveMetastore; | ||
import io.trino.testing.AbstractTestQueryFramework; | ||
import io.trino.testing.QueryRunner; | ||
import org.testng.annotations.AfterClass; | ||
import org.testng.annotations.Test; | ||
|
||
import java.io.IOException; | ||
|
||
import static io.trino.plugin.deltalake.DeltaLakeQueryRunner.DELTA_CATALOG; | ||
import static io.trino.plugin.deltalake.DeltaLakeQueryRunner.createS3DeltaLakeQueryRunner; | ||
import static io.trino.plugin.hive.TestingThriftHiveMetastoreBuilder.testingThriftHiveMetastoreBuilder; | ||
import static io.trino.plugin.hive.containers.HiveHadoop.HIVE3_IMAGE; | ||
|
||
public class TestDeltaLakeFlushMetadataCacheProcedure | ||
extends AbstractTestQueryFramework | ||
{ | ||
private static final String BUCKET_NAME = "delta-lake-test-flush-metadata-cache"; | ||
|
||
private HiveMetastore metastore; | ||
|
||
@Override | ||
protected QueryRunner createQueryRunner() | ||
throws Exception | ||
{ | ||
HiveMinioDataLake hiveMinioDataLake = new HiveMinioDataLake(BUCKET_NAME, HIVE3_IMAGE); | ||
hiveMinioDataLake.start(); | ||
metastore = new BridgingHiveMetastore( | ||
testingThriftHiveMetastoreBuilder() | ||
.metastoreClient(hiveMinioDataLake.getHiveHadoop().getHiveMetastoreEndpoint()) | ||
.build()); | ||
|
||
return createS3DeltaLakeQueryRunner( | ||
DELTA_CATALOG, | ||
"default", | ||
ImmutableMap.of("hive.metastore-cache-ttl", "10m"), | ||
hiveMinioDataLake.getMinio().getMinioAddress(), | ||
hiveMinioDataLake.getHiveHadoop()); | ||
} | ||
|
||
@AfterClass(alwaysRun = true) | ||
public void tearDown() | ||
throws IOException | ||
{ | ||
metastore = null; | ||
} | ||
|
||
@Test | ||
public void testFlushMetadataCache() | ||
{ | ||
assertUpdate("CREATE SCHEMA cached WITH (location = 's3://" + BUCKET_NAME + "/cached')"); | ||
assertUpdate("CREATE TABLE cached.cached AS SELECT * FROM tpch.tiny.nation", 25); | ||
|
||
// Verify that column cache is flushed | ||
// Fill caches | ||
assertQuerySucceeds("SELECT name, regionkey FROM cached.cached"); | ||
|
||
// Verify that table cache is flushed | ||
String showTablesSql = "SHOW TABLES FROM cached"; | ||
// Fill caches | ||
assertQuery(showTablesSql, "VALUES 'cached'"); | ||
|
||
// Rename table outside Trino | ||
metastore.renameTable("cached", "cached", "cached", "renamed"); | ||
|
||
// Should still return old table name from cache | ||
assertQuery(showTablesSql, "VALUES 'cached'"); | ||
|
||
// Should return new table name after cache flush | ||
assertUpdate("CALL system.flush_metadata_cache(schema_name => 'cached', table_name => 'cached')"); | ||
assertQuery(showTablesSql, "VALUES 'renamed'"); | ||
|
||
// Verify that schema cache is flushed | ||
String showSchemasSql = "SHOW SCHEMAS FROM delta_lake"; | ||
// Fill caches | ||
assertQuery(showSchemasSql, "VALUES ('cached'), ('information_schema'), ('default')"); | ||
|
||
// Drop a table and a schema outside Trino | ||
metastore.dropTable("cached", "renamed", false); | ||
metastore.dropDatabase("cached", false); | ||
|
||
// Should still return old schemas from cache | ||
assertQuery(showSchemasSql, "VALUES ('cached'), ('information_schema'), ('default')"); | ||
|
||
// Should not return the old schema name after cache flush | ||
assertUpdate("CALL system.flush_metadata_cache()"); | ||
assertQuery(showSchemasSql, "VALUES ('information_schema'), ('default')"); | ||
} | ||
|
||
@Test | ||
public void testFlushMetadataCacheTableNotFound() | ||
{ | ||
assertQueryFails( | ||
"CALL system.flush_metadata_cache(schema_name => 'test_not_existing_schema', table_name => 'test_not_existing_table')", | ||
"Table 'test_not_existing_schema.test_not_existing_table' not found"); | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.