Skip to content

Commit

Permalink
[HUDI-5200] Clean up resources in hudi common UT (#7190)
Browse files Browse the repository at this point in the history
  • Loading branch information
Zouxxyy authored Nov 13, 2022
1 parent 49a2110 commit b10a7c3
Show file tree
Hide file tree
Showing 15 changed files with 110 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsAction;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;

Expand Down Expand Up @@ -72,6 +73,11 @@ public void init() throws IOException {
initMetaClient();
}

@AfterEach
public void tearDown() throws Exception {
cleanMetaClient();
}

@Test
public void testBootstrapIndex() throws IOException {
testBootstrapIndexOneRound(10);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
import org.apache.hadoop.fs.Path;
import org.junit.Rule;
import org.junit.contrib.java.lang.system.EnvironmentVariables;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;

Expand Down Expand Up @@ -81,6 +82,11 @@ public void setUp() throws IOException {
basePath = "file:" + basePath;
}

@AfterEach
public void tearDown() throws Exception {
cleanMetaClient();
}

@Test
public void testMakeDataFileName() {
String instantTime = HoodieActiveTimeline.formatDate(new Date());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -148,6 +148,7 @@ public void testFailedToGetAppendStreamFromHDFSNameNode()
writer.appendBlock(new HoodieCommandBlock(header));
// The log version should be different for this new writer
assertNotEquals(writer.getLogFile().getLogVersion(), logFileVersion);
writer.close();
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
Expand All @@ -48,6 +49,12 @@ public void setupTest() throws IOException {
fs = metaClient.getFs();
}

@AfterEach
public void tearDown() throws Exception {
fs.close();
cleanMetaClient();
}

static Stream<Arguments> formatProviderFn() {
return Stream.of(
Arguments.arguments(Option.empty()),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
Expand Down Expand Up @@ -60,6 +61,11 @@ public void setUp() throws Exception {
backupCfgPath = new Path(metaPath, HoodieTableConfig.HOODIE_PROPERTIES_FILE_BACKUP);
}

@AfterEach
public void tearDown() throws Exception {
fs.close();
}

@Test
public void testCreate() throws IOException {
assertTrue(fs.exists(new Path(metaPath, HoodieTableConfig.HOODIE_PROPERTIES_FILE)));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import org.apache.hudi.common.testutils.HoodieTestUtils;
import org.apache.hudi.common.util.Option;

import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;

Expand All @@ -47,6 +48,11 @@ public void init() throws IOException {
initMetaClient();
}

@AfterEach
public void tearDown() throws Exception {
cleanMetaClient();
}

@Test
public void checkMetadata() {
assertEquals(HoodieTestUtils.RAW_TRIPS_TEST_NAME, metaClient.getTableConfig().getTableName(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
import org.apache.hudi.common.testutils.HoodieCommonTestHarness;
import org.apache.hudi.common.util.CollectionUtils;
import org.apache.hudi.common.util.Option;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;

Expand All @@ -61,6 +62,11 @@ public void setUp() throws Exception {
initMetaClient();
}

@AfterEach
public void tearDown() throws Exception {
cleanMetaClient();
}

@Test
public void testGetPartitionsWithReplaceCommits() throws IOException {
HoodieActiveTimeline activeTimeline = metaClient.getActiveTimeline();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
import org.apache.hudi.exception.HoodieException;

import org.apache.hadoop.fs.Path;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;

Expand Down Expand Up @@ -71,6 +72,11 @@ public void setUp() throws Exception {
initMetaClient();
}

@AfterEach
public void tearDown() throws Exception {
cleanMetaClient();
}

@Test
public void testLoadingInstantsFromFiles() throws IOException {
HoodieInstant instant1 = new HoodieInstant(State.REQUESTED, HoodieTimeline.COMMIT_ACTION, "1");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,12 @@ public void setup() throws IOException {
refreshFsView();
}

@AfterEach
public void tearDown() throws Exception {
closeFsView();
cleanMetaClient();
}

protected void refreshFsView() throws IOException {
super.refreshFsView();
closeFsView();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
Expand Down Expand Up @@ -119,6 +120,12 @@ public void setup() throws IOException {
refreshFsView();
}

@AfterEach
public void tearDown() throws Exception {
closeFsView();
cleanMetaClient();
}

protected void refreshFsView() throws IOException {
super.refreshFsView();
closeFsView();
Expand Down Expand Up @@ -1400,6 +1407,7 @@ public void testReplaceWithTimeTravel() throws IOException {
.filter(dfile -> dfile.getFileId().equals(fileId3)).count());
assertEquals(1, filteredView.getLatestBaseFiles(partitionPath1)
.filter(dfile -> dfile.getFileId().equals(fileId4)).count());
filteredView.close();

// ensure replacedFileGroupsBefore works with all instants
List<HoodieFileGroup> replacedOnInstant1 = fsView.getReplacedFileGroupsBeforeOrOn("1", partitionPath1).collect(Collectors.toList());
Expand Down Expand Up @@ -1858,6 +1866,8 @@ public void testPendingMajorAndMinorCompactionOperations() throws Exception {

// Verify file system view after 4th commit which is logcompaction.requested.
verifyFileSystemView(partitionPath, expectedState, fileSystemView);

fileSystemView.close();
}

private HoodieCompactionPlan getHoodieCompactionPlan(List<CompactionOperation> operations) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@
import org.apache.hudi.exception.HoodieIOException;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;

Expand Down Expand Up @@ -104,11 +105,17 @@ public void init() throws IOException {
refreshFsView();
}

@AfterEach
public void tearDown() throws Exception {
cleanMetaClient();
}

@Test
public void testEmptyPartitionsAndTimeline() throws IOException {
SyncableFileSystemView view = getFileSystemView(metaClient);
assertFalse(view.getLastInstant().isPresent());
partitions.forEach(p -> assertEquals(0, view.getLatestFileSlices(p).count()));
view.close();
}

@Test
Expand Down Expand Up @@ -164,6 +171,7 @@ public void testAsyncCompaction() throws IOException {
// Finish Compaction
instantsToFiles.putAll(testMultipleWriteSteps(view, Collections.singletonList("19"), false, "19", 2,
Collections.singletonList(new HoodieInstant(State.COMPLETED, HoodieTimeline.DELTA_COMMIT_ACTION, "24"))));
view.close();
}

@Test
Expand Down Expand Up @@ -227,6 +235,8 @@ public void testAsyncMajorAndMinorCompaction() throws IOException {
// Finish Compaction
instantsToFiles.putAll(testMultipleWriteSteps(view, Collections.singletonList("25"), false, "25", 2,
Collections.singletonList(new HoodieInstant(State.COMPLETED, HoodieTimeline.COMMIT_ACTION, "25"))));

view.close();
}

@Test
Expand Down Expand Up @@ -267,6 +277,9 @@ public void testIngestion() throws IOException {

// Clean instants.
testCleans(view, Arrays.asList("21", "22"), instantsToFiles, Arrays.asList("18", "19"), 0, 0);

newView.close();
view.close();
}

@Test
Expand Down Expand Up @@ -314,6 +327,9 @@ public void testReplaceCommits() throws IOException {

// Clean instants.
testCleans(view, Arrays.asList("21", "22"), instantsToFiles, Arrays.asList("18", "19"), NUM_FILE_IDS_PER_PARTITION, 1);

newView.close();
view.close();
}

private void testMultipleReplaceSteps(Map<String, List<String>> instantsToFiles, SyncableFileSystemView view, List<String> instants,
Expand All @@ -329,6 +345,7 @@ private void testMultipleReplaceSteps(Map<String, List<String>> instantsToFiles,
// 1 fileId is replaced for every partition, so subtract partitions.size()
expectedSlicesPerPartition = expectedSlicesPerPartition + fileIdsPerPartition.size() - 1;
areViewsConsistent(view, newView, expectedSlicesPerPartition * partitions.size());
newView.close();
} catch (IOException e) {
throw new HoodieIOException("unable to test replace", e);
}
Expand Down Expand Up @@ -466,6 +483,13 @@ public void testMultipleTransitions() throws IOException {
v.sync();
areViewsConsistent(v, view1, partitions.size() * fileIdsPerPartition.size() * 3);
});

view1.close();
view2.close();
view3.close();
view4.close();
view5.close();
view6.close();
}

/*
Expand Down Expand Up @@ -527,6 +551,7 @@ private void testCleans(SyncableFileSystemView view, List<String> newCleanerInst
metaClient.reloadActiveTimeline();
SyncableFileSystemView newView = getFileSystemView(metaClient);
areViewsConsistent(view, newView, expTotalFileSlicesPerPartition * partitions.size());
newView.close();
} catch (IOException e) {
throw new HoodieException(e);
}
Expand Down Expand Up @@ -582,6 +607,7 @@ private void testRestore(SyncableFileSystemView view, List<String> newRestoreIns
metaClient.reloadActiveTimeline();
SyncableFileSystemView newView = getFileSystemView(metaClient);
areViewsConsistent(view, newView, expTotalFileSlicesPerPartition * partitions.size());
newView.close();
} catch (IOException e) {
throw new HoodieException(e);
}
Expand Down Expand Up @@ -703,6 +729,7 @@ private void scheduleCompaction(SyncableFileSystemView view, String instantTime
metaClient.reloadActiveTimeline();
SyncableFileSystemView newView = getFileSystemView(metaClient);
areViewsConsistent(view, newView, initialExpTotalFileSlices + partitions.size() * fileIdsPerPartition.size());
newView.close();
}

/**
Expand Down Expand Up @@ -732,6 +759,7 @@ private void scheduleLogCompaction(SyncableFileSystemView view, String instantTi
metaClient.reloadActiveTimeline();
SyncableFileSystemView newView = getFileSystemView(metaClient);
areViewsConsistent(view, newView, initialExpTotalFileSlices);
newView.close();
}

/**
Expand Down Expand Up @@ -862,6 +890,7 @@ private Map<String, List<String>> testMultipleWriteSteps(SyncableFileSystemView
metaClient.reloadActiveTimeline();
SyncableFileSystemView newView = getFileSystemView(metaClient);
areViewsConsistent(view, newView, fileIdsPerPartition.size() * partitions.size() * multiple);
newView.close();
instantToFiles.put(instant, filePaths);
if (!deltaCommit) {
multiple++;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.Logger;
import org.apache.logging.log4j.core.appender.AbstractAppender;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
Expand Down Expand Up @@ -80,6 +81,13 @@ public void setUp() {
"file0001" + HoodieTableConfig.BASE_FILE_FORMAT.defaultValue().getFileExtension()));
}

@AfterEach
public void tearDown() throws Exception {
testFileSliceStream.close();
testBaseFileStream.close();
fsView.close();
}

private void resetMocks() {
reset(primary, secondary);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,12 @@ protected void initMetaClient() throws IOException {
basePath = metaClient.getBasePath();
}

protected void cleanMetaClient() {
if (metaClient != null) {
metaClient = null;
}
}

protected void refreshFsView() throws IOException {
metaClient = HoodieTableMetaClient.builder().setConf(metaClient.getHadoopConf()).setBasePath(basePath).setLoadActiveTimelineOnLoad(true).build();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import org.apache.hudi.internal.schema.InternalSchema;
import org.apache.hudi.internal.schema.Types;
import org.apache.hudi.internal.schema.utils.SerDeHelper;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;

Expand All @@ -47,6 +48,11 @@ public void setUp() throws Exception {
initMetaClient();
}

@AfterEach
public void tearDown() throws Exception {
cleanMetaClient();
}

@Test
public void testPersistAndReadHistorySchemaStr() throws IOException {
timeline = new HoodieActiveTimeline(metaClient);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ public void setUp() throws IOException {
@AfterEach
public void tearDown() throws IOException {
metaClient.getFs().delete(new Path(metaClient.getBasePath()), true);
cleanMetaClient();
}

/**
Expand Down

0 comments on commit b10a7c3

Please sign in to comment.