Skip to content

Commit

Permalink
[MINOR] Fix a potential NPE and some finer points of hudi cli (#5656)
Browse files Browse the repository at this point in the history
  • Loading branch information
luoyajun authored May 24, 2022
1 parent 18635b5 commit f30b3ae
Show file tree
Hide file tree
Showing 5 changed files with 26 additions and 12 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -364,7 +364,7 @@ private HoodieWriteConfig getWriteConfig() {

private void initJavaSparkContext(Option<String> userDefinedMaster) {
if (jsc == null) {
jsc = SparkUtil.initJavaSparkConf(SparkUtil.getDefaultConf("HoodieCLI", userDefinedMaster));
jsc = SparkUtil.initJavaSparkContext(SparkUtil.getDefaultConf("HoodieCLI", userDefinedMaster));
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ public static void main(String[] args) throws Exception {
LOG.info("Invoking SparkMain: " + commandString);
final SparkCommand cmd = SparkCommand.valueOf(commandString);

JavaSparkContext jsc = SparkUtil.initJavaSparkConf("hoodie-cli-" + commandString,
JavaSparkContext jsc = SparkUtil.initJavaSparkContext("hoodie-cli-" + commandString,
Option.of(args[1]), Option.of(args[2]));

int returnCode = 0;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ public String upgradeHoodieTable(
if (exitCode != 0) {
return String.format("Failed: Could not Upgrade/Downgrade Hoodie table to \"%s\".", toVersion);
}
return String.format("Hoodie table upgraded/downgraded to ", toVersion);
return String.format("Hoodie table upgraded/downgraded to %s", toVersion);
}

@CliCommand(value = "downgrade table", help = "Downgrades a table")
Expand All @@ -78,6 +78,6 @@ public String downgradeHoodieTable(
if (exitCode != 0) {
return String.format("Failed: Could not Upgrade/Downgrade Hoodie table to \"%s\".", toVersion);
}
return String.format("Hoodie table upgraded/downgraded to ", toVersion);
return String.format("Hoodie table upgraded/downgraded to %s", toVersion);
}
}
19 changes: 11 additions & 8 deletions hudi-cli/src/main/java/org/apache/hudi/cli/utils/SparkUtil.java
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,8 @@

import java.io.File;
import java.net.URISyntaxException;
import java.util.Arrays;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;

/**
Expand All @@ -56,9 +56,12 @@ public static SparkLauncher initLauncher(String propertiesFile) throws URISyntax
if (!StringUtils.isNullOrEmpty(propertiesFile)) {
sparkLauncher.setPropertiesFile(propertiesFile);
}

File libDirectory = new File(new File(currentJar).getParent(), "lib");
for (String library : Objects.requireNonNull(libDirectory.list())) {
sparkLauncher.addJar(new File(libDirectory, library).getAbsolutePath());
// This lib directory may be not required, such as providing libraries through a bundle jar
if (libDirectory.exists()) {
Arrays.stream(libDirectory.list()).forEach(library ->
sparkLauncher.addJar(new File(libDirectory, library).getAbsolutePath()));
}
return sparkLauncher;
}
Expand Down Expand Up @@ -99,20 +102,20 @@ public static SparkConf getDefaultConf(final String appName, final Option<String
return sparkConf;
}

public static JavaSparkContext initJavaSparkConf(String name) {
return initJavaSparkConf(name, Option.empty(), Option.empty());
public static JavaSparkContext initJavaSparkContext(String name) {
return initJavaSparkContext(name, Option.empty(), Option.empty());
}

public static JavaSparkContext initJavaSparkConf(String name, Option<String> master, Option<String> executorMemory) {
public static JavaSparkContext initJavaSparkContext(String name, Option<String> master, Option<String> executorMemory) {
SparkConf sparkConf = getDefaultConf(name, master);
if (executorMemory.isPresent()) {
sparkConf.set(HoodieCliSparkConfig.CLI_EXECUTOR_MEMORY, executorMemory.get());
}

return initJavaSparkConf(sparkConf);
return initJavaSparkContext(sparkConf);
}

public static JavaSparkContext initJavaSparkConf(SparkConf sparkConf) {
public static JavaSparkContext initJavaSparkContext(SparkConf sparkConf) {
SparkRDDWriteClient.registerClasses(sparkConf);
JavaSparkContext jsc = new JavaSparkContext(sparkConf);
jsc.hadoopConfiguration().setBoolean(HoodieCliSparkConfig.CLI_PARQUET_ENABLE_SUMMARY_METADATA, false);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,11 +22,22 @@
import org.apache.hudi.cli.utils.SparkUtil;
import org.apache.spark.SparkConf;

import org.apache.spark.launcher.SparkLauncher;
import org.junit.jupiter.api.Test;

import java.net.URISyntaxException;

import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;

public class SparkUtilTest {

@Test
public void testInitSparkLauncher() throws URISyntaxException {
SparkLauncher sparkLauncher = SparkUtil.initLauncher(null);
assertNotNull(sparkLauncher);
}

@Test
public void testGetDefaultSparkConf() {
SparkConf sparkConf = SparkUtil.getDefaultConf("test-spark-app", Option.of(""));
Expand Down

0 comments on commit f30b3ae

Please sign in to comment.