Skip to content

Commit

Permalink
better integration test management
Browse files Browse the repository at this point in the history
  • Loading branch information
jetoile committed Oct 10, 2018
1 parent 7372f5b commit 9817fc1
Show file tree
Hide file tree
Showing 14 changed files with 132 additions and 256 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,9 @@ public class HadoopBootstrapRemoteStarter extends AbstractMojo {
@Parameter(property = "exec")
protected String exec;

@Parameter(property = "skip", required = false, defaultValue = "${skipTests}")
private boolean skipTests;

@Component
private MavenProject project;

Expand All @@ -61,26 +64,30 @@ public class HadoopBootstrapRemoteStarter extends AbstractMojo {

@Override
public void execute() throws MojoExecutionException, MojoFailureException {
HadoopBootstrapRemoteUtils utils = new HadoopBootstrapRemoteUtils(project, session, pluginManager);
if (skipTests) {
getLog().info("Hadoop Unit's start goal is skipped");
} else {
HadoopBootstrapRemoteUtils utils = new HadoopBootstrapRemoteUtils(project, session, pluginManager);

hadoopUnitPath = utils.getHadoopUnitPath(hadoopUnitPath, getLog());
hadoopUnitPath = utils.getHadoopUnitPath(hadoopUnitPath, getLog());

//change hadoop.properties
getLog().info("is going to modifying hadoop.properties");
editHadoopUnitConfFile();
getLog().info("modifying hadoop.properties done");
//change hadoop.properties
getLog().info("is going to modifying hadoop.properties");
editHadoopUnitConfFile();
getLog().info("modifying hadoop.properties done");

//clean log file
Path hadoopLogFilePath = Paths.get(hadoopUnitPath, "wrapper.log");
deleteLogFile(hadoopLogFilePath);
//clean log file
Path hadoopLogFilePath = Paths.get(hadoopUnitPath, "wrapper.log");
deleteLogFile(hadoopLogFilePath);

getLog().info("is going to start hadoop unit with executable " + ((exec == null) ? "./hadoop-unit-standalone" : exec));
utils.operateRemoteHadoopUnit(hadoopUnitPath, outputFile, "start", exec);
getLog().info("is going to start hadoop unit with executable " + ((exec == null) ? "./hadoop-unit-standalone" : exec));
utils.operateRemoteHadoopUnit(hadoopUnitPath, outputFile, "start", exec);

//listen to log file and wait
getLog().info("is going tail log file");
utils.tailLogFileUntilFind(hadoopLogFilePath, "/_/ /_/ \\__,_/ \\__,_/ \\____/\\____/_ .___/ \\____/ /_/ /_//_/ \\__/", getLog());
getLog().info("hadoop unit started");
//listen to log file and wait
getLog().info("is going tail log file");
utils.tailLogFileUntilFind(hadoopLogFilePath, "/_/ /_/ \\__,_/ \\__,_/ \\____/\\____/_ .___/ \\____/ /_/ /_//_/ \\__/", getLog());
getLog().info("hadoop unit started");
}

}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,9 @@ public class HadoopBootstrapRemoteStopper extends AbstractMojo {
@Parameter(property = "exec")
protected String exec;

@Parameter(property = "skip", required = false, defaultValue = "${skipTests}")
private boolean skipTests;

@Component
private MavenProject project;

Expand All @@ -53,19 +56,22 @@ public class HadoopBootstrapRemoteStopper extends AbstractMojo {

@Override
public void execute() throws MojoExecutionException, MojoFailureException {
HadoopBootstrapRemoteUtils utils = new HadoopBootstrapRemoteUtils(project, session, pluginManager);

if (skipTests) {
getLog().info("Hadoop Unit's stop goal is skipped");
} else {
HadoopBootstrapRemoteUtils utils = new HadoopBootstrapRemoteUtils(project, session, pluginManager);

hadoopUnitPath = utils.getHadoopUnitPath(hadoopUnitPath, getLog());

getLog().info("is going to stop hadoop unit with executable " + ((exec == null) ? "./hadoop-unit-standalone" : exec));
utils.operateRemoteHadoopUnit(hadoopUnitPath, outputFile, "stop", exec);
Path hadoopLogFilePath = Paths.get(hadoopUnitPath, "wrapper.log");
hadoopUnitPath = utils.getHadoopUnitPath(hadoopUnitPath, getLog());

getLog().info("is going tail log file");
utils.tailLogFileUntilFind(hadoopLogFilePath, "<-- Wrapper Stopped", getLog());
getLog().info("hadoop unit stopped");
getLog().info("is going to stop hadoop unit with executable " + ((exec == null) ? "./hadoop-unit-standalone" : exec));
utils.operateRemoteHadoopUnit(hadoopUnitPath, outputFile, "stop", exec);
Path hadoopLogFilePath = Paths.get(hadoopUnitPath, "wrapper.log");

getLog().info("is going tail log file");
utils.tailLogFileUntilFind(hadoopLogFilePath, "<-- Wrapper Stopped", getLog());
getLog().info("hadoop unit stopped");
}


}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,9 @@ public class HadoopBootstrapStarter extends AbstractMojo {
@Parameter(property = "components", required = true)
protected List<ComponentArtifact> components;

@Parameter(property = "skip", required = false, defaultValue = "${skipTests}")
private boolean skipTests;

/**
* The current repository/network configuration of Maven.
*
Expand All @@ -64,17 +67,21 @@ public class HadoopBootstrapStarter extends AbstractMojo {

@Override
public void execute() throws MojoExecutionException, MojoFailureException {
HadoopUnitRunnable hadoopUnitRunnable = new HadoopUnitRunnable(components, queue, getLog(), port, repoSession, remoteRepos);
if (skipTests) {
getLog().info("Hadoop Unit's embedded-start goal is skipped");
} else {
HadoopUnitRunnable hadoopUnitRunnable = new HadoopUnitRunnable(components, queue, getLog(), port, repoSession, remoteRepos);

Thread thread = new Thread(hadoopUnitRunnable, "hadoop-unit-runner");
thread.start();
Thread thread = new Thread(hadoopUnitRunnable, "hadoop-unit-runner");
thread.start();

try {
queue.poll(10, TimeUnit.MINUTES);
getLog().info("free starter");
try {
queue.poll(10, TimeUnit.MINUTES);
getLog().info("free starter");

} catch (InterruptedException e) {
getLog().error("unable to synchronize startup: " + e.getMessage());
} catch (InterruptedException e) {
getLog().error("unable to synchronize startup: " + e.getMessage());
}
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,25 +36,33 @@ public class HadoopBootstrapStopper extends AbstractMojo {
@Parameter(property = "timeout", defaultValue = "120000", required = false) //set timeout to 2 min
protected int timeout;

@Parameter(property = "skip", required = false, defaultValue = "${skipTests}")
private boolean skipTests;

@Override
public void execute() throws MojoExecutionException, MojoFailureException {
getLog().info("is going to send a hadoop unit stop message");

try (Socket client = new Socket("localhost", port);
PrintWriter out = new PrintWriter(client.getOutputStream(), true);
BufferedReader in = new BufferedReader(new InputStreamReader(client.getInputStream()))) {
client.setSoTimeout(timeout);

out.println("stop");
String responseLine;
if ((responseLine = in.readLine()) != null) {
if (StringUtils.containsIgnoreCase(responseLine, "success")) {
getLog().info("hadoop unit is stopped");
if (skipTests) {
getLog().info("Hadoop Unit's embedded-stop goal is skipped");
} else {

getLog().info("is going to send a hadoop unit stop message");

try (Socket client = new Socket("localhost", port);
PrintWriter out = new PrintWriter(client.getOutputStream(), true);
BufferedReader in = new BufferedReader(new InputStreamReader(client.getInputStream()))) {
client.setSoTimeout(timeout);

out.println("stop");
String responseLine;
if ((responseLine = in.readLine()) != null) {
if (StringUtils.containsIgnoreCase(responseLine, "success")) {
getLog().info("hadoop unit is stopped");
}
}
}

} catch (IOException e) {
getLog().error("unable to contact pre-integration phase: " + e.getMessage());
} catch (IOException e) {
getLog().error("unable to contact pre-integration phase: " + e.getMessage());
}
}
}
}
Expand Down
4 changes: 4 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -886,6 +886,10 @@
<artifactId>maven-surefire-plugin</artifactId>
<version>2.19.1</version>
</plugin>
<plugin>
<artifactId>maven-failsafe-plugin</artifactId>
<version>2.19.1</version>
</plugin>
<plugin>
<artifactId>maven-jar-plugin</artifactId>
<version>3.0.2</version>
Expand Down
28 changes: 1 addition & 27 deletions sample/confluent-integrationtest/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -42,33 +42,7 @@
</activation>

<build>
<plugins>
<plugin>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<excludes>
<exclude>**/*IntegrationTest.java</exclude>
</excludes>
</configuration>
<executions>
<execution>
<id>integration-test</id>
<goals>
<goal>test</goal>
</goals>
<phase>integration-test</phase>
<configuration>
<excludes>
<exclude>none</exclude>
</excludes>
<includes>
<include>**/*IntegrationTest.java</include>
</includes>
</configuration>
</execution>
</executions>
</plugin>

<plugins>
<plugin>
<artifactId>hadoop-unit-maven-plugin</artifactId>
<groupId>fr.jetoile.hadoop</groupId>
Expand Down
26 changes: 0 additions & 26 deletions sample/kafka-spark-streaming/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -86,32 +86,6 @@

<build>
<plugins>
<plugin>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<excludes>
<exclude>**/*IntegrationTest.java</exclude>
</excludes>
</configuration>
<executions>
<execution>
<id>integration-test</id>
<goals>
<goal>test</goal>
</goals>
<phase>integration-test</phase>
<configuration>
<excludes>
<exclude>none</exclude>
</excludes>
<includes>
<include>**/*IntegrationTest.java</include>
</includes>
</configuration>
</execution>
</executions>
</plugin>

<plugin>
<artifactId>hadoop-unit-maven-plugin</artifactId>
<groupId>fr.jetoile.hadoop</groupId>
Expand Down
26 changes: 0 additions & 26 deletions sample/kafka-stream/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -61,32 +61,6 @@
</activation>
<build>
<plugins>
<plugin>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<excludes>
<exclude>**/*IntegrationTest.java</exclude>
</excludes>
</configuration>
<executions>
<execution>
<id>integration-test</id>
<goals>
<goal>test</goal>
</goals>
<phase>integration-test</phase>
<configuration>
<excludes>
<exclude>none</exclude>
</excludes>
<includes>
<include>**/*IntegrationTest.java</include>
</includes>
</configuration>
</execution>
</executions>
</plugin>

<plugin>
<artifactId>hadoop-unit-maven-plugin</artifactId>
<groupId>fr.jetoile.hadoop</groupId>
Expand Down
26 changes: 0 additions & 26 deletions sample/knox-hbase-webhdfs/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -40,32 +40,6 @@
</activation>
<build>
<plugins>
<plugin>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<excludes>
<exclude>**/*IntegrationTest.java</exclude>
</excludes>
</configuration>
<executions>
<execution>
<id>integration-test</id>
<goals>
<goal>test</goal>
</goals>
<phase>integration-test</phase>
<configuration>
<excludes>
<exclude>none</exclude>
</excludes>
<includes>
<include>**/*IntegrationTest.java</include>
</includes>
</configuration>
</execution>
</executions>
</plugin>

<plugin>
<artifactId>hadoop-unit-maven-plugin</artifactId>
<groupId>fr.jetoile.hadoop</groupId>
Expand Down
26 changes: 0 additions & 26 deletions sample/parquet-spark/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -75,32 +75,6 @@
</activation>
<build>
<plugins>
<plugin>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<excludes>
<exclude>**/*IntegrationTest.java</exclude>
</excludes>
</configuration>
<executions>
<execution>
<id>integration-test</id>
<goals>
<goal>test</goal>
</goals>
<phase>integration-test</phase>
<configuration>
<excludes>
<exclude>none</exclude>
</excludes>
<includes>
<include>**/*IntegrationTest.java</include>
</includes>
</configuration>
</execution>
</executions>
</plugin>

<plugin>
<artifactId>hadoop-unit-maven-plugin</artifactId>
<groupId>fr.jetoile.hadoop</groupId>
Expand Down
Loading

0 comments on commit 9817fc1

Please sign in to comment.