Skip to content

Commit

Permalink
add maven plugin for integration test
Browse files Browse the repository at this point in the history
  • Loading branch information
jetoile committed Mar 28, 2016
1 parent 6a781ec commit e93342d
Show file tree
Hide file tree
Showing 5 changed files with 409 additions and 0 deletions.
164 changes: 164 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,170 @@ hive
#Sample
See hadoop-unit-standalone/src/test/java/fr/jetoile/hadoopunit/integrationtest

#Maven Plugin usage
A maven plugin is provided for integration test only.

To use it, add into the pom project stuff like that:
```xml
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.easytesting</groupId>
<artifactId>fest-assert</artifactId>
<version>1.4</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<version>1.8.5</version>
<scope>test</scope>
</dependency>

<dependency>
<groupId>fr.jetoile.hadoop</groupId>
<artifactId>hadoop-unit-hdfs</artifactId>
<version>1.1-SNAPSHOT</version>
<scope>test</scope>
</dependency>

<dependency>
<groupId>fr.jetoile.hadoop</groupId>
<artifactId>hadoop-unit-hive</artifactId>
<version>1.1-SNAPSHOT</version>
<scope>test</scope>
</dependency>

<dependency>
<groupId>fr.jetoile.hadoop</groupId>
<artifactId>hadoop-unit-client-hdfs</artifactId>
<version>1.1-SNAPSHOT</version>
<scope>test</scope>
</dependency>

<dependency>
<groupId>fr.jetoile.hadoop</groupId>
<artifactId>hadoop-unit-client-hive</artifactId>
<version>1.1-SNAPSHOT</version>
<scope>test</scope>
</dependency>

<dependency>
<groupId>fr.jetoile.hadoop</groupId>
<artifactId>hadoop-unit-client-spark</artifactId>
<version>1.1-SNAPSHOT</version>
<scope>test</scope>
</dependency>
</dependencies>

<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<excludes>
<exclude>**/*IntegrationTest.java</exclude>
</excludes>
</configuration>
<executions>
<execution>
<id>integration-test</id>
<goals>
<goal>test</goal>
</goals>
<phase>integration-test</phase>
<configuration>
<excludes>
<exclude>none</exclude>
</excludes>
<includes>
<include>**/*IntegrationTest.java</include>
</includes>
</configuration>
</execution>
</executions>
</plugin>

<plugin>
<artifactId>hadoop-unit-maven-plugin</artifactId>
<groupId>fr.jetoile.hadoop</groupId>
<version>1.1-SNAPSHOT</version>
<executions>
<execution>
<id>start</id>
<goals>
<goal>start</goal>
</goals>
<phase>pre-integration-test</phase>
</execution>
</executions>
<configuration>
<values>
<value>HDFS</value>
<value>ZOOKEEPER</value>
<value>HIVEMETA</value>
<value>HIVESERVER2</value>
</values>
</configuration>

</plugin>

</plugins>
</build>
```

Here is a sample integration test:
```java
public class HdfsBootstrapIntegrationTest {

static private Configuration configuration;


@BeforeClass
public static void setup() throws BootstrapException {
try {
configuration = new PropertiesConfiguration("default.properties");
} catch (ConfigurationException e) {
throw new BootstrapException("bad config", e);
}
}


@Test
public void hdfsShouldStart() throws Exception {

FileSystem hdfsFsHandle = HdfsUtils.INSTANCE.getFileSystem();


FSDataOutputStream writer = hdfsFsHandle.create(new Path(configuration.getString(Config.HDFS_TEST_FILE_KEY)));
writer.writeUTF(configuration.getString(Config.HDFS_TEST_STRING_KEY));
writer.close();

// Read the file and compare to test string
FSDataInputStream reader = hdfsFsHandle.open(new Path(configuration.getString(Config.HDFS_TEST_FILE_KEY)));
assertEquals(reader.readUTF(), configuration.getString(Config.HDFS_TEST_STRING_KEY));
reader.close();
hdfsFsHandle.close();

URL url = new URL(
String.format( "http://localhost:%s/webhdfs/v1?op=GETHOMEDIRECTORY&user.name=guest",
configuration.getInt( Config.HDFS_NAMENODE_HTTP_PORT_KEY ) ) );
URLConnection connection = url.openConnection();
connection.setRequestProperty( "Accept-Charset", "UTF-8" );
BufferedReader response = new BufferedReader( new InputStreamReader( connection.getInputStream() ) );
String line = response.readLine();
response.close();
assertThat("{\"Path\":\"/user/guest\"}").isEqualTo(line);
}
}
```

#Component available

* SolrCloud 5.4.1
Expand Down
102 changes: 102 additions & 0 deletions hadoop-unit-maven-plugin/pom.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>hadoop-unit</artifactId>
<groupId>fr.jetoile.hadoop</groupId>
<version>1.1-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>

<artifactId>hadoop-unit-maven-plugin</artifactId>
<packaging>maven-plugin</packaging>


<dependencies>
<dependency>
<groupId>fr.jetoile.hadoop</groupId>
<artifactId>hadoop-unit-commons</artifactId>
</dependency>

<dependency>
<groupId>fr.jetoile.hadoop</groupId>
<artifactId>hadoop-unit-hbase</artifactId>
</dependency>

<dependency>
<groupId>fr.jetoile.hadoop</groupId>
<artifactId>hadoop-unit-hdfs</artifactId>
</dependency>

<dependency>
<groupId>fr.jetoile.hadoop</groupId>
<artifactId>hadoop-unit-hive</artifactId>
</dependency>

<dependency>
<groupId>fr.jetoile.hadoop</groupId>
<artifactId>hadoop-unit-kafka</artifactId>
</dependency>

<dependency>
<groupId>fr.jetoile.hadoop</groupId>
<artifactId>hadoop-unit-oozie</artifactId>
</dependency>

<dependency>
<groupId>fr.jetoile.hadoop</groupId>
<artifactId>hadoop-unit-solr</artifactId>
</dependency>

<dependency>
<groupId>fr.jetoile.hadoop</groupId>
<artifactId>hadoop-unit-solrcloud</artifactId>
</dependency>

<dependency>
<groupId>fr.jetoile.hadoop</groupId>
<artifactId>hadoop-unit-zookeeper</artifactId>
</dependency>

<dependency>
<groupId>org.apache.maven</groupId>
<artifactId>maven-plugin-api</artifactId>
<version>3.2.1</version>
</dependency>

<!-- dependencies to annotations -->
<dependency>
<groupId>org.apache.maven.plugin-tools</groupId>
<artifactId>maven-plugin-annotations</artifactId>
<version>3.4</version>
<scope>provided</scope>
</dependency>

</dependencies>

<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-plugin-plugin</artifactId>
<version>3.3</version>
<executions>
<execution>
<id>mojo-descriptor</id>
<goals>
<goal>descriptor</goal>
</goals>
</execution>
</executions>
<configuration>
<skipErrorNoDescriptorsFound>true</skipErrorNoDescriptorsFound>
</configuration>
</plugin>

</plugins>
</pluginManagement>
</build>

</project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
package fr.jetoile.hadoopunit;

import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;

import java.util.List;
import java.util.stream.Collectors;


@Mojo(name = "start", defaultPhase = LifecyclePhase.PRE_INTEGRATION_TEST, threadSafe = false)
public class HadoopBootstrapStarter extends AbstractMojo {

@Parameter(property = "values", required = true)
protected List<String> values;

@Override
public void execute() throws MojoExecutionException, MojoFailureException {

HadoopBootstrap bootstrap = HadoopBootstrap.INSTANCE;


bootstrap.componentsToStart = bootstrap.componentsToStart.stream().filter(c ->
values.contains(c.getName().toUpperCase())
).collect(Collectors.toList());

bootstrap.componentsToStop = bootstrap.componentsToStop.stream().filter(c ->
values.contains(c.getName().toUpperCase())
).collect(Collectors.toList());

try {
HadoopBootstrap.INSTANCE.startAll();
} catch (Exception e) {
e.printStackTrace();
}

}
}
Loading

0 comments on commit e93342d

Please sign in to comment.