From c4dcb2aca8c71a73295f8527c5d14d61cbbfb898 Mon Sep 17 00:00:00 2001 From: jetoile Date: Tue, 24 Jan 2017 17:54:28 +0100 Subject: [PATCH] add knox integration test with gateway-shell --- sample/knox-hbase-webhdfs/pom.xml | 129 ++++++++++++++ .../fr/jetoile/hadoopunit/sample/KnoxJob.java | 73 ++++++++ .../sample/KnoxJobIntegrationTest.java | 81 +++++++++ .../resources/hadoop-unit-default.properties | 157 ++++++++++++++++++ sample/pom.xml | 1 + 5 files changed, 441 insertions(+) create mode 100644 sample/knox-hbase-webhdfs/pom.xml create mode 100644 sample/knox-hbase-webhdfs/src/main/java/fr/jetoile/hadoopunit/sample/KnoxJob.java create mode 100644 sample/knox-hbase-webhdfs/src/test/java/fr/jetoile/hadoopunit/sample/KnoxJobIntegrationTest.java create mode 100644 sample/knox-hbase-webhdfs/src/test/resources/hadoop-unit-default.properties diff --git a/sample/knox-hbase-webhdfs/pom.xml b/sample/knox-hbase-webhdfs/pom.xml new file mode 100644 index 00000000..1e9a3b92 --- /dev/null +++ b/sample/knox-hbase-webhdfs/pom.xml @@ -0,0 +1,129 @@ + + + + sample + fr.jetoile.hadoop + 2.2-SNAPSHOT + + 4.0.0 + + knox-hbase-webhdfs + + + 0.11.0 + + + + + org.apache.knox + gateway-shell + ${gateway-shell.version} + + + + commons-configuration + commons-configuration + + + + + + + default + + true + + !travis + + + + + + maven-surefire-plugin + + + **/*IntegrationTest.java + + + + + integration-test + + test + + integration-test + + + none + + + **/*IntegrationTest.java + + + + + + + + hadoop-unit-maven-plugin + fr.jetoile.hadoop + ${hadoop-unit.version} + + + start + + embedded-start + + pre-integration-test + + + + + + ZOOKEEPER + fr.jetoile.hadoop:hadoop-unit-zookeeper:${hadoop-unit.version} + + + HDFS + fr.jetoile.hadoop:hadoop-unit-hdfs:${hadoop-unit.version} + + + HBASE + fr.jetoile.hadoop:hadoop-unit-hbase:${hadoop-unit.version} + + + KNOX + fr.jetoile.hadoop:hadoop-unit-knox:${hadoop-unit.version} + + + + + + + + + + + + travis + + false + + travis + + + + + + maven-surefire-plugin + + true + + + + + + + \ No newline at end of file diff --git a/sample/knox-hbase-webhdfs/src/main/java/fr/jetoile/hadoopunit/sample/KnoxJob.java b/sample/knox-hbase-webhdfs/src/main/java/fr/jetoile/hadoopunit/sample/KnoxJob.java new file mode 100644 index 00000000..63ccff76 --- /dev/null +++ b/sample/knox-hbase-webhdfs/src/main/java/fr/jetoile/hadoopunit/sample/KnoxJob.java @@ -0,0 +1,73 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package fr.jetoile.hadoopunit.sample; + +import org.apache.commons.configuration.ConfigurationException; +import org.apache.commons.io.IOUtils; +import org.apache.hadoop.gateway.shell.Hadoop; +import org.apache.hadoop.gateway.shell.hbase.HBase; +import org.apache.hadoop.gateway.shell.hdfs.Hdfs; + +import java.io.IOException; +import java.io.InputStream; +import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; +import java.security.KeyManagementException; +import java.security.NoSuchAlgorithmException; + +public class KnoxJob { + + public void createHdfsDirectory(Hadoop hadoop) throws ConfigurationException, URISyntaxException, KeyManagementException, NoSuchAlgorithmException, IOException { + Hdfs.mkdir(hadoop).dir("/hdfs/test").now(); + } + + public void createFileOnHdfs(Hadoop hadoop) throws ConfigurationException, URISyntaxException, KeyManagementException, NoSuchAlgorithmException, IOException { + Hdfs.put(hadoop).text("TEST").to("/hdfs/test/sample.txt").now(); + } + + public String getFileOnHdfs(Hadoop hadoop) throws ConfigurationException, URISyntaxException, KeyManagementException, NoSuchAlgorithmException, IOException { + InputStream inputStream = Hdfs.get(hadoop).from("/hdfs/test/sample.txt").now().getStream(); + return IOUtils.toString(inputStream, StandardCharsets.UTF_8); + } + + public String getHBaseStatus(Hadoop hadoop) throws ConfigurationException, URISyntaxException, KeyManagementException, NoSuchAlgorithmException, IOException { + InputStream inputStream = HBase.session(hadoop).status().now().getStream(); + return IOUtils.toString(inputStream, StandardCharsets.UTF_8); + } + + public void createHBaseTable(Hadoop hadoop) throws ConfigurationException, URISyntaxException, KeyManagementException, NoSuchAlgorithmException, IOException { + HBase.session(hadoop).table("test").create().family("family1").endFamilyDef().now(); + } + + + public String getHBaseTableSchema(Hadoop hadoop) throws ConfigurationException, URISyntaxException, KeyManagementException, NoSuchAlgorithmException, IOException { + InputStream inputStream = HBase.session(hadoop).table("test").schema().now().getStream(); + return IOUtils.toString(inputStream, StandardCharsets.UTF_8); + } + + + public void putHBaseData(Hadoop hadoop) throws ConfigurationException, URISyntaxException, KeyManagementException, NoSuchAlgorithmException, IOException { + HBase.session(hadoop).table("test").row("row_id_1").store() + .column("family1", "col1", "col_value1") + .now(); + } + + public String readHBaseData(Hadoop hadoop) throws ConfigurationException, URISyntaxException, KeyManagementException, NoSuchAlgorithmException, IOException { + InputStream inputStream = HBase.session(hadoop).table("test").row("row_id_1") + .query() + .now().getStream(); + return IOUtils.toString(inputStream, StandardCharsets.UTF_8); + } + +} diff --git a/sample/knox-hbase-webhdfs/src/test/java/fr/jetoile/hadoopunit/sample/KnoxJobIntegrationTest.java b/sample/knox-hbase-webhdfs/src/test/java/fr/jetoile/hadoopunit/sample/KnoxJobIntegrationTest.java new file mode 100644 index 00000000..36143c57 --- /dev/null +++ b/sample/knox-hbase-webhdfs/src/test/java/fr/jetoile/hadoopunit/sample/KnoxJobIntegrationTest.java @@ -0,0 +1,81 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package fr.jetoile.hadoopunit.sample; + +import org.apache.commons.configuration.Configuration; +import org.apache.commons.configuration.ConfigurationException; +import org.apache.commons.configuration.PropertiesConfiguration; +import org.apache.hadoop.gateway.shell.Hadoop; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import java.io.IOException; +import java.net.URISyntaxException; +import java.security.KeyManagementException; +import java.security.NoSuchAlgorithmException; + +import static org.junit.Assert.assertTrue; + +public class KnoxJobIntegrationTest { + + private static Hadoop hadoop; + + @BeforeClass + static public void setUp() throws ConfigurationException, URISyntaxException { + Configuration configuration = new PropertiesConfiguration("src/test/resources/hadoop-unit-default.properties"); + + String host = configuration.getString("knox.host"); + String port = configuration.getString("knox.port"); + String gateway = configuration.getString("knox.path"); + String cluster = configuration.getString("knox.cluster"); + + hadoop = Hadoop.loginInsecure("https://" + host + ":" + port + "/" + gateway + "/" + cluster, "none", "none"); + } + + @AfterClass + static public void tearDown() throws InterruptedException { + hadoop.shutdown(); + } + + @Test + public void hdfs_through_knox_should_be_ok() throws ConfigurationException, KeyManagementException, URISyntaxException, IOException, NoSuchAlgorithmException { + KnoxJob knoxJob = new KnoxJob(); + knoxJob.createHdfsDirectory(hadoop); + knoxJob.createFileOnHdfs(hadoop); + knoxJob.getFileOnHdfs(hadoop); + } + + + @Test + public void hbase_through_knox_should_be_ok() throws ConfigurationException, KeyManagementException, URISyntaxException, IOException, NoSuchAlgorithmException { + KnoxJob knoxJob = new KnoxJob(); + + assertTrue(knoxJob.getHBaseStatus(hadoop).contains("\"regions\":3")); + knoxJob.createHBaseTable(hadoop); + + System.out.println("=============================="); + System.out.println(knoxJob.getHBaseTableSchema(hadoop)); + System.out.println("=============================="); + + assertTrue(knoxJob.getHBaseTableSchema(hadoop).contains("{\"name\":\"test\",\"ColumnSchema\":[{\"name\":\"family1\"")); + knoxJob.putHBaseData(hadoop); + + System.out.println("=============================="); + System.out.println(knoxJob.readHBaseData(hadoop)); + System.out.println("=============================="); + + assertTrue(knoxJob.readHBaseData(hadoop).contains("{\"Row\":[{\"key\":\"")); + } +} \ No newline at end of file diff --git a/sample/knox-hbase-webhdfs/src/test/resources/hadoop-unit-default.properties b/sample/knox-hbase-webhdfs/src/test/resources/hadoop-unit-default.properties new file mode 100644 index 00000000..9220f629 --- /dev/null +++ b/sample/knox-hbase-webhdfs/src/test/resources/hadoop-unit-default.properties @@ -0,0 +1,157 @@ + +# Zookeeper +zookeeper.temp.dir=/tmp/embedded_zk +zookeeper.host=127.0.0.1 +zookeeper.port=22010 + +# Hive +hive.scratch.dir=/tmp/hive_scratch_dir +hive.warehouse.dir=/tmp/warehouse_dir + +# Hive Metastore +hive.metastore.hostname=localhost +hive.metastore.port=20102 +hive.metastore.derby.db.dir=metastore_db + +# Hive Server2 +hive.server2.hostname=localhost +hive.server2.port=20103 + +# Hive Test +hive.test.database.name=default +hive.test.table.name=test_table + + +# HDFS +hdfs.namenode.host=localhost +hdfs.namenode.port=20112 +hdfs.namenode.http.port=50070 +hdfs.temp.dir=/tmp/embedded_hdfs +hdfs.num.datanodes=1 +hdfs.enable.permissions=false +hdfs.format=true +hdfs.enable.running.user.as.proxy.user=true + +# HDFS Test +hdfs.test.file=/tmp/testing +hdfs.test.string=TESTING + + +# HBase +hbase.master.port=25111 +hbase.master.info.port=-1 +hbase.num.region.servers=1 +hbase.root.dir=/tmp/embedded_hbase +hbase.znode.parent=/hbase-unsecure +hbase.wal.replication.enabled=false + +# HBase REST +hbase.rest.port=28000 +hbase.rest.readonly=false +hbase.rest.info.port=28080 +hbase.rest.host=0.0.0.0 +hbase.rest.threads.max=100 +hbase.rest.threads.min=2 + +# HBase Test +hbase.test.table.name=hbase_test_table +hbase.test.col.family.name=cf1 +hbase.test.col.qualifier.name=cq1 +hbase.test.num.rows.to.put=50 + +# Kafka +kafka.hostname=127.0.0.1 +kafka.port=20111 + +# Kafka Test +kafka.test.topic=testtopic +kafka.test.message.count=10 +kafka.test.broker.id=1 +kafka.test.temp.dir=embedded_kafka + +#SolR + SolRCloud +solr.dir=solr + +#SolR +solr.collection.internal.name=collection1_shard1_replica1 + +#SolRCloud +solr.collection.name=collection1 +solr.cloud.port=8983 + + + + + +# YARN +yarn.num.node.managers=1 +yarn.num.local.dirs=1 +yarn.num.log.dirs=1 +yarn.resource.manager.address=localhost:37001 +yarn.resource.manager.hostname=localhost +yarn.resource.manager.scheduler.address=localhost:37002 +yarn.resource.manager.resource.tracker.address=localhost:37003 +yarn.resource.manager.webapp.address=localhost:37004 +yarn.use.in.jvm.container.executor=false + +# MR +mr.job.history.address=localhost:37005 + +# Oozie +oozie.tmp.dir=/tmp/oozie_tmp +oozie.test.dir=/tmp/embedded_oozie +oozie.home.dir=/tmp/oozie_home +oozie.username=blah +oozie.groupname=testgroup +oozie.hdfs.share.lib.dir=/tmp/share_lib +oozie.share.lib.create=true +oozie.local.share.lib.cache.dir=/tmp/share_lib_cache +oozie.purge.local.share.lib.cache=false +oozie.sharelib.path=/home/khanh/github +oozie.sharelib.name=oozie-4.2.2-SNAPSHOT.2.3.2.2-SNAPSHOT-2950-distro.tar.gz +oozie.port=20113 +oozie.host=localhost + +# ElasticSearch +elasticsearch.ip=127.0.0.1 +elasticsearch.http.port=14433 +elasticsearch.tcp.port=14533 +elasticsearch.temp.dir=/tmp/elasticsearch +elasticsearch.index.name=test_index +elasticsearch.cluster.name=elasticsearch + +# MongoDB +mongo.ip=127.0.0.1 +mongo.port=13333 +mongo.database.name=test_database +mongo.collection.name=test_collection + +# Cassandra +cassandra.ip=127.0.0.1 +cassandra.port=13433 +cassandra.temp.dir=/tmp/embedded_cassandra + +# Neo4j +neo4j.ip=127.0.0.1 +neo4j.port=13533 +neo4j.temp.dir=/tmp/embedded_neo4j + +# KNOX +knox.host=localhost +knox.port=8888 +knox.path=gateway +knox.cluster=mycluster +knox.home.dir=/tmp/embedded_knox +knox.service=namenode,webhdfs,webhbase + +# Alluxio +#alluxio.work.dir=/tmp/alluxio +alluxio.work.dir=hdfs://localhost:20112/alluxio +alluxio.hostname=localhost +alluxio.master.port=14001 +alluxio.master.web.port=14002 +alluxio.proxy.web.port=14100 +alluxio.worker.web.port=14003 +alluxio.worker.data.port=14004 +alluxio.worker.port=14005 +alluxio.webapp.directory=conf/alluxio/webapp diff --git a/sample/pom.xml b/sample/pom.xml index b8c639a5..b35c9d3b 100644 --- a/sample/pom.xml +++ b/sample/pom.xml @@ -17,6 +17,7 @@ kafka-stream spark-streaming-cassandra all + knox-hbase-webhdfs sample