diff --git a/hadoop-ozone/dev-support/checks/_lib.sh b/hadoop-ozone/dev-support/checks/_lib.sh index b81acf98993..134c8f53c6e 100644 --- a/hadoop-ozone/dev-support/checks/_lib.sh +++ b/hadoop-ozone/dev-support/checks/_lib.sh @@ -149,3 +149,18 @@ install_spotbugs() { _install_spotbugs() { curl -LSs https://repo.maven.apache.org/maven2/com/github/spotbugs/spotbugs/3.1.12/spotbugs-3.1.12.tgz | tar -xz -f - } + +download_hadoop_aws() { + local dir="$1" + + if [[ -z ${dir} ]]; then + echo "Required argument: target directory for Hadoop AWS sources" >&2 + return 1 + fi + + if [[ ! -e "${dir}" ]] || [[ ! -d "${dir}"/src/test/resources ]]; then + mkdir -p "${dir}" + [[ -f "${dir}.tar.gz" ]] || curl -LSs -o "${dir}.tar.gz" https://archive.apache.org/dist/hadoop/common/hadoop-${HADOOP_VERSION}/hadoop-${HADOOP_VERSION}-src.tar.gz + tar -x -z -C "${dir}" --strip-components=3 -f "${dir}.tar.gz" --wildcards 'hadoop-*-src/hadoop-tools/hadoop-aws' || return 1 + fi +} diff --git a/hadoop-ozone/dev-support/checks/_mvn_unit_report.sh b/hadoop-ozone/dev-support/checks/_mvn_unit_report.sh index 4fca7bb6aae..36205c69bb6 100755 --- a/hadoop-ozone/dev-support/checks/_mvn_unit_report.sh +++ b/hadoop-ozone/dev-support/checks/_mvn_unit_report.sh @@ -81,8 +81,8 @@ for failed_test in $(< ${REPORT_DIR}/summary.txt); do \( -name "${failed_test}.txt" -or -name "${failed_test}-output.txt" -or -name "TEST-${failed_test}.xml" \)); do dir=$(dirname "${file}") dest_dir=$(_realpath --relative-to="${PWD}" "${dir}/../..") || continue - mkdir -p "${REPORT_DIR}/${dest_dir}" - mv "${file}" "${REPORT_DIR}/${dest_dir}"/ + mkdir -pv "${REPORT_DIR}/${dest_dir}" + mv -v "${file}" "${REPORT_DIR}/${dest_dir}"/ done done diff --git a/hadoop-ozone/dev-support/checks/acceptance.sh b/hadoop-ozone/dev-support/checks/acceptance.sh index 0489fa24384..5be3f7b5879 100755 --- a/hadoop-ozone/dev-support/checks/acceptance.sh +++ b/hadoop-ozone/dev-support/checks/acceptance.sh @@ -19,15 +19,20 @@ set -u -o pipefail DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" cd "$DIR/../../.." || exit 1 -source "${DIR}/_lib.sh" +OZONE_ROOT=$(pwd -P) + +: ${HADOOP_AWS_DIR:=""} +: ${OZONE_ACCEPTANCE_SUITE:=""} +: ${OZONE_TEST_SELECTOR:=""} +: ${OZONE_ACCEPTANCE_TEST_TYPE:="robot"} +: ${OZONE_WITH_COVERAGE:="false"} -install_virtualenv -install_robot +source "${DIR}/_lib.sh" -REPORT_DIR=${OUTPUT_DIR:-"$DIR/../../../target/acceptance"} +REPORT_DIR=${OUTPUT_DIR:-"${OZONE_ROOT}/target/acceptance"} OZONE_VERSION=$(mvn help:evaluate -Dexpression=ozone.version -q -DforceStdout) -DIST_DIR="$DIR/../../dist/target/ozone-$OZONE_VERSION" +DIST_DIR="${OZONE_ROOT}/hadoop-ozone/dist/target/ozone-$OZONE_VERSION" if [ ! -d "$DIST_DIR" ]; then echo "Distribution dir is missing. Doing a full build" @@ -36,15 +41,42 @@ fi mkdir -p "$REPORT_DIR" -export OZONE_ACCEPTANCE_SUITE +if [[ "${OZONE_ACCEPTANCE_SUITE}" == "s3a" ]]; then + OZONE_ACCEPTANCE_TEST_TYPE="maven" + + if [[ -z "${HADOOP_AWS_DIR}" ]]; then + HADOOP_VERSION=$(mvn help:evaluate -Dexpression=hadoop.version -q -DforceStdout) + export HADOOP_AWS_DIR=${OZONE_ROOT}/target/hadoop-src + fi + + download_hadoop_aws "${HADOOP_AWS_DIR}" +fi + +if [[ "${OZONE_ACCEPTANCE_TEST_TYPE}" == "robot" ]]; then + install_virtualenv + install_robot +fi + +export OZONE_ACCEPTANCE_SUITE OZONE_ACCEPTANCE_TEST_TYPE cd "$DIST_DIR/compose" || exit 1 ./test-all.sh 2>&1 | tee "${REPORT_DIR}/output.log" RES=$? -cp -rv result/* "$REPORT_DIR/" -cp "$REPORT_DIR/log.html" "$REPORT_DIR/summary.html" -find "$REPORT_DIR" -type f -empty -print0 | xargs -0 rm -v -grep -A1 FAIL "${REPORT_DIR}/output.log" | grep -v '^Output' > "${REPORT_DIR}/summary.txt" +if [[ "${OZONE_ACCEPTANCE_TEST_TYPE}" == "maven" ]]; then + pushd result + source "${DIR}/_mvn_unit_report.sh" + find . -name junit -print0 | xargs -r -0 rm -frv + cp -rv * "${REPORT_DIR}"/ + popd +else + cp -rv result/* "$REPORT_DIR/" + if [[ -f "${REPORT_DIR}/log.html" ]]; then + cp "$REPORT_DIR/log.html" "$REPORT_DIR/summary.html" + fi + grep -A1 FAIL "${REPORT_DIR}/output.log" | grep -v '^Output' > "${REPORT_DIR}/summary.txt" +fi + +find "$REPORT_DIR" -type f -empty -not -name summary.txt -print0 | xargs -0 rm -v exit $RES diff --git a/hadoop-ozone/dist/src/main/compose/common/s3a-test.sh b/hadoop-ozone/dist/src/main/compose/common/s3a-test.sh new file mode 100644 index 00000000000..85dbc5feced --- /dev/null +++ b/hadoop-ozone/dist/src/main/compose/common/s3a-test.sh @@ -0,0 +1,112 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This script runs S3A contract tests against various bucket types on +# a Docker Compose-based Ozone cluster. +# Requires HADOOP_AWS_DIR to point the directory containing hadoop-aws sources. + +if [[ -z ${HADOOP_AWS_DIR} ]] || [[ ! -e ${HADOOP_AWS_DIR} ]]; then + echo "Skipping S3A tests due to missing HADOOP_AWS_DIR (directory with hadoop-aws sources)" >&2 + exit +fi + +# shellcheck source=/dev/null +source "$COMPOSE_DIR/../testlib.sh" + +## @description Run S3A contract tests against Ozone. +## @param Ozone S3 bucket +execute_s3a_tests() { + local bucket="$1" + + pushd "${HADOOP_AWS_DIR}" + + # S3A contract tests are enabled by presence of `auth-keys.xml`. + # https://hadoop.apache.org/docs/r3.3.6/hadoop-aws/tools/hadoop-aws/testing.html#Setting_up_the_tests + cat > src/test/resources/auth-keys.xml <<-EOF + + + + fs.s3a.endpoint + http://localhost:9878 + + + + test.fs.s3a.endpoint + http://localhost:9878 + + + + fs.contract.test.fs.s3a + s3a://${bucket}/ + + + + test.fs.s3a.name + s3a://${bucket}/ + + + + test.fs.s3a.sts.enabled + false + + + + fs.s3a.path.style.access + true + + + + fs.s3a.directory.marker.retention + keep + + + +EOF + + # Some tests are skipped due to known issues. + # - ITestS3AContractDistCp: HDDS-10616 + # - ITestS3AContractEtag, ITestS3AContractRename: HDDS-10615 + # - ITestS3AContractGetFileStatusV1List: HDDS-10617 + # - ITestS3AContractMkdir: HDDS-10572 + mvn -B -V --fail-never --no-transfer-progress \ + -Dtest='ITestS3AContract*, !ITestS3AContractDistCp, !ITestS3AContractEtag, !ITestS3AContractGetFileStatusV1List, !ITestS3AContractMkdir, !ITestS3AContractRename' \ + clean test + + local target="${RESULT_DIR}/junit/${bucket}/target" + mkdir -p "${target}" + mv -iv target/surefire-reports "${target}"/ + popd +} + +start_docker_env + +if [[ ${SECURITY_ENABLED} == "true" ]]; then + execute_command_in_container s3g kinit -kt /etc/security/keytabs/testuser.keytab "testuser/s3g@EXAMPLE.COM" + access=$(execute_command_in_container s3g ozone s3 getsecret -e) + eval "$access" +else + export AWS_ACCESS_KEY_ID="s3a-contract" + export AWS_SECRET_ACCESS_KEY="unsecure" +fi + +execute_command_in_container s3g ozone sh bucket create --layout OBJECT_STORE /s3v/obs-bucket +execute_command_in_container s3g ozone sh bucket create --layout LEGACY /s3v/leg-bucket +execute_command_in_container s3g ozone sh bucket create --layout FILE_SYSTEM_OPTIMIZED /s3v/fso-bucket + +for bucket in obs-bucket leg-bucket fso-bucket; do + execute_s3a_tests "$bucket" +done diff --git a/hadoop-ozone/dist/src/main/compose/ozone/test-s3a.sh b/hadoop-ozone/dist/src/main/compose/ozone/test-s3a.sh new file mode 100644 index 00000000000..c277e71a4bf --- /dev/null +++ b/hadoop-ozone/dist/src/main/compose/ozone/test-s3a.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +#suite:s3a + +COMPOSE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" +export COMPOSE_DIR + +export SECURITY_ENABLED=false + +source "$COMPOSE_DIR/../common/s3a-test.sh" diff --git a/hadoop-ozone/dist/src/main/compose/ozonesecure-ha/test-s3a.sh b/hadoop-ozone/dist/src/main/compose/ozonesecure-ha/test-s3a.sh new file mode 100644 index 00000000000..78b8b51d9d8 --- /dev/null +++ b/hadoop-ozone/dist/src/main/compose/ozonesecure-ha/test-s3a.sh @@ -0,0 +1,27 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +#suite:s3a + +COMPOSE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" +export COMPOSE_DIR + +export SECURITY_ENABLED=true +export OM_SERVICE_ID="omservice" +export SCM=scm1.org + +source "$COMPOSE_DIR/../common/s3a-test.sh" diff --git a/hadoop-ozone/dist/src/main/compose/test-all.sh b/hadoop-ozone/dist/src/main/compose/test-all.sh index a998690032a..85294b6b793 100755 --- a/hadoop-ozone/dist/src/main/compose/test-all.sh +++ b/hadoop-ozone/dist/src/main/compose/test-all.sh @@ -27,6 +27,7 @@ rm "$ALL_RESULT_DIR"/* || true source "$SCRIPT_DIR"/testlib.sh +: ${OZONE_ACCEPTANCE_TEST_TYPE:="robot"} : ${OZONE_WITH_COVERAGE:="false"} if [[ "${OZONE_WITH_COVERAGE}" == "true" ]]; then @@ -46,7 +47,9 @@ if [[ "${OZONE_WITH_COVERAGE}" == "true" ]]; then cp /tmp/jacoco-combined.exec "$SCRIPT_DIR"/result fi -generate_report "acceptance" "${ALL_RESULT_DIR}" "${XUNIT_RESULT_DIR}" - +if [[ "${OZONE_ACCEPTANCE_TEST_TYPE}" == "robot" ]]; then + # does not apply to JUnit tests run via Maven + generate_report "acceptance" "${ALL_RESULT_DIR}" "${XUNIT_RESULT_DIR}" +fi exit $RESULT diff --git a/hadoop-ozone/dist/src/main/compose/testlib.sh b/hadoop-ozone/dist/src/main/compose/testlib.sh index 860c4f51640..5ac3d09d59a 100755 --- a/hadoop-ozone/dist/src/main/compose/testlib.sh +++ b/hadoop-ozone/dist/src/main/compose/testlib.sh @@ -405,12 +405,12 @@ copy_results() { target_dir="${target_dir}/${test_script_name}" fi - if [[ -n "$(find "${result_dir}" -name "*.xml")" ]]; then + if command -v rebot > /dev/null 2>&1 && [[ -n "$(find "${result_dir}" -name "*.xml")" ]]; then rebot --nostatusrc -N "${test_name}" -l NONE -r NONE -o "${all_result_dir}/${test_name}.xml" "${result_dir}"/*.xml \ && rm -fv "${result_dir}"/*.xml "${result_dir}"/log.html "${result_dir}"/report.html fi - mkdir -p "${target_dir}" + mkdir -pv "${target_dir}" mv -v "${result_dir}"/* "${target_dir}"/ }