Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

HDDS-8450. Dedicated acceptance test suite for s3a #6458

Merged
merged 9 commits into from
Apr 12, 2024
15 changes: 15 additions & 0 deletions hadoop-ozone/dev-support/checks/_lib.sh
Original file line number Diff line number Diff line change
Expand Up @@ -149,3 +149,18 @@ install_spotbugs() {
_install_spotbugs() {
curl -LSs https://repo.maven.apache.org/maven2/com/github/spotbugs/spotbugs/3.1.12/spotbugs-3.1.12.tgz | tar -xz -f -
}

download_hadoop_aws() {
local dir="$1"

if [[ -z ${dir} ]]; then
echo "Required argument: target directory for Hadoop AWS sources" >&2
return 1
fi

if [[ ! -e "${dir}" ]] || [[ ! -d "${dir}"/src/test/resources ]]; then
mkdir -p "${dir}"
[[ -f "${dir}.tar.gz" ]] || curl -LSs -o "${dir}.tar.gz" https://archive.apache.org/dist/hadoop/common/hadoop-${HADOOP_VERSION}/hadoop-${HADOOP_VERSION}-src.tar.gz
tar -x -z -C "${dir}" --strip-components=3 -f "${dir}.tar.gz" --wildcards 'hadoop-*-src/hadoop-tools/hadoop-aws' || return 1
fi
}
4 changes: 2 additions & 2 deletions hadoop-ozone/dev-support/checks/_mvn_unit_report.sh
Original file line number Diff line number Diff line change
Expand Up @@ -81,8 +81,8 @@ for failed_test in $(< ${REPORT_DIR}/summary.txt); do
\( -name "${failed_test}.txt" -or -name "${failed_test}-output.txt" -or -name "TEST-${failed_test}.xml" \)); do
dir=$(dirname "${file}")
dest_dir=$(_realpath --relative-to="${PWD}" "${dir}/../..") || continue
mkdir -p "${REPORT_DIR}/${dest_dir}"
mv "${file}" "${REPORT_DIR}/${dest_dir}"/
mkdir -pv "${REPORT_DIR}/${dest_dir}"
mv -v "${file}" "${REPORT_DIR}/${dest_dir}"/
done
done

Expand Down
52 changes: 42 additions & 10 deletions hadoop-ozone/dev-support/checks/acceptance.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,20 @@ set -u -o pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
cd "$DIR/../../.." || exit 1

source "${DIR}/_lib.sh"
OZONE_ROOT=$(pwd -P)
errose28 marked this conversation as resolved.
Show resolved Hide resolved

: ${HADOOP_AWS_DIR:=""}
: ${OZONE_ACCEPTANCE_SUITE:=""}
: ${OZONE_TEST_SELECTOR:=""}
: ${OZONE_ACCEPTANCE_TEST_TYPE:="robot"}
: ${OZONE_WITH_COVERAGE:="false"}

install_virtualenv
install_robot
source "${DIR}/_lib.sh"

REPORT_DIR=${OUTPUT_DIR:-"$DIR/../../../target/acceptance"}
REPORT_DIR=${OUTPUT_DIR:-"${OZONE_ROOT}/target/acceptance"}

OZONE_VERSION=$(mvn help:evaluate -Dexpression=ozone.version -q -DforceStdout)
DIST_DIR="$DIR/../../dist/target/ozone-$OZONE_VERSION"
DIST_DIR="${OZONE_ROOT}/hadoop-ozone/dist/target/ozone-$OZONE_VERSION"

if [ ! -d "$DIST_DIR" ]; then
echo "Distribution dir is missing. Doing a full build"
Expand All @@ -36,15 +41,42 @@ fi

mkdir -p "$REPORT_DIR"

export OZONE_ACCEPTANCE_SUITE
if [[ "${OZONE_ACCEPTANCE_SUITE}" == "s3a" ]]; then
OZONE_ACCEPTANCE_TEST_TYPE="maven"

if [[ -z "${HADOOP_AWS_DIR}" ]]; then
HADOOP_VERSION=$(mvn help:evaluate -Dexpression=hadoop.version -q -DforceStdout)
export HADOOP_AWS_DIR=${OZONE_ROOT}/target/hadoop-src
fi

download_hadoop_aws "${HADOOP_AWS_DIR}"
fi

if [[ "${OZONE_ACCEPTANCE_TEST_TYPE}" == "robot" ]]; then
install_virtualenv
install_robot
fi

export OZONE_ACCEPTANCE_SUITE OZONE_ACCEPTANCE_TEST_TYPE

cd "$DIST_DIR/compose" || exit 1
./test-all.sh 2>&1 | tee "${REPORT_DIR}/output.log"
RES=$?
cp -rv result/* "$REPORT_DIR/"
cp "$REPORT_DIR/log.html" "$REPORT_DIR/summary.html"
find "$REPORT_DIR" -type f -empty -print0 | xargs -0 rm -v

grep -A1 FAIL "${REPORT_DIR}/output.log" | grep -v '^Output' > "${REPORT_DIR}/summary.txt"
if [[ "${OZONE_ACCEPTANCE_TEST_TYPE}" == "maven" ]]; then
pushd result
source "${DIR}/_mvn_unit_report.sh"
find . -name junit -print0 | xargs -r -0 rm -frv
cp -rv * "${REPORT_DIR}"/
popd
else
cp -rv result/* "$REPORT_DIR/"
if [[ -f "${REPORT_DIR}/log.html" ]]; then
cp "$REPORT_DIR/log.html" "$REPORT_DIR/summary.html"
fi
grep -A1 FAIL "${REPORT_DIR}/output.log" | grep -v '^Output' > "${REPORT_DIR}/summary.txt"
fi

find "$REPORT_DIR" -type f -empty -not -name summary.txt -print0 | xargs -0 rm -v

exit $RES
112 changes: 112 additions & 0 deletions hadoop-ozone/dist/src/main/compose/common/s3a-test.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# This script runs S3A contract tests against various bucket types on
# a Docker Compose-based Ozone cluster.
# Requires HADOOP_AWS_DIR to point the directory containing hadoop-aws sources.

if [[ -z ${HADOOP_AWS_DIR} ]] || [[ ! -e ${HADOOP_AWS_DIR} ]]; then
echo "Skipping S3A tests due to missing HADOOP_AWS_DIR (directory with hadoop-aws sources)" >&2
exit
fi

# shellcheck source=/dev/null
source "$COMPOSE_DIR/../testlib.sh"

## @description Run S3A contract tests against Ozone.
## @param Ozone S3 bucket
execute_s3a_tests() {
local bucket="$1"

pushd "${HADOOP_AWS_DIR}"

# S3A contract tests are enabled by presence of `auth-keys.xml`.
# https://hadoop.apache.org/docs/r3.3.6/hadoop-aws/tools/hadoop-aws/testing.html#Setting_up_the_tests
cat > src/test/resources/auth-keys.xml <<-EOF
<configuration>

<property>
<name>fs.s3a.endpoint</name>
<value>http://localhost:9878</value>
</property>

<property>
<name>test.fs.s3a.endpoint</name>
<value>http://localhost:9878</value>
</property>

<property>
<name>fs.contract.test.fs.s3a</name>
<value>s3a://${bucket}/</value>
</property>

<property>
<name>test.fs.s3a.name</name>
<value>s3a://${bucket}/</value>
</property>

<property>
<name>test.fs.s3a.sts.enabled</name>
<value>false</value>
</property>

<property>
<name>fs.s3a.path.style.access</name>
<value>true</value>
</property>
Comment on lines +67 to +70
Copy link
Contributor

@ivandika3 ivandika3 Apr 9, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It might be nice to also test for virtual host style pattern to uncover some AWS Java SDK problems with it (e.g. HDDS-7035) since our S3G implementation is built on top of path style pattern assumption (i.e. S3G needs to use VirtualHostStyleFilter to convert virtual host style -> path style).

This can be addressed in future patches if needed.


<property>
<name>fs.s3a.directory.marker.retention</name>
<value>keep</value>
</property>

</configuration>
EOF

# Some tests are skipped due to known issues.
# - ITestS3AContractDistCp: HDDS-10616
# - ITestS3AContractEtag, ITestS3AContractRename: HDDS-10615
# - ITestS3AContractGetFileStatusV1List: HDDS-10617
# - ITestS3AContractMkdir: HDDS-10572
mvn -B -V --fail-never --no-transfer-progress \
-Dtest='ITestS3AContract*, !ITestS3AContractDistCp, !ITestS3AContractEtag, !ITestS3AContractGetFileStatusV1List, !ITestS3AContractMkdir, !ITestS3AContractRename' \
clean test

local target="${RESULT_DIR}/junit/${bucket}/target"
mkdir -p "${target}"
mv -iv target/surefire-reports "${target}"/
popd
}

start_docker_env

if [[ ${SECURITY_ENABLED} == "true" ]]; then
execute_command_in_container s3g kinit -kt /etc/security/keytabs/testuser.keytab "testuser/s3g@EXAMPLE.COM"
access=$(execute_command_in_container s3g ozone s3 getsecret -e)
eval "$access"
else
export AWS_ACCESS_KEY_ID="s3a-contract"
export AWS_SECRET_ACCESS_KEY="unsecure"
fi

execute_command_in_container s3g ozone sh bucket create --layout OBJECT_STORE /s3v/obs-bucket
execute_command_in_container s3g ozone sh bucket create --layout LEGACY /s3v/leg-bucket
execute_command_in_container s3g ozone sh bucket create --layout FILE_SYSTEM_OPTIMIZED /s3v/fso-bucket

for bucket in obs-bucket leg-bucket fso-bucket; do
execute_s3a_tests "$bucket"
done
25 changes: 25 additions & 0 deletions hadoop-ozone/dist/src/main/compose/ozone/test-s3a.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

#suite:s3a

COMPOSE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
export COMPOSE_DIR

export SECURITY_ENABLED=false

source "$COMPOSE_DIR/../common/s3a-test.sh"
27 changes: 27 additions & 0 deletions hadoop-ozone/dist/src/main/compose/ozonesecure-ha/test-s3a.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

#suite:s3a

COMPOSE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
export COMPOSE_DIR

export SECURITY_ENABLED=true
export OM_SERVICE_ID="omservice"
export SCM=scm1.org

source "$COMPOSE_DIR/../common/s3a-test.sh"
7 changes: 5 additions & 2 deletions hadoop-ozone/dist/src/main/compose/test-all.sh
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ rm "$ALL_RESULT_DIR"/* || true

source "$SCRIPT_DIR"/testlib.sh

: ${OZONE_ACCEPTANCE_TEST_TYPE:="robot"}
: ${OZONE_WITH_COVERAGE:="false"}

if [[ "${OZONE_WITH_COVERAGE}" == "true" ]]; then
Expand All @@ -46,7 +47,9 @@ if [[ "${OZONE_WITH_COVERAGE}" == "true" ]]; then
cp /tmp/jacoco-combined.exec "$SCRIPT_DIR"/result
fi

generate_report "acceptance" "${ALL_RESULT_DIR}" "${XUNIT_RESULT_DIR}"

if [[ "${OZONE_ACCEPTANCE_TEST_TYPE}" == "robot" ]]; then
# does not apply to JUnit tests run via Maven
generate_report "acceptance" "${ALL_RESULT_DIR}" "${XUNIT_RESULT_DIR}"
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Maybe add a comment or rename this method to clarify that this method only applies to tests run with Robot.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Generalized s3a-specific condition to check acceptance test type (robot or maven). We may be adding more Maven-based tests in the future.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Although the overall job passes, there is still a failure because generate_report is being called (not sure why this doesn't fail the s3a job):

Robot framework is not installed, the reports cannot be generated (sudo pip install robotframework).
...
Error: Process completed with exit code 1.

Looks like OZONE_ACCEPTANCE_TEST_TYPE is not exported in acceptance.sh and not visible in this subshell.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Side note, will this also be a problem if devs run test-all.sh manually to run all the tests locally?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks for pointing out the problem with OZONE_ACCEPTANCE_TEST_TYPE.

will this also be a problem if devs run test-all.sh manually to run all the tests locally?

When running test-all.sh directly, installing Robot is the developer's responsibility (it was already required before this change).

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Right, my question is more generally: what is the expected way to run all acceptance tests locally and manually? Ideally running test-all.sh (or maybeacceptance.sh) should kick off all the acceptance tests locally. After this change, running either of these will only run the robot tests, unless the user knows which variables to export to their shell before running.

Maybe that's ok for now and we can handle that in a follow up task, since there's probably not many people running all the tests this way.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

what is the expected way to run all acceptance tests locally and manually?
...
After this change, running either of these will only run the robot tests

Actually, they will also try to run s3a tests, but that part will fail, since Hadoop sources will be missing.

We can avoid that by further separating the two kinds of tests.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm ok to handle this in a follow up task if you want. It works fine in CI and I don't think many people are running the tests this way.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Slightly changed s3a-test.sh to not fail, rather skip the test if HADOOP_AWS_DIR is missing. This way current behavior is retained when running acceptance.sh or test-all.sh without any test filters.

fi

exit $RESULT
4 changes: 2 additions & 2 deletions hadoop-ozone/dist/src/main/compose/testlib.sh
Original file line number Diff line number Diff line change
Expand Up @@ -405,12 +405,12 @@ copy_results() {
target_dir="${target_dir}/${test_script_name}"
fi

if [[ -n "$(find "${result_dir}" -name "*.xml")" ]]; then
if command -v rebot > /dev/null 2>&1 && [[ -n "$(find "${result_dir}" -name "*.xml")" ]]; then
rebot --nostatusrc -N "${test_name}" -l NONE -r NONE -o "${all_result_dir}/${test_name}.xml" "${result_dir}"/*.xml \
&& rm -fv "${result_dir}"/*.xml "${result_dir}"/log.html "${result_dir}"/report.html
fi

mkdir -p "${target_dir}"
mkdir -pv "${target_dir}"
mv -v "${result_dir}"/* "${target_dir}"/
}

Expand Down