Skip to content

Commit

Permalink
Merge branch 'master' of github.com:ceekay47/ozone into HDDS-10206
Browse files Browse the repository at this point in the history
  • Loading branch information
ceekay committed Feb 13, 2024
2 parents 8b16a91 + bacb184 commit 140ec36
Show file tree
Hide file tree
Showing 461 changed files with 5,454 additions and 8,092 deletions.
2 changes: 2 additions & 0 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -38,5 +38,7 @@ updates:
directory: "/"
schedule:
interval: "weekly"
day: "saturday"
time: "07:00" # UTC
pull-request-branch-name:
separator: "-"
45 changes: 12 additions & 33 deletions .github/workflows/intermittent-test-check.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ on:
required: true
splits:
description: Number of splits
default: 2
default: 10
required: true
fail-fast:
description: Stop after first failure
Expand All @@ -52,34 +52,16 @@ jobs:
runs-on: ubuntu-20.04
outputs:
matrix: ${{steps.generate.outputs.matrix}}
test_type: ${{steps.check-test-existence.outputs.test_type}}
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.inputs.ref }}
- name: Check for Test File
id: check-test-existence
run: |
filename="$TEST_CLASS.java"
found_file=$(find . -name "$filename" -type f -print -quit)
test_type=unit
if [ -n "$found_file" ]; then
echo "File path : $found_file"
if [[ "$found_file" == *"integration-test"* ]]; then
test_type=integration
fi
echo "Test file $filename found. Continuing.."
else
echo "Test file $filename not found.Stopping!"
exit 1
fi
echo "test_type=$test_type" >> $GITHUB_OUTPUT
- id: generate
name: Generate test matrix
run: |
splits=()
for ((i = 1; i <= ${{ github.event.inputs.splits }}; i++)); do
splits+=("$i")
splits+=("$i")
done
printf -v x "%s," "${splits[@]}"
split_matrix="[${x%,}]"
Expand Down Expand Up @@ -157,31 +139,28 @@ jobs:
export OZONE_REPO_CACHED=true
fi
test_type=${{ needs.prepare-job.outputs.test_type }}
args="-DexcludedGroups=unhealthy"
if [ "$test_type" = "integration" ]; then
args="$args -pl :ozone-integration-test,:mini-chaos-tests"
fi
args="-DexcludedGroups=native|slow|unhealthy"
if [ "$TEST_METHOD" = "ALL" ]; then
echo "Running all tests from $TEST_CLASS"
hadoop-ozone/dev-support/checks/junit.sh $args -Dtest=$TEST_CLASS
echo "Running all tests from $TEST_CLASS"
set -x
hadoop-ozone/dev-support/checks/junit.sh $args -Dtest="$TEST_CLASS,Abstract*Test*\$*"
else
echo "Running test: $TEST_METHOD from $TEST_CLASS"
hadoop-ozone/dev-support/checks/junit.sh $args -Dtest=$TEST_CLASS#$TEST_METHOD
echo "Running test: $TEST_METHOD from $TEST_CLASS"
set -x
hadoop-ozone/dev-support/checks/junit.sh $args -Dtest="$TEST_CLASS#$TEST_METHOD,Abstract*Test*\$*"
fi
continue-on-error: true
env:
CHECK: ${{ needs.prepare-job.outputs.test_type }}
GRADLE_ENTERPRISE_ACCESS_KEY: ${{ secrets.GE_ACCESS_TOKEN }}
- name: Summary of failures
run: hadoop-ozone/dev-support/checks/_summary.sh target/${{ needs.prepare-job.outputs.test_type }}/summary.txt
run: hadoop-ozone/dev-support/checks/_summary.sh target/unit/summary.txt
if: ${{ !cancelled() }}
- name: Archive build results
uses: actions/upload-artifact@v4
if: always()
with:
name: result-${{ env.TEST_CLASS }}-split-${{ matrix.split }}
path: target/${{ needs.prepare-job.outputs.test_type }}
name: result-${{ github.run_id }}-${{ github.run_number }}-${{ matrix.split }}
path: target/unit
count-failures:
if: ${{ always() }}
needs: run-test
Expand Down
12 changes: 12 additions & 0 deletions dev-support/ci/selective_ci_checks.bats
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,18 @@ load bats-assert/load.bash
assert_output -p needs-kubernetes-tests=false
}

@test "dashboard only" {
run dev-support/ci/selective_ci_checks.sh 039dea9

assert_output -p 'basic-checks=["rat"]'
assert_output -p needs-build=false
assert_output -p needs-compile=false
assert_output -p needs-compose-tests=false
assert_output -p needs-dependency-check=false
assert_output -p needs-integration-tests=false
assert_output -p needs-kubernetes-tests=false
}

@test "compose and robot" {
run dev-support/ci/selective_ci_checks.sh b83039eef

Expand Down
2 changes: 2 additions & 0 deletions dev-support/ci/selective_ci_checks.sh
Original file line number Diff line number Diff line change
Expand Up @@ -233,6 +233,7 @@ function get_count_compose_files() {
local ignore_array=(
"^hadoop-ozone/dist/src/main/k8s"
"^hadoop-ozone/dist/src/main/license"
"^hadoop-ozone/dist/src/main/compose/common/grafana/dashboards"
"\.md$"
)
filter_changed_files true
Expand Down Expand Up @@ -494,6 +495,7 @@ function get_count_misc_files() {
"\.md$"
"findbugsExcludeFile.xml"
"/NOTICE$"
"^hadoop-ozone/dist/src/main/compose/common/grafana/dashboards"
)
local ignore_array=(
"^.github/workflows/post-commit.yml"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ public class BlockDataStreamOutput implements ByteBufferStreamOutput {
private final DataStreamOutput out;
private CompletableFuture<DataStreamReply> dataStreamCloseReply;
private List<CompletableFuture<DataStreamReply>> futures = new ArrayList<>();
private final long syncSize = 0; // TODO: disk sync is disabled for now
private static final long SYNC_SIZE = 0; // TODO: disk sync is disabled for now
private long syncPosition = 0;
private StreamBuffer currentBuffer;
private XceiverClientMetrics metrics;
Expand Down Expand Up @@ -630,9 +630,9 @@ public boolean isClosed() {
}

private boolean needSync(long position) {
if (syncSize > 0) {
if (SYNC_SIZE > 0) {
// TODO: or position >= fileLength
if (position - syncPosition >= syncSize) {
if (position - syncPosition >= SYNC_SIZE) {
syncPosition = position;
return true;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,7 @@ void releaseBuffer(ChunkBuffer chunkBuffer) {
}

public void clearBufferPool() {
bufferList.forEach(ChunkBuffer::close);
bufferList.clear();
currentBufferIndex = -1;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,6 @@
import static org.junit.jupiter.api.Assertions.assertSame;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;

/**
* This test class verifies the parsing of SCM endpoint config settings. The
Expand Down Expand Up @@ -228,17 +227,13 @@ public void testVerifyResourceName() {
invalidNames.add(tooShort);

for (String name : invalidNames) {
try {
HddsClientUtils.verifyResourceName(name);
fail("Did not reject invalid string [" + name + "] as a name");
} catch (IllegalArgumentException e) {
// throwing up on an invalid name. we're good
}
assertThrows(IllegalArgumentException.class, () -> HddsClientUtils.verifyResourceName(name),
"Did not reject invalid string [" + name + "] as a name");
}
}

@Test
public void testVerifyKeyName() {
void testVerifyKeyName() throws IllegalArgumentException {
List<String> invalidNames = new ArrayList<>();
invalidNames.add("#");
invalidNames.add("ab^cd");
Expand All @@ -257,12 +252,8 @@ public void testVerifyKeyName() {


for (String name : invalidNames) {
try {
HddsClientUtils.verifyKeyName(name);
fail("Did not reject invalid string [" + name + "] as a name");
} catch (IllegalArgumentException e) {
// throwing up on an invalid name. it's working.
}
assertThrows(IllegalArgumentException.class, () -> HddsClientUtils.verifyKeyName(name),
"Did not reject invalid string [" + name + "] as a name");
}

List<String> validNames = new ArrayList<>();
Expand All @@ -284,13 +275,7 @@ public void testVerifyKeyName() {
validNames.add("dollar$");

for (String name : validNames) {
try {
HddsClientUtils.verifyKeyName(name);
// not throwing up on a valid name. it's working.
} catch (IllegalArgumentException e) {
// throwing up on an valid name. it's not working.
fail("Rejected valid string [" + name + "] as a name");
}
HddsClientUtils.verifyKeyName(name);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,10 @@

import java.io.IOException;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.atomic.AtomicInteger;

import org.apache.commons.lang3.RandomUtils;
import org.apache.hadoop.hdds.client.BlockID;
import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ChecksumType;
Expand All @@ -42,9 +42,10 @@
import org.apache.hadoop.hdds.scm.XceiverClientSpi;
import org.apache.hadoop.hdds.scm.pipeline.MockPipeline;
import org.apache.hadoop.hdds.scm.pipeline.Pipeline;

import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;

import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.mockito.Mockito.any;
Expand All @@ -56,37 +57,28 @@
* <p>
* Compares bytes written to the stream and received in the ChunkWriteRequests.
*/
public class TestBlockOutputStreamCorrectness {

private static final long SEED = 18480315L;
class TestBlockOutputStreamCorrectness {

private int writeUnitSize = 1;
private static final int DATA_SIZE = 256 * (int) OzoneConsts.MB;
private static final byte[] DATA = RandomUtils.nextBytes(DATA_SIZE);

@Test
public void test() throws IOException {
@ParameterizedTest
@ValueSource(ints = { 1, 1024, 1024 * 1024 })
void test(final int writeSize) throws IOException {
assertEquals(0, DATA_SIZE % writeSize);

final BufferPool bufferPool = new BufferPool(4 * 1024 * 1024, 32 / 4);

for (int block = 0; block < 10; block++) {
BlockOutputStream outputStream =
createBlockOutputStream(bufferPool);

Random random = new Random(SEED);

int max = 256 * 1024 * 1024 / writeUnitSize;

byte[] writeBuffer = new byte[writeUnitSize];
for (int t = 0; t < max; t++) {
if (writeUnitSize > 1) {
for (int i = 0; i < writeBuffer.length; i++) {
writeBuffer[i] = (byte) random.nextInt();
try (BlockOutputStream outputStream = createBlockOutputStream(bufferPool)) {
for (int i = 0; i < DATA_SIZE / writeSize; i++) {
if (writeSize > 1) {
outputStream.write(DATA, i * writeSize, writeSize);
} else {
outputStream.write(DATA[i]);
}
outputStream.write(writeBuffer, 0, writeBuffer.length);
} else {
outputStream.write((byte) random.nextInt());
}
}
outputStream.close();
}
}

Expand Down Expand Up @@ -126,9 +118,8 @@ private static class MockXceiverClientSpi extends XceiverClientSpi {

private final Pipeline pipeline;

private final Random expectedRandomStream = new Random(SEED);

private final AtomicInteger counter = new AtomicInteger();
private int i;

MockXceiverClientSpi(Pipeline pipeline) {
super();
Expand Down Expand Up @@ -175,8 +166,8 @@ public XceiverClientReply sendCommandAsync(
ByteString data = request.getWriteChunk().getData();
final byte[] writePayload = data.toByteArray();
for (byte b : writePayload) {
byte expectedByte = (byte) expectedRandomStream.nextInt();
assertEquals(expectedByte, b);
assertEquals(DATA[i], b);
++i;
}
break;
default:
Expand Down
6 changes: 6 additions & 0 deletions hadoop-hdds/common/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -181,6 +181,12 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
<artifactId>grpc-api</artifactId>
<version>${io.grpc.version}</version>
<scope>compile</scope>
<exclusions>
<exclusion>
<groupId>com.google.code.findbugs</groupId>
<artifactId>jsr305</artifactId>
</exclusion>
</exclusions>
</dependency>

<!-- Test dependencies -->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -392,4 +392,9 @@ private HddsConfigKeys() {

public static final String OZONE_AUDIT_LOG_DEBUG_CMD_LIST_DNAUDIT =
"ozone.audit.log.debug.cmd.list.dnaudit";

public static final String HDDS_DATANODE_SLOW_OP_WARNING_THRESHOLD_KEY =
"hdds.datanode.slow.op.warning.threshold";
public static final String HDDS_DATANODE_SLOW_OP_WARNING_THRESHOLD_DEFAULT =
"500ms";
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
*/
package org.apache.hadoop.hdds;

import java.util.UUID;
import java.util.concurrent.atomic.AtomicLong;

/**
Expand All @@ -41,13 +40,4 @@ public static long getLongId() {
return LONG_COUNTER.incrementAndGet();
}

/**
* Returns a uuid.
*
* @return UUID.
*/
public static UUID getUUId() {
return UUID.randomUUID();
}

}
Loading

0 comments on commit 140ec36

Please sign in to comment.