Skip to content

Commit

Permalink
Merge branch 'main' into FixTrafficLimiting
Browse files Browse the repository at this point in the history
  • Loading branch information
gregschohn committed Mar 1, 2024
2 parents 51ad945 + 7987740 commit 0beb25a
Show file tree
Hide file tree
Showing 38 changed files with 334 additions and 411 deletions.
4 changes: 3 additions & 1 deletion .github/workflows/e2eTest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,10 @@ jobs:
cache-read-only: false

- name: Start Docker Solution
run: ./gradlew dockerSolution:ComposeUp -x test
run: ./gradlew dockerSolution:ComposeUp -x test --scan --info --stacktrace
working-directory: TrafficCapture
env:
OS_MIGRATIONS_GRADLE_SCAN_TOS_AGREE_AND_ENABLED: ''

- name: Set up Python
uses: actions/setup-python@v5
Expand Down
8 changes: 6 additions & 2 deletions .github/workflows/gradle-build-and-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,16 @@ jobs:
gradle-home-cache-cleanup: true

- name: Run Gradle Build
run: ./gradlew build -x test --info
run: ./gradlew build -x test --scan --info --stacktrace
working-directory: TrafficCapture
env:
OS_MIGRATIONS_GRADLE_SCAN_TOS_AGREE_AND_ENABLED: ''

- name: Run Tests with Coverage
run: ./gradlew jacocoTestReport --info
run: ./gradlew test jacocoTestReport --scan --info --stacktrace
working-directory: TrafficCapture
env:
OS_MIGRATIONS_GRADLE_SCAN_TOS_AGREE_AND_ENABLED: ''

- name: Upload to Codecov
uses: codecov/codecov-action@v4
Expand Down
49 changes: 48 additions & 1 deletion TrafficCapture/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@
- [Traffic Capture Proxy Server](#traffic-capture-proxy-server)
- [Traffic Replayer](#traffic-replayer)
- [Capture Kafka Offloader](#capture-kafka-offloader)
- [Building](#building)
- [Gradle Scans](#gradle-scans)
- [Publishing](#publishing)

## Overview
Expand Down Expand Up @@ -137,6 +139,44 @@ The Capture Kafka Offloader will act as a Kafka Producer for offloading captured

Learn more about its functionality and setup here: [Capture Kafka Offloader](captureKafkaOffloader/README.md)

## Building

The building process for this project is streamlined through the use of Gradle. This section outlines the necessary steps to build the project and execute tests effectively.

To compile the project and execute unit tests, use the following command:

```sh
./gradlew build
```

This command compiles the source code and runs the quick unit tests, ensuring the project is correctly assembled and functional.

For a comprehensive test run, including both quick unit tests and more extensive slow tests, execute:

```sh
./gradlew test slowTest --rerun
```

This command initiates all tests, ensuring thorough validation of the project. The `--rerun` option is used to ignore existing task output cache for the specified tasks.

### Gradle Scans

Gradle Scans offer a more intuitive understanding of build outputs. To enable Gradle Scans for enhanced output analysis, append `--scan` to your Gradle command.
This action requires acceptance of the Gradle Scan terms of service.
To automate this acceptance and enable scans by default, set the `OS_MIGRATIONS_GRADLE_SCAN_TOS_AGREE_AND_ENABLED` environment variable:

```sh
export OS_MIGRATIONS_GRADLE_SCAN_TOS_AGREE_AND_ENABLED=
```

For persistent configuration in Zsh:

```sh
echo 'export OS_MIGRATIONS_GRADLE_SCAN_TOS_AGREE_AND_ENABLED=' >> ~/.zshrc
```

Access your detailed build reports by following the link provided at the end of your Gradle command's output.

## Publishing

This project can be published to a local maven repository with:
Expand Down Expand Up @@ -172,7 +212,14 @@ jsonMessageTransformers
nettyWireLogging
openSearch23PlusTargetTransformerProvider
replayerPlugins
testUtilities
trafficCaptureProxyServer
trafficCaptureProxyServerTest
trafficReplayer
```
```

To include a testFixture dependency, define the import like

```groovy
testImplementation testFixtures('org.opensearch.migrations.trafficcapture:trafficReplayer:0.1.0-SNAPSHOT')
```
6 changes: 1 addition & 5 deletions TrafficCapture/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -15,18 +15,14 @@ subprojects {
def excludedProjects = [
'buildSrc',
'dockerSolution',
// TODO: Get testFixtures exported to Maven
'testUtilities',
]
if (!(project.name in excludedProjects)) {
publishing {
publications {
mavenJava(MavenPublication) {
artifactId = project.name

from components.java

groupId = 'org.opensearch.migrations.trafficcapture'
group = 'org.opensearch.migrations.trafficcapture'
version = '0.1.0-SNAPSHOT'

// Suppress POM metadata warnings for test fixtures
Expand Down
2 changes: 1 addition & 1 deletion TrafficCapture/dockerSolution/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ dockerCompose {
"${project.getProperty('otel-collector')}" :
"otel-prometheus-jaeger-opensearch.yml"),
"${extensionsDir}" + (project.hasProperty("multiProxy") ? "proxy-multi.yml" : "proxy-single.yml")
]
]
}

task buildDockerImages {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,21 +82,23 @@ services:
condition: service_started
opensearchtarget:
condition: service_started
command: /bin/sh -c "/runJavaWithClasspath.sh org.opensearch.migrations.replay.TrafficReplayer https://opensearchtarget:9200 --auth-header-value Basic\\ YWRtaW46YWRtaW4= --insecure --kafka-traffic-brokers kafka:9092 --kafka-traffic-topic logging-traffic-topic --kafka-traffic-group-id default-logging-group --otelCollectorEndpoint http://otel-collector:4317"
command: /bin/sh -c "/runJavaWithClasspath.sh org.opensearch.migrations.replay.TrafficReplayer https://opensearchtarget:9200 --auth-header-value Basic\\ YWRtaW46bXlTdHJvbmdQYXNzd29yZDEyMyE= --insecure --kafka-traffic-brokers kafka:9092 --kafka-traffic-topic logging-traffic-topic --kafka-traffic-group-id default-logging-group --otelCollectorEndpoint http://otel-collector:4317"

opensearchtarget:
image: 'opensearchproject/opensearch:2.11.0'
image: 'opensearchproject/opensearch:latest'
environment:
- discovery.type=single-node
- OPENSEARCH_INITIAL_ADMIN_PASSWORD=myStrongPassword123!
networks:
- migrations
ports:
- "29200:9200"

opensearchanalytics:
image: 'opensearchproject/opensearch:2.11.0'
image: 'opensearchproject/opensearch:latest'
environment:
- discovery.type=single-node
- OPENSEARCH_INITIAL_ADMIN_PASSWORD=myStrongPassword123!
networks:
- migrations
ports:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ if [ -n "$MIGRATION_DOMAIN_ENDPOINT" ]; then
target_auth_user_and_pass="admin:Admin123!"
else
target_endpoint="https://opensearchtarget:9200"
target_auth_user_and_pass="admin:admin"
target_auth_user_and_pass="admin:myStrongPassword123!"
fi

usage() {
Expand Down
1 change: 1 addition & 0 deletions TrafficCapture/gradlew
Original file line number Diff line number Diff line change
Expand Up @@ -241,4 +241,5 @@ eval "set -- $(
tr '\n' ' '
)" '"$@"'


exec "$JAVACMD" "$@"
14 changes: 14 additions & 0 deletions TrafficCapture/settings.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,10 @@
* in the user manual at https://docs.gradle.org/8.0.2/userguide/multi_project_builds.html
*/

plugins {
id("com.gradle.enterprise") version("3.16.2")
}

void addSubProjects(String path, File dir) {
if (dir.isDirectory() == false) return;
if (dir.name == 'buildSrc') return;
Expand Down Expand Up @@ -37,3 +41,13 @@ include('captureKafkaOffloader',
)

addSubProjects('', new File(rootProject.projectDir,'replayerPlugins'))

if (System.getenv().containsKey("OS_MIGRATIONS_GRADLE_SCAN_TOS_AGREE_AND_ENABLED")) {
gradleEnterprise {
buildScan {
publishAlways()
termsOfServiceUrl = "https://gradle.com/terms-of-service"
termsOfServiceAgree = "yes"
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -55,94 +55,94 @@ public class CaptureProxy {
private static final String HTTPS_CONFIG_PREFIX = "plugins.security.ssl.http.";
public static final String DEFAULT_KAFKA_CLIENT_ID = "HttpCaptureProxyProducer";

static class Parameters {
public static class Parameters {
@Parameter(required = false,
names = {"--traceDirectory"},
arity = 1,
description = "Directory to store trace files in.")
String traceDirectory;
public String traceDirectory;
@Parameter(required = false,
names = {"--noCapture"},
arity = 0,
description = "If enabled, Does NOT capture traffic to ANY sink.")
boolean noCapture;
public boolean noCapture;
@Parameter(required = false,
names = {"--kafkaConfigFile"},
arity = 1,
description = "Kafka properties file for additional client customization.")
String kafkaPropertiesFile;
public String kafkaPropertiesFile;
@Parameter(required = false,
names = {"--kafkaClientId"},
arity = 1,
description = "clientId to use for interfacing with Kafka.")
String kafkaClientId = DEFAULT_KAFKA_CLIENT_ID;
public String kafkaClientId = DEFAULT_KAFKA_CLIENT_ID;
@Parameter(required = false,
names = {"--kafkaConnection"},
arity = 1,
description = "Sequence of <HOSTNAME:PORT> values delimited by ','.")
String kafkaConnection;
public String kafkaConnection;
@Parameter(required = false,
names = {"--enableMSKAuth"},
arity = 0,
description = "Enables SASL Kafka properties required for connecting to MSK with IAM auth.")
boolean mskAuthEnabled = false;
public boolean mskAuthEnabled = false;
@Parameter(required = false,
names = {"--sslConfigFile"},
arity = 1,
description = "YAML configuration of the HTTPS settings. When this is not set, the proxy will not use TLS.")
String sslConfigFilePath;
public String sslConfigFilePath;
@Parameter(required = false,
names = {"--maxTrafficBufferSize"},
arity = 1,
description = "The maximum number of bytes that will be written to a single TrafficStream object.")
int maximumTrafficStreamSize = 1024*1024;
public int maximumTrafficStreamSize = 1024*1024;
@Parameter(required = false,
names = {"--insecureDestination"},
arity = 0,
description = "Do not check the destination server's certificate")
boolean allowInsecureConnectionsToBackside;
public boolean allowInsecureConnectionsToBackside;
@Parameter(required = true,
names = {"--destinationUri"},
arity = 1,
description = "URI of the server that the proxy is capturing traffic for.")
String backsideUriString;
public String backsideUriString;
@Parameter(required = true,
names = {"--listenPort"},
arity = 1,
description = "Exposed port for clients to connect to this proxy.")
int frontsidePort = 0;
public int frontsidePort = 0;
@Parameter(required = false,
names = {"--numThreads"},
arity = 1,
description = "How many threads netty should create in its event loop group")
int numThreads = 1;
public int numThreads = 1;
@Parameter(required = false,
names = {"--destinationConnectionPoolSize"},
arity = 1,
description = "Number of socket connections that should be maintained to the destination server " +
"to reduce the perceived latency to clients. Each thread will have its own cache, so the " +
"total number of outstanding warm connections will be multiplied by numThreads.")
int destinationConnectionPoolSize = 0;
public int destinationConnectionPoolSize = 0;
@Parameter(required = false,
names = {"--destinationConnectionPoolTimeout"},
arity = 1,
description = "Of the socket connections maintained by the destination connection pool, " +
"how long after connection should the be recycled " +
"(closed with a new connection taking its place)")
String destinationConnectionPoolTimeout = "PT30S";
public String destinationConnectionPoolTimeout = "PT30S";
@Parameter(required = false,
names = {"--otelCollectorEndpoint"},
arity = 1,
description = "Endpoint (host:port) for the OpenTelemetry Collector to which metrics logs should be forwarded." +
"If this is not provided, metrics will not be sent to a collector.")
String otelCollectorEndpoint;
public String otelCollectorEndpoint;
@Parameter(required = false,
names = "--suppressCaptureForHeaderMatch",
arity = 2,
description = "The header name (which will be interpreted in a case-insensitive manner) and a regex " +
"pattern. When the incoming request has a header that matches the regex, it will be passed " +
"through to the service but will NOT be captured. E.g. user-agent 'healthcheck'.")
private List<String> suppressCaptureHeaderPairs = new ArrayList<>();
public List<String> suppressCaptureHeaderPairs = new ArrayList<>();
}

static Parameters parseArgs(String[] args) {
Expand All @@ -167,7 +167,7 @@ static Parameters parseArgs(String[] args) {
}

@SneakyThrows
private static Settings getSettings(@NonNull String configFile) {
protected static Settings getSettings(@NonNull String configFile) {
var builder = Settings.builder();
try (var lines = Files.lines(Paths.get(configFile))) {
lines
Expand All @@ -184,7 +184,7 @@ private static Settings getSettings(@NonNull String configFile) {
return builder.build();
}

private static IConnectionCaptureFactory<Object> getNullConnectionCaptureFactory() {
protected static IConnectionCaptureFactory<Object> getNullConnectionCaptureFactory() {
System.err.println("No trace log directory specified. Logging to /dev/null");
return ctx -> new StreamChannelConnectionCaptureSerializer<>(null, ctx.getConnectionId(),
new StreamLifecycleManager<>() {
Expand All @@ -201,7 +201,7 @@ public CompletableFuture<Object> closeStream(CodedOutputStreamHolder outputStrea
});
}

private static String getNodeId() {
protected static String getNodeId() {
return UUID.randomUUID().toString();
}

Expand Down Expand Up @@ -234,7 +234,7 @@ static Properties buildKafkaProperties(Parameters params) throws IOException {
return kafkaProps;
}

private static IConnectionCaptureFactory
protected static IConnectionCaptureFactory
getConnectionCaptureFactory(Parameters params, RootCaptureContext rootContext) throws IOException {
var nodeId = getNodeId();
// Resist the urge for now though until it comes in as a request/need.
Expand All @@ -252,7 +252,7 @@ static Properties buildKafkaProperties(Parameters params) throws IOException {

// Utility method for converting uri string to an actual URI object. Similar logic is placed in the trafficReplayer
// module: TrafficReplayer.java
private static URI convertStringToUri(String uriString) {
protected static URI convertStringToUri(String uriString) {
URI serverUri;
try {
serverUri = new URI(uriString);
Expand All @@ -274,7 +274,7 @@ private static URI convertStringToUri(String uriString) {
return serverUri;
}

private static SslContext loadBacksideSslContext(URI serverUri, boolean allowInsecureConnections) throws
protected static SslContext loadBacksideSslContext(URI serverUri, boolean allowInsecureConnections) throws
SSLException {
if (serverUri.getScheme().equalsIgnoreCase("https")) {
var sslContextBuilder = SslContextBuilder.forClient();
Expand All @@ -287,7 +287,7 @@ private static SslContext loadBacksideSslContext(URI serverUri, boolean allowIns
}
}

private static Map<String, String> convertPairListToMap(List<String> list) {
protected static Map<String, String> convertPairListToMap(List<String> list) {
var map = new TreeMap<String, String>();
for (int i=0; i<list.size(); i+=2) {
map.put(list.get(i), list.get(i+1));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,10 @@
import java.util.function.Supplier;

public class NettyScanningHttpProxy {
private final int proxyPort;
private Channel mainChannel;
private EventLoopGroup workerGroup;
private EventLoopGroup bossGroup;
protected final int proxyPort;
protected Channel mainChannel;
protected EventLoopGroup workerGroup;
protected EventLoopGroup bossGroup;

public NettyScanningHttpProxy(int proxyPort) {
this.proxyPort = proxyPort;
Expand Down
Loading

0 comments on commit 0beb25a

Please sign in to comment.