Skip to content

Commit

Permalink
Add examples enabling SASL with JAAS in Kafka (#7763)
Browse files Browse the repository at this point in the history
Fixes #7441
  • Loading branch information
eddumelendez committed Nov 6, 2023
1 parent fa23ae4 commit 666ec53
Showing 1 changed file with 166 additions and 18 deletions.
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package org.testcontainers.containers;

import com.google.common.collect.ImmutableMap;
import lombok.SneakyThrows;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.AdminClientConfig;
import org.apache.kafka.clients.admin.NewTopic;
Expand All @@ -11,8 +12,12 @@
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.errors.SaslAuthenticationException;
import org.apache.kafka.common.errors.TopicAuthorizationException;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.awaitility.Awaitility;
import org.junit.Test;
import org.rnorth.ducttape.unreliables.Unreliables;
import org.testcontainers.Testcontainers;
Expand All @@ -22,10 +27,12 @@
import java.time.Duration;
import java.util.Collection;
import java.util.Collections;
import java.util.Properties;
import java.util.UUID;
import java.util.concurrent.TimeUnit;

import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.assertj.core.api.Assertions.tuple;

public class KafkaContainerTest {
Expand All @@ -38,6 +45,15 @@ public class KafkaContainerTest {
"confluentinc/cp-zookeeper:4.0.0"
);

private final ImmutableMap<String, String> properties = ImmutableMap.of(
AdminClientConfig.SECURITY_PROTOCOL_CONFIG,
"SASL_PLAINTEXT",
SaslConfigs.SASL_MECHANISM,
"PLAIN",
SaslConfigs.SASL_JAAS_CONFIG,
"org.apache.kafka.common.security.plain.PlainLoginModule required username=\"admin\" password=\"admin\";"
);

@Test
public void testUsage() throws Exception {
try (KafkaContainer kafka = new KafkaContainer(KAFKA_TEST_IMAGE)) {
Expand Down Expand Up @@ -220,34 +236,166 @@ public void testUsageWithListener() throws Exception {
}
}

protected void testKafkaFunctionality(String bootstrapServers) throws Exception {
testKafkaFunctionality(bootstrapServers, 1, 1);
@SneakyThrows
@Test
public void shouldConfigureAuthenticationWithSaslUsingJaas() {
try (
KafkaContainer kafka = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:6.2.1"))
.withEnv("KAFKA_LISTENER_SECURITY_PROTOCOL_MAP", "PLAINTEXT:SASL_PLAINTEXT,BROKER:SASL_PLAINTEXT")
.withEnv("KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL", "PLAIN")
.withEnv("KAFKA_LISTENER_NAME_PLAINTEXT_SASL_ENABLED_MECHANISMS", "PLAIN")
.withEnv("KAFKA_LISTENER_NAME_BROKER_SASL_ENABLED_MECHANISMS", "PLAIN")
.withEnv("KAFKA_LISTENER_NAME_BROKER_PLAIN_SASL_JAAS_CONFIG", getJaasConfig())
.withEnv("KAFKA_LISTENER_NAME_PLAINTEXT_PLAIN_SASL_JAAS_CONFIG", getJaasConfig())
) {
kafka.start();

testSecureKafkaFunctionality(kafka.getBootstrapServers());
}
}

protected void testKafkaFunctionality(String bootstrapServers, int partitions, int rf) throws Exception {
@SneakyThrows
@Test
public void enableSaslWithUnsuccessfulTopicCreation() {
try (
KafkaContainer kafka = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:6.2.1"))
.withEnv("KAFKA_LISTENER_SECURITY_PROTOCOL_MAP", "PLAINTEXT:SASL_PLAINTEXT,BROKER:SASL_PLAINTEXT")
.withEnv("KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL", "PLAIN")
.withEnv("KAFKA_LISTENER_NAME_PLAINTEXT_SASL_ENABLED_MECHANISMS", "PLAIN")
.withEnv("KAFKA_LISTENER_NAME_BROKER_SASL_ENABLED_MECHANISMS", "PLAIN")
.withEnv("KAFKA_LISTENER_NAME_BROKER_PLAIN_SASL_JAAS_CONFIG", getJaasConfig())
.withEnv("KAFKA_LISTENER_NAME_PLAINTEXT_PLAIN_SASL_JAAS_CONFIG", getJaasConfig())
.withEnv("KAFKA_AUTHORIZER_CLASS_NAME", "kafka.security.authorizer.AclAuthorizer")
.withEnv("KAFKA_SUPER_USERS", "User:admin")
) {
kafka.start();

AdminClient adminClient = AdminClient.create(
ImmutableMap.of(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers)
ImmutableMap.of(
AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG,
kafka.getBootstrapServers(),
AdminClientConfig.SECURITY_PROTOCOL_CONFIG,
"SASL_PLAINTEXT",
SaslConfigs.SASL_MECHANISM,
"PLAIN",
SaslConfigs.SASL_JAAS_CONFIG,
"org.apache.kafka.common.security.plain.PlainLoginModule required username=\"test\" password=\"secret\";"
)
);
KafkaProducer<String, String> producer = new KafkaProducer<>(

String topicName = "messages-" + UUID.randomUUID();
Collection<NewTopic> topics = Collections.singletonList(new NewTopic(topicName, 1, (short) 1));

Awaitility
.await()
.untilAsserted(() -> {
assertThatThrownBy(() -> adminClient.createTopics(topics).all().get(30, TimeUnit.SECONDS))
.hasCauseInstanceOf(TopicAuthorizationException.class);
});
}
}

@SneakyThrows
@Test
public void enableSaslAndWithAuthenticationError() {
String jaasConfig = getJaasConfig();
try (
KafkaContainer kafka = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:6.2.1"))
.withEnv("KAFKA_LISTENER_SECURITY_PROTOCOL_MAP", "PLAINTEXT:SASL_PLAINTEXT,BROKER:SASL_PLAINTEXT")
.withEnv("KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL", "PLAIN")
.withEnv("KAFKA_LISTENER_NAME_PLAINTEXT_SASL_ENABLED_MECHANISMS", "PLAIN")
.withEnv("KAFKA_LISTENER_NAME_BROKER_SASL_ENABLED_MECHANISMS", "PLAIN")
.withEnv("KAFKA_LISTENER_NAME_BROKER_PLAIN_SASL_JAAS_CONFIG", jaasConfig)
.withEnv("KAFKA_LISTENER_NAME_PLAINTEXT_PLAIN_SASL_JAAS_CONFIG", jaasConfig)
) {
kafka.start();

AdminClient adminClient = AdminClient.create(
ImmutableMap.of(
ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,
bootstrapServers,
ProducerConfig.CLIENT_ID_CONFIG,
UUID.randomUUID().toString()
),
AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG,
kafka.getBootstrapServers(),
AdminClientConfig.SECURITY_PROTOCOL_CONFIG,
"SASL_PLAINTEXT",
SaslConfigs.SASL_MECHANISM,
"PLAIN",
SaslConfigs.SASL_JAAS_CONFIG,
"org.apache.kafka.common.security.plain.PlainLoginModule required username=\"test\" password=\"secretx\";"
)
);

String topicName = "messages-" + UUID.randomUUID();
Collection<NewTopic> topics = Collections.singletonList(new NewTopic(topicName, 1, (short) 1));

Awaitility
.await()
.untilAsserted(() -> {
assertThatThrownBy(() -> adminClient.createTopics(topics).all().get(30, TimeUnit.SECONDS))
.hasCauseInstanceOf(SaslAuthenticationException.class);
});
}
}

private static String getJaasConfig() {
String jaasConfig =
"org.apache.kafka.common.security.plain.PlainLoginModule required " +
"username=\"admin\" " +
"password=\"admin\" " +
"user_admin=\"admin\" " +
"user_test=\"secret\";";
return jaasConfig;
}

private void testKafkaFunctionality(String bootstrapServers) throws Exception {
testKafkaFunctionality(bootstrapServers, false, 1, 1);
}

private void testSecureKafkaFunctionality(String bootstrapServers) throws Exception {
testKafkaFunctionality(bootstrapServers, true, 1, 1);
}

private void testKafkaFunctionality(String bootstrapServers, boolean authenticated, int partitions, int rf)
throws Exception {
ImmutableMap<String, String> adminClientDefaultProperties = ImmutableMap.of(
AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG,
bootstrapServers
);
Properties adminClientProperties = new Properties();
adminClientProperties.putAll(adminClientDefaultProperties);

ImmutableMap<String, String> consumerDefaultProperties = ImmutableMap.of(
ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,
bootstrapServers,
ConsumerConfig.GROUP_ID_CONFIG,
"tc-" + UUID.randomUUID(),
ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,
"earliest"
);
Properties consumerProperties = new Properties();
consumerProperties.putAll(consumerDefaultProperties);

ImmutableMap<String, String> producerDefaultProperties = ImmutableMap.of(
ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,
bootstrapServers,
ProducerConfig.CLIENT_ID_CONFIG,
UUID.randomUUID().toString()
);
Properties producerProperties = new Properties();
producerProperties.putAll(producerDefaultProperties);

if (authenticated) {
adminClientProperties.putAll(this.properties);
consumerProperties.putAll(this.properties);
producerProperties.putAll(this.properties);
}
try (
AdminClient adminClient = AdminClient.create(adminClientProperties);
KafkaProducer<String, String> producer = new KafkaProducer<>(
producerProperties,
new StringSerializer(),
new StringSerializer()
);
KafkaConsumer<String, String> consumer = new KafkaConsumer<>(
ImmutableMap.of(
ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,
bootstrapServers,
ConsumerConfig.GROUP_ID_CONFIG,
"tc-" + UUID.randomUUID(),
ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,
"earliest"
),
consumerProperties,
new StringDeserializer(),
new StringDeserializer()
);
Expand Down

0 comments on commit 666ec53

Please sign in to comment.