Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add connector management to the sasl example #332

Merged
merged 1 commit into from
Sep 17, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 37 additions & 1 deletion docker/sasl/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,10 +39,46 @@ services:
KAFKA_MECHANISMS_INTER_BROKER_PROTOCOL: 'SASL_PLAINTEXT'
KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: 'PLAIN'
KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: 'true'
KAFKA_SUPER_USERS: 'User:kafka'
KAFKA_SUPER_USERS: 'User:kafka;User:ANONYMOUS'
KAFKA_OPTS: '-Djava.security.auth.login.config=/etc/kafka/kafka_server_jaas.conf'
KAFKA_LOG4J_LOGGERS: "kafka.authorizer.logger=DEBUG"

kafka-connect:
build:
context: kafka-connect/
dockerfile: Dockerfile
container_name: kafka-connect
depends_on:
- zookeeper
- kafka
- schema-registry
ports:
- 18083:18083
environment:
CUB_CLASSPATH: '/usr/share/java/confluent-security/connect/*:/usr/share/java/kafka/*:/usr/share/java/cp-base-new/*'
CLASSPATH: "/usr/share/java/kafka-connect-replicator/*:/usr/share/java/monitoring-interceptors/*"
CONNECT_BOOTSTRAP_SERVERS: "kafka:29092"
CONNECT_REST_PORT: 18083
CONNECT_GROUP_ID: kafka-connect
CONNECT_CONFIG_STORAGE_TOPIC: _kafka-connect-configs
CONNECT_OFFSET_STORAGE_TOPIC: _kafka-connect-offsets
CONNECT_STATUS_STORAGE_TOPIC: _kafka-connect-status
CONNECT_KEY_CONVERTER: io.confluent.connect.avro.AvroConverter
CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: 'http://schema-registry:8081'
CONNECT_VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter
CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: 'http://schema-registry:8081'
CONNECT_LOG4J_ROOT_LOGLEVEL: "INFO"
CONNECT_LOG4J_LOGGERS: "org.apache.kafka.connect.runtime.rest=WARN,org.reflections=ERROR"
CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: "1"
CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: "1"
CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: "1"
CONNECT_REST_ADVERTISED_HOST_NAME: "kafka-connect"
CONNECT_PLUGIN_PATH: /usr/share/confluent-hub-components
volumes:
- $PWD/scripts:/scripts
- $PWD/connect-plugins:/usr/share/confluent-hub-components
- $PWD/jars:/usr/share/confluent-hub-components/confluentinc-kafka-connect-jdbc/jars

schema-registry:
image: confluentinc/cp-schema-registry:${TAG}
hostname: schema-registry
Expand Down
8 changes: 8 additions & 0 deletions docker/sasl/kafka-connect/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
FROM confluentinc/cp-server-connect:6.2.0

ENV CONNECT_PLUGIN_PATH="/usr/share/java,/usr/share/confluent-hub-components"

USER root

RUN confluent-hub install --no-prompt confluentinc/kafka-connect-datagen:0.5.0 \
&& confluent-hub install --no-prompt confluentinc/kafka-connect-jdbc:10.2.1
2 changes: 1 addition & 1 deletion docker/sasl/kafka/kafka.properties
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
inter.broker.listener.name=SASL_PLAINTEXT
super.users=User:kafka
super.users=User:kafka;User:ANONYMOUS
mechanisms.inter.broker.protocol=SASL_PLAINTEXT
group.initial.rebalance.delay.ms=100
auto.create.topics.enable=true
Expand Down
9 changes: 9 additions & 0 deletions example/connectors/sink-jdbc.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
{
"name": "sink-jdbc",
"config": {
"connector.class": "FileStreamSource",
"tasks.max": "1",
"file": "/tmp/test.txt",
"topic": "connect-test"
}
}
9 changes: 9 additions & 0 deletions example/connectors/source-jdbc.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
{
"name": "source-jdbc",
"config": {
"connector.class": "FileStreamSource",
"tasks.max": "1",
"file": "/tmp/test.txt",
"topic": "connect-test"
}
}
93 changes: 93 additions & 0 deletions example/descriptor-connect.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
---
context: "context"
company: "company"
env: "env"
source: "source"
projects:
- name: "projectA"
consumers:
- principal: "User:App0"
- principal: "User:App1"
producers:
- principal: "User:App3"
- principal: "User:App4"
streams:
- principal: "User:Streams0"
topics:
read:
- "topicA"
- "topicB"
write:
- "topicC"
- "topicD"
connectors:
artifacts:
- path: "connectors/source-jdbc.json"
server: "connect"
name: "source-jdbc"
- path: "connectors/sink-jdbc.json"
server: "connect"
name: "sink-jdbc"
access_control:
- principal: "User:Connect1"
cluster_id: "foo"
group: "group"
status_topic: "status"
offset_topic: "offset"
configs_topic: "configs"
topics:
read:
- "topicA"
- "topicB"
- principal: "User:Connect2"
topics:
write:
- "topicC"
- "topicD"
topics:
- name: "foo"
config:
replication.factor: "1"
num.partitions: "1"
- name: "bar"
dataType: "avro"
config:
replication.factor: "1"
num.partitions: "1"
- name: "projectB"
topics:
- dataType: "avro"
name: "bar"
config:
replication.factor: "1"
num.partitions: "1"
- name: "projectC"
streams:
- principal: "User:App0"
applicationId: "streamsApplicationId"
topics:
read:
- "topicE"
write:
- "topicF"
topics:
- name: "topicE"
config:
replication.factor: "1"
num.partitions: "1"
- name: "topicF"
config:
replication.factor: "1"
num.partitions: "1"
platform:
schema_registry:
instances:
- principal: "User:SchemaRegistry01"
topic: "foo"
group: "bar"
- principal: "User:SchemaRegistry02"
topic: "zet"
control_center:
instances:
- principal: "User:ControlCenter"
appId: "controlcenter"
4 changes: 3 additions & 1 deletion example/topology-builder-sasl-plain.properties
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,6 @@ sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule require
password="kafka";
#topology.validations.0=com.purbon.kafka.topology.validation.topology.CamelCaseNameFormatValidation
#topology.validations.1=com.purbon.kafka.topology.validation.topic.PartitionNumberValidation
#confluent.schema.registry.url="http://localhost:8082"
#confluent.schema.registry.url="http://localhost:8082"
platform.servers.connect.0=connect:http://localhost:18083
topology.state.cluster.enabled=false
Original file line number Diff line number Diff line change
Expand Up @@ -64,4 +64,9 @@ public String status(String connectorName) throws IOException {
public void pause(String connectorName) throws IOException {
doPut("/connectors/" + connectorName + "/pause");
}

@Override
public String toString() {
return "KConnectApiClient{" + server + "}";
}
}
15 changes: 15 additions & 0 deletions src/main/java/com/purbon/kafka/topology/model/Artefact.java
Original file line number Diff line number Diff line change
Expand Up @@ -50,4 +50,19 @@ public int hashCode() {
return Objects.hash(getName().toLowerCase());
}
}

@Override
public String toString() {
return "Artefact{"
+ "path='"
+ path
+ '\''
+ ", serverLabel='"
+ serverLabel
+ '\''
+ ", name='"
+ name
+ '\''
+ '}';
}
}