diff --git a/airbyte-integrations/connectors/source-bigquery/acceptance-test-config.yml b/airbyte-integrations/connectors/source-bigquery/acceptance-test-config.yml
new file mode 100644
index 000000000000..d634ced570a5
--- /dev/null
+++ b/airbyte-integrations/connectors/source-bigquery/acceptance-test-config.yml
@@ -0,0 +1,7 @@
+# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference)
+# for more information about how to configure these tests
+connector_image: airbyte/source-bigquery:dev
+tests:
+ spec:
+ - spec_path: "src/test-integration/resources/expected_spec.json"
+ config_path: "src/test-integration/resources/dummy_config.json"
diff --git a/airbyte-integrations/connectors/source-bigquery/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-bigquery/acceptance-test-docker.sh
new file mode 100644
index 000000000000..ba0ab2874b98
--- /dev/null
+++ b/airbyte-integrations/connectors/source-bigquery/acceptance-test-docker.sh
@@ -0,0 +1,15 @@
+#!/usr/bin/env sh
+
+# Build latest connector image
+docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev
+
+# Pull latest acctest image
+docker pull airbyte/source-acceptance-test:latest
+
+# Run
+docker run --rm -it \
+ -v /var/run/docker.sock:/var/run/docker.sock \
+ -v /tmp:/tmp \
+ -v $(pwd):/test_input \
+ airbyte/source-acceptance-test \
+ --acceptance-test-config /test_input
diff --git a/airbyte-integrations/connectors/source-bigquery/build.gradle b/airbyte-integrations/connectors/source-bigquery/build.gradle
index be8411702ae5..5c6168f9a971 100644
--- a/airbyte-integrations/connectors/source-bigquery/build.gradle
+++ b/airbyte-integrations/connectors/source-bigquery/build.gradle
@@ -2,6 +2,7 @@ plugins {
id 'application'
id 'airbyte-docker'
id 'airbyte-integration-test-java'
+ id 'airbyte-source-acceptance-test'
}
application {
diff --git a/airbyte-integrations/connectors/source-bigquery/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-bigquery/integration_tests/acceptance.py
new file mode 100644
index 000000000000..1302b2f57e10
--- /dev/null
+++ b/airbyte-integrations/connectors/source-bigquery/integration_tests/acceptance.py
@@ -0,0 +1,16 @@
+#
+# Copyright (c) 2022 Airbyte, Inc., all rights reserved.
+#
+
+
+import pytest
+
+pytest_plugins = ("source_acceptance_test.plugin",)
+
+
+@pytest.fixture(scope="session", autouse=True)
+def connector_setup():
+ """This fixture is a placeholder for external resources that acceptance test might require."""
+ # TODO: setup test dependencies if needed. otherwise remove the TODO comments
+ yield
+ # TODO: clean up test dependencies
diff --git a/airbyte-integrations/connectors/source-bigquery/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-bigquery/src/test-integration/resources/dummy_config.json
new file mode 100644
index 000000000000..3086a588fab5
--- /dev/null
+++ b/airbyte-integrations/connectors/source-bigquery/src/test-integration/resources/dummy_config.json
@@ -0,0 +1,5 @@
+{
+ "dataset_id": "dataset",
+ "project_id": "project",
+ "credentials_json": "credentials"
+}
diff --git a/airbyte-integrations/connectors/source-bigquery/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-bigquery/src/test-integration/resources/expected_spec.json
new file mode 100644
index 000000000000..ac749a77def8
--- /dev/null
+++ b/airbyte-integrations/connectors/source-bigquery/src/test-integration/resources/expected_spec.json
@@ -0,0 +1,32 @@
+{
+ "documentationUrl": "https://docs.airbyte.com/integrations/sources/bigquery",
+ "supportsIncremental": true,
+ "supportsNormalization": true,
+ "supportsDBT": true,
+ "supported_destination_sync_modes": [],
+ "supported_sync_modes": ["overwrite", "append", "append_dedup"],
+ "connectionSpecification": {
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "BigQuery Source Spec",
+ "type": "object",
+ "required": ["project_id", "credentials_json"],
+ "properties": {
+ "project_id": {
+ "type": "string",
+ "description": "The GCP project ID for the project containing the target BigQuery dataset.",
+ "title": "Project ID"
+ },
+ "dataset_id": {
+ "type": "string",
+ "description": "The dataset ID to search for tables and views. If you are only loading data from one dataset, setting this option could result in much faster schema discovery.",
+ "title": "Default Dataset ID"
+ },
+ "credentials_json": {
+ "type": "string",
+ "description": "The contents of your Service Account Key JSON file. See the docs for more information on how to obtain this key.",
+ "title": "Credentials JSON",
+ "airbyte_secret": true
+ }
+ }
+ }
+}
diff --git a/airbyte-integrations/connectors/source-clickhouse/acceptance-test-config.yml b/airbyte-integrations/connectors/source-clickhouse/acceptance-test-config.yml
new file mode 100644
index 000000000000..c1cb0b594e5b
--- /dev/null
+++ b/airbyte-integrations/connectors/source-clickhouse/acceptance-test-config.yml
@@ -0,0 +1,7 @@
+# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference)
+# for more information about how to configure these tests
+connector_image: airbyte/source-clickhouse:dev
+tests:
+ spec:
+ - spec_path: "src/test-integration/resources/expected_spec.json"
+ config_path: "src/test-integration/resources/dummy_config.json"
diff --git a/airbyte-integrations/connectors/source-clickhouse/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-clickhouse/acceptance-test-docker.sh
new file mode 100644
index 000000000000..ba0ab2874b98
--- /dev/null
+++ b/airbyte-integrations/connectors/source-clickhouse/acceptance-test-docker.sh
@@ -0,0 +1,15 @@
+#!/usr/bin/env sh
+
+# Build latest connector image
+docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev
+
+# Pull latest acctest image
+docker pull airbyte/source-acceptance-test:latest
+
+# Run
+docker run --rm -it \
+ -v /var/run/docker.sock:/var/run/docker.sock \
+ -v /tmp:/tmp \
+ -v $(pwd):/test_input \
+ airbyte/source-acceptance-test \
+ --acceptance-test-config /test_input
diff --git a/airbyte-integrations/connectors/source-clickhouse/build.gradle b/airbyte-integrations/connectors/source-clickhouse/build.gradle
index e15b1e743883..1e9f4cc21a28 100644
--- a/airbyte-integrations/connectors/source-clickhouse/build.gradle
+++ b/airbyte-integrations/connectors/source-clickhouse/build.gradle
@@ -2,6 +2,7 @@ plugins {
id 'application'
id 'airbyte-docker'
id 'airbyte-integration-test-java'
+ id 'airbyte-source-acceptance-test'
}
application {
diff --git a/airbyte-integrations/connectors/source-clickhouse/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-clickhouse/integration_tests/acceptance.py
new file mode 100644
index 000000000000..1302b2f57e10
--- /dev/null
+++ b/airbyte-integrations/connectors/source-clickhouse/integration_tests/acceptance.py
@@ -0,0 +1,16 @@
+#
+# Copyright (c) 2022 Airbyte, Inc., all rights reserved.
+#
+
+
+import pytest
+
+pytest_plugins = ("source_acceptance_test.plugin",)
+
+
+@pytest.fixture(scope="session", autouse=True)
+def connector_setup():
+ """This fixture is a placeholder for external resources that acceptance test might require."""
+ # TODO: setup test dependencies if needed. otherwise remove the TODO comments
+ yield
+ # TODO: clean up test dependencies
diff --git a/airbyte-integrations/connectors/source-clickhouse/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-clickhouse/src/test-integration/resources/dummy_config.json
new file mode 100644
index 000000000000..49e2346d457f
--- /dev/null
+++ b/airbyte-integrations/connectors/source-clickhouse/src/test-integration/resources/dummy_config.json
@@ -0,0 +1,6 @@
+{
+ "host": "default",
+ "port": 8123,
+ "database": "default",
+ "username": "default"
+}
diff --git a/airbyte-integrations/connectors/source-clickhouse/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-clickhouse/src/test-integration/resources/expected_spec.json
new file mode 100644
index 000000000000..16411f19eccc
--- /dev/null
+++ b/airbyte-integrations/connectors/source-clickhouse/src/test-integration/resources/expected_spec.json
@@ -0,0 +1,177 @@
+{
+ "documentationUrl": "https://docs.airbyte.com/integrations/destinations/clickhouse",
+ "connectionSpecification": {
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "ClickHouse Source Spec",
+ "type": "object",
+ "required": ["host", "port", "database", "username"],
+ "properties": {
+ "host": {
+ "description": "The host endpoint of the Clickhouse cluster.",
+ "title": "Host",
+ "type": "string",
+ "order": 0
+ },
+ "port": {
+ "description": "The port of the database.",
+ "title": "Port",
+ "type": "integer",
+ "minimum": 0,
+ "maximum": 65536,
+ "default": 8123,
+ "examples": ["8123"],
+ "order": 1
+ },
+ "database": {
+ "description": "The name of the database.",
+ "title": "Database",
+ "type": "string",
+ "examples": ["default"],
+ "order": 2
+ },
+ "username": {
+ "description": "The username which is used to access the database.",
+ "title": "Username",
+ "type": "string",
+ "order": 3
+ },
+ "password": {
+ "description": "The password associated with this username.",
+ "title": "Password",
+ "type": "string",
+ "airbyte_secret": true,
+ "order": 4
+ },
+ "jdbc_url_params": {
+ "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (Eg. key1=value1&key2=value2&key3=value3). For more information read about JDBC URL parameters.",
+ "title": "JDBC URL Parameters (Advanced)",
+ "type": "string",
+ "order": 5
+ },
+ "ssl": {
+ "title": "SSL Connection",
+ "description": "Encrypt data using SSL.",
+ "type": "boolean",
+ "default": true,
+ "order": 6
+ },
+ "tunnel_method": {
+ "type": "object",
+ "title": "SSH Tunnel Method",
+ "description": "Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.",
+ "oneOf": [
+ {
+ "title": "No Tunnel",
+ "required": ["tunnel_method"],
+ "properties": {
+ "tunnel_method": {
+ "description": "No ssh tunnel needed to connect to database",
+ "type": "string",
+ "const": "NO_TUNNEL",
+ "order": 0
+ }
+ }
+ },
+ {
+ "title": "SSH Key Authentication",
+ "required": [
+ "tunnel_method",
+ "tunnel_host",
+ "tunnel_port",
+ "tunnel_user",
+ "ssh_key"
+ ],
+ "properties": {
+ "tunnel_method": {
+ "description": "Connect through a jump server tunnel host using username and ssh key",
+ "type": "string",
+ "const": "SSH_KEY_AUTH",
+ "order": 0
+ },
+ "tunnel_host": {
+ "title": "SSH Tunnel Jump Server Host",
+ "description": "Hostname of the jump server host that allows inbound ssh tunnel.",
+ "type": "string",
+ "order": 1
+ },
+ "tunnel_port": {
+ "title": "SSH Connection Port",
+ "description": "Port on the proxy/jump server that accepts inbound ssh connections.",
+ "type": "integer",
+ "minimum": 0,
+ "maximum": 65536,
+ "default": 22,
+ "examples": ["22"],
+ "order": 2
+ },
+ "tunnel_user": {
+ "title": "SSH Login Username",
+ "description": "OS-level username for logging into the jump server host.",
+ "type": "string",
+ "order": 3
+ },
+ "ssh_key": {
+ "title": "SSH Private Key",
+ "description": "OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )",
+ "type": "string",
+ "airbyte_secret": true,
+ "multiline": true,
+ "order": 4
+ }
+ }
+ },
+ {
+ "title": "Password Authentication",
+ "required": [
+ "tunnel_method",
+ "tunnel_host",
+ "tunnel_port",
+ "tunnel_user",
+ "tunnel_user_password"
+ ],
+ "properties": {
+ "tunnel_method": {
+ "description": "Connect through a jump server tunnel host using username and password authentication",
+ "type": "string",
+ "const": "SSH_PASSWORD_AUTH",
+ "order": 0
+ },
+ "tunnel_host": {
+ "title": "SSH Tunnel Jump Server Host",
+ "description": "Hostname of the jump server host that allows inbound ssh tunnel.",
+ "type": "string",
+ "order": 1
+ },
+ "tunnel_port": {
+ "title": "SSH Connection Port",
+ "description": "Port on the proxy/jump server that accepts inbound ssh connections.",
+ "type": "integer",
+ "minimum": 0,
+ "maximum": 65536,
+ "default": 22,
+ "examples": ["22"],
+ "order": 2
+ },
+ "tunnel_user": {
+ "title": "SSH Login Username",
+ "description": "OS-level username for logging into the jump server host",
+ "type": "string",
+ "order": 3
+ },
+ "tunnel_user_password": {
+ "title": "Password",
+ "description": "OS-level password for logging into the jump server host",
+ "type": "string",
+ "airbyte_secret": true,
+ "order": 4
+ }
+ }
+ }
+ ]
+ }
+ }
+ },
+ "supportsNormalization": false,
+ "supportsDBT": false,
+ "supported_destination_sync_modes": []
+}
diff --git a/airbyte-integrations/connectors/source-cockroachdb/acceptance-test-config.yml b/airbyte-integrations/connectors/source-cockroachdb/acceptance-test-config.yml
new file mode 100644
index 000000000000..be8bdf914af3
--- /dev/null
+++ b/airbyte-integrations/connectors/source-cockroachdb/acceptance-test-config.yml
@@ -0,0 +1,7 @@
+# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference)
+# for more information about how to configure these tests
+connector_image: airbyte/source-cockroachdb:dev
+tests:
+ spec:
+ - spec_path: "src/test-integration/resources/expected_spec.json"
+ config_path: "src/test-integration/resources/dummy_config.json"
diff --git a/airbyte-integrations/connectors/source-cockroachdb/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-cockroachdb/acceptance-test-docker.sh
new file mode 100644
index 000000000000..ba0ab2874b98
--- /dev/null
+++ b/airbyte-integrations/connectors/source-cockroachdb/acceptance-test-docker.sh
@@ -0,0 +1,15 @@
+#!/usr/bin/env sh
+
+# Build latest connector image
+docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev
+
+# Pull latest acctest image
+docker pull airbyte/source-acceptance-test:latest
+
+# Run
+docker run --rm -it \
+ -v /var/run/docker.sock:/var/run/docker.sock \
+ -v /tmp:/tmp \
+ -v $(pwd):/test_input \
+ airbyte/source-acceptance-test \
+ --acceptance-test-config /test_input
diff --git a/airbyte-integrations/connectors/source-cockroachdb/build.gradle b/airbyte-integrations/connectors/source-cockroachdb/build.gradle
index 1a73d68242aa..67e77014637f 100644
--- a/airbyte-integrations/connectors/source-cockroachdb/build.gradle
+++ b/airbyte-integrations/connectors/source-cockroachdb/build.gradle
@@ -2,6 +2,7 @@ plugins {
id 'application'
id 'airbyte-docker'
id 'airbyte-integration-test-java'
+ id 'airbyte-source-acceptance-test'
}
application {
diff --git a/airbyte-integrations/connectors/source-cockroachdb/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-cockroachdb/integration_tests/acceptance.py
new file mode 100644
index 000000000000..1302b2f57e10
--- /dev/null
+++ b/airbyte-integrations/connectors/source-cockroachdb/integration_tests/acceptance.py
@@ -0,0 +1,16 @@
+#
+# Copyright (c) 2022 Airbyte, Inc., all rights reserved.
+#
+
+
+import pytest
+
+pytest_plugins = ("source_acceptance_test.plugin",)
+
+
+@pytest.fixture(scope="session", autouse=True)
+def connector_setup():
+ """This fixture is a placeholder for external resources that acceptance test might require."""
+ # TODO: setup test dependencies if needed. otherwise remove the TODO comments
+ yield
+ # TODO: clean up test dependencies
diff --git a/airbyte-integrations/connectors/source-cockroachdb/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-cockroachdb/src/test-integration/resources/dummy_config.json
new file mode 100644
index 000000000000..49e2346d457f
--- /dev/null
+++ b/airbyte-integrations/connectors/source-cockroachdb/src/test-integration/resources/dummy_config.json
@@ -0,0 +1,6 @@
+{
+ "host": "default",
+ "port": 8123,
+ "database": "default",
+ "username": "default"
+}
diff --git a/airbyte-integrations/connectors/source-cockroachdb/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-cockroachdb/src/test-integration/resources/expected_spec.json
new file mode 100644
index 000000000000..953149a5ef73
--- /dev/null
+++ b/airbyte-integrations/connectors/source-cockroachdb/src/test-integration/resources/expected_spec.json
@@ -0,0 +1,62 @@
+{
+ "documentationUrl": "https://docs.airbyte.com/integrations/sources/cockroachdb",
+ "connectionSpecification": {
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Cockroach Source Spec",
+ "type": "object",
+ "required": ["host", "port", "database", "username"],
+ "properties": {
+ "host": {
+ "title": "Host",
+ "description": "Hostname of the database.",
+ "type": "string",
+ "order": 0
+ },
+ "port": {
+ "title": "Port",
+ "description": "Port of the database.",
+ "type": "integer",
+ "minimum": 0,
+ "maximum": 65536,
+ "default": 5432,
+ "examples": ["5432"],
+ "order": 1
+ },
+ "database": {
+ "title": "DB Name",
+ "description": "Name of the database.",
+ "type": "string",
+ "order": 2
+ },
+ "username": {
+ "title": "User",
+ "description": "Username to use to access the database.",
+ "type": "string",
+ "order": 3
+ },
+ "password": {
+ "title": "Password",
+ "description": "Password associated with the username.",
+ "type": "string",
+ "airbyte_secret": true,
+ "order": 4
+ },
+ "jdbc_url_params": {
+ "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (Eg. key1=value1&key2=value2&key3=value3). For more information read about JDBC URL parameters.",
+ "title": "JDBC URL Parameters (Advanced)",
+ "type": "string",
+ "order": 5
+ },
+ "ssl": {
+ "title": "Connect using SSL",
+ "description": "Encrypt client/server communications for increased security.",
+ "type": "boolean",
+ "default": false,
+ "order": 6
+ }
+ }
+ },
+ "supportsNormalization": false,
+ "supportsDBT": false,
+ "supported_destination_sync_modes": []
+}
diff --git a/airbyte-integrations/connectors/source-db2/acceptance-test-config.yml b/airbyte-integrations/connectors/source-db2/acceptance-test-config.yml
index 5106bfe080ca..a0707f64ca70 100644
--- a/airbyte-integrations/connectors/source-db2/acceptance-test-config.yml
+++ b/airbyte-integrations/connectors/source-db2/acceptance-test-config.yml
@@ -3,4 +3,5 @@
connector_image: airbyte/source-db2:dev
tests:
spec:
- - spec_path: "src/main/resources/spec.json"
+ - spec_path: "src/test-integration/resources/expected_spec.json"
+ config_path: "src/test-integration/resources/dummy_config.json"
diff --git a/airbyte-integrations/connectors/source-db2/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-db2/acceptance-test-docker.sh
new file mode 100644
index 000000000000..ba0ab2874b98
--- /dev/null
+++ b/airbyte-integrations/connectors/source-db2/acceptance-test-docker.sh
@@ -0,0 +1,15 @@
+#!/usr/bin/env sh
+
+# Build latest connector image
+docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev
+
+# Pull latest acctest image
+docker pull airbyte/source-acceptance-test:latest
+
+# Run
+docker run --rm -it \
+ -v /var/run/docker.sock:/var/run/docker.sock \
+ -v /tmp:/tmp \
+ -v $(pwd):/test_input \
+ airbyte/source-acceptance-test \
+ --acceptance-test-config /test_input
diff --git a/airbyte-integrations/connectors/source-db2/build.gradle b/airbyte-integrations/connectors/source-db2/build.gradle
index 34d5d340fdc2..6798912d60f7 100644
--- a/airbyte-integrations/connectors/source-db2/build.gradle
+++ b/airbyte-integrations/connectors/source-db2/build.gradle
@@ -2,6 +2,7 @@ plugins {
id 'application'
id 'airbyte-docker'
id 'airbyte-integration-test-java'
+ id 'airbyte-source-acceptance-test'
}
application {
diff --git a/airbyte-integrations/connectors/source-db2/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-db2/integration_tests/acceptance.py
new file mode 100644
index 000000000000..1302b2f57e10
--- /dev/null
+++ b/airbyte-integrations/connectors/source-db2/integration_tests/acceptance.py
@@ -0,0 +1,16 @@
+#
+# Copyright (c) 2022 Airbyte, Inc., all rights reserved.
+#
+
+
+import pytest
+
+pytest_plugins = ("source_acceptance_test.plugin",)
+
+
+@pytest.fixture(scope="session", autouse=True)
+def connector_setup():
+ """This fixture is a placeholder for external resources that acceptance test might require."""
+ # TODO: setup test dependencies if needed. otherwise remove the TODO comments
+ yield
+ # TODO: clean up test dependencies
diff --git a/airbyte-integrations/connectors/source-db2/src/main/resources/spec.json b/airbyte-integrations/connectors/source-db2/src/main/resources/spec.json
index 45235a6e0cd6..092944df3c4a 100644
--- a/airbyte-integrations/connectors/source-db2/src/main/resources/spec.json
+++ b/airbyte-integrations/connectors/source-db2/src/main/resources/spec.json
@@ -56,9 +56,7 @@
"properties": {
"encryption_method": {
"type": "string",
- "const": "unencrypted",
- "enum": ["unencrypted"],
- "default": "unencrypted"
+ "const": "unencrypted"
}
}
},
@@ -69,9 +67,7 @@
"properties": {
"encryption_method": {
"type": "string",
- "const": "encrypted_verify_certificate",
- "enum": ["encrypted_verify_certificate"],
- "default": "encrypted_verify_certificate"
+ "const": "encrypted_verify_certificate"
},
"ssl_certificate": {
"title": "SSL PEM file",
diff --git a/airbyte-integrations/connectors/source-db2/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-db2/src/test-integration/resources/dummy_config.json
new file mode 100644
index 000000000000..47e237f97604
--- /dev/null
+++ b/airbyte-integrations/connectors/source-db2/src/test-integration/resources/dummy_config.json
@@ -0,0 +1,11 @@
+{
+ "host": "hhh",
+ "port": 8123,
+ "db": "ddd",
+ "username": "uuu",
+ "password": "ppp",
+ "encryption": {
+ "encryption_method": "encrypted_verify_certificate",
+ "ssl_certificate": "sss"
+ }
+}
diff --git a/airbyte-integrations/connectors/source-db2/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-db2/src/test-integration/resources/expected_spec.json
new file mode 100644
index 000000000000..a7a07e530389
--- /dev/null
+++ b/airbyte-integrations/connectors/source-db2/src/test-integration/resources/expected_spec.json
@@ -0,0 +1,94 @@
+{
+ "documentationUrl": "https://docs.airbyte.com/integrations/sources/db2",
+ "connectionSpecification": {
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "IBM Db2 Source Spec",
+ "type": "object",
+ "required": ["host", "port", "db", "username", "password", "encryption"],
+ "properties": {
+ "host": {
+ "description": "Host of the Db2.",
+ "type": "string",
+ "order": 0
+ },
+ "port": {
+ "description": "Port of the database.",
+ "type": "integer",
+ "minimum": 0,
+ "maximum": 65536,
+ "default": 8123,
+ "examples": ["8123"],
+ "order": 1
+ },
+ "db": {
+ "description": "Name of the database.",
+ "type": "string",
+ "examples": ["default"],
+ "order": 2
+ },
+ "username": {
+ "description": "Username to use to access the database.",
+ "type": "string",
+ "order": 3
+ },
+ "password": {
+ "description": "Password associated with the username.",
+ "type": "string",
+ "airbyte_secret": true,
+ "order": 4
+ },
+ "jdbc_url_params": {
+ "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).",
+ "title": "JDBC URL Params",
+ "type": "string",
+ "order": 5
+ },
+ "encryption": {
+ "title": "Encryption",
+ "type": "object",
+ "description": "Encryption method to use when communicating with the database",
+ "order": 6,
+ "oneOf": [
+ {
+ "title": "Unencrypted",
+ "description": "Data transfer will not be encrypted.",
+ "required": ["encryption_method"],
+ "properties": {
+ "encryption_method": {
+ "type": "string",
+ "const": "unencrypted"
+ }
+ }
+ },
+ {
+ "title": "TLS Encrypted (verify certificate)",
+ "description": "Verify and use the cert provided by the server.",
+ "required": ["encryption_method", "ssl_certificate"],
+ "properties": {
+ "encryption_method": {
+ "type": "string",
+ "const": "encrypted_verify_certificate"
+ },
+ "ssl_certificate": {
+ "title": "SSL PEM file",
+ "description": "Privacy Enhanced Mail (PEM) files are concatenated certificate containers frequently used in certificate installations",
+ "type": "string",
+ "airbyte_secret": true,
+ "multiline": true
+ },
+ "key_store_password": {
+ "title": "Key Store Password. This field is optional. If you do not fill in this field, the password will be randomly generated.",
+ "description": "Key Store Password",
+ "type": "string",
+ "airbyte_secret": true
+ }
+ }
+ }
+ ]
+ }
+ }
+ },
+ "supportsNormalization": false,
+ "supportsDBT": false,
+ "supported_destination_sync_modes": []
+}
diff --git a/airbyte-integrations/connectors/source-elasticsearch/acceptance-test-config.yml b/airbyte-integrations/connectors/source-elasticsearch/acceptance-test-config.yml
index 7f499fdefc40..73ee3ce32d37 100644
--- a/airbyte-integrations/connectors/source-elasticsearch/acceptance-test-config.yml
+++ b/airbyte-integrations/connectors/source-elasticsearch/acceptance-test-config.yml
@@ -1,6 +1,7 @@
# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference)
# for more information about how to configure these tests
-connector_image: airbyte/source-elasticsearch
+connector_image: airbyte/source-elasticsearch:dev
tests:
spec:
- - spec_path: "src/test/resources/expected_spec.json"
+ - spec_path: "src/test-integration/resources/expected_spec.json"
+ config_path: "src/test-integration/resources/dummy_config.json"
diff --git a/airbyte-integrations/connectors/source-elasticsearch/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-elasticsearch/acceptance-test-docker.sh
new file mode 100644
index 000000000000..ba0ab2874b98
--- /dev/null
+++ b/airbyte-integrations/connectors/source-elasticsearch/acceptance-test-docker.sh
@@ -0,0 +1,15 @@
+#!/usr/bin/env sh
+
+# Build latest connector image
+docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev
+
+# Pull latest acctest image
+docker pull airbyte/source-acceptance-test:latest
+
+# Run
+docker run --rm -it \
+ -v /var/run/docker.sock:/var/run/docker.sock \
+ -v /tmp:/tmp \
+ -v $(pwd):/test_input \
+ airbyte/source-acceptance-test \
+ --acceptance-test-config /test_input
diff --git a/airbyte-integrations/connectors/source-elasticsearch/build.gradle b/airbyte-integrations/connectors/source-elasticsearch/build.gradle
index dbd991036297..fff8a6d3fe05 100644
--- a/airbyte-integrations/connectors/source-elasticsearch/build.gradle
+++ b/airbyte-integrations/connectors/source-elasticsearch/build.gradle
@@ -2,6 +2,7 @@ plugins {
id 'application'
id 'airbyte-docker'
id 'airbyte-integration-test-java'
+ id 'airbyte-source-acceptance-test'
}
application {
diff --git a/airbyte-integrations/connectors/source-elasticsearch/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-elasticsearch/integration_tests/acceptance.py
new file mode 100644
index 000000000000..1302b2f57e10
--- /dev/null
+++ b/airbyte-integrations/connectors/source-elasticsearch/integration_tests/acceptance.py
@@ -0,0 +1,16 @@
+#
+# Copyright (c) 2022 Airbyte, Inc., all rights reserved.
+#
+
+
+import pytest
+
+pytest_plugins = ("source_acceptance_test.plugin",)
+
+
+@pytest.fixture(scope="session", autouse=True)
+def connector_setup():
+ """This fixture is a placeholder for external resources that acceptance test might require."""
+ # TODO: setup test dependencies if needed. otherwise remove the TODO comments
+ yield
+ # TODO: clean up test dependencies
diff --git a/airbyte-integrations/connectors/source-elasticsearch/src/main/resources/spec.json b/airbyte-integrations/connectors/source-elasticsearch/src/main/resources/spec.json
index fba748601133..71ad637a0c4f 100644
--- a/airbyte-integrations/connectors/source-elasticsearch/src/main/resources/spec.json
+++ b/airbyte-integrations/connectors/source-elasticsearch/src/main/resources/spec.json
@@ -5,7 +5,7 @@
"title": "Elasticsearch Connection Configuration",
"type": "object",
"required": ["endpoint"],
- "additionalProperties": false,
+ "additionalProperties": true,
"properties": {
"endpoint": {
"title": "Server Endpoint",
@@ -19,7 +19,7 @@
"oneOf": [
{
"title": "None",
- "additionalProperties": false,
+ "additionalProperties": true,
"description": "No authentication will be used",
"required": ["method"],
"properties": {
@@ -31,7 +31,7 @@
},
{
"title": "Api Key/Secret",
- "additionalProperties": false,
+ "additionalProperties": true,
"description": "Use a api key and secret combination to authenticate",
"required": ["method", "apiKeyId", "apiKeySecret"],
"properties": {
@@ -54,7 +54,7 @@
},
{
"title": "Username/Password",
- "additionalProperties": false,
+ "additionalProperties": true,
"description": "Basic auth header with a username and password",
"required": ["method", "username", "password"],
"properties": {
diff --git a/airbyte-integrations/connectors/source-elasticsearch/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-elasticsearch/src/test-integration/resources/dummy_config.json
new file mode 100644
index 000000000000..c945791cea37
--- /dev/null
+++ b/airbyte-integrations/connectors/source-elasticsearch/src/test-integration/resources/dummy_config.json
@@ -0,0 +1,3 @@
+{
+ "endpoint": "default"
+}
diff --git a/airbyte-integrations/connectors/source-elasticsearch/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-elasticsearch/src/test-integration/resources/expected_spec.json
new file mode 100644
index 000000000000..a6245dc39f1c
--- /dev/null
+++ b/airbyte-integrations/connectors/source-elasticsearch/src/test-integration/resources/expected_spec.json
@@ -0,0 +1,85 @@
+{
+ "documentationUrl": "https://docs.airbyte.com/integrations/source/elasticsearch",
+ "connectionSpecification": {
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Elasticsearch Connection Configuration",
+ "type": "object",
+ "required": ["endpoint"],
+ "additionalProperties": true,
+ "properties": {
+ "endpoint": {
+ "title": "Server Endpoint",
+ "type": "string",
+ "description": "The full url of the Elasticsearch server"
+ },
+ "authenticationMethod": {
+ "title": "Authentication Method",
+ "type": "object",
+ "description": "The type of authentication to be used",
+ "oneOf": [
+ {
+ "title": "None",
+ "additionalProperties": true,
+ "description": "No authentication will be used",
+ "required": ["method"],
+ "properties": {
+ "method": {
+ "type": "string",
+ "const": "none"
+ }
+ }
+ },
+ {
+ "title": "Api Key/Secret",
+ "additionalProperties": true,
+ "description": "Use a api key and secret combination to authenticate",
+ "required": ["method", "apiKeyId", "apiKeySecret"],
+ "properties": {
+ "method": {
+ "type": "string",
+ "const": "secret"
+ },
+ "apiKeyId": {
+ "title": "API Key ID",
+ "description": "The Key ID to used when accessing an enterprise Elasticsearch instance.",
+ "type": "string"
+ },
+ "apiKeySecret": {
+ "title": "API Key Secret",
+ "description": "The secret associated with the API Key ID.",
+ "type": "string",
+ "airbyte_secret": true
+ }
+ }
+ },
+ {
+ "title": "Username/Password",
+ "additionalProperties": true,
+ "description": "Basic auth header with a username and password",
+ "required": ["method", "username", "password"],
+ "properties": {
+ "method": {
+ "type": "string",
+ "const": "basic"
+ },
+ "username": {
+ "title": "Username",
+ "description": "Basic auth username to access a secure Elasticsearch server",
+ "type": "string"
+ },
+ "password": {
+ "title": "Password",
+ "description": "Basic auth password to access a secure Elasticsearch server",
+ "type": "string",
+ "airbyte_secret": true
+ }
+ }
+ }
+ ]
+ }
+ }
+ },
+ "supportsNormalization": false,
+ "supportsDBT": false,
+ "supported_destination_sync_modes": []
+}
diff --git a/airbyte-integrations/connectors/source-elasticsearch/src/test/resources/expected_spec.json b/airbyte-integrations/connectors/source-elasticsearch/src/test/resources/expected_spec.json
index fba748601133..71ad637a0c4f 100644
--- a/airbyte-integrations/connectors/source-elasticsearch/src/test/resources/expected_spec.json
+++ b/airbyte-integrations/connectors/source-elasticsearch/src/test/resources/expected_spec.json
@@ -5,7 +5,7 @@
"title": "Elasticsearch Connection Configuration",
"type": "object",
"required": ["endpoint"],
- "additionalProperties": false,
+ "additionalProperties": true,
"properties": {
"endpoint": {
"title": "Server Endpoint",
@@ -19,7 +19,7 @@
"oneOf": [
{
"title": "None",
- "additionalProperties": false,
+ "additionalProperties": true,
"description": "No authentication will be used",
"required": ["method"],
"properties": {
@@ -31,7 +31,7 @@
},
{
"title": "Api Key/Secret",
- "additionalProperties": false,
+ "additionalProperties": true,
"description": "Use a api key and secret combination to authenticate",
"required": ["method", "apiKeyId", "apiKeySecret"],
"properties": {
@@ -54,7 +54,7 @@
},
{
"title": "Username/Password",
- "additionalProperties": false,
+ "additionalProperties": true,
"description": "Basic auth header with a username and password",
"required": ["method", "username", "password"],
"properties": {
diff --git a/airbyte-integrations/connectors/source-jdbc/acceptance-test-config.yml b/airbyte-integrations/connectors/source-jdbc/acceptance-test-config.yml
new file mode 100644
index 000000000000..9a4392c5347b
--- /dev/null
+++ b/airbyte-integrations/connectors/source-jdbc/acceptance-test-config.yml
@@ -0,0 +1,7 @@
+# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference)
+# for more information about how to configure these tests
+connector_image: airbyte/source-jdbc:dev
+tests:
+ spec:
+ - spec_path: "src/test-integration/resources/expected_spec.json"
+ config_path: "src/test-integration/resources/dummy_config.json"
diff --git a/airbyte-integrations/connectors/source-jdbc/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-jdbc/acceptance-test-docker.sh
new file mode 100644
index 000000000000..ba0ab2874b98
--- /dev/null
+++ b/airbyte-integrations/connectors/source-jdbc/acceptance-test-docker.sh
@@ -0,0 +1,15 @@
+#!/usr/bin/env sh
+
+# Build latest connector image
+docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev
+
+# Pull latest acctest image
+docker pull airbyte/source-acceptance-test:latest
+
+# Run
+docker run --rm -it \
+ -v /var/run/docker.sock:/var/run/docker.sock \
+ -v /tmp:/tmp \
+ -v $(pwd):/test_input \
+ airbyte/source-acceptance-test \
+ --acceptance-test-config /test_input
diff --git a/airbyte-integrations/connectors/source-jdbc/build.gradle b/airbyte-integrations/connectors/source-jdbc/build.gradle
index 2e9393f32335..4daa6755626b 100644
--- a/airbyte-integrations/connectors/source-jdbc/build.gradle
+++ b/airbyte-integrations/connectors/source-jdbc/build.gradle
@@ -2,6 +2,7 @@ plugins {
id 'application'
id 'airbyte-docker'
id 'airbyte-integration-test-java'
+ id 'airbyte-source-acceptance-test'
id "java-library"
// https://docs.gradle.org/current/userguide/java_testing.html#sec:java_test_fixtures
id "java-test-fixtures"
diff --git a/airbyte-integrations/connectors/source-jdbc/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-jdbc/integration_tests/acceptance.py
new file mode 100644
index 000000000000..1302b2f57e10
--- /dev/null
+++ b/airbyte-integrations/connectors/source-jdbc/integration_tests/acceptance.py
@@ -0,0 +1,16 @@
+#
+# Copyright (c) 2022 Airbyte, Inc., all rights reserved.
+#
+
+
+import pytest
+
+pytest_plugins = ("source_acceptance_test.plugin",)
+
+
+@pytest.fixture(scope="session", autouse=True)
+def connector_setup():
+ """This fixture is a placeholder for external resources that acceptance test might require."""
+ # TODO: setup test dependencies if needed. otherwise remove the TODO comments
+ yield
+ # TODO: clean up test dependencies
diff --git a/airbyte-integrations/connectors/source-jdbc/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-jdbc/src/test-integration/resources/dummy_config.json
new file mode 100644
index 000000000000..892b30269c60
--- /dev/null
+++ b/airbyte-integrations/connectors/source-jdbc/src/test-integration/resources/dummy_config.json
@@ -0,0 +1,4 @@
+{
+ "username": "default",
+ "jdbc_url": "default"
+}
diff --git a/airbyte-integrations/connectors/source-jdbc/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-jdbc/src/test-integration/resources/expected_spec.json
new file mode 100644
index 000000000000..95e6b354ddcb
--- /dev/null
+++ b/airbyte-integrations/connectors/source-jdbc/src/test-integration/resources/expected_spec.json
@@ -0,0 +1,35 @@
+{
+ "documentationUrl": "https://docs.airbyte.com/integrations/sources/postgres",
+ "connectionSpecification": {
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "JDBC Source Spec",
+ "type": "object",
+ "required": ["username", "jdbc_url"],
+ "properties": {
+ "username": {
+ "title": "Username",
+ "description": "The username which is used to access the database.",
+ "type": "string"
+ },
+ "password": {
+ "title": "Password",
+ "description": "The password associated with this username.",
+ "type": "string",
+ "airbyte_secret": true
+ },
+ "jdbc_url": {
+ "title": "JDBC URL",
+ "description": "JDBC formatted URL. See the standard here.",
+ "type": "string"
+ },
+ "jdbc_url_params": {
+ "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).",
+ "title": "JDBC URL Params",
+ "type": "string"
+ }
+ }
+ },
+ "supportsNormalization": false,
+ "supportsDBT": false,
+ "supported_destination_sync_modes": []
+}
diff --git a/airbyte-integrations/connectors/source-kafka/acceptance-test-config.yml b/airbyte-integrations/connectors/source-kafka/acceptance-test-config.yml
new file mode 100644
index 000000000000..6bebc5793b0a
--- /dev/null
+++ b/airbyte-integrations/connectors/source-kafka/acceptance-test-config.yml
@@ -0,0 +1,7 @@
+# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference)
+# for more information about how to configure these tests
+connector_image: airbyte/source-kafka:dev
+tests:
+ spec:
+ - spec_path: "src/test-integration/resources/expected_spec.json"
+ config_path: "src/test-integration/resources/dummy_config.json"
\ No newline at end of file
diff --git a/airbyte-integrations/connectors/source-kafka/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-kafka/acceptance-test-docker.sh
new file mode 100644
index 000000000000..ba0ab2874b98
--- /dev/null
+++ b/airbyte-integrations/connectors/source-kafka/acceptance-test-docker.sh
@@ -0,0 +1,15 @@
+#!/usr/bin/env sh
+
+# Build latest connector image
+docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev
+
+# Pull latest acctest image
+docker pull airbyte/source-acceptance-test:latest
+
+# Run
+docker run --rm -it \
+ -v /var/run/docker.sock:/var/run/docker.sock \
+ -v /tmp:/tmp \
+ -v $(pwd):/test_input \
+ airbyte/source-acceptance-test \
+ --acceptance-test-config /test_input
diff --git a/airbyte-integrations/connectors/source-kafka/build.gradle b/airbyte-integrations/connectors/source-kafka/build.gradle
index 6ef80b0db86e..d06782d15150 100644
--- a/airbyte-integrations/connectors/source-kafka/build.gradle
+++ b/airbyte-integrations/connectors/source-kafka/build.gradle
@@ -2,6 +2,7 @@ plugins {
id 'application'
id 'airbyte-docker'
id 'airbyte-integration-test-java'
+ id 'airbyte-source-acceptance-test'
}
application {
diff --git a/airbyte-integrations/connectors/source-kafka/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-kafka/integration_tests/acceptance.py
new file mode 100644
index 000000000000..1302b2f57e10
--- /dev/null
+++ b/airbyte-integrations/connectors/source-kafka/integration_tests/acceptance.py
@@ -0,0 +1,16 @@
+#
+# Copyright (c) 2022 Airbyte, Inc., all rights reserved.
+#
+
+
+import pytest
+
+pytest_plugins = ("source_acceptance_test.plugin",)
+
+
+@pytest.fixture(scope="session", autouse=True)
+def connector_setup():
+ """This fixture is a placeholder for external resources that acceptance test might require."""
+ # TODO: setup test dependencies if needed. otherwise remove the TODO comments
+ yield
+ # TODO: clean up test dependencies
diff --git a/airbyte-integrations/connectors/source-kafka/src/main/resources/spec.json b/airbyte-integrations/connectors/source-kafka/src/main/resources/spec.json
index 60ddd5e0c343..5a0bdcbcb8c0 100644
--- a/airbyte-integrations/connectors/source-kafka/src/main/resources/spec.json
+++ b/airbyte-integrations/connectors/source-kafka/src/main/resources/spec.json
@@ -9,7 +9,7 @@
"title": "Kafka Source Spec",
"type": "object",
"required": ["bootstrap_servers", "subscription", "protocol"],
- "additionalProperties": false,
+ "additionalProperties": true,
"properties": {
"MessageFormat": {
"title": "MessageFormat",
@@ -21,8 +21,7 @@
"properties": {
"deserialization_type": {
"type": "string",
- "enum": ["JSON"],
- "default": "JSON"
+ "const": "JSON"
}
}
},
@@ -30,9 +29,7 @@
"title": "AVRO",
"properties": {
"deserialization_type": {
- "type": "string",
- "enum": ["AVRO"],
- "default": "AVRO"
+ "const": "AVRO"
},
"deserialization_strategy": {
"type": "string",
@@ -77,9 +74,7 @@
"subscription_type": {
"description": "Manually assign a list of partitions to this consumer. This interface does not allow for incremental assignment and will replace the previous assignment (if there is one).\nIf the given list of topic partitions is empty, it is treated the same as unsubscribe().",
"type": "string",
- "const": "assign",
- "enum": ["assign"],
- "default": "assign"
+ "const": "assign"
},
"topic_partitions": {
"title": "List of topic:partition Pairs",
@@ -95,9 +90,7 @@
"subscription_type": {
"description": "The Topic pattern from which the records will be read.",
"type": "string",
- "const": "subscribe",
- "enum": ["subscribe"],
- "default": "subscribe"
+ "const": "subscribe"
},
"topic_pattern": {
"title": "Topic Pattern",
@@ -143,8 +136,7 @@
"properties": {
"security_protocol": {
"type": "string",
- "enum": ["PLAINTEXT"],
- "default": "PLAINTEXT"
+ "const": "PLAINTEXT"
}
}
},
@@ -158,15 +150,13 @@
"properties": {
"security_protocol": {
"type": "string",
- "enum": ["SASL_PLAINTEXT"],
- "default": "SASL_PLAINTEXT"
+ "const": "SASL_PLAINTEXT"
},
"sasl_mechanism": {
"title": "SASL Mechanism",
"description": "The SASL mechanism used for client connections. This may be any mechanism for which a security provider is available.",
"type": "string",
- "default": "PLAIN",
- "enum": ["PLAIN"]
+ "const": "PLAIN"
},
"sasl_jaas_config": {
"title": "SASL JAAS Config",
@@ -187,8 +177,7 @@
"properties": {
"security_protocol": {
"type": "string",
- "enum": ["SASL_SSL"],
- "default": "SASL_SSL"
+ "const": "SASL_SSL"
},
"sasl_mechanism": {
"title": "SASL Mechanism",
diff --git a/airbyte-integrations/connectors/source-kafka/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-kafka/src/test-integration/resources/dummy_config.json
new file mode 100644
index 000000000000..520d509e9154
--- /dev/null
+++ b/airbyte-integrations/connectors/source-kafka/src/test-integration/resources/dummy_config.json
@@ -0,0 +1,10 @@
+{
+ "bootstrap_servers": "default",
+ "subscription": {
+ "subscription_type": "assign",
+ "topic_partitions": "default"
+ },
+ "protocol": {
+ "security_protocol": "PLAINTEXT"
+ }
+}
diff --git a/airbyte-integrations/connectors/source-kafka/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-kafka/src/test-integration/resources/expected_spec.json
new file mode 100644
index 000000000000..3eae6e008ad7
--- /dev/null
+++ b/airbyte-integrations/connectors/source-kafka/src/test-integration/resources/expected_spec.json
@@ -0,0 +1,275 @@
+{
+ "documentationUrl": "https://docs.airbyte.com/integrations/sources/kafka",
+ "connectionSpecification": {
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Kafka Source Spec",
+ "type": "object",
+ "required": ["bootstrap_servers", "subscription", "protocol"],
+ "additionalProperties": true,
+ "properties": {
+ "MessageFormat": {
+ "title": "MessageFormat",
+ "type": "object",
+ "description": "The serialization used based on this ",
+ "oneOf": [
+ {
+ "title": "JSON",
+ "properties": {
+ "deserialization_type": {
+ "type": "string",
+ "const": "JSON"
+ }
+ }
+ },
+ {
+ "title": "AVRO",
+ "properties": {
+ "deserialization_type": {
+ "const": "AVRO"
+ },
+ "deserialization_strategy": {
+ "type": "string",
+ "enum": [
+ "TopicNameStrategy",
+ "RecordNameStrategy",
+ "TopicRecordNameStrategy"
+ ],
+ "default": "TopicNameStrategy"
+ },
+ "schema_registry_url": {
+ "type": "string",
+ "examples": ["http://localhost:8081"]
+ },
+ "schema_registry_username": {
+ "type": "string",
+ "default": ""
+ },
+ "schema_registry_password": {
+ "type": "string",
+ "default": ""
+ }
+ }
+ }
+ ]
+ },
+ "bootstrap_servers": {
+ "title": "Bootstrap Servers",
+ "description": "A list of host/port pairs to use for establishing the initial connection to the Kafka cluster. The client will make use of all servers irrespective of which servers are specified here for bootstrapping—this list only impacts the initial hosts used to discover the full set of servers. This list should be in the form host1:port1,host2:port2,...
. Since these servers are just used for the initial connection to discover the full cluster membership (which may change dynamically), this list need not contain the full set of servers (you may want more than one, though, in case a server is down).",
+ "type": "string",
+ "examples": ["kafka-broker1:9092,kafka-broker2:9092"]
+ },
+ "subscription": {
+ "title": "Subscription Method",
+ "type": "object",
+ "description": "You can choose to manually assign a list of partitions, or subscribe to all topics matching specified pattern to get dynamically assigned partitions.",
+ "oneOf": [
+ {
+ "title": "Manually assign a list of partitions",
+ "required": ["subscription_type", "topic_partitions"],
+ "properties": {
+ "subscription_type": {
+ "description": "Manually assign a list of partitions to this consumer. This interface does not allow for incremental assignment and will replace the previous assignment (if there is one).\nIf the given list of topic partitions is empty, it is treated the same as unsubscribe().",
+ "type": "string",
+ "const": "assign"
+ },
+ "topic_partitions": {
+ "title": "List of topic:partition Pairs",
+ "type": "string",
+ "examples": ["sample.topic:0, sample.topic:1"]
+ }
+ }
+ },
+ {
+ "title": "Subscribe to all topics matching specified pattern",
+ "required": ["subscription_type", "topic_pattern"],
+ "properties": {
+ "subscription_type": {
+ "description": "The Topic pattern from which the records will be read.",
+ "type": "string",
+ "const": "subscribe"
+ },
+ "topic_pattern": {
+ "title": "Topic Pattern",
+ "type": "string",
+ "examples": ["sample.topic"]
+ }
+ }
+ }
+ ]
+ },
+ "test_topic": {
+ "title": "Test Topic",
+ "description": "The Topic to test in case the Airbyte can consume messages.",
+ "type": "string",
+ "examples": ["test.topic"]
+ },
+ "group_id": {
+ "title": "Group ID",
+ "description": "The Group ID is how you distinguish different consumer groups.",
+ "type": "string",
+ "examples": ["group.id"]
+ },
+ "max_poll_records": {
+ "title": "Max Poll Records",
+ "description": "The maximum number of records returned in a single call to poll(). Note, that max_poll_records does not impact the underlying fetching behavior. The consumer will cache the records from each fetch request and returns them incrementally from each poll.",
+ "type": "integer",
+ "default": 500
+ },
+ "polling_time": {
+ "title": "Polling Time",
+ "description": "Amount of time Kafka connector should try to poll for messages.",
+ "type": "integer",
+ "default": 100
+ },
+ "protocol": {
+ "title": "Protocol",
+ "type": "object",
+ "description": "The Protocol used to communicate with brokers.",
+ "oneOf": [
+ {
+ "title": "PLAINTEXT",
+ "required": ["security_protocol"],
+ "properties": {
+ "security_protocol": {
+ "type": "string",
+ "const": "PLAINTEXT"
+ }
+ }
+ },
+ {
+ "title": "SASL PLAINTEXT",
+ "required": [
+ "security_protocol",
+ "sasl_mechanism",
+ "sasl_jaas_config"
+ ],
+ "properties": {
+ "security_protocol": {
+ "type": "string",
+ "const": "SASL_PLAINTEXT"
+ },
+ "sasl_mechanism": {
+ "title": "SASL Mechanism",
+ "description": "The SASL mechanism used for client connections. This may be any mechanism for which a security provider is available.",
+ "type": "string",
+ "const": "PLAIN"
+ },
+ "sasl_jaas_config": {
+ "title": "SASL JAAS Config",
+ "description": "The JAAS login context parameters for SASL connections in the format used by JAAS configuration files.",
+ "type": "string",
+ "default": "",
+ "airbyte_secret": true
+ }
+ }
+ },
+ {
+ "title": "SASL SSL",
+ "required": [
+ "security_protocol",
+ "sasl_mechanism",
+ "sasl_jaas_config"
+ ],
+ "properties": {
+ "security_protocol": {
+ "type": "string",
+ "const": "SASL_SSL"
+ },
+ "sasl_mechanism": {
+ "title": "SASL Mechanism",
+ "description": "The SASL mechanism used for client connections. This may be any mechanism for which a security provider is available.",
+ "type": "string",
+ "default": "GSSAPI",
+ "enum": [
+ "GSSAPI",
+ "OAUTHBEARER",
+ "SCRAM-SHA-256",
+ "SCRAM-SHA-512",
+ "PLAIN"
+ ]
+ },
+ "sasl_jaas_config": {
+ "title": "SASL JAAS Config",
+ "description": "The JAAS login context parameters for SASL connections in the format used by JAAS configuration files.",
+ "type": "string",
+ "default": "",
+ "airbyte_secret": true
+ }
+ }
+ }
+ ]
+ },
+ "client_id": {
+ "title": "Client ID",
+ "description": "An ID string to pass to the server when making requests. The purpose of this is to be able to track the source of requests beyond just ip/port by allowing a logical application name to be included in server-side request logging.",
+ "type": "string",
+ "examples": ["airbyte-consumer"]
+ },
+ "enable_auto_commit": {
+ "title": "Enable Auto Commit",
+ "description": "If true, the consumer's offset will be periodically committed in the background.",
+ "type": "boolean",
+ "default": true
+ },
+ "auto_commit_interval_ms": {
+ "title": "Auto Commit Interval, ms",
+ "description": "The frequency in milliseconds that the consumer offsets are auto-committed to Kafka if enable.auto.commit is set to true.",
+ "type": "integer",
+ "default": 5000
+ },
+ "client_dns_lookup": {
+ "title": "Client DNS Lookup",
+ "description": "Controls how the client uses DNS lookups. If set to use_all_dns_ips, connect to each returned IP address in sequence until a successful connection is established. After a disconnection, the next IP is used. Once all IPs have been used once, the client resolves the IP(s) from the hostname again. If set to resolve_canonical_bootstrap_servers_only, resolve each bootstrap address into a list of canonical names. After the bootstrap phase, this behaves the same as use_all_dns_ips. If set to default (deprecated), attempt to connect to the first IP address returned by the lookup, even if the lookup returns multiple IP addresses.",
+ "type": "string",
+ "default": "use_all_dns_ips",
+ "enum": [
+ "default",
+ "use_all_dns_ips",
+ "resolve_canonical_bootstrap_servers_only"
+ ]
+ },
+ "retry_backoff_ms": {
+ "title": "Retry Backoff, ms",
+ "description": "The amount of time to wait before attempting to retry a failed request to a given topic partition. This avoids repeatedly sending requests in a tight loop under some failure scenarios.",
+ "type": "integer",
+ "default": 100
+ },
+ "request_timeout_ms": {
+ "title": "Request Timeout, ms",
+ "description": "The configuration controls the maximum amount of time the client will wait for the response of a request. If the response is not received before the timeout elapses the client will resend the request if necessary or fail the request if retries are exhausted.",
+ "type": "integer",
+ "default": 30000
+ },
+ "receive_buffer_bytes": {
+ "title": "Receive Buffer, bytes",
+ "description": "The size of the TCP receive buffer (SO_RCVBUF) to use when reading data. If the value is -1, the OS default will be used.",
+ "type": "integer",
+ "default": 32768
+ },
+ "auto_offset_reset": {
+ "title": "Auto Offset Reset",
+ "description": "What to do when there is no initial offset in Kafka or if the current offset does not exist any more on the server - earliest: automatically reset the offset to the earliest offset, latest: automatically reset the offset to the latest offset, none: throw exception to the consumer if no previous offset is found for the consumer's group, anything else: throw exception to the consumer.",
+ "type": "string",
+ "default": "latest",
+ "enum": ["latest", "earliest", "none"]
+ },
+ "repeated_calls": {
+ "title": "Repeated Calls",
+ "description": "The number of repeated calls to poll() if no messages were received.",
+ "type": "integer",
+ "default": 3
+ },
+ "max_records_process": {
+ "title": "Maximum Records",
+ "description": "The Maximum to be processed per execution",
+ "type": "integer",
+ "default": 100000
+ }
+ }
+ },
+ "supportsIncremental": true,
+ "supportsNormalization": false,
+ "supportsDBT": false,
+ "supported_destination_sync_modes": [],
+ "supported_source_sync_modes": ["append"]
+}
diff --git a/airbyte-integrations/connectors/source-mongodb-v2/acceptance-test-config.yml b/airbyte-integrations/connectors/source-mongodb-v2/acceptance-test-config.yml
new file mode 100644
index 000000000000..6402c3beba5f
--- /dev/null
+++ b/airbyte-integrations/connectors/source-mongodb-v2/acceptance-test-config.yml
@@ -0,0 +1,7 @@
+# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference)
+# for more information about how to configure these tests
+connector_image: airbyte/source-mongodb-v2:dev
+tests:
+ spec:
+ - spec_path: "src/test-integration/resources/expected_spec.json"
+ config_path: "src/test-integration/resources/dummy_config.json"
diff --git a/airbyte-integrations/connectors/source-mongodb-v2/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-mongodb-v2/acceptance-test-docker.sh
new file mode 100644
index 000000000000..ba0ab2874b98
--- /dev/null
+++ b/airbyte-integrations/connectors/source-mongodb-v2/acceptance-test-docker.sh
@@ -0,0 +1,15 @@
+#!/usr/bin/env sh
+
+# Build latest connector image
+docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev
+
+# Pull latest acctest image
+docker pull airbyte/source-acceptance-test:latest
+
+# Run
+docker run --rm -it \
+ -v /var/run/docker.sock:/var/run/docker.sock \
+ -v /tmp:/tmp \
+ -v $(pwd):/test_input \
+ airbyte/source-acceptance-test \
+ --acceptance-test-config /test_input
diff --git a/airbyte-integrations/connectors/source-mongodb-v2/build.gradle b/airbyte-integrations/connectors/source-mongodb-v2/build.gradle
index 6b776c6192dc..ab5675cd3211 100644
--- a/airbyte-integrations/connectors/source-mongodb-v2/build.gradle
+++ b/airbyte-integrations/connectors/source-mongodb-v2/build.gradle
@@ -2,6 +2,7 @@ plugins {
id 'application'
id 'airbyte-docker'
id 'airbyte-integration-test-java'
+ id 'airbyte-source-acceptance-test'
}
application {
diff --git a/airbyte-integrations/connectors/source-mongodb-v2/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-mongodb-v2/integration_tests/acceptance.py
new file mode 100644
index 000000000000..1302b2f57e10
--- /dev/null
+++ b/airbyte-integrations/connectors/source-mongodb-v2/integration_tests/acceptance.py
@@ -0,0 +1,16 @@
+#
+# Copyright (c) 2022 Airbyte, Inc., all rights reserved.
+#
+
+
+import pytest
+
+pytest_plugins = ("source_acceptance_test.plugin",)
+
+
+@pytest.fixture(scope="session", autouse=True)
+def connector_setup():
+ """This fixture is a placeholder for external resources that acceptance test might require."""
+ # TODO: setup test dependencies if needed. otherwise remove the TODO comments
+ yield
+ # TODO: clean up test dependencies
diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/main/resources/spec.json b/airbyte-integrations/connectors/source-mongodb-v2/src/main/resources/spec.json
index fc7959b42274..2f535f07687a 100644
--- a/airbyte-integrations/connectors/source-mongodb-v2/src/main/resources/spec.json
+++ b/airbyte-integrations/connectors/source-mongodb-v2/src/main/resources/spec.json
@@ -20,8 +20,7 @@
"properties": {
"instance": {
"type": "string",
- "enum": ["standalone"],
- "default": "standalone"
+ "const": "standalone"
},
"host": {
"title": "Host",
@@ -54,8 +53,7 @@
"properties": {
"instance": {
"type": "string",
- "enum": ["replica"],
- "default": "replica"
+ "const": "replica"
},
"server_addresses": {
"title": "Server Addresses",
@@ -74,13 +72,12 @@
},
{
"title": "MongoDB Atlas",
- "additionalProperties": false,
+ "additionalProperties": true,
"required": ["instance", "cluster_url"],
"properties": {
"instance": {
"type": "string",
- "enum": ["atlas"],
- "default": "atlas"
+ "const": "atlas"
},
"cluster_url": {
"title": "Cluster URL",
diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/resources/dummy_config.json
new file mode 100644
index 000000000000..0216ddfd3c03
--- /dev/null
+++ b/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/resources/dummy_config.json
@@ -0,0 +1,3 @@
+{
+ "database": "default"
+}
diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/resources/expected_spec.json
new file mode 100644
index 000000000000..b39746df263b
--- /dev/null
+++ b/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/resources/expected_spec.json
@@ -0,0 +1,124 @@
+{
+ "documentationUrl": "https://docs.airbyte.com/integrations/sources/mongodb-v2",
+ "changelogUrl": "https://docs.airbyte.com/integrations/sources/mongodb-v2",
+ "connectionSpecification": {
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "MongoDb Source Spec",
+ "type": "object",
+ "required": ["database"],
+ "additionalProperties": true,
+ "properties": {
+ "instance_type": {
+ "type": "object",
+ "title": "MongoDb Instance Type",
+ "description": "The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.",
+ "order": 0,
+ "oneOf": [
+ {
+ "title": "Standalone MongoDb Instance",
+ "required": ["instance", "host", "port"],
+ "properties": {
+ "instance": {
+ "type": "string",
+ "const": "standalone"
+ },
+ "host": {
+ "title": "Host",
+ "type": "string",
+ "description": "The host name of the Mongo database.",
+ "order": 0
+ },
+ "port": {
+ "title": "Port",
+ "type": "integer",
+ "description": "The port of the Mongo database.",
+ "minimum": 0,
+ "maximum": 65536,
+ "default": 27017,
+ "examples": ["27017"],
+ "order": 1
+ },
+ "tls": {
+ "title": "TLS Connection",
+ "type": "boolean",
+ "description": "Indicates whether TLS encryption protocol will be used to connect to MongoDB. It is recommended to use TLS connection if possible. For more information see documentation.",
+ "default": false,
+ "order": 2
+ }
+ }
+ },
+ {
+ "title": "Replica Set",
+ "required": ["instance", "server_addresses"],
+ "properties": {
+ "instance": {
+ "type": "string",
+ "const": "replica"
+ },
+ "server_addresses": {
+ "title": "Server Addresses",
+ "type": "string",
+ "description": "The members of a replica set. Please specify `host`:`port` of each member separated by comma.",
+ "examples": ["host1:27017,host2:27017,host3:27017"],
+ "order": 0
+ },
+ "replica_set": {
+ "title": "Replica Set",
+ "type": "string",
+ "description": "A replica set in MongoDB is a group of mongod processes that maintain the same data set.",
+ "order": 1
+ }
+ }
+ },
+ {
+ "title": "MongoDB Atlas",
+ "additionalProperties": true,
+ "required": ["instance", "cluster_url"],
+ "properties": {
+ "instance": {
+ "type": "string",
+ "const": "atlas"
+ },
+ "cluster_url": {
+ "title": "Cluster URL",
+ "type": "string",
+ "description": "The URL of a cluster to connect to.",
+ "order": 0
+ }
+ }
+ }
+ ]
+ },
+ "database": {
+ "title": "Database Name",
+ "type": "string",
+ "description": "The database you want to replicate.",
+ "order": 1
+ },
+ "user": {
+ "title": "User",
+ "type": "string",
+ "description": "The username which is used to access the database.",
+ "order": 2
+ },
+ "password": {
+ "title": "Password",
+ "type": "string",
+ "description": "The password associated with this username.",
+ "airbyte_secret": true,
+ "order": 3
+ },
+ "auth_source": {
+ "title": "Authentication Source",
+ "type": "string",
+ "description": "The authentication source where the user information is stored.",
+ "default": "admin",
+ "examples": ["admin"],
+ "order": 4
+ }
+ }
+ },
+ "supportsNormalization": false,
+ "supportsDBT": false,
+ "supported_destination_sync_modes": []
+}
diff --git a/airbyte-integrations/connectors/source-mssql/acceptance-test-config.yml b/airbyte-integrations/connectors/source-mssql/acceptance-test-config.yml
index 5aa3dde87597..2b3f94e1b3a8 100644
--- a/airbyte-integrations/connectors/source-mssql/acceptance-test-config.yml
+++ b/airbyte-integrations/connectors/source-mssql/acceptance-test-config.yml
@@ -3,4 +3,7 @@
connector_image: airbyte/source-mssql:dev
tests:
spec:
- - spec_path: "src/main/resources/spec.json"
+ - spec_path: "src/test-integration/resources/expected_spec.json"
+ config_path: "src/test-integration/resources/dummy_config.json"
+ backward_compatibility_tests_config:
+ disable_for_version: "0.4.25"
diff --git a/airbyte-integrations/connectors/source-mssql/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-mssql/acceptance-test-docker.sh
new file mode 100644
index 000000000000..ba0ab2874b98
--- /dev/null
+++ b/airbyte-integrations/connectors/source-mssql/acceptance-test-docker.sh
@@ -0,0 +1,15 @@
+#!/usr/bin/env sh
+
+# Build latest connector image
+docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev
+
+# Pull latest acctest image
+docker pull airbyte/source-acceptance-test:latest
+
+# Run
+docker run --rm -it \
+ -v /var/run/docker.sock:/var/run/docker.sock \
+ -v /tmp:/tmp \
+ -v $(pwd):/test_input \
+ airbyte/source-acceptance-test \
+ --acceptance-test-config /test_input
diff --git a/airbyte-integrations/connectors/source-mssql/build.gradle b/airbyte-integrations/connectors/source-mssql/build.gradle
index a1bceecbf7a0..55f5f5a313c3 100644
--- a/airbyte-integrations/connectors/source-mssql/build.gradle
+++ b/airbyte-integrations/connectors/source-mssql/build.gradle
@@ -3,6 +3,7 @@ plugins {
id 'airbyte-docker'
id 'airbyte-integration-test-java'
id 'airbyte-performance-test-java'
+ id 'airbyte-source-acceptance-test'
}
application {
diff --git a/airbyte-integrations/connectors/source-mssql/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-mssql/integration_tests/acceptance.py
new file mode 100644
index 000000000000..1302b2f57e10
--- /dev/null
+++ b/airbyte-integrations/connectors/source-mssql/integration_tests/acceptance.py
@@ -0,0 +1,16 @@
+#
+# Copyright (c) 2022 Airbyte, Inc., all rights reserved.
+#
+
+
+import pytest
+
+pytest_plugins = ("source_acceptance_test.plugin",)
+
+
+@pytest.fixture(scope="session", autouse=True)
+def connector_setup():
+ """This fixture is a placeholder for external resources that acceptance test might require."""
+ # TODO: setup test dependencies if needed. otherwise remove the TODO comments
+ yield
+ # TODO: clean up test dependencies
diff --git a/airbyte-integrations/connectors/source-mssql/src/main/resources/spec.json b/airbyte-integrations/connectors/source-mssql/src/main/resources/spec.json
index 674f9342317d..35b192d2c4de 100644
--- a/airbyte-integrations/connectors/source-mssql/src/main/resources/spec.json
+++ b/airbyte-integrations/connectors/source-mssql/src/main/resources/spec.json
@@ -72,9 +72,7 @@
"properties": {
"ssl_method": {
"type": "string",
- "const": "unencrypted",
- "enum": ["unencrypted"],
- "default": "unencrypted"
+ "const": "unencrypted"
}
}
},
@@ -85,9 +83,7 @@
"properties": {
"ssl_method": {
"type": "string",
- "const": "encrypted_trust_server_certificate",
- "enum": ["encrypted_trust_server_certificate"],
- "default": "encrypted_trust_server_certificate"
+ "const": "encrypted_trust_server_certificate"
}
}
},
@@ -98,9 +94,7 @@
"properties": {
"ssl_method": {
"type": "string",
- "const": "encrypted_verify_certificate",
- "enum": ["encrypted_verify_certificate"],
- "default": "encrypted_verify_certificate"
+ "const": "encrypted_verify_certificate"
},
"hostNameInCertificate": {
"title": "Host Name In Certificate",
@@ -127,8 +121,6 @@
"method": {
"type": "string",
"const": "STANDARD",
- "enum": ["STANDARD"],
- "default": "STANDARD",
"order": 0
}
}
@@ -141,8 +133,6 @@
"method": {
"type": "string",
"const": "CDC",
- "enum": ["CDC"],
- "default": "CDC",
"order": 0
},
"data_to_sync": {
@@ -160,15 +150,6 @@
"enum": ["Snapshot", "Read Committed"],
"description": "Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the \"Snapshot\" level, you must enable the snapshot isolation mode on the database.",
"order": 2
- },
- "initial_waiting_seconds": {
- "type": "integer",
- "title": "Initial Waiting Time in Seconds (Advanced)",
- "description": "The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.",
- "default": 300,
- "min": 120,
- "max": 1200,
- "order": 3
}
}
}
diff --git a/airbyte-integrations/connectors/source-mssql/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-mssql/src/test-integration/resources/dummy_config.json
new file mode 100644
index 000000000000..560e55333378
--- /dev/null
+++ b/airbyte-integrations/connectors/source-mssql/src/test-integration/resources/dummy_config.json
@@ -0,0 +1,6 @@
+{
+ "host": "default",
+ "port": 5555,
+ "database": "default",
+ "username": "default"
+}
diff --git a/airbyte-integrations/connectors/source-mssql/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-mssql/src/test-integration/resources/expected_spec.json
new file mode 100644
index 000000000000..4a5352b013bd
--- /dev/null
+++ b/airbyte-integrations/connectors/source-mssql/src/test-integration/resources/expected_spec.json
@@ -0,0 +1,277 @@
+{
+ "documentationUrl": "https://docs.airbyte.com/integrations/destinations/mssql",
+ "connectionSpecification": {
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "MSSQL Source Spec",
+ "type": "object",
+ "required": ["host", "port", "database", "username"],
+ "properties": {
+ "host": {
+ "description": "The hostname of the database.",
+ "title": "Host",
+ "type": "string",
+ "order": 0
+ },
+ "port": {
+ "description": "The port of the database.",
+ "title": "Port",
+ "type": "integer",
+ "minimum": 0,
+ "maximum": 65536,
+ "examples": ["1433"],
+ "order": 1
+ },
+ "database": {
+ "description": "The name of the database.",
+ "title": "Database",
+ "type": "string",
+ "examples": ["master"],
+ "order": 2
+ },
+ "schemas": {
+ "title": "Schemas",
+ "description": "The list of schemas to sync from. Defaults to user. Case sensitive.",
+ "type": "array",
+ "items": {
+ "type": "string"
+ },
+ "minItems": 0,
+ "uniqueItems": true,
+ "default": ["dbo"],
+ "order": 3
+ },
+ "username": {
+ "description": "The username which is used to access the database.",
+ "title": "Username",
+ "type": "string",
+ "order": 4
+ },
+ "password": {
+ "description": "The password associated with the username.",
+ "title": "Password",
+ "type": "string",
+ "airbyte_secret": true,
+ "order": 5
+ },
+ "jdbc_url_params": {
+ "title": "JDBC URL Params",
+ "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).",
+ "type": "string",
+ "order": 6
+ },
+ "ssl_method": {
+ "title": "SSL Method",
+ "type": "object",
+ "description": "The encryption method which is used when communicating with the database.",
+ "order": 7,
+ "oneOf": [
+ {
+ "title": "Unencrypted",
+ "description": "Data transfer will not be encrypted.",
+ "required": ["ssl_method"],
+ "properties": {
+ "ssl_method": {
+ "type": "string",
+ "const": "unencrypted"
+ }
+ }
+ },
+ {
+ "title": "Encrypted (trust server certificate)",
+ "description": "Use the certificate provided by the server without verification. (For testing purposes only!)",
+ "required": ["ssl_method"],
+ "properties": {
+ "ssl_method": {
+ "type": "string",
+ "const": "encrypted_trust_server_certificate"
+ }
+ }
+ },
+ {
+ "title": "Encrypted (verify certificate)",
+ "description": "Verify and use the certificate provided by the server.",
+ "required": ["ssl_method", "trustStoreName", "trustStorePassword"],
+ "properties": {
+ "ssl_method": {
+ "type": "string",
+ "const": "encrypted_verify_certificate"
+ },
+ "hostNameInCertificate": {
+ "title": "Host Name In Certificate",
+ "type": "string",
+ "description": "Specifies the host name of the server. The value of this property must match the subject property of the certificate.",
+ "order": 7
+ }
+ }
+ }
+ ]
+ },
+ "replication_method": {
+ "type": "object",
+ "title": "Replication Method",
+ "description": "The replication method used for extracting data from the database. STANDARD replication requires no setup on the DB side but will not be able to represent deletions incrementally. CDC uses {TBC} to detect inserts, updates, and deletes. This needs to be configured on the source database itself.",
+ "default": "STANDARD",
+ "order": 8,
+ "oneOf": [
+ {
+ "title": "Standard",
+ "description": "Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally.",
+ "required": ["method"],
+ "properties": {
+ "method": {
+ "type": "string",
+ "const": "STANDARD",
+ "order": 0
+ }
+ }
+ },
+ {
+ "title": "Logical Replication (CDC)",
+ "description": "CDC uses {TBC} to detect inserts, updates, and deletes. This needs to be configured on the source database itself.",
+ "required": ["method"],
+ "properties": {
+ "method": {
+ "type": "string",
+ "const": "CDC",
+ "order": 0
+ },
+ "data_to_sync": {
+ "title": "Data to Sync",
+ "type": "string",
+ "default": "Existing and New",
+ "enum": ["Existing and New", "New Changes Only"],
+ "description": "What data should be synced under the CDC. \"Existing and New\" will read existing data as a snapshot, and sync new changes through CDC. \"New Changes Only\" will skip the initial snapshot, and only sync new changes through CDC.",
+ "order": 1
+ },
+ "snapshot_isolation": {
+ "title": "Initial Snapshot Isolation Level",
+ "type": "string",
+ "default": "Snapshot",
+ "enum": ["Snapshot", "Read Committed"],
+ "description": "Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the \"Snapshot\" level, you must enable the snapshot isolation mode on the database.",
+ "order": 2
+ }
+ }
+ }
+ ]
+ },
+ "tunnel_method": {
+ "type": "object",
+ "title": "SSH Tunnel Method",
+ "description": "Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.",
+ "oneOf": [
+ {
+ "title": "No Tunnel",
+ "required": ["tunnel_method"],
+ "properties": {
+ "tunnel_method": {
+ "description": "No ssh tunnel needed to connect to database",
+ "type": "string",
+ "const": "NO_TUNNEL",
+ "order": 0
+ }
+ }
+ },
+ {
+ "title": "SSH Key Authentication",
+ "required": [
+ "tunnel_method",
+ "tunnel_host",
+ "tunnel_port",
+ "tunnel_user",
+ "ssh_key"
+ ],
+ "properties": {
+ "tunnel_method": {
+ "description": "Connect through a jump server tunnel host using username and ssh key",
+ "type": "string",
+ "const": "SSH_KEY_AUTH",
+ "order": 0
+ },
+ "tunnel_host": {
+ "title": "SSH Tunnel Jump Server Host",
+ "description": "Hostname of the jump server host that allows inbound ssh tunnel.",
+ "type": "string",
+ "order": 1
+ },
+ "tunnel_port": {
+ "title": "SSH Connection Port",
+ "description": "Port on the proxy/jump server that accepts inbound ssh connections.",
+ "type": "integer",
+ "minimum": 0,
+ "maximum": 65536,
+ "default": 22,
+ "examples": ["22"],
+ "order": 2
+ },
+ "tunnel_user": {
+ "title": "SSH Login Username",
+ "description": "OS-level username for logging into the jump server host.",
+ "type": "string",
+ "order": 3
+ },
+ "ssh_key": {
+ "title": "SSH Private Key",
+ "description": "OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )",
+ "type": "string",
+ "airbyte_secret": true,
+ "multiline": true,
+ "order": 4
+ }
+ }
+ },
+ {
+ "title": "Password Authentication",
+ "required": [
+ "tunnel_method",
+ "tunnel_host",
+ "tunnel_port",
+ "tunnel_user",
+ "tunnel_user_password"
+ ],
+ "properties": {
+ "tunnel_method": {
+ "description": "Connect through a jump server tunnel host using username and password authentication",
+ "type": "string",
+ "const": "SSH_PASSWORD_AUTH",
+ "order": 0
+ },
+ "tunnel_host": {
+ "title": "SSH Tunnel Jump Server Host",
+ "description": "Hostname of the jump server host that allows inbound ssh tunnel.",
+ "type": "string",
+ "order": 1
+ },
+ "tunnel_port": {
+ "title": "SSH Connection Port",
+ "description": "Port on the proxy/jump server that accepts inbound ssh connections.",
+ "type": "integer",
+ "minimum": 0,
+ "maximum": 65536,
+ "default": 22,
+ "examples": ["22"],
+ "order": 2
+ },
+ "tunnel_user": {
+ "title": "SSH Login Username",
+ "description": "OS-level username for logging into the jump server host",
+ "type": "string",
+ "order": 3
+ },
+ "tunnel_user_password": {
+ "title": "Password",
+ "description": "OS-level password for logging into the jump server host",
+ "type": "string",
+ "airbyte_secret": true,
+ "order": 4
+ }
+ }
+ }
+ ]
+ }
+ }
+ },
+ "supportsNormalization": false,
+ "supportsDBT": false,
+ "supported_destination_sync_modes": []
+}
diff --git a/airbyte-integrations/connectors/source-mysql/acceptance-test-config.yml b/airbyte-integrations/connectors/source-mysql/acceptance-test-config.yml
index cf854c42e505..4653b50be581 100644
--- a/airbyte-integrations/connectors/source-mysql/acceptance-test-config.yml
+++ b/airbyte-integrations/connectors/source-mysql/acceptance-test-config.yml
@@ -3,4 +3,5 @@
connector_image: airbyte/source-mysql:dev
tests:
spec:
- - spec_path: "src/main/resources/spec.json"
+ - spec_path: "src/test-integration/resources/expected_spec.json"
+ config_path: "src/test-integration/resources/dummy_config.json"
diff --git a/airbyte-integrations/connectors/source-mysql/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-mysql/acceptance-test-docker.sh
new file mode 100644
index 000000000000..ba0ab2874b98
--- /dev/null
+++ b/airbyte-integrations/connectors/source-mysql/acceptance-test-docker.sh
@@ -0,0 +1,15 @@
+#!/usr/bin/env sh
+
+# Build latest connector image
+docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev
+
+# Pull latest acctest image
+docker pull airbyte/source-acceptance-test:latest
+
+# Run
+docker run --rm -it \
+ -v /var/run/docker.sock:/var/run/docker.sock \
+ -v /tmp:/tmp \
+ -v $(pwd):/test_input \
+ airbyte/source-acceptance-test \
+ --acceptance-test-config /test_input
diff --git a/airbyte-integrations/connectors/source-mysql/build.gradle b/airbyte-integrations/connectors/source-mysql/build.gradle
index 568ccfd0eee0..d738922c4714 100644
--- a/airbyte-integrations/connectors/source-mysql/build.gradle
+++ b/airbyte-integrations/connectors/source-mysql/build.gradle
@@ -3,6 +3,7 @@ plugins {
id 'airbyte-docker'
id 'airbyte-integration-test-java'
id 'airbyte-performance-test-java'
+ id 'airbyte-source-acceptance-test'
}
application {
diff --git a/airbyte-integrations/connectors/source-mysql/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-mysql/integration_tests/acceptance.py
new file mode 100644
index 000000000000..1302b2f57e10
--- /dev/null
+++ b/airbyte-integrations/connectors/source-mysql/integration_tests/acceptance.py
@@ -0,0 +1,16 @@
+#
+# Copyright (c) 2022 Airbyte, Inc., all rights reserved.
+#
+
+
+import pytest
+
+pytest_plugins = ("source_acceptance_test.plugin",)
+
+
+@pytest.fixture(scope="session", autouse=True)
+def connector_setup():
+ """This fixture is a placeholder for external resources that acceptance test might require."""
+ # TODO: setup test dependencies if needed. otherwise remove the TODO comments
+ yield
+ # TODO: clean up test dependencies
diff --git a/airbyte-integrations/connectors/source-mysql/src/main/resources/spec.json b/airbyte-integrations/connectors/source-mysql/src/main/resources/spec.json
index c1a6f48b4e87..40373023020a 100644
--- a/airbyte-integrations/connectors/source-mysql/src/main/resources/spec.json
+++ b/airbyte-integrations/connectors/source-mysql/src/main/resources/spec.json
@@ -68,8 +68,6 @@
"mode": {
"type": "string",
"const": "preferred",
- "enum": ["preferred"],
- "default": "preferred",
"order": 0
}
}
@@ -82,8 +80,6 @@
"mode": {
"type": "string",
"const": "required",
- "enum": ["required"],
- "default": "required",
"order": 0
}
}
@@ -96,8 +92,6 @@
"mode": {
"type": "string",
"const": "verify_ca",
- "enum": ["verify_ca"],
- "default": "verify_ca",
"order": 0
},
"ca_certificate": {
@@ -141,8 +135,6 @@
"mode": {
"type": "string",
"const": "verify_identity",
- "enum": ["verify_identity"],
- "default": "verify_identity",
"order": 0
},
"ca_certificate": {
@@ -194,8 +186,6 @@
"method": {
"type": "string",
"const": "STANDARD",
- "enum": ["STANDARD"],
- "default": "STANDARD",
"order": 0
}
}
@@ -208,8 +198,6 @@
"method": {
"type": "string",
"const": "CDC",
- "enum": ["CDC"],
- "default": "CDC",
"order": 0
},
"initial_waiting_seconds": {
diff --git a/airbyte-integrations/connectors/source-mysql/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-mysql/src/test-integration/resources/dummy_config.json
new file mode 100644
index 000000000000..e17733f16b23
--- /dev/null
+++ b/airbyte-integrations/connectors/source-mysql/src/test-integration/resources/dummy_config.json
@@ -0,0 +1,7 @@
+{
+ "host": "default",
+ "port": 5555,
+ "database": "default",
+ "username": "default",
+ "replication_method": { "method": "STANDARD" }
+}
diff --git a/airbyte-integrations/connectors/source-mysql/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-mysql/src/test-integration/resources/expected_spec.json
new file mode 100644
index 000000000000..a1ab35b8c56c
--- /dev/null
+++ b/airbyte-integrations/connectors/source-mysql/src/test-integration/resources/expected_spec.json
@@ -0,0 +1,341 @@
+{
+ "documentationUrl": "https://docs.airbyte.com/integrations/sources/mysql",
+ "connectionSpecification": {
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "MySql Source Spec",
+ "type": "object",
+ "required": ["host", "port", "database", "username", "replication_method"],
+ "properties": {
+ "host": {
+ "description": "The host name of the database.",
+ "title": "Host",
+ "type": "string",
+ "order": 0
+ },
+ "port": {
+ "description": "The port to connect to.",
+ "title": "Port",
+ "type": "integer",
+ "minimum": 0,
+ "maximum": 65536,
+ "default": 3306,
+ "examples": ["3306"],
+ "order": 1
+ },
+ "database": {
+ "description": "The database name.",
+ "title": "Database",
+ "type": "string",
+ "order": 2
+ },
+ "username": {
+ "description": "The username which is used to access the database.",
+ "title": "Username",
+ "type": "string",
+ "order": 3
+ },
+ "password": {
+ "description": "The password associated with the username.",
+ "title": "Password",
+ "type": "string",
+ "airbyte_secret": true,
+ "order": 4
+ },
+ "jdbc_url_params": {
+ "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). For more information read about JDBC URL parameters.",
+ "title": "JDBC URL Parameters (Advanced)",
+ "type": "string",
+ "order": 5
+ },
+ "ssl": {
+ "title": "SSL Connection",
+ "description": "Encrypt data using SSL.",
+ "type": "boolean",
+ "default": true,
+ "order": 6
+ },
+ "ssl_mode": {
+ "title": "SSL modes",
+ "description": "SSL connection modes.