diff --git a/airbyte-integrations/connectors/source-bigquery/acceptance-test-config.yml b/airbyte-integrations/connectors/source-bigquery/acceptance-test-config.yml new file mode 100644 index 000000000000..d634ced570a5 --- /dev/null +++ b/airbyte-integrations/connectors/source-bigquery/acceptance-test-config.yml @@ -0,0 +1,7 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-bigquery:dev +tests: + spec: + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "src/test-integration/resources/dummy_config.json" diff --git a/airbyte-integrations/connectors/source-bigquery/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-bigquery/acceptance-test-docker.sh new file mode 100644 index 000000000000..ba0ab2874b98 --- /dev/null +++ b/airbyte-integrations/connectors/source-bigquery/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-bigquery/build.gradle b/airbyte-integrations/connectors/source-bigquery/build.gradle index be8411702ae5..5c6168f9a971 100644 --- a/airbyte-integrations/connectors/source-bigquery/build.gradle +++ b/airbyte-integrations/connectors/source-bigquery/build.gradle @@ -2,6 +2,7 @@ plugins { id 'application' id 'airbyte-docker' id 'airbyte-integration-test-java' + id 'airbyte-source-acceptance-test' } application { diff --git a/airbyte-integrations/connectors/source-bigquery/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-bigquery/integration_tests/acceptance.py new file mode 100644 index 000000000000..1302b2f57e10 --- /dev/null +++ b/airbyte-integrations/connectors/source-bigquery/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-bigquery/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-bigquery/src/test-integration/resources/dummy_config.json new file mode 100644 index 000000000000..3086a588fab5 --- /dev/null +++ b/airbyte-integrations/connectors/source-bigquery/src/test-integration/resources/dummy_config.json @@ -0,0 +1,5 @@ +{ + "dataset_id": "dataset", + "project_id": "project", + "credentials_json": "credentials" +} diff --git a/airbyte-integrations/connectors/source-bigquery/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-bigquery/src/test-integration/resources/expected_spec.json new file mode 100644 index 000000000000..ac749a77def8 --- /dev/null +++ b/airbyte-integrations/connectors/source-bigquery/src/test-integration/resources/expected_spec.json @@ -0,0 +1,32 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/sources/bigquery", + "supportsIncremental": true, + "supportsNormalization": true, + "supportsDBT": true, + "supported_destination_sync_modes": [], + "supported_sync_modes": ["overwrite", "append", "append_dedup"], + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "BigQuery Source Spec", + "type": "object", + "required": ["project_id", "credentials_json"], + "properties": { + "project_id": { + "type": "string", + "description": "The GCP project ID for the project containing the target BigQuery dataset.", + "title": "Project ID" + }, + "dataset_id": { + "type": "string", + "description": "The dataset ID to search for tables and views. If you are only loading data from one dataset, setting this option could result in much faster schema discovery.", + "title": "Default Dataset ID" + }, + "credentials_json": { + "type": "string", + "description": "The contents of your Service Account Key JSON file. See the docs for more information on how to obtain this key.", + "title": "Credentials JSON", + "airbyte_secret": true + } + } + } +} diff --git a/airbyte-integrations/connectors/source-clickhouse/acceptance-test-config.yml b/airbyte-integrations/connectors/source-clickhouse/acceptance-test-config.yml new file mode 100644 index 000000000000..c1cb0b594e5b --- /dev/null +++ b/airbyte-integrations/connectors/source-clickhouse/acceptance-test-config.yml @@ -0,0 +1,7 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-clickhouse:dev +tests: + spec: + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "src/test-integration/resources/dummy_config.json" diff --git a/airbyte-integrations/connectors/source-clickhouse/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-clickhouse/acceptance-test-docker.sh new file mode 100644 index 000000000000..ba0ab2874b98 --- /dev/null +++ b/airbyte-integrations/connectors/source-clickhouse/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-clickhouse/build.gradle b/airbyte-integrations/connectors/source-clickhouse/build.gradle index e15b1e743883..1e9f4cc21a28 100644 --- a/airbyte-integrations/connectors/source-clickhouse/build.gradle +++ b/airbyte-integrations/connectors/source-clickhouse/build.gradle @@ -2,6 +2,7 @@ plugins { id 'application' id 'airbyte-docker' id 'airbyte-integration-test-java' + id 'airbyte-source-acceptance-test' } application { diff --git a/airbyte-integrations/connectors/source-clickhouse/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-clickhouse/integration_tests/acceptance.py new file mode 100644 index 000000000000..1302b2f57e10 --- /dev/null +++ b/airbyte-integrations/connectors/source-clickhouse/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-clickhouse/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-clickhouse/src/test-integration/resources/dummy_config.json new file mode 100644 index 000000000000..49e2346d457f --- /dev/null +++ b/airbyte-integrations/connectors/source-clickhouse/src/test-integration/resources/dummy_config.json @@ -0,0 +1,6 @@ +{ + "host": "default", + "port": 8123, + "database": "default", + "username": "default" +} diff --git a/airbyte-integrations/connectors/source-clickhouse/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-clickhouse/src/test-integration/resources/expected_spec.json new file mode 100644 index 000000000000..16411f19eccc --- /dev/null +++ b/airbyte-integrations/connectors/source-clickhouse/src/test-integration/resources/expected_spec.json @@ -0,0 +1,177 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/destinations/clickhouse", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "ClickHouse Source Spec", + "type": "object", + "required": ["host", "port", "database", "username"], + "properties": { + "host": { + "description": "The host endpoint of the Clickhouse cluster.", + "title": "Host", + "type": "string", + "order": 0 + }, + "port": { + "description": "The port of the database.", + "title": "Port", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 8123, + "examples": ["8123"], + "order": 1 + }, + "database": { + "description": "The name of the database.", + "title": "Database", + "type": "string", + "examples": ["default"], + "order": 2 + }, + "username": { + "description": "The username which is used to access the database.", + "title": "Username", + "type": "string", + "order": 3 + }, + "password": { + "description": "The password associated with this username.", + "title": "Password", + "type": "string", + "airbyte_secret": true, + "order": 4 + }, + "jdbc_url_params": { + "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (Eg. key1=value1&key2=value2&key3=value3). For more information read about JDBC URL parameters.", + "title": "JDBC URL Parameters (Advanced)", + "type": "string", + "order": 5 + }, + "ssl": { + "title": "SSL Connection", + "description": "Encrypt data using SSL.", + "type": "boolean", + "default": true, + "order": 6 + }, + "tunnel_method": { + "type": "object", + "title": "SSH Tunnel Method", + "description": "Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.", + "oneOf": [ + { + "title": "No Tunnel", + "required": ["tunnel_method"], + "properties": { + "tunnel_method": { + "description": "No ssh tunnel needed to connect to database", + "type": "string", + "const": "NO_TUNNEL", + "order": 0 + } + } + }, + { + "title": "SSH Key Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "ssh_key" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and ssh key", + "type": "string", + "const": "SSH_KEY_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host.", + "type": "string", + "order": 3 + }, + "ssh_key": { + "title": "SSH Private Key", + "description": "OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )", + "type": "string", + "airbyte_secret": true, + "multiline": true, + "order": 4 + } + } + }, + { + "title": "Password Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "tunnel_user_password" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and password authentication", + "type": "string", + "const": "SSH_PASSWORD_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host", + "type": "string", + "order": 3 + }, + "tunnel_user_password": { + "title": "Password", + "description": "OS-level password for logging into the jump server host", + "type": "string", + "airbyte_secret": true, + "order": 4 + } + } + } + ] + } + } + }, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [] +} diff --git a/airbyte-integrations/connectors/source-cockroachdb/acceptance-test-config.yml b/airbyte-integrations/connectors/source-cockroachdb/acceptance-test-config.yml new file mode 100644 index 000000000000..be8bdf914af3 --- /dev/null +++ b/airbyte-integrations/connectors/source-cockroachdb/acceptance-test-config.yml @@ -0,0 +1,7 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-cockroachdb:dev +tests: + spec: + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "src/test-integration/resources/dummy_config.json" diff --git a/airbyte-integrations/connectors/source-cockroachdb/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-cockroachdb/acceptance-test-docker.sh new file mode 100644 index 000000000000..ba0ab2874b98 --- /dev/null +++ b/airbyte-integrations/connectors/source-cockroachdb/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-cockroachdb/build.gradle b/airbyte-integrations/connectors/source-cockroachdb/build.gradle index 1a73d68242aa..67e77014637f 100644 --- a/airbyte-integrations/connectors/source-cockroachdb/build.gradle +++ b/airbyte-integrations/connectors/source-cockroachdb/build.gradle @@ -2,6 +2,7 @@ plugins { id 'application' id 'airbyte-docker' id 'airbyte-integration-test-java' + id 'airbyte-source-acceptance-test' } application { diff --git a/airbyte-integrations/connectors/source-cockroachdb/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-cockroachdb/integration_tests/acceptance.py new file mode 100644 index 000000000000..1302b2f57e10 --- /dev/null +++ b/airbyte-integrations/connectors/source-cockroachdb/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-cockroachdb/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-cockroachdb/src/test-integration/resources/dummy_config.json new file mode 100644 index 000000000000..49e2346d457f --- /dev/null +++ b/airbyte-integrations/connectors/source-cockroachdb/src/test-integration/resources/dummy_config.json @@ -0,0 +1,6 @@ +{ + "host": "default", + "port": 8123, + "database": "default", + "username": "default" +} diff --git a/airbyte-integrations/connectors/source-cockroachdb/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-cockroachdb/src/test-integration/resources/expected_spec.json new file mode 100644 index 000000000000..953149a5ef73 --- /dev/null +++ b/airbyte-integrations/connectors/source-cockroachdb/src/test-integration/resources/expected_spec.json @@ -0,0 +1,62 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/sources/cockroachdb", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Cockroach Source Spec", + "type": "object", + "required": ["host", "port", "database", "username"], + "properties": { + "host": { + "title": "Host", + "description": "Hostname of the database.", + "type": "string", + "order": 0 + }, + "port": { + "title": "Port", + "description": "Port of the database.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 5432, + "examples": ["5432"], + "order": 1 + }, + "database": { + "title": "DB Name", + "description": "Name of the database.", + "type": "string", + "order": 2 + }, + "username": { + "title": "User", + "description": "Username to use to access the database.", + "type": "string", + "order": 3 + }, + "password": { + "title": "Password", + "description": "Password associated with the username.", + "type": "string", + "airbyte_secret": true, + "order": 4 + }, + "jdbc_url_params": { + "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (Eg. key1=value1&key2=value2&key3=value3). For more information read about JDBC URL parameters.", + "title": "JDBC URL Parameters (Advanced)", + "type": "string", + "order": 5 + }, + "ssl": { + "title": "Connect using SSL", + "description": "Encrypt client/server communications for increased security.", + "type": "boolean", + "default": false, + "order": 6 + } + } + }, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [] +} diff --git a/airbyte-integrations/connectors/source-db2/acceptance-test-config.yml b/airbyte-integrations/connectors/source-db2/acceptance-test-config.yml index 5106bfe080ca..a0707f64ca70 100644 --- a/airbyte-integrations/connectors/source-db2/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-db2/acceptance-test-config.yml @@ -3,4 +3,5 @@ connector_image: airbyte/source-db2:dev tests: spec: - - spec_path: "src/main/resources/spec.json" + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "src/test-integration/resources/dummy_config.json" diff --git a/airbyte-integrations/connectors/source-db2/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-db2/acceptance-test-docker.sh new file mode 100644 index 000000000000..ba0ab2874b98 --- /dev/null +++ b/airbyte-integrations/connectors/source-db2/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-db2/build.gradle b/airbyte-integrations/connectors/source-db2/build.gradle index 34d5d340fdc2..6798912d60f7 100644 --- a/airbyte-integrations/connectors/source-db2/build.gradle +++ b/airbyte-integrations/connectors/source-db2/build.gradle @@ -2,6 +2,7 @@ plugins { id 'application' id 'airbyte-docker' id 'airbyte-integration-test-java' + id 'airbyte-source-acceptance-test' } application { diff --git a/airbyte-integrations/connectors/source-db2/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-db2/integration_tests/acceptance.py new file mode 100644 index 000000000000..1302b2f57e10 --- /dev/null +++ b/airbyte-integrations/connectors/source-db2/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-db2/src/main/resources/spec.json b/airbyte-integrations/connectors/source-db2/src/main/resources/spec.json index 45235a6e0cd6..092944df3c4a 100644 --- a/airbyte-integrations/connectors/source-db2/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/source-db2/src/main/resources/spec.json @@ -56,9 +56,7 @@ "properties": { "encryption_method": { "type": "string", - "const": "unencrypted", - "enum": ["unencrypted"], - "default": "unencrypted" + "const": "unencrypted" } } }, @@ -69,9 +67,7 @@ "properties": { "encryption_method": { "type": "string", - "const": "encrypted_verify_certificate", - "enum": ["encrypted_verify_certificate"], - "default": "encrypted_verify_certificate" + "const": "encrypted_verify_certificate" }, "ssl_certificate": { "title": "SSL PEM file", diff --git a/airbyte-integrations/connectors/source-db2/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-db2/src/test-integration/resources/dummy_config.json new file mode 100644 index 000000000000..47e237f97604 --- /dev/null +++ b/airbyte-integrations/connectors/source-db2/src/test-integration/resources/dummy_config.json @@ -0,0 +1,11 @@ +{ + "host": "hhh", + "port": 8123, + "db": "ddd", + "username": "uuu", + "password": "ppp", + "encryption": { + "encryption_method": "encrypted_verify_certificate", + "ssl_certificate": "sss" + } +} diff --git a/airbyte-integrations/connectors/source-db2/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-db2/src/test-integration/resources/expected_spec.json new file mode 100644 index 000000000000..a7a07e530389 --- /dev/null +++ b/airbyte-integrations/connectors/source-db2/src/test-integration/resources/expected_spec.json @@ -0,0 +1,94 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/sources/db2", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "IBM Db2 Source Spec", + "type": "object", + "required": ["host", "port", "db", "username", "password", "encryption"], + "properties": { + "host": { + "description": "Host of the Db2.", + "type": "string", + "order": 0 + }, + "port": { + "description": "Port of the database.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 8123, + "examples": ["8123"], + "order": 1 + }, + "db": { + "description": "Name of the database.", + "type": "string", + "examples": ["default"], + "order": 2 + }, + "username": { + "description": "Username to use to access the database.", + "type": "string", + "order": 3 + }, + "password": { + "description": "Password associated with the username.", + "type": "string", + "airbyte_secret": true, + "order": 4 + }, + "jdbc_url_params": { + "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).", + "title": "JDBC URL Params", + "type": "string", + "order": 5 + }, + "encryption": { + "title": "Encryption", + "type": "object", + "description": "Encryption method to use when communicating with the database", + "order": 6, + "oneOf": [ + { + "title": "Unencrypted", + "description": "Data transfer will not be encrypted.", + "required": ["encryption_method"], + "properties": { + "encryption_method": { + "type": "string", + "const": "unencrypted" + } + } + }, + { + "title": "TLS Encrypted (verify certificate)", + "description": "Verify and use the cert provided by the server.", + "required": ["encryption_method", "ssl_certificate"], + "properties": { + "encryption_method": { + "type": "string", + "const": "encrypted_verify_certificate" + }, + "ssl_certificate": { + "title": "SSL PEM file", + "description": "Privacy Enhanced Mail (PEM) files are concatenated certificate containers frequently used in certificate installations", + "type": "string", + "airbyte_secret": true, + "multiline": true + }, + "key_store_password": { + "title": "Key Store Password. This field is optional. If you do not fill in this field, the password will be randomly generated.", + "description": "Key Store Password", + "type": "string", + "airbyte_secret": true + } + } + } + ] + } + } + }, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [] +} diff --git a/airbyte-integrations/connectors/source-elasticsearch/acceptance-test-config.yml b/airbyte-integrations/connectors/source-elasticsearch/acceptance-test-config.yml index 7f499fdefc40..73ee3ce32d37 100644 --- a/airbyte-integrations/connectors/source-elasticsearch/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-elasticsearch/acceptance-test-config.yml @@ -1,6 +1,7 @@ # See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) # for more information about how to configure these tests -connector_image: airbyte/source-elasticsearch +connector_image: airbyte/source-elasticsearch:dev tests: spec: - - spec_path: "src/test/resources/expected_spec.json" + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "src/test-integration/resources/dummy_config.json" diff --git a/airbyte-integrations/connectors/source-elasticsearch/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-elasticsearch/acceptance-test-docker.sh new file mode 100644 index 000000000000..ba0ab2874b98 --- /dev/null +++ b/airbyte-integrations/connectors/source-elasticsearch/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-elasticsearch/build.gradle b/airbyte-integrations/connectors/source-elasticsearch/build.gradle index dbd991036297..fff8a6d3fe05 100644 --- a/airbyte-integrations/connectors/source-elasticsearch/build.gradle +++ b/airbyte-integrations/connectors/source-elasticsearch/build.gradle @@ -2,6 +2,7 @@ plugins { id 'application' id 'airbyte-docker' id 'airbyte-integration-test-java' + id 'airbyte-source-acceptance-test' } application { diff --git a/airbyte-integrations/connectors/source-elasticsearch/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-elasticsearch/integration_tests/acceptance.py new file mode 100644 index 000000000000..1302b2f57e10 --- /dev/null +++ b/airbyte-integrations/connectors/source-elasticsearch/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-elasticsearch/src/main/resources/spec.json b/airbyte-integrations/connectors/source-elasticsearch/src/main/resources/spec.json index fba748601133..71ad637a0c4f 100644 --- a/airbyte-integrations/connectors/source-elasticsearch/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/source-elasticsearch/src/main/resources/spec.json @@ -5,7 +5,7 @@ "title": "Elasticsearch Connection Configuration", "type": "object", "required": ["endpoint"], - "additionalProperties": false, + "additionalProperties": true, "properties": { "endpoint": { "title": "Server Endpoint", @@ -19,7 +19,7 @@ "oneOf": [ { "title": "None", - "additionalProperties": false, + "additionalProperties": true, "description": "No authentication will be used", "required": ["method"], "properties": { @@ -31,7 +31,7 @@ }, { "title": "Api Key/Secret", - "additionalProperties": false, + "additionalProperties": true, "description": "Use a api key and secret combination to authenticate", "required": ["method", "apiKeyId", "apiKeySecret"], "properties": { @@ -54,7 +54,7 @@ }, { "title": "Username/Password", - "additionalProperties": false, + "additionalProperties": true, "description": "Basic auth header with a username and password", "required": ["method", "username", "password"], "properties": { diff --git a/airbyte-integrations/connectors/source-elasticsearch/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-elasticsearch/src/test-integration/resources/dummy_config.json new file mode 100644 index 000000000000..c945791cea37 --- /dev/null +++ b/airbyte-integrations/connectors/source-elasticsearch/src/test-integration/resources/dummy_config.json @@ -0,0 +1,3 @@ +{ + "endpoint": "default" +} diff --git a/airbyte-integrations/connectors/source-elasticsearch/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-elasticsearch/src/test-integration/resources/expected_spec.json new file mode 100644 index 000000000000..a6245dc39f1c --- /dev/null +++ b/airbyte-integrations/connectors/source-elasticsearch/src/test-integration/resources/expected_spec.json @@ -0,0 +1,85 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/source/elasticsearch", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Elasticsearch Connection Configuration", + "type": "object", + "required": ["endpoint"], + "additionalProperties": true, + "properties": { + "endpoint": { + "title": "Server Endpoint", + "type": "string", + "description": "The full url of the Elasticsearch server" + }, + "authenticationMethod": { + "title": "Authentication Method", + "type": "object", + "description": "The type of authentication to be used", + "oneOf": [ + { + "title": "None", + "additionalProperties": true, + "description": "No authentication will be used", + "required": ["method"], + "properties": { + "method": { + "type": "string", + "const": "none" + } + } + }, + { + "title": "Api Key/Secret", + "additionalProperties": true, + "description": "Use a api key and secret combination to authenticate", + "required": ["method", "apiKeyId", "apiKeySecret"], + "properties": { + "method": { + "type": "string", + "const": "secret" + }, + "apiKeyId": { + "title": "API Key ID", + "description": "The Key ID to used when accessing an enterprise Elasticsearch instance.", + "type": "string" + }, + "apiKeySecret": { + "title": "API Key Secret", + "description": "The secret associated with the API Key ID.", + "type": "string", + "airbyte_secret": true + } + } + }, + { + "title": "Username/Password", + "additionalProperties": true, + "description": "Basic auth header with a username and password", + "required": ["method", "username", "password"], + "properties": { + "method": { + "type": "string", + "const": "basic" + }, + "username": { + "title": "Username", + "description": "Basic auth username to access a secure Elasticsearch server", + "type": "string" + }, + "password": { + "title": "Password", + "description": "Basic auth password to access a secure Elasticsearch server", + "type": "string", + "airbyte_secret": true + } + } + } + ] + } + } + }, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [] +} diff --git a/airbyte-integrations/connectors/source-elasticsearch/src/test/resources/expected_spec.json b/airbyte-integrations/connectors/source-elasticsearch/src/test/resources/expected_spec.json index fba748601133..71ad637a0c4f 100644 --- a/airbyte-integrations/connectors/source-elasticsearch/src/test/resources/expected_spec.json +++ b/airbyte-integrations/connectors/source-elasticsearch/src/test/resources/expected_spec.json @@ -5,7 +5,7 @@ "title": "Elasticsearch Connection Configuration", "type": "object", "required": ["endpoint"], - "additionalProperties": false, + "additionalProperties": true, "properties": { "endpoint": { "title": "Server Endpoint", @@ -19,7 +19,7 @@ "oneOf": [ { "title": "None", - "additionalProperties": false, + "additionalProperties": true, "description": "No authentication will be used", "required": ["method"], "properties": { @@ -31,7 +31,7 @@ }, { "title": "Api Key/Secret", - "additionalProperties": false, + "additionalProperties": true, "description": "Use a api key and secret combination to authenticate", "required": ["method", "apiKeyId", "apiKeySecret"], "properties": { @@ -54,7 +54,7 @@ }, { "title": "Username/Password", - "additionalProperties": false, + "additionalProperties": true, "description": "Basic auth header with a username and password", "required": ["method", "username", "password"], "properties": { diff --git a/airbyte-integrations/connectors/source-jdbc/acceptance-test-config.yml b/airbyte-integrations/connectors/source-jdbc/acceptance-test-config.yml new file mode 100644 index 000000000000..9a4392c5347b --- /dev/null +++ b/airbyte-integrations/connectors/source-jdbc/acceptance-test-config.yml @@ -0,0 +1,7 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-jdbc:dev +tests: + spec: + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "src/test-integration/resources/dummy_config.json" diff --git a/airbyte-integrations/connectors/source-jdbc/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-jdbc/acceptance-test-docker.sh new file mode 100644 index 000000000000..ba0ab2874b98 --- /dev/null +++ b/airbyte-integrations/connectors/source-jdbc/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-jdbc/build.gradle b/airbyte-integrations/connectors/source-jdbc/build.gradle index 2e9393f32335..4daa6755626b 100644 --- a/airbyte-integrations/connectors/source-jdbc/build.gradle +++ b/airbyte-integrations/connectors/source-jdbc/build.gradle @@ -2,6 +2,7 @@ plugins { id 'application' id 'airbyte-docker' id 'airbyte-integration-test-java' + id 'airbyte-source-acceptance-test' id "java-library" // https://docs.gradle.org/current/userguide/java_testing.html#sec:java_test_fixtures id "java-test-fixtures" diff --git a/airbyte-integrations/connectors/source-jdbc/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-jdbc/integration_tests/acceptance.py new file mode 100644 index 000000000000..1302b2f57e10 --- /dev/null +++ b/airbyte-integrations/connectors/source-jdbc/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-jdbc/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-jdbc/src/test-integration/resources/dummy_config.json new file mode 100644 index 000000000000..892b30269c60 --- /dev/null +++ b/airbyte-integrations/connectors/source-jdbc/src/test-integration/resources/dummy_config.json @@ -0,0 +1,4 @@ +{ + "username": "default", + "jdbc_url": "default" +} diff --git a/airbyte-integrations/connectors/source-jdbc/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-jdbc/src/test-integration/resources/expected_spec.json new file mode 100644 index 000000000000..95e6b354ddcb --- /dev/null +++ b/airbyte-integrations/connectors/source-jdbc/src/test-integration/resources/expected_spec.json @@ -0,0 +1,35 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/sources/postgres", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "JDBC Source Spec", + "type": "object", + "required": ["username", "jdbc_url"], + "properties": { + "username": { + "title": "Username", + "description": "The username which is used to access the database.", + "type": "string" + }, + "password": { + "title": "Password", + "description": "The password associated with this username.", + "type": "string", + "airbyte_secret": true + }, + "jdbc_url": { + "title": "JDBC URL", + "description": "JDBC formatted URL. See the standard here.", + "type": "string" + }, + "jdbc_url_params": { + "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).", + "title": "JDBC URL Params", + "type": "string" + } + } + }, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [] +} diff --git a/airbyte-integrations/connectors/source-kafka/acceptance-test-config.yml b/airbyte-integrations/connectors/source-kafka/acceptance-test-config.yml new file mode 100644 index 000000000000..6bebc5793b0a --- /dev/null +++ b/airbyte-integrations/connectors/source-kafka/acceptance-test-config.yml @@ -0,0 +1,7 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-kafka:dev +tests: + spec: + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "src/test-integration/resources/dummy_config.json" \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-kafka/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-kafka/acceptance-test-docker.sh new file mode 100644 index 000000000000..ba0ab2874b98 --- /dev/null +++ b/airbyte-integrations/connectors/source-kafka/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-kafka/build.gradle b/airbyte-integrations/connectors/source-kafka/build.gradle index 6ef80b0db86e..d06782d15150 100644 --- a/airbyte-integrations/connectors/source-kafka/build.gradle +++ b/airbyte-integrations/connectors/source-kafka/build.gradle @@ -2,6 +2,7 @@ plugins { id 'application' id 'airbyte-docker' id 'airbyte-integration-test-java' + id 'airbyte-source-acceptance-test' } application { diff --git a/airbyte-integrations/connectors/source-kafka/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-kafka/integration_tests/acceptance.py new file mode 100644 index 000000000000..1302b2f57e10 --- /dev/null +++ b/airbyte-integrations/connectors/source-kafka/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-kafka/src/main/resources/spec.json b/airbyte-integrations/connectors/source-kafka/src/main/resources/spec.json index 60ddd5e0c343..5a0bdcbcb8c0 100644 --- a/airbyte-integrations/connectors/source-kafka/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/source-kafka/src/main/resources/spec.json @@ -9,7 +9,7 @@ "title": "Kafka Source Spec", "type": "object", "required": ["bootstrap_servers", "subscription", "protocol"], - "additionalProperties": false, + "additionalProperties": true, "properties": { "MessageFormat": { "title": "MessageFormat", @@ -21,8 +21,7 @@ "properties": { "deserialization_type": { "type": "string", - "enum": ["JSON"], - "default": "JSON" + "const": "JSON" } } }, @@ -30,9 +29,7 @@ "title": "AVRO", "properties": { "deserialization_type": { - "type": "string", - "enum": ["AVRO"], - "default": "AVRO" + "const": "AVRO" }, "deserialization_strategy": { "type": "string", @@ -77,9 +74,7 @@ "subscription_type": { "description": "Manually assign a list of partitions to this consumer. This interface does not allow for incremental assignment and will replace the previous assignment (if there is one).\nIf the given list of topic partitions is empty, it is treated the same as unsubscribe().", "type": "string", - "const": "assign", - "enum": ["assign"], - "default": "assign" + "const": "assign" }, "topic_partitions": { "title": "List of topic:partition Pairs", @@ -95,9 +90,7 @@ "subscription_type": { "description": "The Topic pattern from which the records will be read.", "type": "string", - "const": "subscribe", - "enum": ["subscribe"], - "default": "subscribe" + "const": "subscribe" }, "topic_pattern": { "title": "Topic Pattern", @@ -143,8 +136,7 @@ "properties": { "security_protocol": { "type": "string", - "enum": ["PLAINTEXT"], - "default": "PLAINTEXT" + "const": "PLAINTEXT" } } }, @@ -158,15 +150,13 @@ "properties": { "security_protocol": { "type": "string", - "enum": ["SASL_PLAINTEXT"], - "default": "SASL_PLAINTEXT" + "const": "SASL_PLAINTEXT" }, "sasl_mechanism": { "title": "SASL Mechanism", "description": "The SASL mechanism used for client connections. This may be any mechanism for which a security provider is available.", "type": "string", - "default": "PLAIN", - "enum": ["PLAIN"] + "const": "PLAIN" }, "sasl_jaas_config": { "title": "SASL JAAS Config", @@ -187,8 +177,7 @@ "properties": { "security_protocol": { "type": "string", - "enum": ["SASL_SSL"], - "default": "SASL_SSL" + "const": "SASL_SSL" }, "sasl_mechanism": { "title": "SASL Mechanism", diff --git a/airbyte-integrations/connectors/source-kafka/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-kafka/src/test-integration/resources/dummy_config.json new file mode 100644 index 000000000000..520d509e9154 --- /dev/null +++ b/airbyte-integrations/connectors/source-kafka/src/test-integration/resources/dummy_config.json @@ -0,0 +1,10 @@ +{ + "bootstrap_servers": "default", + "subscription": { + "subscription_type": "assign", + "topic_partitions": "default" + }, + "protocol": { + "security_protocol": "PLAINTEXT" + } +} diff --git a/airbyte-integrations/connectors/source-kafka/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-kafka/src/test-integration/resources/expected_spec.json new file mode 100644 index 000000000000..3eae6e008ad7 --- /dev/null +++ b/airbyte-integrations/connectors/source-kafka/src/test-integration/resources/expected_spec.json @@ -0,0 +1,275 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/sources/kafka", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Kafka Source Spec", + "type": "object", + "required": ["bootstrap_servers", "subscription", "protocol"], + "additionalProperties": true, + "properties": { + "MessageFormat": { + "title": "MessageFormat", + "type": "object", + "description": "The serialization used based on this ", + "oneOf": [ + { + "title": "JSON", + "properties": { + "deserialization_type": { + "type": "string", + "const": "JSON" + } + } + }, + { + "title": "AVRO", + "properties": { + "deserialization_type": { + "const": "AVRO" + }, + "deserialization_strategy": { + "type": "string", + "enum": [ + "TopicNameStrategy", + "RecordNameStrategy", + "TopicRecordNameStrategy" + ], + "default": "TopicNameStrategy" + }, + "schema_registry_url": { + "type": "string", + "examples": ["http://localhost:8081"] + }, + "schema_registry_username": { + "type": "string", + "default": "" + }, + "schema_registry_password": { + "type": "string", + "default": "" + } + } + } + ] + }, + "bootstrap_servers": { + "title": "Bootstrap Servers", + "description": "A list of host/port pairs to use for establishing the initial connection to the Kafka cluster. The client will make use of all servers irrespective of which servers are specified here for bootstrapping—this list only impacts the initial hosts used to discover the full set of servers. This list should be in the form host1:port1,host2:port2,.... Since these servers are just used for the initial connection to discover the full cluster membership (which may change dynamically), this list need not contain the full set of servers (you may want more than one, though, in case a server is down).", + "type": "string", + "examples": ["kafka-broker1:9092,kafka-broker2:9092"] + }, + "subscription": { + "title": "Subscription Method", + "type": "object", + "description": "You can choose to manually assign a list of partitions, or subscribe to all topics matching specified pattern to get dynamically assigned partitions.", + "oneOf": [ + { + "title": "Manually assign a list of partitions", + "required": ["subscription_type", "topic_partitions"], + "properties": { + "subscription_type": { + "description": "Manually assign a list of partitions to this consumer. This interface does not allow for incremental assignment and will replace the previous assignment (if there is one).\nIf the given list of topic partitions is empty, it is treated the same as unsubscribe().", + "type": "string", + "const": "assign" + }, + "topic_partitions": { + "title": "List of topic:partition Pairs", + "type": "string", + "examples": ["sample.topic:0, sample.topic:1"] + } + } + }, + { + "title": "Subscribe to all topics matching specified pattern", + "required": ["subscription_type", "topic_pattern"], + "properties": { + "subscription_type": { + "description": "The Topic pattern from which the records will be read.", + "type": "string", + "const": "subscribe" + }, + "topic_pattern": { + "title": "Topic Pattern", + "type": "string", + "examples": ["sample.topic"] + } + } + } + ] + }, + "test_topic": { + "title": "Test Topic", + "description": "The Topic to test in case the Airbyte can consume messages.", + "type": "string", + "examples": ["test.topic"] + }, + "group_id": { + "title": "Group ID", + "description": "The Group ID is how you distinguish different consumer groups.", + "type": "string", + "examples": ["group.id"] + }, + "max_poll_records": { + "title": "Max Poll Records", + "description": "The maximum number of records returned in a single call to poll(). Note, that max_poll_records does not impact the underlying fetching behavior. The consumer will cache the records from each fetch request and returns them incrementally from each poll.", + "type": "integer", + "default": 500 + }, + "polling_time": { + "title": "Polling Time", + "description": "Amount of time Kafka connector should try to poll for messages.", + "type": "integer", + "default": 100 + }, + "protocol": { + "title": "Protocol", + "type": "object", + "description": "The Protocol used to communicate with brokers.", + "oneOf": [ + { + "title": "PLAINTEXT", + "required": ["security_protocol"], + "properties": { + "security_protocol": { + "type": "string", + "const": "PLAINTEXT" + } + } + }, + { + "title": "SASL PLAINTEXT", + "required": [ + "security_protocol", + "sasl_mechanism", + "sasl_jaas_config" + ], + "properties": { + "security_protocol": { + "type": "string", + "const": "SASL_PLAINTEXT" + }, + "sasl_mechanism": { + "title": "SASL Mechanism", + "description": "The SASL mechanism used for client connections. This may be any mechanism for which a security provider is available.", + "type": "string", + "const": "PLAIN" + }, + "sasl_jaas_config": { + "title": "SASL JAAS Config", + "description": "The JAAS login context parameters for SASL connections in the format used by JAAS configuration files.", + "type": "string", + "default": "", + "airbyte_secret": true + } + } + }, + { + "title": "SASL SSL", + "required": [ + "security_protocol", + "sasl_mechanism", + "sasl_jaas_config" + ], + "properties": { + "security_protocol": { + "type": "string", + "const": "SASL_SSL" + }, + "sasl_mechanism": { + "title": "SASL Mechanism", + "description": "The SASL mechanism used for client connections. This may be any mechanism for which a security provider is available.", + "type": "string", + "default": "GSSAPI", + "enum": [ + "GSSAPI", + "OAUTHBEARER", + "SCRAM-SHA-256", + "SCRAM-SHA-512", + "PLAIN" + ] + }, + "sasl_jaas_config": { + "title": "SASL JAAS Config", + "description": "The JAAS login context parameters for SASL connections in the format used by JAAS configuration files.", + "type": "string", + "default": "", + "airbyte_secret": true + } + } + } + ] + }, + "client_id": { + "title": "Client ID", + "description": "An ID string to pass to the server when making requests. The purpose of this is to be able to track the source of requests beyond just ip/port by allowing a logical application name to be included in server-side request logging.", + "type": "string", + "examples": ["airbyte-consumer"] + }, + "enable_auto_commit": { + "title": "Enable Auto Commit", + "description": "If true, the consumer's offset will be periodically committed in the background.", + "type": "boolean", + "default": true + }, + "auto_commit_interval_ms": { + "title": "Auto Commit Interval, ms", + "description": "The frequency in milliseconds that the consumer offsets are auto-committed to Kafka if enable.auto.commit is set to true.", + "type": "integer", + "default": 5000 + }, + "client_dns_lookup": { + "title": "Client DNS Lookup", + "description": "Controls how the client uses DNS lookups. If set to use_all_dns_ips, connect to each returned IP address in sequence until a successful connection is established. After a disconnection, the next IP is used. Once all IPs have been used once, the client resolves the IP(s) from the hostname again. If set to resolve_canonical_bootstrap_servers_only, resolve each bootstrap address into a list of canonical names. After the bootstrap phase, this behaves the same as use_all_dns_ips. If set to default (deprecated), attempt to connect to the first IP address returned by the lookup, even if the lookup returns multiple IP addresses.", + "type": "string", + "default": "use_all_dns_ips", + "enum": [ + "default", + "use_all_dns_ips", + "resolve_canonical_bootstrap_servers_only" + ] + }, + "retry_backoff_ms": { + "title": "Retry Backoff, ms", + "description": "The amount of time to wait before attempting to retry a failed request to a given topic partition. This avoids repeatedly sending requests in a tight loop under some failure scenarios.", + "type": "integer", + "default": 100 + }, + "request_timeout_ms": { + "title": "Request Timeout, ms", + "description": "The configuration controls the maximum amount of time the client will wait for the response of a request. If the response is not received before the timeout elapses the client will resend the request if necessary or fail the request if retries are exhausted.", + "type": "integer", + "default": 30000 + }, + "receive_buffer_bytes": { + "title": "Receive Buffer, bytes", + "description": "The size of the TCP receive buffer (SO_RCVBUF) to use when reading data. If the value is -1, the OS default will be used.", + "type": "integer", + "default": 32768 + }, + "auto_offset_reset": { + "title": "Auto Offset Reset", + "description": "What to do when there is no initial offset in Kafka or if the current offset does not exist any more on the server - earliest: automatically reset the offset to the earliest offset, latest: automatically reset the offset to the latest offset, none: throw exception to the consumer if no previous offset is found for the consumer's group, anything else: throw exception to the consumer.", + "type": "string", + "default": "latest", + "enum": ["latest", "earliest", "none"] + }, + "repeated_calls": { + "title": "Repeated Calls", + "description": "The number of repeated calls to poll() if no messages were received.", + "type": "integer", + "default": 3 + }, + "max_records_process": { + "title": "Maximum Records", + "description": "The Maximum to be processed per execution", + "type": "integer", + "default": 100000 + } + } + }, + "supportsIncremental": true, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [], + "supported_source_sync_modes": ["append"] +} diff --git a/airbyte-integrations/connectors/source-mongodb-v2/acceptance-test-config.yml b/airbyte-integrations/connectors/source-mongodb-v2/acceptance-test-config.yml new file mode 100644 index 000000000000..6402c3beba5f --- /dev/null +++ b/airbyte-integrations/connectors/source-mongodb-v2/acceptance-test-config.yml @@ -0,0 +1,7 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-mongodb-v2:dev +tests: + spec: + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "src/test-integration/resources/dummy_config.json" diff --git a/airbyte-integrations/connectors/source-mongodb-v2/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-mongodb-v2/acceptance-test-docker.sh new file mode 100644 index 000000000000..ba0ab2874b98 --- /dev/null +++ b/airbyte-integrations/connectors/source-mongodb-v2/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-mongodb-v2/build.gradle b/airbyte-integrations/connectors/source-mongodb-v2/build.gradle index 6b776c6192dc..ab5675cd3211 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/build.gradle +++ b/airbyte-integrations/connectors/source-mongodb-v2/build.gradle @@ -2,6 +2,7 @@ plugins { id 'application' id 'airbyte-docker' id 'airbyte-integration-test-java' + id 'airbyte-source-acceptance-test' } application { diff --git a/airbyte-integrations/connectors/source-mongodb-v2/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-mongodb-v2/integration_tests/acceptance.py new file mode 100644 index 000000000000..1302b2f57e10 --- /dev/null +++ b/airbyte-integrations/connectors/source-mongodb-v2/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/main/resources/spec.json b/airbyte-integrations/connectors/source-mongodb-v2/src/main/resources/spec.json index fc7959b42274..2f535f07687a 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/main/resources/spec.json @@ -20,8 +20,7 @@ "properties": { "instance": { "type": "string", - "enum": ["standalone"], - "default": "standalone" + "const": "standalone" }, "host": { "title": "Host", @@ -54,8 +53,7 @@ "properties": { "instance": { "type": "string", - "enum": ["replica"], - "default": "replica" + "const": "replica" }, "server_addresses": { "title": "Server Addresses", @@ -74,13 +72,12 @@ }, { "title": "MongoDB Atlas", - "additionalProperties": false, + "additionalProperties": true, "required": ["instance", "cluster_url"], "properties": { "instance": { "type": "string", - "enum": ["atlas"], - "default": "atlas" + "const": "atlas" }, "cluster_url": { "title": "Cluster URL", diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/resources/dummy_config.json new file mode 100644 index 000000000000..0216ddfd3c03 --- /dev/null +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/resources/dummy_config.json @@ -0,0 +1,3 @@ +{ + "database": "default" +} diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/resources/expected_spec.json new file mode 100644 index 000000000000..b39746df263b --- /dev/null +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/resources/expected_spec.json @@ -0,0 +1,124 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/sources/mongodb-v2", + "changelogUrl": "https://docs.airbyte.com/integrations/sources/mongodb-v2", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "MongoDb Source Spec", + "type": "object", + "required": ["database"], + "additionalProperties": true, + "properties": { + "instance_type": { + "type": "object", + "title": "MongoDb Instance Type", + "description": "The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.", + "order": 0, + "oneOf": [ + { + "title": "Standalone MongoDb Instance", + "required": ["instance", "host", "port"], + "properties": { + "instance": { + "type": "string", + "const": "standalone" + }, + "host": { + "title": "Host", + "type": "string", + "description": "The host name of the Mongo database.", + "order": 0 + }, + "port": { + "title": "Port", + "type": "integer", + "description": "The port of the Mongo database.", + "minimum": 0, + "maximum": 65536, + "default": 27017, + "examples": ["27017"], + "order": 1 + }, + "tls": { + "title": "TLS Connection", + "type": "boolean", + "description": "Indicates whether TLS encryption protocol will be used to connect to MongoDB. It is recommended to use TLS connection if possible. For more information see documentation.", + "default": false, + "order": 2 + } + } + }, + { + "title": "Replica Set", + "required": ["instance", "server_addresses"], + "properties": { + "instance": { + "type": "string", + "const": "replica" + }, + "server_addresses": { + "title": "Server Addresses", + "type": "string", + "description": "The members of a replica set. Please specify `host`:`port` of each member separated by comma.", + "examples": ["host1:27017,host2:27017,host3:27017"], + "order": 0 + }, + "replica_set": { + "title": "Replica Set", + "type": "string", + "description": "A replica set in MongoDB is a group of mongod processes that maintain the same data set.", + "order": 1 + } + } + }, + { + "title": "MongoDB Atlas", + "additionalProperties": true, + "required": ["instance", "cluster_url"], + "properties": { + "instance": { + "type": "string", + "const": "atlas" + }, + "cluster_url": { + "title": "Cluster URL", + "type": "string", + "description": "The URL of a cluster to connect to.", + "order": 0 + } + } + } + ] + }, + "database": { + "title": "Database Name", + "type": "string", + "description": "The database you want to replicate.", + "order": 1 + }, + "user": { + "title": "User", + "type": "string", + "description": "The username which is used to access the database.", + "order": 2 + }, + "password": { + "title": "Password", + "type": "string", + "description": "The password associated with this username.", + "airbyte_secret": true, + "order": 3 + }, + "auth_source": { + "title": "Authentication Source", + "type": "string", + "description": "The authentication source where the user information is stored.", + "default": "admin", + "examples": ["admin"], + "order": 4 + } + } + }, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [] +} diff --git a/airbyte-integrations/connectors/source-mssql/acceptance-test-config.yml b/airbyte-integrations/connectors/source-mssql/acceptance-test-config.yml index 5aa3dde87597..2b3f94e1b3a8 100644 --- a/airbyte-integrations/connectors/source-mssql/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-mssql/acceptance-test-config.yml @@ -3,4 +3,7 @@ connector_image: airbyte/source-mssql:dev tests: spec: - - spec_path: "src/main/resources/spec.json" + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "src/test-integration/resources/dummy_config.json" + backward_compatibility_tests_config: + disable_for_version: "0.4.25" diff --git a/airbyte-integrations/connectors/source-mssql/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-mssql/acceptance-test-docker.sh new file mode 100644 index 000000000000..ba0ab2874b98 --- /dev/null +++ b/airbyte-integrations/connectors/source-mssql/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-mssql/build.gradle b/airbyte-integrations/connectors/source-mssql/build.gradle index a1bceecbf7a0..55f5f5a313c3 100644 --- a/airbyte-integrations/connectors/source-mssql/build.gradle +++ b/airbyte-integrations/connectors/source-mssql/build.gradle @@ -3,6 +3,7 @@ plugins { id 'airbyte-docker' id 'airbyte-integration-test-java' id 'airbyte-performance-test-java' + id 'airbyte-source-acceptance-test' } application { diff --git a/airbyte-integrations/connectors/source-mssql/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-mssql/integration_tests/acceptance.py new file mode 100644 index 000000000000..1302b2f57e10 --- /dev/null +++ b/airbyte-integrations/connectors/source-mssql/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-mssql/src/main/resources/spec.json b/airbyte-integrations/connectors/source-mssql/src/main/resources/spec.json index 674f9342317d..35b192d2c4de 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/source-mssql/src/main/resources/spec.json @@ -72,9 +72,7 @@ "properties": { "ssl_method": { "type": "string", - "const": "unencrypted", - "enum": ["unencrypted"], - "default": "unencrypted" + "const": "unencrypted" } } }, @@ -85,9 +83,7 @@ "properties": { "ssl_method": { "type": "string", - "const": "encrypted_trust_server_certificate", - "enum": ["encrypted_trust_server_certificate"], - "default": "encrypted_trust_server_certificate" + "const": "encrypted_trust_server_certificate" } } }, @@ -98,9 +94,7 @@ "properties": { "ssl_method": { "type": "string", - "const": "encrypted_verify_certificate", - "enum": ["encrypted_verify_certificate"], - "default": "encrypted_verify_certificate" + "const": "encrypted_verify_certificate" }, "hostNameInCertificate": { "title": "Host Name In Certificate", @@ -127,8 +121,6 @@ "method": { "type": "string", "const": "STANDARD", - "enum": ["STANDARD"], - "default": "STANDARD", "order": 0 } } @@ -141,8 +133,6 @@ "method": { "type": "string", "const": "CDC", - "enum": ["CDC"], - "default": "CDC", "order": 0 }, "data_to_sync": { @@ -160,15 +150,6 @@ "enum": ["Snapshot", "Read Committed"], "description": "Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the \"Snapshot\" level, you must enable the snapshot isolation mode on the database.", "order": 2 - }, - "initial_waiting_seconds": { - "type": "integer", - "title": "Initial Waiting Time in Seconds (Advanced)", - "description": "The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.", - "default": 300, - "min": 120, - "max": 1200, - "order": 3 } } } diff --git a/airbyte-integrations/connectors/source-mssql/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-mssql/src/test-integration/resources/dummy_config.json new file mode 100644 index 000000000000..560e55333378 --- /dev/null +++ b/airbyte-integrations/connectors/source-mssql/src/test-integration/resources/dummy_config.json @@ -0,0 +1,6 @@ +{ + "host": "default", + "port": 5555, + "database": "default", + "username": "default" +} diff --git a/airbyte-integrations/connectors/source-mssql/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-mssql/src/test-integration/resources/expected_spec.json new file mode 100644 index 000000000000..4a5352b013bd --- /dev/null +++ b/airbyte-integrations/connectors/source-mssql/src/test-integration/resources/expected_spec.json @@ -0,0 +1,277 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/destinations/mssql", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "MSSQL Source Spec", + "type": "object", + "required": ["host", "port", "database", "username"], + "properties": { + "host": { + "description": "The hostname of the database.", + "title": "Host", + "type": "string", + "order": 0 + }, + "port": { + "description": "The port of the database.", + "title": "Port", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "examples": ["1433"], + "order": 1 + }, + "database": { + "description": "The name of the database.", + "title": "Database", + "type": "string", + "examples": ["master"], + "order": 2 + }, + "schemas": { + "title": "Schemas", + "description": "The list of schemas to sync from. Defaults to user. Case sensitive.", + "type": "array", + "items": { + "type": "string" + }, + "minItems": 0, + "uniqueItems": true, + "default": ["dbo"], + "order": 3 + }, + "username": { + "description": "The username which is used to access the database.", + "title": "Username", + "type": "string", + "order": 4 + }, + "password": { + "description": "The password associated with the username.", + "title": "Password", + "type": "string", + "airbyte_secret": true, + "order": 5 + }, + "jdbc_url_params": { + "title": "JDBC URL Params", + "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).", + "type": "string", + "order": 6 + }, + "ssl_method": { + "title": "SSL Method", + "type": "object", + "description": "The encryption method which is used when communicating with the database.", + "order": 7, + "oneOf": [ + { + "title": "Unencrypted", + "description": "Data transfer will not be encrypted.", + "required": ["ssl_method"], + "properties": { + "ssl_method": { + "type": "string", + "const": "unencrypted" + } + } + }, + { + "title": "Encrypted (trust server certificate)", + "description": "Use the certificate provided by the server without verification. (For testing purposes only!)", + "required": ["ssl_method"], + "properties": { + "ssl_method": { + "type": "string", + "const": "encrypted_trust_server_certificate" + } + } + }, + { + "title": "Encrypted (verify certificate)", + "description": "Verify and use the certificate provided by the server.", + "required": ["ssl_method", "trustStoreName", "trustStorePassword"], + "properties": { + "ssl_method": { + "type": "string", + "const": "encrypted_verify_certificate" + }, + "hostNameInCertificate": { + "title": "Host Name In Certificate", + "type": "string", + "description": "Specifies the host name of the server. The value of this property must match the subject property of the certificate.", + "order": 7 + } + } + } + ] + }, + "replication_method": { + "type": "object", + "title": "Replication Method", + "description": "The replication method used for extracting data from the database. STANDARD replication requires no setup on the DB side but will not be able to represent deletions incrementally. CDC uses {TBC} to detect inserts, updates, and deletes. This needs to be configured on the source database itself.", + "default": "STANDARD", + "order": 8, + "oneOf": [ + { + "title": "Standard", + "description": "Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally.", + "required": ["method"], + "properties": { + "method": { + "type": "string", + "const": "STANDARD", + "order": 0 + } + } + }, + { + "title": "Logical Replication (CDC)", + "description": "CDC uses {TBC} to detect inserts, updates, and deletes. This needs to be configured on the source database itself.", + "required": ["method"], + "properties": { + "method": { + "type": "string", + "const": "CDC", + "order": 0 + }, + "data_to_sync": { + "title": "Data to Sync", + "type": "string", + "default": "Existing and New", + "enum": ["Existing and New", "New Changes Only"], + "description": "What data should be synced under the CDC. \"Existing and New\" will read existing data as a snapshot, and sync new changes through CDC. \"New Changes Only\" will skip the initial snapshot, and only sync new changes through CDC.", + "order": 1 + }, + "snapshot_isolation": { + "title": "Initial Snapshot Isolation Level", + "type": "string", + "default": "Snapshot", + "enum": ["Snapshot", "Read Committed"], + "description": "Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the \"Snapshot\" level, you must enable the snapshot isolation mode on the database.", + "order": 2 + } + } + } + ] + }, + "tunnel_method": { + "type": "object", + "title": "SSH Tunnel Method", + "description": "Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.", + "oneOf": [ + { + "title": "No Tunnel", + "required": ["tunnel_method"], + "properties": { + "tunnel_method": { + "description": "No ssh tunnel needed to connect to database", + "type": "string", + "const": "NO_TUNNEL", + "order": 0 + } + } + }, + { + "title": "SSH Key Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "ssh_key" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and ssh key", + "type": "string", + "const": "SSH_KEY_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host.", + "type": "string", + "order": 3 + }, + "ssh_key": { + "title": "SSH Private Key", + "description": "OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )", + "type": "string", + "airbyte_secret": true, + "multiline": true, + "order": 4 + } + } + }, + { + "title": "Password Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "tunnel_user_password" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and password authentication", + "type": "string", + "const": "SSH_PASSWORD_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host", + "type": "string", + "order": 3 + }, + "tunnel_user_password": { + "title": "Password", + "description": "OS-level password for logging into the jump server host", + "type": "string", + "airbyte_secret": true, + "order": 4 + } + } + } + ] + } + } + }, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [] +} diff --git a/airbyte-integrations/connectors/source-mysql/acceptance-test-config.yml b/airbyte-integrations/connectors/source-mysql/acceptance-test-config.yml index cf854c42e505..4653b50be581 100644 --- a/airbyte-integrations/connectors/source-mysql/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-mysql/acceptance-test-config.yml @@ -3,4 +3,5 @@ connector_image: airbyte/source-mysql:dev tests: spec: - - spec_path: "src/main/resources/spec.json" + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "src/test-integration/resources/dummy_config.json" diff --git a/airbyte-integrations/connectors/source-mysql/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-mysql/acceptance-test-docker.sh new file mode 100644 index 000000000000..ba0ab2874b98 --- /dev/null +++ b/airbyte-integrations/connectors/source-mysql/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-mysql/build.gradle b/airbyte-integrations/connectors/source-mysql/build.gradle index 568ccfd0eee0..d738922c4714 100644 --- a/airbyte-integrations/connectors/source-mysql/build.gradle +++ b/airbyte-integrations/connectors/source-mysql/build.gradle @@ -3,6 +3,7 @@ plugins { id 'airbyte-docker' id 'airbyte-integration-test-java' id 'airbyte-performance-test-java' + id 'airbyte-source-acceptance-test' } application { diff --git a/airbyte-integrations/connectors/source-mysql/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-mysql/integration_tests/acceptance.py new file mode 100644 index 000000000000..1302b2f57e10 --- /dev/null +++ b/airbyte-integrations/connectors/source-mysql/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-mysql/src/main/resources/spec.json b/airbyte-integrations/connectors/source-mysql/src/main/resources/spec.json index c1a6f48b4e87..40373023020a 100644 --- a/airbyte-integrations/connectors/source-mysql/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/source-mysql/src/main/resources/spec.json @@ -68,8 +68,6 @@ "mode": { "type": "string", "const": "preferred", - "enum": ["preferred"], - "default": "preferred", "order": 0 } } @@ -82,8 +80,6 @@ "mode": { "type": "string", "const": "required", - "enum": ["required"], - "default": "required", "order": 0 } } @@ -96,8 +92,6 @@ "mode": { "type": "string", "const": "verify_ca", - "enum": ["verify_ca"], - "default": "verify_ca", "order": 0 }, "ca_certificate": { @@ -141,8 +135,6 @@ "mode": { "type": "string", "const": "verify_identity", - "enum": ["verify_identity"], - "default": "verify_identity", "order": 0 }, "ca_certificate": { @@ -194,8 +186,6 @@ "method": { "type": "string", "const": "STANDARD", - "enum": ["STANDARD"], - "default": "STANDARD", "order": 0 } } @@ -208,8 +198,6 @@ "method": { "type": "string", "const": "CDC", - "enum": ["CDC"], - "default": "CDC", "order": 0 }, "initial_waiting_seconds": { diff --git a/airbyte-integrations/connectors/source-mysql/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-mysql/src/test-integration/resources/dummy_config.json new file mode 100644 index 000000000000..e17733f16b23 --- /dev/null +++ b/airbyte-integrations/connectors/source-mysql/src/test-integration/resources/dummy_config.json @@ -0,0 +1,7 @@ +{ + "host": "default", + "port": 5555, + "database": "default", + "username": "default", + "replication_method": { "method": "STANDARD" } +} diff --git a/airbyte-integrations/connectors/source-mysql/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-mysql/src/test-integration/resources/expected_spec.json new file mode 100644 index 000000000000..a1ab35b8c56c --- /dev/null +++ b/airbyte-integrations/connectors/source-mysql/src/test-integration/resources/expected_spec.json @@ -0,0 +1,341 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/sources/mysql", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "MySql Source Spec", + "type": "object", + "required": ["host", "port", "database", "username", "replication_method"], + "properties": { + "host": { + "description": "The host name of the database.", + "title": "Host", + "type": "string", + "order": 0 + }, + "port": { + "description": "The port to connect to.", + "title": "Port", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 3306, + "examples": ["3306"], + "order": 1 + }, + "database": { + "description": "The database name.", + "title": "Database", + "type": "string", + "order": 2 + }, + "username": { + "description": "The username which is used to access the database.", + "title": "Username", + "type": "string", + "order": 3 + }, + "password": { + "description": "The password associated with the username.", + "title": "Password", + "type": "string", + "airbyte_secret": true, + "order": 4 + }, + "jdbc_url_params": { + "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). For more information read about JDBC URL parameters.", + "title": "JDBC URL Parameters (Advanced)", + "type": "string", + "order": 5 + }, + "ssl": { + "title": "SSL Connection", + "description": "Encrypt data using SSL.", + "type": "boolean", + "default": true, + "order": 6 + }, + "ssl_mode": { + "title": "SSL modes", + "description": "SSL connection modes.
  • preferred - Automatically attempt SSL connection. If the MySQL server does not support SSL, continue with a regular connection.
  • required - Always connect with SSL. If the MySQL server doesn’t support SSL, the connection will not be established. Certificate Authority (CA) and Hostname are not verified.
  • verify-ca - Always connect with SSL. Verifies CA, but allows connection even if Hostname does not match.
  • Verify Identity - Always connect with SSL. Verify both CA and Hostname.
  • Read more in the docs.", + "type": "object", + "order": 7, + "oneOf": [ + { + "title": "preferred", + "description": "Preferred SSL mode.", + "required": ["mode"], + "properties": { + "mode": { + "type": "string", + "const": "preferred", + "order": 0 + } + } + }, + { + "title": "required", + "description": "Require SSL mode.", + "required": ["mode"], + "properties": { + "mode": { + "type": "string", + "const": "required", + "order": 0 + } + } + }, + { + "title": "Verify CA", + "description": "Verify CA SSL mode.", + "required": ["mode", "ca_certificate"], + "properties": { + "mode": { + "type": "string", + "const": "verify_ca", + "order": 0 + }, + "ca_certificate": { + "type": "string", + "title": "CA certificate", + "description": "CA certificate", + "airbyte_secret": true, + "multiline": true, + "order": 1 + }, + "client_certificate": { + "type": "string", + "title": "Client certificate", + "description": "Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well)", + "airbyte_secret": true, + "multiline": true, + "order": 2 + }, + "client_key": { + "type": "string", + "title": "Client key", + "description": "Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well)", + "airbyte_secret": true, + "multiline": true, + "order": 3 + }, + "client_key_password": { + "type": "string", + "title": "Client key password", + "description": "Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.", + "airbyte_secret": true, + "order": 4 + } + } + }, + { + "title": "Verify Identity", + "description": "Verify-full SSL mode.", + "required": ["mode", "ca_certificate"], + "properties": { + "mode": { + "type": "string", + "const": "verify_identity", + "order": 0 + }, + "ca_certificate": { + "type": "string", + "title": "CA certificate", + "description": "CA certificate", + "airbyte_secret": true, + "multiline": true, + "order": 1 + }, + "client_certificate": { + "type": "string", + "title": "Client certificate", + "description": "Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well)", + "airbyte_secret": true, + "multiline": true, + "order": 2 + }, + "client_key": { + "type": "string", + "title": "Client key", + "description": "Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well)", + "airbyte_secret": true, + "multiline": true, + "order": 3 + }, + "client_key_password": { + "type": "string", + "title": "Client key password", + "description": "Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.", + "airbyte_secret": true, + "order": 4 + } + } + } + ] + }, + "replication_method": { + "type": "object", + "title": "Replication Method", + "description": "Replication method to use for extracting data from the database.", + "order": 8, + "oneOf": [ + { + "title": "Standard", + "description": "Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally.", + "required": ["method"], + "properties": { + "method": { + "type": "string", + "const": "STANDARD", + "order": 0 + } + } + }, + { + "title": "Logical Replication (CDC)", + "description": "CDC uses the Binlog to detect inserts, updates, and deletes. This needs to be configured on the source database itself.", + "required": ["method"], + "properties": { + "method": { + "type": "string", + "const": "CDC", + "order": 0 + }, + "initial_waiting_seconds": { + "type": "integer", + "title": "Initial Waiting Time in Seconds (Advanced)", + "description": "The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.", + "default": 300, + "min": 120, + "max": 1200, + "order": 1 + }, + "server_time_zone": { + "type": "string", + "title": "Configured server timezone for the MySQL source (Advanced)", + "description": "Enter the configured MySQL server timezone. This should only be done if the configured timezone in your MySQL instance does not conform to IANNA standard.", + "order": 2 + } + } + } + ] + }, + "tunnel_method": { + "type": "object", + "title": "SSH Tunnel Method", + "description": "Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.", + "oneOf": [ + { + "title": "No Tunnel", + "required": ["tunnel_method"], + "properties": { + "tunnel_method": { + "description": "No ssh tunnel needed to connect to database", + "type": "string", + "const": "NO_TUNNEL", + "order": 0 + } + } + }, + { + "title": "SSH Key Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "ssh_key" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and ssh key", + "type": "string", + "const": "SSH_KEY_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host.", + "type": "string", + "order": 3 + }, + "ssh_key": { + "title": "SSH Private Key", + "description": "OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )", + "type": "string", + "airbyte_secret": true, + "multiline": true, + "order": 4 + } + } + }, + { + "title": "Password Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "tunnel_user_password" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and password authentication", + "type": "string", + "const": "SSH_PASSWORD_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host", + "type": "string", + "order": 3 + }, + "tunnel_user_password": { + "title": "Password", + "description": "OS-level password for logging into the jump server host", + "type": "string", + "airbyte_secret": true, + "order": 4 + } + } + } + ] + } + } + }, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [] +} diff --git a/airbyte-integrations/connectors/source-oracle/acceptance-test-config.yml b/airbyte-integrations/connectors/source-oracle/acceptance-test-config.yml index 37c40cb71f16..fc86a52ddd77 100644 --- a/airbyte-integrations/connectors/source-oracle/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-oracle/acceptance-test-config.yml @@ -3,4 +3,6 @@ connector_image: airbyte/source-oracle:dev tests: spec: - - spec_path: "src/main/resources/spec.json" + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "src/test-integration/resources/dummy_config.json" + timeout_seconds: 300 diff --git a/airbyte-integrations/connectors/source-oracle/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-oracle/acceptance-test-docker.sh new file mode 100644 index 000000000000..ba0ab2874b98 --- /dev/null +++ b/airbyte-integrations/connectors/source-oracle/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-oracle/build.gradle b/airbyte-integrations/connectors/source-oracle/build.gradle index 435b7625e749..32cc9db422df 100644 --- a/airbyte-integrations/connectors/source-oracle/build.gradle +++ b/airbyte-integrations/connectors/source-oracle/build.gradle @@ -2,6 +2,7 @@ plugins { id 'application' id 'airbyte-docker' id 'airbyte-integration-test-java' + id 'airbyte-source-acceptance-test' } application { diff --git a/airbyte-integrations/connectors/source-oracle/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-oracle/integration_tests/acceptance.py new file mode 100644 index 000000000000..1302b2f57e10 --- /dev/null +++ b/airbyte-integrations/connectors/source-oracle/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-oracle/src/main/resources/spec.json b/airbyte-integrations/connectors/source-oracle/src/main/resources/spec.json index 4f1c7b04e01a..56a6cc0b2824 100644 --- a/airbyte-integrations/connectors/source-oracle/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/source-oracle/src/main/resources/spec.json @@ -35,7 +35,6 @@ "connection_type": { "type": "string", "const": "service_name", - "default": "service_name", "order": 0 }, "service_name": { @@ -53,7 +52,6 @@ "connection_type": { "type": "string", "const": "sid", - "default": "sid", "order": 0 }, "sid": { @@ -108,9 +106,7 @@ "properties": { "encryption_method": { "type": "string", - "const": "unencrypted", - "enum": ["unencrypted"], - "default": "unencrypted" + "const": "unencrypted" } } }, @@ -121,9 +117,7 @@ "properties": { "encryption_method": { "type": "string", - "const": "client_nne", - "enum": ["client_nne"], - "default": "client_nne" + "const": "client_nne" }, "encryption_algorithm": { "type": "string", @@ -141,9 +135,7 @@ "properties": { "encryption_method": { "type": "string", - "const": "encrypted_verify_certificate", - "enum": ["encrypted_verify_certificate"], - "default": "encrypted_verify_certificate" + "const": "encrypted_verify_certificate" }, "ssl_certificate": { "title": "SSL PEM File", diff --git a/airbyte-integrations/connectors/source-oracle/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-oracle/src/test-integration/resources/dummy_config.json new file mode 100644 index 000000000000..4bd783a80420 --- /dev/null +++ b/airbyte-integrations/connectors/source-oracle/src/test-integration/resources/dummy_config.json @@ -0,0 +1,5 @@ +{ + "host": "default", + "port": 5555, + "username": "default" +} diff --git a/airbyte-integrations/connectors/source-oracle/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-oracle/src/test-integration/resources/expected_spec.json new file mode 100644 index 000000000000..94d3ab92ca4a --- /dev/null +++ b/airbyte-integrations/connectors/source-oracle/src/test-integration/resources/expected_spec.json @@ -0,0 +1,271 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/sources/oracle", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Oracle Source Spec", + "type": "object", + "required": ["host", "port", "username"], + "properties": { + "host": { + "title": "Host", + "description": "Hostname of the database.", + "type": "string", + "order": 1 + }, + "port": { + "title": "Port", + "description": "Port of the database.\nOracle Corporations recommends the following port numbers:\n1521 - Default listening port for client connections to the listener. \n2484 - Recommended and officially registered listening port for client connections to the listener using TCP/IP with SSL", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 1521, + "order": 2 + }, + "connection_data": { + "title": "Connect by", + "type": "object", + "description": "Connect data that will be used for DB connection", + "order": 3, + "oneOf": [ + { + "title": "Service name", + "description": "Use service name", + "required": ["service_name"], + "properties": { + "connection_type": { + "type": "string", + "const": "service_name", + "order": 0 + }, + "service_name": { + "title": "Service name", + "type": "string", + "order": 1 + } + } + }, + { + "title": "System ID (SID)", + "description": "Use SID (Oracle System Identifier)", + "required": ["sid"], + "properties": { + "connection_type": { + "type": "string", + "const": "sid", + "order": 0 + }, + "sid": { + "title": "System ID (SID)", + "type": "string", + "order": 1 + } + } + } + ] + }, + "username": { + "title": "User", + "description": "The username which is used to access the database.", + "type": "string", + "order": 4 + }, + "password": { + "title": "Password", + "description": "The password associated with the username.", + "type": "string", + "airbyte_secret": true, + "order": 5 + }, + "schemas": { + "title": "Schemas", + "description": "The list of schemas to sync from. Defaults to user. Case sensitive.", + "type": "array", + "items": { + "type": "string" + }, + "minItems": 1, + "uniqueItems": true, + "order": 6 + }, + "jdbc_url_params": { + "title": "JDBC URL Params", + "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).", + "type": "string", + "order": 7 + }, + "encryption": { + "title": "Encryption", + "type": "object", + "description": "The encryption method with is used when communicating with the database.", + "order": 8, + "oneOf": [ + { + "title": "Unencrypted", + "description": "Data transfer will not be encrypted.", + "required": ["encryption_method"], + "properties": { + "encryption_method": { + "type": "string", + "const": "unencrypted" + } + } + }, + { + "title": "Native Network Encryption (NNE)", + "description": "The native network encryption gives you the ability to encrypt database connections, without the configuration overhead of TCP/IP and SSL/TLS and without the need to open and listen on different ports.", + "required": ["encryption_method"], + "properties": { + "encryption_method": { + "type": "string", + "const": "client_nne" + }, + "encryption_algorithm": { + "type": "string", + "description": "This parameter defines what encryption algorithm is used.", + "title": "Encryption Algorithm", + "default": "AES256", + "enum": ["AES256", "RC4_56", "3DES168"] + } + } + }, + { + "title": "TLS Encrypted (verify certificate)", + "description": "Verify and use the certificate provided by the server.", + "required": ["encryption_method", "ssl_certificate"], + "properties": { + "encryption_method": { + "type": "string", + "const": "encrypted_verify_certificate" + }, + "ssl_certificate": { + "title": "SSL PEM File", + "description": "Privacy Enhanced Mail (PEM) files are concatenated certificate containers frequently used in certificate installations.", + "type": "string", + "airbyte_secret": true, + "multiline": true, + "order": 4 + } + } + } + ] + }, + "tunnel_method": { + "type": "object", + "title": "SSH Tunnel Method", + "description": "Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.", + "oneOf": [ + { + "title": "No Tunnel", + "required": ["tunnel_method"], + "properties": { + "tunnel_method": { + "description": "No ssh tunnel needed to connect to database", + "type": "string", + "const": "NO_TUNNEL", + "order": 0 + } + } + }, + { + "title": "SSH Key Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "ssh_key" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and ssh key", + "type": "string", + "const": "SSH_KEY_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host.", + "type": "string", + "order": 3 + }, + "ssh_key": { + "title": "SSH Private Key", + "description": "OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )", + "type": "string", + "airbyte_secret": true, + "multiline": true, + "order": 4 + } + } + }, + { + "title": "Password Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "tunnel_user_password" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and password authentication", + "type": "string", + "const": "SSH_PASSWORD_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host", + "type": "string", + "order": 3 + }, + "tunnel_user_password": { + "title": "Password", + "description": "OS-level password for logging into the jump server host", + "type": "string", + "airbyte_secret": true, + "order": 4 + } + } + } + ] + } + } + }, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [] +} diff --git a/airbyte-integrations/connectors/source-postgres/acceptance-test-config.yml b/airbyte-integrations/connectors/source-postgres/acceptance-test-config.yml index beaef784e60c..1236f3899422 100644 --- a/airbyte-integrations/connectors/source-postgres/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-postgres/acceptance-test-config.yml @@ -3,4 +3,5 @@ connector_image: airbyte/source-postgres:dev tests: spec: - - spec_path: "src/main/resources/spec.json" + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "src/test-integration/resources/dummy_config.json" diff --git a/airbyte-integrations/connectors/source-postgres/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-postgres/acceptance-test-docker.sh new file mode 100644 index 000000000000..ba0ab2874b98 --- /dev/null +++ b/airbyte-integrations/connectors/source-postgres/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-postgres/build.gradle b/airbyte-integrations/connectors/source-postgres/build.gradle index 535b2209088d..9f720df36647 100644 --- a/airbyte-integrations/connectors/source-postgres/build.gradle +++ b/airbyte-integrations/connectors/source-postgres/build.gradle @@ -3,6 +3,7 @@ plugins { id 'airbyte-docker' id 'airbyte-integration-test-java' id 'airbyte-performance-test-java' + id 'airbyte-source-acceptance-test' } application { diff --git a/airbyte-integrations/connectors/source-postgres/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-postgres/integration_tests/acceptance.py new file mode 100644 index 000000000000..1302b2f57e10 --- /dev/null +++ b/airbyte-integrations/connectors/source-postgres/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-postgres/src/main/resources/spec.json b/airbyte-integrations/connectors/source-postgres/src/main/resources/spec.json index ca14aa473aff..4dfdfab3fb6e 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/source-postgres/src/main/resources/spec.json @@ -74,75 +74,65 @@ "oneOf": [ { "title": "disable", - "additionalProperties": false, + "additionalProperties": true, "description": "Disable SSL.", "required": ["mode"], "properties": { "mode": { "type": "string", "const": "disable", - "enum": ["disable"], - "default": "disable", "order": 0 } } }, { "title": "allow", - "additionalProperties": false, + "additionalProperties": true, "description": "Allow SSL mode.", "required": ["mode"], "properties": { "mode": { "type": "string", "const": "allow", - "enum": ["allow"], - "default": "allow", "order": 0 } } }, { "title": "prefer", - "additionalProperties": false, + "additionalProperties": true, "description": "Prefer SSL mode.", "required": ["mode"], "properties": { "mode": { "type": "string", "const": "prefer", - "enum": ["prefer"], - "default": "prefer", "order": 0 } } }, { "title": "require", - "additionalProperties": false, + "additionalProperties": true, "description": "Require SSL mode.", "required": ["mode"], "properties": { "mode": { "type": "string", "const": "require", - "enum": ["require"], - "default": "require", "order": 0 } } }, { "title": "verify-ca", - "additionalProperties": false, + "additionalProperties": true, "description": "Verify-ca SSL mode.", "required": ["mode", "ca_certificate"], "properties": { "mode": { "type": "string", "const": "verify-ca", - "enum": ["verify-ca"], - "default": "verify-ca", "order": 0 }, "ca_certificate": { @@ -180,15 +170,13 @@ }, { "title": "verify-full", - "additionalProperties": false, + "additionalProperties": true, "description": "Verify-full SSL mode.", "required": ["mode", "ca_certificate"], "properties": { "mode": { "type": "string", "const": "verify-full", - "enum": ["verify-full"], - "default": "verify-full", "order": 0 }, "ca_certificate": { @@ -240,8 +228,6 @@ "method": { "type": "string", "const": "Standard", - "enum": ["Standard"], - "default": "Standard", "order": 0 } } @@ -254,8 +240,6 @@ "method": { "type": "string", "const": "CDC", - "enum": ["CDC"], - "default": "CDC", "order": 0 }, "plugin": { @@ -263,7 +247,7 @@ "title": "Plugin", "description": "A logical decoding plugin installed on the PostgreSQL server. The `pgoutput` plugin is used by default. If the replication table contains a lot of big jsonb values it is recommended to use `wal2json` plugin. Read more about selecting replication plugins.", "enum": ["pgoutput", "wal2json"], - "default": "pgoutput", + "const": "pgoutput", "order": 1 }, "replication_slot": { diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-postgres/src/test-integration/resources/dummy_config.json new file mode 100644 index 000000000000..560e55333378 --- /dev/null +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/resources/dummy_config.json @@ -0,0 +1,6 @@ +{ + "host": "default", + "port": 5555, + "database": "default", + "username": "default" +} diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-postgres/src/test-integration/resources/expected_spec.json new file mode 100644 index 000000000000..5875205e32eb --- /dev/null +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/resources/expected_spec.json @@ -0,0 +1,397 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/sources/postgres", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Postgres Source Spec", + "type": "object", + "required": ["host", "port", "database", "username"], + "properties": { + "host": { + "title": "Host", + "description": "Hostname of the database.", + "type": "string", + "order": 0 + }, + "port": { + "title": "Port", + "description": "Port of the database.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 5432, + "examples": ["5432"], + "order": 1 + }, + "database": { + "title": "Database Name", + "description": "Name of the database.", + "type": "string", + "order": 2 + }, + "schemas": { + "title": "Schemas", + "description": "The list of schemas (case sensitive) to sync from. Defaults to public.", + "type": "array", + "items": { + "type": "string" + }, + "minItems": 0, + "uniqueItems": true, + "default": ["public"], + "order": 3 + }, + "username": { + "title": "Username", + "description": "Username to access the database.", + "type": "string", + "order": 4 + }, + "password": { + "title": "Password", + "description": "Password associated with the username.", + "type": "string", + "airbyte_secret": true, + "order": 5 + }, + "jdbc_url_params": { + "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (Eg. key1=value1&key2=value2&key3=value3). For more information read about JDBC URL parameters.", + "title": "JDBC URL Parameters (Advanced)", + "type": "string", + "order": 6 + }, + "ssl": { + "title": "Connect using SSL", + "description": "Encrypt data using SSL. When activating SSL, please select one of the connection modes.", + "type": "boolean", + "default": false, + "order": 7 + }, + "ssl_mode": { + "title": "SSL Modes", + "description": "SSL connection modes. \n \n Read more in the docs.", + "type": "object", + "order": 7, + "oneOf": [ + { + "title": "disable", + "additionalProperties": true, + "description": "Disable SSL.", + "required": ["mode"], + "properties": { + "mode": { + "type": "string", + "const": "disable", + "order": 0 + } + } + }, + { + "title": "allow", + "additionalProperties": true, + "description": "Allow SSL mode.", + "required": ["mode"], + "properties": { + "mode": { + "type": "string", + "const": "allow", + "order": 0 + } + } + }, + { + "title": "prefer", + "additionalProperties": true, + "description": "Prefer SSL mode.", + "required": ["mode"], + "properties": { + "mode": { + "type": "string", + "const": "prefer", + "order": 0 + } + } + }, + { + "title": "require", + "additionalProperties": true, + "description": "Require SSL mode.", + "required": ["mode"], + "properties": { + "mode": { + "type": "string", + "const": "require", + "order": 0 + } + } + }, + { + "title": "verify-ca", + "additionalProperties": true, + "description": "Verify-ca SSL mode.", + "required": ["mode", "ca_certificate"], + "properties": { + "mode": { + "type": "string", + "const": "verify-ca", + "order": 0 + }, + "ca_certificate": { + "type": "string", + "title": "CA certificate", + "description": "CA certificate", + "airbyte_secret": true, + "multiline": true, + "order": 1 + }, + "client_certificate": { + "type": "string", + "title": "Client Certificate", + "description": "Client certificate", + "airbyte_secret": true, + "multiline": true, + "order": 2 + }, + "client_key": { + "type": "string", + "title": "Client Key", + "description": "Client key", + "airbyte_secret": true, + "multiline": true, + "order": 3 + }, + "client_key_password": { + "type": "string", + "title": "Client key password", + "description": "Password for keystorage. If you do not add it - the password will be generated automatically.", + "airbyte_secret": true, + "order": 4 + } + } + }, + { + "title": "verify-full", + "additionalProperties": true, + "description": "Verify-full SSL mode.", + "required": ["mode", "ca_certificate"], + "properties": { + "mode": { + "type": "string", + "const": "verify-full", + "order": 0 + }, + "ca_certificate": { + "type": "string", + "title": "CA Certificate", + "description": "CA certificate", + "airbyte_secret": true, + "multiline": true, + "order": 1 + }, + "client_certificate": { + "type": "string", + "title": "Client Certificate", + "description": "Client certificate", + "airbyte_secret": true, + "multiline": true, + "order": 2 + }, + "client_key": { + "type": "string", + "title": "Client Key", + "description": "Client key", + "airbyte_secret": true, + "multiline": true, + "order": 3 + }, + "client_key_password": { + "type": "string", + "title": "Client key password", + "description": "Password for keystorage. If you do not add it - the password will be generated automatically.", + "airbyte_secret": true, + "order": 4 + } + } + } + ] + }, + "replication_method": { + "type": "object", + "title": "Replication Method", + "description": "Replication method for extracting data from the database.", + "order": 8, + "oneOf": [ + { + "title": "Standard", + "description": "Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally.", + "required": ["method"], + "properties": { + "method": { + "type": "string", + "const": "Standard", + "order": 0 + } + } + }, + { + "title": "Logical Replication (CDC)", + "description": "Logical replication uses the Postgres write-ahead log (WAL) to detect inserts, updates, and deletes. This needs to be configured on the source database itself. Only available on Postgres 10 and above. Read the docs.", + "required": ["method", "replication_slot", "publication"], + "properties": { + "method": { + "type": "string", + "const": "CDC", + "order": 0 + }, + "plugin": { + "type": "string", + "title": "Plugin", + "description": "A logical decoding plugin installed on the PostgreSQL server. The `pgoutput` plugin is used by default. If the replication table contains a lot of big jsonb values it is recommended to use `wal2json` plugin. Read more about selecting replication plugins.", + "enum": ["pgoutput", "wal2json"], + "const": "pgoutput", + "order": 1 + }, + "replication_slot": { + "type": "string", + "title": "Replication Slot", + "description": "A plugin logical replication slot. Read about replication slots.", + "order": 2 + }, + "publication": { + "type": "string", + "title": "Publication", + "description": "A Postgres publication used for consuming changes. Read about publications and replication identities.", + "order": 3 + }, + "initial_waiting_seconds": { + "type": "integer", + "title": "Initial Waiting Time in Seconds (Advanced)", + "description": "The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.", + "default": 300, + "order": 4, + "min": 120, + "max": 1200 + } + } + } + ] + }, + "tunnel_method": { + "type": "object", + "title": "SSH Tunnel Method", + "description": "Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.", + "oneOf": [ + { + "title": "No Tunnel", + "required": ["tunnel_method"], + "properties": { + "tunnel_method": { + "description": "No ssh tunnel needed to connect to database", + "type": "string", + "const": "NO_TUNNEL", + "order": 0 + } + } + }, + { + "title": "SSH Key Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "ssh_key" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and ssh key", + "type": "string", + "const": "SSH_KEY_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host.", + "type": "string", + "order": 3 + }, + "ssh_key": { + "title": "SSH Private Key", + "description": "OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )", + "type": "string", + "airbyte_secret": true, + "multiline": true, + "order": 4 + } + } + }, + { + "title": "Password Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "tunnel_user_password" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and password authentication", + "type": "string", + "const": "SSH_PASSWORD_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host", + "type": "string", + "order": 3 + }, + "tunnel_user_password": { + "title": "Password", + "description": "OS-level password for logging into the jump server host", + "type": "string", + "airbyte_secret": true, + "order": 4 + } + } + } + ] + } + } + }, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [] +} diff --git a/airbyte-integrations/connectors/source-postgres/src/test/resources/expected_spec.json b/airbyte-integrations/connectors/source-postgres/src/test/resources/expected_spec.json index 9e0f63090e8e..52865891ce86 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test/resources/expected_spec.json +++ b/airbyte-integrations/connectors/source-postgres/src/test/resources/expected_spec.json @@ -67,60 +67,52 @@ "oneOf": [ { "title": "allow", - "additionalProperties": false, + "additionalProperties": true, "description": "Allow SSL mode.", "required": ["mode"], "properties": { "mode": { "type": "string", "const": "allow", - "enum": ["allow"], - "default": "allow", "order": 0 } } }, { "title": "prefer", - "additionalProperties": false, + "additionalProperties": true, "description": "Prefer SSL mode.", "required": ["mode"], "properties": { "mode": { "type": "string", "const": "prefer", - "enum": ["prefer"], - "default": "prefer", "order": 0 } } }, { "title": "require", - "additionalProperties": false, + "additionalProperties": true, "description": "Require SSL mode.", "required": ["mode"], "properties": { "mode": { "type": "string", "const": "require", - "enum": ["require"], - "default": "require", "order": 0 } } }, { "title": "verify-ca", - "additionalProperties": false, + "additionalProperties": true, "description": "Verify-ca SSL mode.", "required": ["mode", "ca_certificate"], "properties": { "mode": { "type": "string", "const": "verify-ca", - "enum": ["verify-ca"], - "default": "verify-ca", "order": 0 }, "ca_certificate": { @@ -158,15 +150,13 @@ }, { "title": "verify-full", - "additionalProperties": false, + "additionalProperties": true, "description": "Verify-full SSL mode.", "required": ["mode", "ca_certificate"], "properties": { "mode": { "type": "string", "const": "verify-full", - "enum": ["verify-full"], - "default": "verify-full", "order": 0 }, "ca_certificate": { @@ -218,8 +208,6 @@ "method": { "type": "string", "const": "Standard", - "enum": ["Standard"], - "default": "Standard", "order": 0 } } @@ -232,8 +220,6 @@ "method": { "type": "string", "const": "CDC", - "enum": ["CDC"], - "default": "CDC", "order": 0 }, "plugin": { @@ -241,7 +227,7 @@ "title": "Plugin", "description": "A logical decoding plugin installed on the PostgreSQL server. The `pgoutput` plugin is used by default. If the replication table contains a lot of big jsonb values it is recommended to use `wal2json` plugin. Read more about selecting replication plugins.", "enum": ["pgoutput", "wal2json"], - "default": "pgoutput", + "const": "pgoutput", "order": 1 }, "replication_slot": { @@ -268,7 +254,124 @@ } } ] + }, + "tunnel_method": { + "type": "object", + "title": "SSH Tunnel Method", + "description": "Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.", + "oneOf": [ + { + "title": "No Tunnel", + "required": ["tunnel_method"], + "properties": { + "tunnel_method": { + "description": "No ssh tunnel needed to connect to database", + "type": "string", + "const": "NO_TUNNEL", + "order": 0 + } + } + }, + { + "title": "SSH Key Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "ssh_key" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and ssh key", + "type": "string", + "const": "SSH_KEY_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host.", + "type": "string", + "order": 3 + }, + "ssh_key": { + "title": "SSH Private Key", + "description": "OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )", + "type": "string", + "airbyte_secret": true, + "multiline": true, + "order": 4 + } + } + }, + { + "title": "Password Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "tunnel_user_password" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and password authentication", + "type": "string", + "const": "SSH_PASSWORD_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host", + "type": "string", + "order": 3 + }, + "tunnel_user_password": { + "title": "Password", + "description": "OS-level password for logging into the jump server host", + "type": "string", + "airbyte_secret": true, + "order": 4 + } + } + } + ] } } - } + }, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [] } diff --git a/airbyte-integrations/connectors/source-redshift/acceptance-test-config.yml b/airbyte-integrations/connectors/source-redshift/acceptance-test-config.yml new file mode 100644 index 000000000000..269041c35e8f --- /dev/null +++ b/airbyte-integrations/connectors/source-redshift/acceptance-test-config.yml @@ -0,0 +1,7 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-redshift:dev +tests: + spec: + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-redshift/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-redshift/acceptance-test-docker.sh new file mode 100644 index 000000000000..ba0ab2874b98 --- /dev/null +++ b/airbyte-integrations/connectors/source-redshift/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-redshift/build.gradle b/airbyte-integrations/connectors/source-redshift/build.gradle index 5e8a4af94133..02ebd41a9977 100644 --- a/airbyte-integrations/connectors/source-redshift/build.gradle +++ b/airbyte-integrations/connectors/source-redshift/build.gradle @@ -2,6 +2,7 @@ plugins { id 'application' id 'airbyte-docker' id 'airbyte-integration-test-java' + id 'airbyte-source-acceptance-test' } application { diff --git a/airbyte-integrations/connectors/source-redshift/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-redshift/integration_tests/acceptance.py new file mode 100644 index 000000000000..1302b2f57e10 --- /dev/null +++ b/airbyte-integrations/connectors/source-redshift/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-redshift/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-redshift/src/test-integration/resources/expected_spec.json new file mode 100644 index 000000000000..fcf524915ecc --- /dev/null +++ b/airbyte-integrations/connectors/source-redshift/src/test-integration/resources/expected_spec.json @@ -0,0 +1,68 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/destinations/redshift", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Redshift Source Spec", + "type": "object", + "required": ["host", "port", "database", "username", "password"], + "properties": { + "host": { + "title": "Host", + "description": "Host Endpoint of the Redshift Cluster (must include the cluster-id, region and end with .redshift.amazonaws.com).", + "type": "string", + "order": 1 + }, + "port": { + "title": "Port", + "description": "Port of the database.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 5439, + "examples": ["5439"], + "order": 2 + }, + "database": { + "title": "Database", + "description": "Name of the database.", + "type": "string", + "examples": ["master"], + "order": 3 + }, + "schemas": { + "title": "Schemas", + "description": "The list of schemas to sync from. Specify one or more explicitly or keep empty to process all schemas. Schema names are case sensitive.", + "type": "array", + "items": { + "type": "string" + }, + "minItems": 0, + "uniqueItems": true, + "examples": ["public"], + "order": 4 + }, + "username": { + "title": "Username", + "description": "Username to use to access the database.", + "type": "string", + "order": 5 + }, + "password": { + "title": "Password", + "description": "Password associated with the username.", + "type": "string", + "airbyte_secret": true, + "order": 6 + }, + "jdbc_url_params": { + "title": "JDBC URL Params", + "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).", + "type": "string", + "order": 7 + } + } + }, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [] +} diff --git a/airbyte-integrations/connectors/source-sftp/acceptance-test-config.yml b/airbyte-integrations/connectors/source-sftp/acceptance-test-config.yml new file mode 100644 index 000000000000..6af33aacf986 --- /dev/null +++ b/airbyte-integrations/connectors/source-sftp/acceptance-test-config.yml @@ -0,0 +1,7 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-sftp:dev +tests: + spec: + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "src/test-integration/resources/dummy_config.json" diff --git a/airbyte-integrations/connectors/source-sftp/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-sftp/acceptance-test-docker.sh new file mode 100644 index 000000000000..ba0ab2874b98 --- /dev/null +++ b/airbyte-integrations/connectors/source-sftp/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-sftp/build.gradle b/airbyte-integrations/connectors/source-sftp/build.gradle index 410e4f1b8dfa..501383230d39 100644 --- a/airbyte-integrations/connectors/source-sftp/build.gradle +++ b/airbyte-integrations/connectors/source-sftp/build.gradle @@ -2,6 +2,7 @@ plugins { id 'application' id 'airbyte-docker' id 'airbyte-integration-test-java' + id 'airbyte-source-acceptance-test' } application { diff --git a/airbyte-integrations/connectors/source-sftp/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-sftp/integration_tests/acceptance.py new file mode 100644 index 000000000000..1302b2f57e10 --- /dev/null +++ b/airbyte-integrations/connectors/source-sftp/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-sftp/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-sftp/src/test-integration/resources/dummy_config.json new file mode 100644 index 000000000000..ed5c98b1ed4e --- /dev/null +++ b/airbyte-integrations/connectors/source-sftp/src/test-integration/resources/dummy_config.json @@ -0,0 +1,5 @@ +{ + "host": "default", + "port": 5555, + "user": "default" +} diff --git a/airbyte-integrations/connectors/source-sftp/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-sftp/src/test-integration/resources/expected_spec.json new file mode 100644 index 000000000000..fef8e5b343fb --- /dev/null +++ b/airbyte-integrations/connectors/source-sftp/src/test-integration/resources/expected_spec.json @@ -0,0 +1,109 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/source/sftp", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "SFTP Source Spec", + "type": "object", + "required": ["user", "host", "port"], + "additionalProperties": true, + "properties": { + "user": { + "title": "User Name", + "description": "The server user", + "type": "string", + "order": 0 + }, + "host": { + "title": "Host Address", + "description": "The server host address", + "type": "string", + "examples": ["www.host.com", "192.0.2.1"], + "order": 1 + }, + "port": { + "title": "Port", + "description": "The server port", + "type": "integer", + "default": 22, + "examples": ["22"], + "order": 2 + }, + "credentials": { + "type": "object", + "title": "Authentication", + "description": "The server authentication method", + "order": 3, + "oneOf": [ + { + "title": "Password Authentication", + "required": ["auth_method", "auth_user_password"], + "properties": { + "auth_method": { + "description": "Connect through password authentication", + "type": "string", + "const": "SSH_PASSWORD_AUTH", + "order": 0 + }, + "auth_user_password": { + "title": "Password", + "description": "OS-level password for logging into the jump server host", + "type": "string", + "airbyte_secret": true, + "order": 1 + } + } + }, + { + "title": "SSH Key Authentication", + "required": ["auth_method", "auth_ssh_key"], + "properties": { + "auth_method": { + "description": "Connect through ssh key", + "type": "string", + "const": "SSH_KEY_AUTH", + "order": 0 + }, + "auth_ssh_key": { + "title": "SSH Private Key", + "description": "OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )", + "type": "string", + "airbyte_secret": true, + "multiline": true, + "order": 1 + } + } + } + ] + }, + "file_types": { + "title": "File types", + "description": "Coma separated file types. Currently only 'csv' and 'json' types are supported.", + "type": "string", + "default": "csv,json", + "order": 4, + "examples": ["csv,json", "csv"] + }, + "folder_path": { + "title": "Folder Path", + "description": "The directory to search files for sync", + "type": "string", + "default": "", + "examples": ["/logs/2022"], + "order": 5 + }, + "file_pattern": { + "title": "File Pattern", + "description": "The regular expression to specify files for sync in a chosen Folder Path", + "type": "string", + "default": "", + "examples": [ + "log-([0-9]{4})([0-9]{2})([0-9]{2}) - This will filter files which `log-yearmmdd`" + ], + "order": 6 + } + } + }, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [] +} diff --git a/airbyte-integrations/connectors/source-snowflake/acceptance-test-config.yml b/airbyte-integrations/connectors/source-snowflake/acceptance-test-config.yml new file mode 100644 index 000000000000..d359de152b5e --- /dev/null +++ b/airbyte-integrations/connectors/source-snowflake/acceptance-test-config.yml @@ -0,0 +1,7 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-snowflake:dev +tests: + spec: + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-snowflake/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-snowflake/acceptance-test-docker.sh new file mode 100644 index 000000000000..ba0ab2874b98 --- /dev/null +++ b/airbyte-integrations/connectors/source-snowflake/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-snowflake/build.gradle b/airbyte-integrations/connectors/source-snowflake/build.gradle index 909275a03402..b17a696d397e 100644 --- a/airbyte-integrations/connectors/source-snowflake/build.gradle +++ b/airbyte-integrations/connectors/source-snowflake/build.gradle @@ -2,6 +2,7 @@ plugins { id 'application' id 'airbyte-docker' id 'airbyte-integration-test-java' + id 'airbyte-source-acceptance-test' } application { diff --git a/airbyte-integrations/connectors/source-snowflake/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-snowflake/integration_tests/acceptance.py new file mode 100644 index 000000000000..1302b2f57e10 --- /dev/null +++ b/airbyte-integrations/connectors/source-snowflake/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-snowflake/src/main/resources/spec.json b/airbyte-integrations/connectors/source-snowflake/src/main/resources/spec.json index 40c912a340ba..796d4572c16f 100644 --- a/airbyte-integrations/connectors/source-snowflake/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/source-snowflake/src/main/resources/spec.json @@ -19,7 +19,6 @@ "auth_type": { "type": "string", "const": "OAuth", - "default": "OAuth", "order": 0 }, "client_id": { @@ -61,7 +60,6 @@ "auth_type": { "type": "string", "const": "username/password", - "default": "username/password", "order": 0 }, "username": { diff --git a/airbyte-integrations/connectors/source-snowflake/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-snowflake/src/test-integration/resources/dummy_config.json new file mode 100644 index 000000000000..ed5c98b1ed4e --- /dev/null +++ b/airbyte-integrations/connectors/source-snowflake/src/test-integration/resources/dummy_config.json @@ -0,0 +1,5 @@ +{ + "host": "default", + "port": 5555, + "user": "default" +} diff --git a/airbyte-integrations/connectors/source-snowflake/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-snowflake/src/test-integration/resources/expected_spec.json new file mode 100644 index 000000000000..282a585ffe6f --- /dev/null +++ b/airbyte-integrations/connectors/source-snowflake/src/test-integration/resources/expected_spec.json @@ -0,0 +1,187 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/sources/snowflake", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Snowflake Source Spec", + "type": "object", + "required": ["host", "role", "warehouse", "database", "schema"], + "properties": { + "credentials": { + "title": "Authorization Method", + "type": "object", + "oneOf": [ + { + "type": "object", + "title": "OAuth2.0", + "order": 0, + "required": ["client_id", "client_secret", "auth_type"], + "properties": { + "auth_type": { + "type": "string", + "const": "OAuth", + "order": 0 + }, + "client_id": { + "type": "string", + "title": "Client ID", + "description": "The Client ID of your Snowflake developer application.", + "airbyte_secret": true, + "order": 1 + }, + "client_secret": { + "type": "string", + "title": "Client Secret", + "description": "The Client Secret of your Snowflake developer application.", + "airbyte_secret": true, + "order": 2 + }, + "access_token": { + "type": "string", + "title": "Access Token", + "description": "Access Token for making authenticated requests.", + "airbyte_secret": true, + "order": 3 + }, + "refresh_token": { + "type": "string", + "title": "Refresh Token", + "description": "Refresh Token for making authenticated requests.", + "airbyte_secret": true, + "order": 4 + } + } + }, + { + "title": "Username and Password", + "type": "object", + "required": ["username", "password", "auth_type"], + "order": 1, + "properties": { + "auth_type": { + "type": "string", + "const": "username/password", + "order": 0 + }, + "username": { + "description": "The username you created to allow Airbyte to access the database.", + "examples": ["AIRBYTE_USER"], + "type": "string", + "title": "Username", + "order": 1 + }, + "password": { + "description": "The password associated with the username.", + "type": "string", + "airbyte_secret": true, + "title": "Password", + "order": 2 + } + } + } + ], + "order": 0 + }, + "host": { + "description": "The host domain of the snowflake instance (must include the account, region, cloud environment, and end with snowflakecomputing.com).", + "examples": ["accountname.us-east-2.aws.snowflakecomputing.com"], + "type": "string", + "title": "Account Name", + "order": 1 + }, + "role": { + "description": "The role you created for Airbyte to access Snowflake.", + "examples": ["AIRBYTE_ROLE"], + "type": "string", + "title": "Role", + "order": 2 + }, + "warehouse": { + "description": "The warehouse you created for Airbyte to access data.", + "examples": ["AIRBYTE_WAREHOUSE"], + "type": "string", + "title": "Warehouse", + "order": 3 + }, + "database": { + "description": "The database you created for Airbyte to access data.", + "examples": ["AIRBYTE_DATABASE"], + "type": "string", + "title": "Database", + "order": 4 + }, + "schema": { + "description": "The source Snowflake schema tables.", + "examples": ["AIRBYTE_SCHEMA"], + "type": "string", + "title": "Schema", + "order": 5 + }, + "jdbc_url_params": { + "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).", + "title": "JDBC URL Params", + "type": "string", + "order": 6 + } + } + }, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [], + "advanced_auth": { + "auth_flow_type": "oauth2.0", + "predicate_key": ["credentials", "auth_type"], + "predicate_value": "OAuth", + "oauth_config_specification": { + "oauth_user_input_from_connector_config_specification": { + "type": "object", + "properties": { + "host": { + "type": "string", + "path_in_connector_config": ["host"] + }, + "role": { + "type": "string", + "path_in_connector_config": ["role"] + } + } + }, + "complete_oauth_output_specification": { + "type": "object", + "properties": { + "access_token": { + "type": "string", + "path_in_connector_config": ["credentials", "access_token"] + }, + "refresh_token": { + "type": "string", + "path_in_connector_config": ["credentials", "refresh_token"] + } + } + }, + "complete_oauth_server_input_specification": { + "type": "object", + "properties": { + "client_id": { + "type": "string" + }, + "client_secret": { + "type": "string" + } + } + }, + "complete_oauth_server_output_specification": { + "type": "object", + "properties": { + "client_id": { + "type": "string", + "path_in_connector_config": ["credentials", "client_id"] + }, + "client_secret": { + "type": "string", + "path_in_connector_config": ["credentials", "client_secret"] + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-tidb/acceptance-test-config.yml b/airbyte-integrations/connectors/source-tidb/acceptance-test-config.yml new file mode 100644 index 000000000000..193e2c954199 --- /dev/null +++ b/airbyte-integrations/connectors/source-tidb/acceptance-test-config.yml @@ -0,0 +1,7 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-tidb:dev +tests: + spec: + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "src/test-integration/resources/dummy_config.json" diff --git a/airbyte-integrations/connectors/source-tidb/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-tidb/acceptance-test-docker.sh new file mode 100644 index 000000000000..ba0ab2874b98 --- /dev/null +++ b/airbyte-integrations/connectors/source-tidb/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-tidb/build.gradle b/airbyte-integrations/connectors/source-tidb/build.gradle old mode 100755 new mode 100644 index 7676d78d77af..6f1001121a8d --- a/airbyte-integrations/connectors/source-tidb/build.gradle +++ b/airbyte-integrations/connectors/source-tidb/build.gradle @@ -2,6 +2,7 @@ plugins { id 'application' id 'airbyte-docker' id 'airbyte-integration-test-java' + id 'airbyte-source-acceptance-test' } application { @@ -33,4 +34,4 @@ dependencies { implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) integrationTestJavaImplementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) -} \ No newline at end of file +} diff --git a/airbyte-integrations/connectors/source-tidb/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-tidb/integration_tests/acceptance.py new file mode 100644 index 000000000000..1302b2f57e10 --- /dev/null +++ b/airbyte-integrations/connectors/source-tidb/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-tidb/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-tidb/src/test-integration/resources/dummy_config.json new file mode 100644 index 000000000000..560e55333378 --- /dev/null +++ b/airbyte-integrations/connectors/source-tidb/src/test-integration/resources/dummy_config.json @@ -0,0 +1,6 @@ +{ + "host": "default", + "port": 5555, + "database": "default", + "username": "default" +} diff --git a/airbyte-integrations/connectors/source-tidb/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-tidb/src/test-integration/resources/expected_spec.json new file mode 100644 index 000000000000..395e18282687 --- /dev/null +++ b/airbyte-integrations/connectors/source-tidb/src/test-integration/resources/expected_spec.json @@ -0,0 +1,176 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/sources/tidb", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "TiDB Source Spec", + "type": "object", + "required": ["host", "port", "database", "username"], + "properties": { + "host": { + "description": "Hostname of the database.", + "title": "Host", + "type": "string", + "order": 0 + }, + "port": { + "description": "Port of the database.", + "title": "Port", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 4000, + "examples": ["4000"], + "order": 1 + }, + "database": { + "description": "Name of the database.", + "title": "Database", + "type": "string", + "order": 2 + }, + "username": { + "description": "Username to use to access the database.", + "title": "Username", + "type": "string", + "order": 3 + }, + "password": { + "description": "Password associated with the username.", + "title": "Password", + "type": "string", + "airbyte_secret": true, + "order": 4 + }, + "jdbc_url_params": { + "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)", + "title": "JDBC URL Params", + "type": "string", + "order": 5 + }, + "ssl": { + "title": "SSL Connection", + "description": "Encrypt data using SSL.", + "type": "boolean", + "default": false, + "order": 6 + }, + "tunnel_method": { + "type": "object", + "title": "SSH Tunnel Method", + "description": "Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.", + "oneOf": [ + { + "title": "No Tunnel", + "required": ["tunnel_method"], + "properties": { + "tunnel_method": { + "description": "No ssh tunnel needed to connect to database", + "type": "string", + "const": "NO_TUNNEL", + "order": 0 + } + } + }, + { + "title": "SSH Key Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "ssh_key" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and ssh key", + "type": "string", + "const": "SSH_KEY_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host.", + "type": "string", + "order": 3 + }, + "ssh_key": { + "title": "SSH Private Key", + "description": "OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )", + "type": "string", + "airbyte_secret": true, + "multiline": true, + "order": 4 + } + } + }, + { + "title": "Password Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "tunnel_user_password" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and password authentication", + "type": "string", + "const": "SSH_PASSWORD_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host", + "type": "string", + "order": 3 + }, + "tunnel_user_password": { + "title": "Password", + "description": "OS-level password for logging into the jump server host", + "type": "string", + "airbyte_secret": true, + "order": 4 + } + } + } + ] + } + } + }, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [] +}