diff --git a/.bumpversion.cfg b/.bumpversion.cfg index cd4b5c6bbcf3..f91fe985b871 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.35.39-alpha +current_version = 0.35.42-alpha commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-[a-z]+)? diff --git a/.env b/.env index 2489e31d39ea..88db98daffe4 100644 --- a/.env +++ b/.env @@ -10,7 +10,7 @@ ### SHARED ### -VERSION=0.35.39-alpha +VERSION=0.35.42-alpha # When using the airbyte-db via default docker image CONFIG_ROOT=/data diff --git a/airbyte-bootloader/Dockerfile b/airbyte-bootloader/Dockerfile index b29f4ecae4bb..19cad62d5efd 100644 --- a/airbyte-bootloader/Dockerfile +++ b/airbyte-bootloader/Dockerfile @@ -5,6 +5,6 @@ ENV APPLICATION airbyte-bootloader WORKDIR /app -ADD bin/${APPLICATION}-0.35.39-alpha.tar /app +ADD bin/${APPLICATION}-0.35.42-alpha.tar /app -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.39-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.42-alpha/bin/${APPLICATION}"] diff --git a/airbyte-cdk/python/airbyte_cdk/logger.py b/airbyte-cdk/python/airbyte_cdk/logger.py index 1cfb72175a62..a54c59325fbd 100644 --- a/airbyte-cdk/python/airbyte_cdk/logger.py +++ b/airbyte-cdk/python/airbyte_cdk/logger.py @@ -9,6 +9,7 @@ from typing import List, Tuple from airbyte_cdk.models import AirbyteLogMessage, AirbyteMessage +from deprecated import deprecated TRACE_LEVEL_NUM = 5 @@ -102,6 +103,7 @@ def log_by_prefix(msg: str, default_level: str) -> Tuple[int, str]: return log_level, rendered_message +@deprecated(version="0.1.47", reason="Use logging.getLogger('airbyte') instead") class AirbyteLogger: def log(self, level, message): log_record = AirbyteLogMessage(level=level, message=message) diff --git a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml index fd7b5772272c..43fdc81818ed 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml @@ -37,7 +37,7 @@ - name: Clickhouse destinationDefinitionId: ce0d828e-1dc4-496c-b122-2da42e637e48 dockerRepository: airbyte/destination-clickhouse - dockerImageTag: 0.1.3 + dockerImageTag: 0.1.4 documentationUrl: https://docs.airbyte.io/integrations/destinations/clickhouse - name: DynamoDB destinationDefinitionId: 8ccd8909-4e99-4141-b48d-4984b70b2d89 @@ -108,7 +108,7 @@ - name: MS SQL Server destinationDefinitionId: d4353156-9217-4cad-8dd7-c108fd4f74cf dockerRepository: airbyte/destination-mssql - dockerImageTag: 0.1.14 + dockerImageTag: 0.1.15 documentationUrl: https://docs.airbyte.io/integrations/destinations/mssql icon: mssql.svg - name: MeiliSearch @@ -126,19 +126,19 @@ - name: MySQL destinationDefinitionId: ca81ee7c-3163-4246-af40-094cc31e5e42 dockerRepository: airbyte/destination-mysql - dockerImageTag: 0.1.17 + dockerImageTag: 0.1.18 documentationUrl: https://docs.airbyte.io/integrations/destinations/mysql icon: mysql.svg - name: Oracle destinationDefinitionId: 3986776d-2319-4de9-8af8-db14c0996e72 dockerRepository: airbyte/destination-oracle - dockerImageTag: 0.1.13 + dockerImageTag: 0.1.15 documentationUrl: https://docs.airbyte.io/integrations/destinations/oracle icon: oracle.svg - name: Postgres destinationDefinitionId: 25c5221d-dce2-4163-ade9-739ef790f503 dockerRepository: airbyte/destination-postgres - dockerImageTag: 0.3.14 + dockerImageTag: 0.3.15 documentationUrl: https://docs.airbyte.io/integrations/destinations/postgres icon: postgresql.svg - name: Pulsar @@ -162,7 +162,7 @@ - name: Redshift destinationDefinitionId: f7a7d195-377f-cf5b-70a5-be6b819019dc dockerRepository: airbyte/destination-redshift - dockerImageTag: 0.3.26 + dockerImageTag: 0.3.27 documentationUrl: https://docs.airbyte.io/integrations/destinations/redshift icon: redshift.svg - name: Rockset @@ -185,7 +185,7 @@ - name: Snowflake destinationDefinitionId: 424892c4-daac-4491-b35d-c6688ba547ba dockerRepository: airbyte/destination-snowflake - dockerImageTag: 0.4.16 + dockerImageTag: 0.4.17 documentationUrl: https://docs.airbyte.io/integrations/destinations/snowflake icon: snowflake.svg resourceRequirements: @@ -207,7 +207,7 @@ - name: MariaDB ColumnStore destinationDefinitionId: 294a4790-429b-40ae-9516-49826b9702e1 dockerRepository: airbyte/destination-mariadb-columnstore - dockerImageTag: 0.1.3 + dockerImageTag: 0.1.4 documentationUrl: https://docs.airbyte.io/integrations/destinations/mariadb-columnstore icon: mariadb.svg - name: Streamr @@ -216,3 +216,9 @@ dockerImageTag: 0.0.1 documentationUrl: https://docs.airbyte.io/integrations/destinations/streamr icon: streamr.svg +- name: Scylla + destinationDefinitionId: 3dc6f384-cd6b-4be3-ad16-a41450899bf0 + dockerRepository: airbyte/destination-scylla + dockerImageTag: 0.1.1 + documentationUrl: https://docs.airbyte.io/integrations/destinations/scylla + icon: scylla.svg diff --git a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml index 091d757ed3a3..08325bc09dcc 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml @@ -667,7 +667,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-clickhouse:0.1.3" +- dockerImage: "airbyte/destination-clickhouse:0.1.4" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/clickhouse" connectionSpecification: @@ -2077,7 +2077,7 @@ supportsDBT: false supported_destination_sync_modes: - "append" -- dockerImage: "airbyte/destination-mssql:0.1.14" +- dockerImage: "airbyte/destination-mssql:0.1.15" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/mssql" connectionSpecification: @@ -2464,7 +2464,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-mysql:0.1.17" +- dockerImage: "airbyte/destination-mysql:0.1.18" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/mysql" connectionSpecification: @@ -2629,7 +2629,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-oracle:0.1.13" +- dockerImage: "airbyte/destination-oracle:0.1.15" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/oracle" connectionSpecification: @@ -2853,11 +2853,11 @@ order: 4 supportsIncremental: true supportsNormalization: false - supportsDBT: true + supportsDBT: false supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-postgres:0.3.14" +- dockerImage: "airbyte/destination-postgres:0.3.15" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/postgres" connectionSpecification: @@ -3272,7 +3272,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-redshift:0.3.26" +- dockerImage: "airbyte/destination-redshift:0.3.27" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/redshift" connectionSpecification: @@ -3825,7 +3825,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-snowflake:0.4.16" +- dockerImage: "airbyte/destination-snowflake:0.4.17" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/snowflake" connectionSpecification: @@ -4078,7 +4078,7 @@ - "overwrite" - "append" - "append_dedup" -- dockerImage: "airbyte/destination-mariadb-columnstore:0.1.3" +- dockerImage: "airbyte/destination-mariadb-columnstore:0.1.4" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/mariadb-columnstore" connectionSpecification: @@ -4257,3 +4257,60 @@ supported_destination_sync_modes: - "append" - "append_dedup" +- dockerImage: "airbyte/destination-scylla:0.1.1" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/destinations/scylla" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Scylla Destination Spec" + type: "object" + required: + - "keyspace" + - "username" + - "password" + - "address" + - "port" + additionalProperties: true + properties: + keyspace: + title: "Keyspace" + description: "Default Scylla keyspace to create data in." + type: "string" + order: 0 + username: + title: "Username" + description: "Username to use to access Scylla." + type: "string" + order: 1 + password: + title: "Password" + description: "Password associated with Scylla." + type: "string" + airbyte_secret: true + order: 2 + address: + title: "Address" + description: "Address to connect to." + type: "string" + order: 3 + port: + title: "Port" + description: "Port of Scylla." + type: "integer" + minimum: 0 + maximum: 65536 + default: 9042 + order: 4 + replication: + title: "Replication factor" + type: "integer" + description: "Indicates to how many nodes the data should be replicated\ + \ to." + default: 1 + order: 5 + supportsIncremental: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: + - "overwrite" + - "append" diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index d84ffe4d0468..c9d422cce3af 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -829,7 +829,7 @@ - name: Zendesk Support sourceDefinitionId: 79c1aa37-dae3-42ae-b333-d1c105477715 dockerRepository: airbyte/source-zendesk-support - dockerImageTag: 0.1.12 + dockerImageTag: 0.2.0 documentationUrl: https://docs.airbyte.io/integrations/sources/zendesk-support icon: zendesk.svg sourceType: api diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index 352c6fe29122..2dfaf12842ec 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -8765,7 +8765,7 @@ path_in_connector_config: - "credentials" - "client_secret" -- dockerImage: "airbyte/source-zendesk-support:0.1.12" +- dockerImage: "airbyte/source-zendesk-support:0.2.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/zendesk-support" connectionSpecification: diff --git a/airbyte-container-orchestrator/Dockerfile b/airbyte-container-orchestrator/Dockerfile index 027f0c166a41..d6d3f859aee7 100644 --- a/airbyte-container-orchestrator/Dockerfile +++ b/airbyte-container-orchestrator/Dockerfile @@ -26,12 +26,12 @@ RUN echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] htt RUN apt-get update && apt-get install -y kubectl ENV APPLICATION airbyte-container-orchestrator -ENV AIRBYTE_ENTRYPOINT "/app/${APPLICATION}-0.35.39-alpha/bin/${APPLICATION}" +ENV AIRBYTE_ENTRYPOINT "/app/${APPLICATION}-0.35.42-alpha/bin/${APPLICATION}" WORKDIR /app # Move orchestrator app -ADD bin/${APPLICATION}-0.35.39-alpha.tar /app +ADD bin/${APPLICATION}-0.35.42-alpha.tar /app # wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "/app/${APPLICATION}-0.35.39-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "/app/${APPLICATION}-0.35.42-alpha/bin/${APPLICATION}"] diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/Databases.java b/airbyte-db/lib/src/main/java/io/airbyte/db/Databases.java index e25bd240e2a0..3fb65f85f73b 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/Databases.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/Databases.java @@ -4,6 +4,7 @@ package io.airbyte.db; +import com.google.common.collect.Maps; import io.airbyte.commons.lang.Exceptions; import io.airbyte.db.bigquery.BigQueryDatabase; import io.airbyte.db.jdbc.DefaultJdbcDatabase; @@ -15,7 +16,6 @@ import io.airbyte.db.mongodb.MongoDatabase; import java.io.IOException; import java.util.Map; -import java.util.Optional; import java.util.function.Function; import lombok.val; import org.apache.commons.dbcp2.BasicDataSource; @@ -41,7 +41,7 @@ public static Database createPostgresDatabaseWithRetry(final String username, try { val infinity = Integer.MAX_VALUE; database = createPostgresDatabaseWithRetryTimeout(username, password, jdbcConnectionString, isDbReady, infinity); - } catch (IOException e) { + } catch (final IOException e) { // This should theoretically never happen since we set the timeout to be a very high number. } } @@ -131,9 +131,9 @@ public static Database createDatabase(final String username, final String jdbcConnectionString, final String driverClassName, final SQLDialect dialect, - final String connectionProperties) { + final Map connectionProperties) { final BasicDataSource connectionPool = - createBasicDataSource(username, password, jdbcConnectionString, driverClassName, Optional.ofNullable(connectionProperties)); + createBasicDataSource(username, password, jdbcConnectionString, driverClassName, connectionProperties); return new Database(connectionPool, dialect); } @@ -159,7 +159,7 @@ public static JdbcDatabase createJdbcDatabase(final String username, final String password, final String jdbcConnectionString, final String driverClassName, - final String connectionProperties) { + final Map connectionProperties) { return createJdbcDatabase(username, password, jdbcConnectionString, driverClassName, connectionProperties, JdbcUtils.getDefaultSourceOperations()); } @@ -168,10 +168,10 @@ public static JdbcDatabase createJdbcDatabase(final String username, final String password, final String jdbcConnectionString, final String driverClassName, - final String connectionProperties, + final Map connectionProperties, final JdbcCompatibleSourceOperations sourceOperations) { final BasicDataSource connectionPool = - createBasicDataSource(username, password, jdbcConnectionString, driverClassName, Optional.ofNullable(connectionProperties)); + createBasicDataSource(username, password, jdbcConnectionString, driverClassName, connectionProperties); return new DefaultJdbcDatabase(connectionPool, sourceOperations); } @@ -181,10 +181,10 @@ public static JdbcDatabase createStreamingJdbcDatabase(final String username, final String jdbcConnectionString, final String driverClassName, final JdbcStreamingQueryConfiguration jdbcStreamingQuery, - final String connectionProperties, + final Map connectionProperties, final JdbcCompatibleSourceOperations sourceOperations) { final BasicDataSource connectionPool = - createBasicDataSource(username, password, jdbcConnectionString, driverClassName, Optional.ofNullable(connectionProperties)); + createBasicDataSource(username, password, jdbcConnectionString, driverClassName, connectionProperties); return new StreamingJdbcDatabase(connectionPool, sourceOperations, jdbcStreamingQuery); } @@ -194,27 +194,7 @@ private static BasicDataSource createBasicDataSource(final String username, final String jdbcConnectionString, final String driverClassName) { return createBasicDataSource(username, password, jdbcConnectionString, driverClassName, - Optional.empty()); - } - - /** - * Prefer to use the method that takes in the connection properties as a map. - */ - @Deprecated - private static BasicDataSource createBasicDataSource(final String username, - final String password, - final String jdbcConnectionString, - final String driverClassName, - final Optional connectionProperties) { - final BasicDataSource connectionPool = new BasicDataSource(); - connectionPool.setDriverClassName(driverClassName); - connectionPool.setUsername(username); - connectionPool.setPassword(password); - connectionPool.setInitialSize(0); - connectionPool.setMaxTotal(5); - connectionPool.setUrl(jdbcConnectionString); - connectionProperties.ifPresent(connectionPool::setConnectionProperties); - return connectionPool; + Maps.newHashMap()); } public static BasicDataSource createBasicDataSource(final String username, diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/jdbc/JdbcUtils.java b/airbyte-db/lib/src/main/java/io/airbyte/db/jdbc/JdbcUtils.java index a7d681857aae..05caf59336b0 100644 --- a/airbyte-db/lib/src/main/java/io/airbyte/db/jdbc/JdbcUtils.java +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/jdbc/JdbcUtils.java @@ -4,6 +4,10 @@ package io.airbyte.db.jdbc; +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.collect.Maps; +import java.util.HashMap; +import java.util.Map; import org.jooq.JSONFormat; public class JdbcUtils { @@ -24,4 +28,30 @@ public static String getFullyQualifiedTableName(final String schemaName, final S return schemaName != null ? schemaName + "." + tableName : tableName; } + public static Map parseJdbcParameters(final JsonNode config, final String jdbcUrlParamsKey) { + if (config.has(jdbcUrlParamsKey)) { + return parseJdbcParameters(config.get(jdbcUrlParamsKey).asText()); + } else { + return Maps.newHashMap(); + } + } + + public static Map parseJdbcParameters(final String jdbcPropertiesString) { + final Map parameters = new HashMap<>(); + if (!jdbcPropertiesString.isBlank()) { + final String[] keyValuePairs = jdbcPropertiesString.split("&"); + for (final String kv : keyValuePairs) { + final String[] split = kv.split("="); + if (split.length == 2) { + parameters.put(split[0], split[1]); + } else { + throw new IllegalArgumentException( + "jdbc_url_params must be formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). Got " + + jdbcPropertiesString); + } + } + } + return parameters; + } + } diff --git a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/IntegrationRunner.java b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/IntegrationRunner.java index fa007180851c..4d406140c2df 100644 --- a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/IntegrationRunner.java +++ b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/IntegrationRunner.java @@ -135,8 +135,7 @@ public void runInternal(final ITransaction transaction, final IntegrationConfig validateConfig(integration.spec().getConnectionSpecification(), config, "READ"); final ConfiguredAirbyteCatalog catalog = parseConfig(parsed.getCatalogPath(), ConfiguredAirbyteCatalog.class); final Optional stateOptional = parsed.getStatePath().map(IntegrationRunner::parseConfig); - final AutoCloseableIterator messageIterator = source.read(config, catalog, stateOptional.orElse(null)); - try (messageIterator) { + try (final AutoCloseableIterator messageIterator = source.read(config, catalog, stateOptional.orElse(null))) { AirbyteSentry.executeWithTracing("ReadSource", () -> messageIterator.forEachRemaining(outputRecordCollector::accept)); } } @@ -145,8 +144,9 @@ public void runInternal(final ITransaction transaction, final IntegrationConfig final JsonNode config = parseConfig(parsed.getConfigPath()); validateConfig(integration.spec().getConnectionSpecification(), config, "WRITE"); final ConfiguredAirbyteCatalog catalog = parseConfig(parsed.getCatalogPath(), ConfiguredAirbyteCatalog.class); - final AirbyteMessageConsumer consumer = destination.getConsumer(config, catalog, outputRecordCollector); - AirbyteSentry.executeWithTracing("WriteDestination", () -> consumeWriteStream(consumer)); + try (final AirbyteMessageConsumer consumer = destination.getConsumer(config, catalog, outputRecordCollector)) { + AirbyteSentry.executeWithTracing("WriteDestination", () -> consumeWriteStream(consumer)); + } } default -> throw new IllegalStateException("Unexpected value: " + parsed.getCommand()); } @@ -159,16 +159,14 @@ static void consumeWriteStream(final AirbyteMessageConsumer consumer) throws Exc // use a Scanner that only processes new line characters to strictly abide with the // https://jsonlines.org/ standard final Scanner input = new Scanner(System.in).useDelimiter("[\r\n]+"); - try (consumer) { - consumer.start(); - while (input.hasNext()) { - final String inputString = input.next(); - final Optional messageOptional = Jsons.tryDeserialize(inputString, AirbyteMessage.class); - if (messageOptional.isPresent()) { - consumer.accept(messageOptional.get()); - } else { - LOGGER.error("Received invalid message: " + inputString); - } + consumer.start(); + while (input.hasNext()) { + final String inputString = input.next(); + final Optional messageOptional = Jsons.tryDeserialize(inputString, AirbyteMessage.class); + if (messageOptional.isPresent()) { + consumer.accept(messageOptional.get()); + } else { + LOGGER.error("Received invalid message: " + inputString); } } } diff --git a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/ssh/SshWrappedSource.java b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/ssh/SshWrappedSource.java index 4e25b66d16e0..d66c4b87bfd3 100644 --- a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/ssh/SshWrappedSource.java +++ b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/ssh/SshWrappedSource.java @@ -14,9 +14,12 @@ import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.ConnectorSpecification; import java.util.List; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class SshWrappedSource implements Source { + private static final Logger LOGGER = LoggerFactory.getLogger(SshWrappedSource.class); private final Source delegate; private final List hostKey; private final List portKey; @@ -46,7 +49,15 @@ public AirbyteCatalog discover(final JsonNode config) throws Exception { public AutoCloseableIterator read(final JsonNode config, final ConfiguredAirbyteCatalog catalog, final JsonNode state) throws Exception { final SshTunnel tunnel = SshTunnel.getInstance(config, hostKey, portKey); - return AutoCloseableIterators.appendOnClose(delegate.read(tunnel.getConfigInTunnel(), catalog, state), tunnel::close); + final AutoCloseableIterator delegateRead; + try { + delegateRead = delegate.read(tunnel.getConfigInTunnel(), catalog, state); + } catch (final Exception e) { + LOGGER.error("Exception occurred while getting the delegate read iterator, closing SSH tunnel", e); + tunnel.close(); + throw e; + } + return AutoCloseableIterators.appendOnClose(delegateRead, tunnel::close); } } diff --git a/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/base/IntegrationRunnerTest.java b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/base/IntegrationRunnerTest.java index 83156e849f9e..c2670ed5b6e9 100644 --- a/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/base/IntegrationRunnerTest.java +++ b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/base/IntegrationRunnerTest.java @@ -241,14 +241,13 @@ void testDestinationConsumerLifecycleSuccess() throws Exception { + Jsons.serialize(message2) + "\n" + Jsons.serialize(stateMessage)).getBytes())); - final AirbyteMessageConsumer airbyteMessageConsumerMock = mock(AirbyteMessageConsumer.class); - IntegrationRunner.consumeWriteStream(airbyteMessageConsumerMock); - - final InOrder inOrder = inOrder(airbyteMessageConsumerMock); - inOrder.verify(airbyteMessageConsumerMock).accept(message1); - inOrder.verify(airbyteMessageConsumerMock).accept(message2); - inOrder.verify(airbyteMessageConsumerMock).accept(stateMessage); - inOrder.verify(airbyteMessageConsumerMock).close(); + try (final AirbyteMessageConsumer airbyteMessageConsumerMock = mock(AirbyteMessageConsumer.class)) { + IntegrationRunner.consumeWriteStream(airbyteMessageConsumerMock); + final InOrder inOrder = inOrder(airbyteMessageConsumerMock); + inOrder.verify(airbyteMessageConsumerMock).accept(message1); + inOrder.verify(airbyteMessageConsumerMock).accept(message2); + inOrder.verify(airbyteMessageConsumerMock).accept(stateMessage); + } } @Test @@ -267,15 +266,13 @@ void testDestinationConsumerLifecycleFailure() throws Exception { .withEmittedAt(EMITTED_AT)); System.setIn(new ByteArrayInputStream((Jsons.serialize(message1) + "\n" + Jsons.serialize(message2)).getBytes())); - final AirbyteMessageConsumer airbyteMessageConsumerMock = mock(AirbyteMessageConsumer.class); - doThrow(new IOException("error")).when(airbyteMessageConsumerMock).accept(message1); - - assertThrows(IOException.class, () -> IntegrationRunner.consumeWriteStream(airbyteMessageConsumerMock)); - - final InOrder inOrder = inOrder(airbyteMessageConsumerMock); - inOrder.verify(airbyteMessageConsumerMock).accept(message1); - inOrder.verify(airbyteMessageConsumerMock).close(); - inOrder.verifyNoMoreInteractions(); + try (final AirbyteMessageConsumer airbyteMessageConsumerMock = mock(AirbyteMessageConsumer.class)) { + doThrow(new IOException("error")).when(airbyteMessageConsumerMock).accept(message1); + assertThrows(IOException.class, () -> IntegrationRunner.consumeWriteStream(airbyteMessageConsumerMock)); + final InOrder inOrder = inOrder(airbyteMessageConsumerMock); + inOrder.verify(airbyteMessageConsumerMock).accept(message1); + inOrder.verifyNoMoreInteractions(); + } } } diff --git a/airbyte-integrations/builds.md b/airbyte-integrations/builds.md index 8bde13be03d5..ec80010bbb49 100644 --- a/airbyte-integrations/builds.md +++ b/airbyte-integrations/builds.md @@ -113,30 +113,31 @@ # Destinations -|name |status | -| :--- | :--- | -| Azure Blob Storage | [![destination-azure-blob-storage](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-azure-blob-storage%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-azure-blob-storage) | -| BigQuery | [![destination-bigquery](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-bigquery%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-bigquery) | -| ClickHouse | [![destination-clickhouse](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-clickhouse%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-clickhouse) | -| Cassandra | [![destination-cassandra](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-cassandra%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-cassandra) | -| Databricks | [![destination-databricks](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-databricks%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-databricks) | -| Dev Null | [![destination-dev-null](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-dev-null%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-dev-null) | -| Elasticsearch | (Temporarily Not Available) | -| End-to-End Testing | [![destination-e2e-test](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-e2e-test%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-e2e-test) | -| Google Cloud Storage (GCS) | [![destination-gcs](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-gcs%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-gcs) | -| Google Firestore | [![destination-firestore](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-firestore%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-firestore) | -| Google PubSub | [![destination-pubsub](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-pubsub%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-pubsub) | -| Kafka | [![destination-kafka](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-kafka%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-kafka) | -| Keen (Chargify) | [![destination-keen](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-keen%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-keen) | -| Local CSV | [![destination-csv](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-csv%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-csv) | -| Local JSON | [![destination-local-json](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-local-json%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-local-json) | -| MariaDB ColumnStore | [![destination-mariadb-columnstore](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-mariadb-columnstore%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-mariadb-columnstore) | -| Mongo DB | [![destination-mongodb](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-mongodb%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-mongodb) | -| MQTT | [![destination-mqtt](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-mqtt%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-mqtt) | -| Postgres | [![destination-postgres](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-postgres%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-postgres) | -| Pulsar | [![destination-pulsar](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-pulsar%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-pulsar) | -| Redshift | [![destination-redshift](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-redshift%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-redshift) | -| Rockset | [![destination-rockset](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-rockset%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-rockset) | -| S3 | [![destination-s3](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-s3%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-s3) | -| SFTP-JSON | [![destination-sftp-json](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-sftp-json%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-sftp-json) | -| Snowflake | [![destination-snowflake](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-snowflake%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-snowflake) |\ +| name | status | +|:---------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| Azure Blob Storage | [![destination-azure-blob-storage](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-azure-blob-storage%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-azure-blob-storage) | +| BigQuery | [![destination-bigquery](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-bigquery%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-bigquery) | +| ClickHouse | [![destination-clickhouse](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-clickhouse%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-clickhouse) | +| Cassandra | [![destination-cassandra](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-cassandra%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-cassandra) | +| Databricks | [![destination-databricks](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-databricks%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-databricks) | +| Dev Null | [![destination-dev-null](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-dev-null%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-dev-null) | +| Elasticsearch | (Temporarily Not Available) | +| End-to-End Testing | [![destination-e2e-test](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-e2e-test%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-e2e-test) | +| Google Cloud Storage (GCS) | [![destination-gcs](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-gcs%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-gcs) | +| Google Firestore | [![destination-firestore](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-firestore%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-firestore) | +| Google PubSub | [![destination-pubsub](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-pubsub%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-pubsub) | +| Kafka | [![destination-kafka](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-kafka%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-kafka) | +| Keen (Chargify) | [![destination-keen](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-keen%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-keen) | +| Local CSV | [![destination-csv](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-csv%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-csv) | +| Local JSON | [![destination-local-json](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-local-json%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-local-json) | +| MariaDB ColumnStore | [![destination-mariadb-columnstore](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-mariadb-columnstore%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-mariadb-columnstore) | +| Mongo DB | [![destination-mongodb](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-mongodb%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-mongodb) | +| MQTT | [![destination-mqtt](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-mqtt%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-mqtt) | +| Postgres | [![destination-postgres](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-postgres%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-postgres) | +| Pulsar | [![destination-pulsar](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-pulsar%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-pulsar) | +| Redshift | [![destination-redshift](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-redshift%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-redshift) | +| Rockset | [![destination-rockset](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-rockset%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-rockset) | +| S3 | [![destination-s3](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-s3%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-s3) | +| Scylla | [![destination-scylla](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-s3%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-scylla) | +| SFTP-JSON | [![destination-sftp-json](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-sftp-json%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-sftp-json) | +| Snowflake | [![destination-snowflake](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-snowflake%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-snowflake) |\ diff --git a/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/Dockerfile index f6069d7e1c60..9fc1aa0f33db 100644 --- a/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-clickhouse-strict-encrypt COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.1 +LABEL io.airbyte.version=0.1.4 LABEL io.airbyte.name=airbyte/destination-clickhouse-strict-encrypt diff --git a/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/src/test/resources/expected_spec.json b/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/src/test/resources/expected_spec.json index 3d15378c4080..d27dea83a7f4 100644 --- a/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/src/test/resources/expected_spec.json +++ b/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/src/test/resources/expected_spec.json @@ -3,12 +3,21 @@ "supportsIncremental": true, "supportsNormalization": true, "supportsDBT": false, - "supported_destination_sync_modes": ["overwrite", "append", "append_dedup"], + "supported_destination_sync_modes": [ + "overwrite", + "append", + "append_dedup" + ], "connectionSpecification": { "$schema": "http://json-schema.org/draft-07/schema#", "title": "ClickHouse Destination Spec", "type": "object", - "required": ["host", "port", "database", "username"], + "required": [ + "host", + "port", + "database", + "username" + ], "additionalProperties": true, "properties": { "host": { @@ -24,27 +33,41 @@ "minimum": 0, "maximum": 65536, "default": 8123, - "examples": ["8123"], + "examples": [ + "8123" + ], "order": 1 }, + "tcp-port": { + "title": "Native Port", + "description": "Native port (not the JDBC) of the database.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 9000, + "examples": [ + "9000" + ], + "order": 2 + }, "database": { "title": "DB Name", "description": "Name of the database.", "type": "string", - "order": 2 + "order": 3 }, "username": { "title": "User", "description": "Username to use to access the database.", "type": "string", - "order": 3 + "order": 4 }, "password": { "title": "Password", "description": "Password associated with the username.", "type": "string", "airbyte_secret": true, - "order": 4 + "order": 5 }, "tunnel_method": { "type": "object", @@ -53,7 +76,9 @@ "oneOf": [ { "title": "No Tunnel", - "required": ["tunnel_method"], + "required": [ + "tunnel_method" + ], "properties": { "tunnel_method": { "description": "No ssh tunnel needed to connect to database", @@ -92,7 +117,9 @@ "minimum": 0, "maximum": 65536, "default": 22, - "examples": ["22"], + "examples": [ + "22" + ], "order": 2 }, "tunnel_user": { @@ -140,7 +167,9 @@ "minimum": 0, "maximum": 65536, "default": 22, - "examples": ["22"], + "examples": [ + "22" + ], "order": 2 }, "tunnel_user": { diff --git a/airbyte-integrations/connectors/destination-clickhouse/Dockerfile b/airbyte-integrations/connectors/destination-clickhouse/Dockerfile index f62452f07821..dd60f557cc1c 100644 --- a/airbyte-integrations/connectors/destination-clickhouse/Dockerfile +++ b/airbyte-integrations/connectors/destination-clickhouse/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-clickhouse COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.3 +LABEL io.airbyte.version=0.1.4 LABEL io.airbyte.name=airbyte/destination-clickhouse diff --git a/airbyte-integrations/connectors/destination-clickhouse/src/main/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestination.java b/airbyte-integrations/connectors/destination-clickhouse/src/main/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestination.java index 95c0b767b143..b77459a8ee62 100644 --- a/airbyte-integrations/connectors/destination-clickhouse/src/main/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestination.java +++ b/airbyte-integrations/connectors/destination-clickhouse/src/main/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestination.java @@ -15,8 +15,9 @@ import io.airbyte.integrations.destination.jdbc.AbstractJdbcDestination; import io.airbyte.protocol.models.AirbyteConnectionStatus; import io.airbyte.protocol.models.AirbyteConnectionStatus.Status; -import java.util.ArrayList; +import java.util.HashMap; import java.util.List; +import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -31,6 +32,10 @@ public class ClickhouseDestination extends AbstractJdbcDestination implements De private static final String PASSWORD = "password"; + static final Map SSL_JDBC_PARAMETERS = ImmutableMap.of( + "ssl", "true", + "sslmode", "none"); + public static Destination sshWrappedDestination() { return new SshWrappedDestination(new ClickhouseDestination(), HOST_KEY, PORT_KEY); } @@ -41,25 +46,14 @@ public ClickhouseDestination() { @Override public JsonNode toJdbcConfig(final JsonNode config) { - final List additionalParameters = new ArrayList<>(); - - final StringBuilder jdbcUrl = new StringBuilder(String.format("jdbc:clickhouse://%s:%s/%s?", + final String jdbcUrl = String.format("jdbc:clickhouse://%s:%s/%s?", config.get("host").asText(), config.get("port").asText(), - config.get("database").asText())); - - if (!config.has("ssl") || config.get("ssl").asBoolean()) { - additionalParameters.add("ssl=true"); - additionalParameters.add("sslmode=none"); - } - - if (!additionalParameters.isEmpty()) { - additionalParameters.forEach(x -> jdbcUrl.append(x).append("&")); - } + config.get("database").asText()); final ImmutableMap.Builder configBuilder = ImmutableMap.builder() .put("username", config.get("username").asText()) - .put("jdbc_url", jdbcUrl.toString()); + .put("jdbc_url", jdbcUrl); if (config.has(PASSWORD)) { configBuilder.put(PASSWORD, config.get(PASSWORD).asText()); @@ -68,6 +62,10 @@ public JsonNode toJdbcConfig(final JsonNode config) { return Jsons.jsonNode(configBuilder.build()); } + private boolean useSsl(final JsonNode config) { + return !config.has("ssl") || config.get("ssl").asBoolean(); + } + @Override public AirbyteConnectionStatus check(final JsonNode config) { try (final JdbcDatabase database = getDatabase(config)) { @@ -83,7 +81,17 @@ public AirbyteConnectionStatus check(final JsonNode config) { } } - public static void main(String[] args) throws Exception { + @Override + protected Map getDefaultConnectionProperties(final JsonNode config) { + if (useSsl(config)) { + return SSL_JDBC_PARAMETERS; + } else { + // No need for any parameters if the connection doesn't use SSL + return new HashMap<>(); + } + } + + public static void main(final String[] args) throws Exception { final Destination destination = ClickhouseDestination.sshWrappedDestination(); LOGGER.info("starting destination: {}", ClickhouseDestination.class); new IntegrationRunner(destination).run(args); diff --git a/airbyte-integrations/connectors/destination-clickhouse/src/test/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestinationTest.java b/airbyte-integrations/connectors/destination-clickhouse/src/test/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestinationTest.java index 36fcb20ac7b4..75e5d9663742 100644 --- a/airbyte-integrations/connectors/destination-clickhouse/src/test/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestinationTest.java +++ b/airbyte-integrations/connectors/destination-clickhouse/src/test/java/io/airbyte/integrations/destination/clickhouse/ClickhouseDestinationTest.java @@ -9,6 +9,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.map.MoreMaps; import io.airbyte.db.Databases; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.JdbcUtils; @@ -25,7 +26,9 @@ import io.airbyte.protocol.models.JsonSchemaType; import java.time.Instant; import java.util.Comparator; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.stream.Collectors; import java.util.stream.IntStream; import org.junit.jupiter.api.AfterAll; @@ -44,6 +47,17 @@ public class ClickhouseDestinationTest { private static ConfiguredAirbyteCatalog catalog; private static JsonNode config; + private static final Map CONFIG_WITH_SSL = ImmutableMap.of( + "host", "localhost", + "port", "1337", + "username", "user", + "database", "db"); + + private static final Map CONFIG_NO_SSL = MoreMaps.merge( + CONFIG_WITH_SSL, + ImmutableMap.of( + "ssl", "false")); + @BeforeAll static void init() { db = new ClickHouseContainer("yandex/clickhouse-server"); @@ -76,6 +90,20 @@ static void cleanUp() { db.close(); } + @Test + void testDefaultParamsNoSSL() { + final Map defaultProperties = new ClickhouseDestination().getDefaultConnectionProperties( + Jsons.jsonNode(CONFIG_NO_SSL)); + assertEquals(new HashMap<>(), defaultProperties); + } + + @Test + void testDefaultParamsWithSSL() { + final Map defaultProperties = new ClickhouseDestination().getDefaultConnectionProperties( + Jsons.jsonNode(CONFIG_WITH_SSL)); + assertEquals(ClickhouseDestination.SSL_JDBC_PARAMETERS, defaultProperties); + } + @Test void sanityTest() throws Exception { final Destination dest = new ClickhouseDestination(); diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/AbstractJdbcDestination.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/AbstractJdbcDestination.java index 4a58e8299a8f..d8afb121b152 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/AbstractJdbcDestination.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/AbstractJdbcDestination.java @@ -5,6 +5,7 @@ package io.airbyte.integrations.destination.jdbc; import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.commons.map.MoreMaps; import io.airbyte.db.Databases; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.JdbcUtils; @@ -17,6 +18,8 @@ import io.airbyte.protocol.models.AirbyteConnectionStatus.Status; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import java.util.Map; +import java.util.Objects; import java.util.UUID; import java.util.function.Consumer; import org.slf4j.Logger; @@ -26,14 +29,12 @@ public abstract class AbstractJdbcDestination extends BaseConnector implements D private static final Logger LOGGER = LoggerFactory.getLogger(AbstractJdbcDestination.class); + public static final String JDBC_URL_PARAMS_KEY = "jdbc_url_params"; + private final String driverClass; private final NamingConventionTransformer namingResolver; private final SqlOperations sqlOperations; - protected String getDriverClass() { - return driverClass; - } - protected NamingConventionTransformer getNamingResolver() { return namingResolver; } @@ -89,9 +90,28 @@ protected JdbcDatabase getDatabase(final JsonNode config) { jdbcConfig.get("username").asText(), jdbcConfig.has("password") ? jdbcConfig.get("password").asText() : null, jdbcConfig.get("jdbc_url").asText(), - driverClass); + driverClass, + getConnectionProperties(config)); + } + + protected Map getConnectionProperties(final JsonNode config) { + final Map customProperties = JdbcUtils.parseJdbcParameters(config, JDBC_URL_PARAMS_KEY); + final Map defaultProperties = getDefaultConnectionProperties(config); + assertCustomParametersDontOverwriteDefaultParameters(customProperties, defaultProperties); + return MoreMaps.merge(customProperties, defaultProperties); } + private void assertCustomParametersDontOverwriteDefaultParameters(final Map customParameters, + final Map defaultParameters) { + for (final String key : defaultParameters.keySet()) { + if (customParameters.containsKey(key) && !Objects.equals(customParameters.get(key), defaultParameters.get(key))) { + throw new IllegalArgumentException("Cannot overwrite default JDBC parameter " + key); + } + } + } + + protected abstract Map getDefaultConnectionProperties(final JsonNode config); + public abstract JsonNode toJdbcConfig(JsonNode config); @Override diff --git a/airbyte-integrations/connectors/destination-jdbc/src/test/java/io/airbyte/integrations/destination/jdbc/AbstractJdbcDestinationTest.java b/airbyte-integrations/connectors/destination-jdbc/src/test/java/io/airbyte/integrations/destination/jdbc/AbstractJdbcDestinationTest.java new file mode 100644 index 000000000000..a62128c41fc8 --- /dev/null +++ b/airbyte-integrations/connectors/destination-jdbc/src/test/java/io/airbyte/integrations/destination/jdbc/AbstractJdbcDestinationTest.java @@ -0,0 +1,135 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.jdbc; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.collect.ImmutableMap; +import io.airbyte.commons.json.Jsons; +import io.airbyte.integrations.destination.StandardNameTransformer; +import java.util.HashMap; +import java.util.Map; +import org.junit.jupiter.api.Test; + +public class AbstractJdbcDestinationTest { + + private JsonNode buildConfigNoJdbcParameters() { + return Jsons.jsonNode(ImmutableMap.of( + "host", "localhost", + "port", 1337, + "username", "user", + "database", "db")); + } + + private JsonNode buildConfigWithExtraJdbcParameters(final String extraParam) { + return Jsons.jsonNode(ImmutableMap.of( + "host", "localhost", + "port", 1337, + "username", "user", + "database", "db", + "jdbc_url_params", extraParam)); + } + + @Test + void testNoExtraParamsNoDefault() { + final Map connectionProperties = new TestJdbcDestination().getConnectionProperties(buildConfigNoJdbcParameters()); + + final Map expectedProperties = ImmutableMap.of(); + assertEquals(expectedProperties, connectionProperties); + } + + @Test + void testNoExtraParamsWithDefault() { + final Map defaultProperties = ImmutableMap.of("A_PARAMETER", "A_VALUE"); + + final Map connectionProperties = new TestJdbcDestination(defaultProperties).getConnectionProperties( + buildConfigNoJdbcParameters()); + + assertEquals(defaultProperties, connectionProperties); + } + + @Test + void testExtraParamNoDefault() { + final String extraParam = "key1=value1&key2=value2&key3=value3"; + final Map connectionProperties = new TestJdbcDestination().getConnectionProperties( + buildConfigWithExtraJdbcParameters(extraParam)); + final Map expectedProperties = ImmutableMap.of( + "key1", "value1", + "key2", "value2", + "key3", "value3"); + assertEquals(expectedProperties, connectionProperties); + } + + @Test + void testExtraParamWithDefault() { + final Map defaultProperties = ImmutableMap.of("A_PARAMETER", "A_VALUE"); + final String extraParam = "key1=value1&key2=value2&key3=value3"; + final Map connectionProperties = new TestJdbcDestination(defaultProperties).getConnectionProperties( + buildConfigWithExtraJdbcParameters(extraParam)); + final Map expectedProperties = ImmutableMap.of( + "A_PARAMETER", "A_VALUE", + "key1", "value1", + "key2", "value2", + "key3", "value3"); + assertEquals(expectedProperties, connectionProperties); + } + + @Test + void testExtraParameterEqualToDefault() { + final Map defaultProperties = ImmutableMap.of("key1", "value1"); + final String extraParam = "key1=value1&key2=value2&key3=value3"; + final Map connectionProperties = new TestJdbcDestination(defaultProperties).getConnectionProperties( + buildConfigWithExtraJdbcParameters(extraParam)); + final Map expectedProperties = ImmutableMap.of( + "key1", "value1", + "key2", "value2", + "key3", "value3"); + assertEquals(expectedProperties, connectionProperties); + } + + @Test + void testExtraParameterDiffersFromDefault() { + final Map defaultProperties = ImmutableMap.of("key1", "value0"); + final String extraParam = "key1=value1&key2=value2&key3=value3"; + + assertThrows(IllegalArgumentException.class, () -> new TestJdbcDestination(defaultProperties).getConnectionProperties( + buildConfigWithExtraJdbcParameters(extraParam))); + } + + @Test + void testInvalidExtraParam() { + final String extraParam = "key1=value1&sdf&"; + assertThrows(IllegalArgumentException.class, + () -> new TestJdbcDestination().getConnectionProperties(buildConfigWithExtraJdbcParameters(extraParam))); + } + + static class TestJdbcDestination extends AbstractJdbcDestination { + + private final Map defaultProperties; + + public TestJdbcDestination() { + this(new HashMap<>()); + } + + public TestJdbcDestination(final Map defaultProperties) { + super("", new StandardNameTransformer(), new TestJdbcSqlOperations()); + this.defaultProperties = defaultProperties; + } + + @Override + protected Map getDefaultConnectionProperties(final JsonNode config) { + return defaultProperties; + } + + @Override + public JsonNode toJdbcConfig(final JsonNode config) { + return config; + } + + } + +} diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/Dockerfile b/airbyte-integrations/connectors/destination-mariadb-columnstore/Dockerfile index 0919436b17ef..f33e8ba0b8d4 100644 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/Dockerfile +++ b/airbyte-integrations/connectors/destination-mariadb-columnstore/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-mariadb-columnstore COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.3 +LABEL io.airbyte.version=0.1.4 LABEL io.airbyte.name=airbyte/destination-mariadb-columnstore diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestination.java b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestination.java index 94568fcaf874..f8d6ed49025f 100644 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestination.java +++ b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/main/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestination.java @@ -7,7 +7,6 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; -import io.airbyte.db.Databases; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.base.Destination; import io.airbyte.integrations.base.IntegrationRunner; @@ -17,6 +16,7 @@ import io.airbyte.protocol.models.AirbyteConnectionStatus; import io.airbyte.protocol.models.AirbyteConnectionStatus.Status; import java.util.List; +import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -27,6 +27,9 @@ public class MariadbColumnstoreDestination extends AbstractJdbcDestination imple public static final List HOST_KEY = List.of("host"); public static final List PORT_KEY = List.of("port"); + static final Map DEFAULT_JDBC_PARAMETERS = ImmutableMap.of( + "allowLoadLocalInfile", "true"); + public static Destination sshWrappedDestination() { return new SshWrappedDestination(new MariadbColumnstoreDestination(), HOST_KEY, PORT_KEY); } @@ -36,7 +39,7 @@ public MariadbColumnstoreDestination() { } @Override - public AirbyteConnectionStatus check(JsonNode config) { + public AirbyteConnectionStatus check(final JsonNode config) { try (final JdbcDatabase database = getDatabase(config)) { final MariadbColumnstoreSqlOperations mariadbColumnstoreSqlOperations = (MariadbColumnstoreSqlOperations) getSqlOperations(); final String outputSchema = getNamingResolver().getIdentifier(config.get("database").asText()); @@ -66,27 +69,20 @@ public AirbyteConnectionStatus check(JsonNode config) { } @Override - protected JdbcDatabase getDatabase(final JsonNode config) { - final JsonNode jdbcConfig = toJdbcConfig(config); - - return Databases.createJdbcDatabase( - jdbcConfig.get("username").asText(), - jdbcConfig.has("password") ? jdbcConfig.get("password").asText() : null, - jdbcConfig.get("jdbc_url").asText(), - getDriverClass(), - "allowLoadLocalInfile=true"); + protected Map getDefaultConnectionProperties(final JsonNode config) { + return DEFAULT_JDBC_PARAMETERS; } @Override public JsonNode toJdbcConfig(final JsonNode config) { - final StringBuilder jdbcUrl = new StringBuilder(String.format("jdbc:mariadb://%s:%s/%s", + final String jdbcUrl = String.format("jdbc:mariadb://%s:%s/%s", config.get("host").asText(), config.get("port").asText(), - config.get("database").asText())); + config.get("database").asText()); final ImmutableMap.Builder configBuilder = ImmutableMap.builder() .put("username", config.get("username").asText()) - .put("jdbc_url", jdbcUrl.toString()); + .put("jdbc_url", jdbcUrl); if (config.has("password")) { configBuilder.put("password", config.get("password").asText()); @@ -95,7 +91,7 @@ public JsonNode toJdbcConfig(final JsonNode config) { return Jsons.jsonNode(configBuilder.build()); } - public static void main(String[] args) throws Exception { + public static void main(final String[] args) throws Exception { final Destination destination = MariadbColumnstoreDestination.sshWrappedDestination(); LOGGER.info("starting destination: {}", MariadbColumnstoreDestination.class); new IntegrationRunner(destination).run(args); diff --git a/airbyte-integrations/connectors/destination-mssql-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/destination-mssql-strict-encrypt/Dockerfile index 7cc7a7f59db1..7b094ef7293b 100644 --- a/airbyte-integrations/connectors/destination-mssql-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/destination-mssql-strict-encrypt/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-mssql-strict-encrypt COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.4 +LABEL io.airbyte.version=0.1.5 LABEL io.airbyte.name=airbyte/destination-mssql-strict-encrypt diff --git a/airbyte-integrations/connectors/destination-mssql/Dockerfile b/airbyte-integrations/connectors/destination-mssql/Dockerfile index a48db180356c..027c0699ecf1 100644 --- a/airbyte-integrations/connectors/destination-mssql/Dockerfile +++ b/airbyte-integrations/connectors/destination-mssql/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-mssql COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.14 +LABEL io.airbyte.version=0.1.15 LABEL io.airbyte.name=airbyte/destination-mssql diff --git a/airbyte-integrations/connectors/destination-mssql/src/main/java/io/airbyte/integrations/destination/mssql/MSSQLDestination.java b/airbyte-integrations/connectors/destination-mssql/src/main/java/io/airbyte/integrations/destination/mssql/MSSQLDestination.java index 86aeb9dc3547..ca8a95c7dcf2 100644 --- a/airbyte-integrations/connectors/destination-mssql/src/main/java/io/airbyte/integrations/destination/mssql/MSSQLDestination.java +++ b/airbyte-integrations/connectors/destination-mssql/src/main/java/io/airbyte/integrations/destination/mssql/MSSQLDestination.java @@ -12,8 +12,9 @@ import io.airbyte.integrations.base.ssh.SshWrappedDestination; import io.airbyte.integrations.destination.jdbc.AbstractJdbcDestination; import java.io.File; -import java.util.ArrayList; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.Optional; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -30,27 +31,44 @@ public MSSQLDestination() { super(DRIVER_CLASS, new MSSQLNameTransformer(), new SqlServerOperations()); } + @Override + protected Map getDefaultConnectionProperties(final JsonNode config) { + final HashMap properties = new HashMap<>(); + if (config.has("ssl_method")) { + switch (config.get("ssl_method").asText()) { + case "unencrypted" -> properties.put("encrypt", "false"); + case "encrypted_trust_server_certificate" -> { + properties.put("encrypt", "true"); + properties.put("trustServerCertificate", "true"); + } + case "encrypted_verify_certificate" -> { + properties.put("encrypt", "true"); + properties.put("trustStore", getTrustStoreLocation()); + final String trustStorePassword = System.getProperty("javax.net.ssl.trustStorePassword"); + if (trustStorePassword != null && !trustStorePassword.isEmpty()) { + properties.put("trustStorePassword", config.get("trustStorePassword").asText()); + } + if (config.has("hostNameInCertificate")) { + properties.put("hostNameInCertificate", config.get("hostNameInCertificate").asText()); + } + } + } + } + + return properties; + } + @Override public JsonNode toJdbcConfig(final JsonNode config) { final String schema = Optional.ofNullable(config.get("schema")).map(JsonNode::asText).orElse("public"); - final List additionalParameters = new ArrayList<>(); - - final StringBuilder jdbcUrl = new StringBuilder(String.format("jdbc:sqlserver://%s:%s;databaseName=%s;", + final String jdbcUrl = String.format("jdbc:sqlserver://%s:%s;databaseName=%s;", config.get("host").asText(), config.get("port").asText(), - config.get("database").asText())); - - if (config.has("ssl_method")) { - readSsl(config, additionalParameters); - } - - if (!additionalParameters.isEmpty()) { - jdbcUrl.append(String.join(";", additionalParameters)); - } + config.get("database").asText()); final ImmutableMap.Builder configBuilder = ImmutableMap.builder() - .put("jdbc_url", jdbcUrl.toString()) + .put("jdbc_url", jdbcUrl) .put("username", config.get("username").asText()) .put("password", config.get("password").asText()) .put("schema", schema); @@ -58,37 +76,16 @@ public JsonNode toJdbcConfig(final JsonNode config) { return Jsons.jsonNode(configBuilder.build()); } - private void readSsl(final JsonNode config, final List additionalParameters) { - switch (config.get("ssl_method").asText()) { - case "unencrypted": - additionalParameters.add("encrypt=false"); - break; - case "encrypted_trust_server_certificate": - additionalParameters.add("encrypt=true"); - additionalParameters.add("trustServerCertificate=true"); - break; - case "encrypted_verify_certificate": - additionalParameters.add("encrypt=true"); - - // trust store location code found at https://stackoverflow.com/a/56570588 - final String trustStoreLocation = Optional.ofNullable(System.getProperty("javax.net.ssl.trustStore")) - .orElseGet(() -> System.getProperty("java.home") + "/lib/security/cacerts"); - final File trustStoreFile = new File(trustStoreLocation); - if (!trustStoreFile.exists()) { - throw new RuntimeException("Unable to locate the Java TrustStore: the system property javax.net.ssl.trustStore is undefined or " - + trustStoreLocation + " does not exist."); - } - final String trustStorePassword = System.getProperty("javax.net.ssl.trustStorePassword"); - - additionalParameters.add("trustStore=" + trustStoreLocation); - if (trustStorePassword != null && !trustStorePassword.isEmpty()) { - additionalParameters.add("trustStorePassword=" + config.get("trustStorePassword").asText()); - } - if (config.has("hostNameInCertificate")) { - additionalParameters.add("hostNameInCertificate=" + config.get("hostNameInCertificate").asText()); - } - break; + private String getTrustStoreLocation() { + // trust store location code found at https://stackoverflow.com/a/56570588 + final String trustStoreLocation = Optional.ofNullable(System.getProperty("javax.net.ssl.trustStore")) + .orElseGet(() -> System.getProperty("java.home") + "/lib/security/cacerts"); + final File trustStoreFile = new File(trustStoreLocation); + if (!trustStoreFile.exists()) { + throw new RuntimeException("Unable to locate the Java TrustStore: the system property javax.net.ssl.trustStore is undefined or " + + trustStoreLocation + " does not exist."); } + return trustStoreLocation; } public static Destination sshWrappedDestination() { diff --git a/airbyte-integrations/connectors/destination-mssql/src/test/java/io/airbyte/integrations/destination/mssql/MSSQLDestinationTest.java b/airbyte-integrations/connectors/destination-mssql/src/test/java/io/airbyte/integrations/destination/mssql/MSSQLDestinationTest.java new file mode 100644 index 000000000000..88697030ffa8 --- /dev/null +++ b/airbyte-integrations/connectors/destination-mssql/src/test/java/io/airbyte/integrations/destination/mssql/MSSQLDestinationTest.java @@ -0,0 +1,167 @@ +package io.airbyte.integrations.destination.mssql; + +import static java.lang.System.getProperty; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.map.MoreMaps; +import java.util.HashMap; +import java.util.Map; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.testcontainers.shaded.com.google.common.collect.ImmutableMap; + +public class MSSQLDestinationTest { + + private Map existingProperties; + + private JsonNode createConfig(final String sslMethod) { + return createConfig(sslMethod, new HashMap<>()); + } + + private JsonNode createConfig(final String sslMethod, final Map additionalConfigs) { + return Jsons.jsonNode(MoreMaps.merge(baseParameters(sslMethod), additionalConfigs)); + } + + private Map baseParameters(final String sslMethod) { + return ImmutableMap.builder() + .put("ssl_method", sslMethod) + .put("host", "localhost") + .put("port", "1773") + .put("database", "db") + .put("username", "username") + .put("password", "verysecure") + .build(); + } + + @BeforeEach + public void setUp() { + existingProperties = new HashMap<>(); + } + + @AfterEach + public void tearDown() { + resetProperties(); + } + + @Test + public void testNoSsl() { + final MSSQLDestination destination = new MSSQLDestination(); + final JsonNode config = Jsons.jsonNode(ImmutableMap.of()); + final Map properties = destination.getDefaultConnectionProperties(config); + assertTrue(properties.isEmpty()); + } + + @Test + public void testUnencrypted() { + final MSSQLDestination destination = new MSSQLDestination(); + final JsonNode config = createConfig("unencrypted"); + final Map properties = destination.getDefaultConnectionProperties(config); + assertEquals(properties.get("encrypt"), "false"); + } + + @Test + public void testEncryptedTrustServerCertificate() { + final MSSQLDestination destination = new MSSQLDestination(); + final JsonNode config = createConfig("encrypted_trust_server_certificate"); + final Map properties = destination.getDefaultConnectionProperties(config); + assertEquals(properties.get("encrypt"), "true"); + assertEquals(properties.get("trustServerCertificate"), "true"); + } + + @Test + public void testEncryptedVerifyCertificate() { + final MSSQLDestination destination = new MSSQLDestination(); + final JsonNode config = createConfig("encrypted_verify_certificate"); + + final Map properties = destination.getDefaultConnectionProperties(config); + assertEquals(properties.get("encrypt"), "true"); + + final String trustStoreLocation = getProperty("java.home") + "/lib/security/cacerts"; + assertEquals(properties.get("trustStore"), trustStoreLocation); + assertNull(properties.get("trustStorePassword")); + assertNull(properties.get("hostNameInCertificate")); //TODO: add test with hostname in certificate + } + + @Test + public void testInvalidTrustStoreFile() { + setProperty("javax.net.ssl.trustStore", "/NOT_A_TRUST_STORE"); + final MSSQLDestination destination = new MSSQLDestination(); + final JsonNode config = createConfig("encrypted_verify_certificate"); + + assertThrows(RuntimeException.class, () -> + destination.getDefaultConnectionProperties(config) + ); + } + + @Test + public void testEncryptedVerifyCertificateWithEmptyTrustStorePassword() { + setProperty("javax.net.ssl.trustStorePassword", ""); + final MSSQLDestination destination = new MSSQLDestination(); + final JsonNode config = createConfig("encrypted_verify_certificate", ImmutableMap.of("trustStorePassword", "")); + + final Map properties = destination.getDefaultConnectionProperties(config); + assertEquals(properties.get("encrypt"), "true"); + + final String trustStoreLocation = getProperty("java.home") + "/lib/security/cacerts"; + assertEquals(properties.get("trustStore"), trustStoreLocation); + assertNull(properties.get("trustStorePassword")); + assertNull(properties.get("hostNameInCertificate")); + } + + @Test + public void testEncryptedVerifyCertificateWithNonEmptyTrustStorePassword() { + final String TRUST_STORE_PASSWORD = "TRUSTSTOREPASSWORD"; + setProperty("javax.net.ssl.trustStorePassword", TRUST_STORE_PASSWORD); + final MSSQLDestination destination = new MSSQLDestination(); + final JsonNode config = createConfig("encrypted_verify_certificate", ImmutableMap.of("trustStorePassword", TRUST_STORE_PASSWORD)); + + final Map properties = destination.getDefaultConnectionProperties(config); + assertEquals(properties.get("encrypt"), "true"); + + final String trustStoreLocation = getProperty("java.home") + "/lib/security/cacerts"; + assertEquals(properties.get("trustStore"), trustStoreLocation); + assertEquals(properties.get("trustStorePassword"), TRUST_STORE_PASSWORD); + assertNull(properties.get("hostNameInCertificate")); + } + + @Test + public void testEncryptedVerifyCertificateWithHostNameInCertificate() { + final MSSQLDestination destination = new MSSQLDestination(); + final String HOSTNAME_IN_CERTIFICATE = "HOSTNAME_IN_CERTIFICATE"; + final JsonNode config = createConfig("encrypted_verify_certificate", ImmutableMap.of("hostNameInCertificate", HOSTNAME_IN_CERTIFICATE)); + + final Map properties = destination.getDefaultConnectionProperties(config); + assertEquals(properties.get("encrypt"), "true"); + + final String trustStoreLocation = getProperty("java.home") + "/lib/security/cacerts"; + assertEquals(properties.get("trustStore"), trustStoreLocation); + assertNull(properties.get("trustStorePassword")); + + assertEquals(properties.get("hostNameInCertificate"), HOSTNAME_IN_CERTIFICATE); + } + + private void setProperty(final String key, final String value) { + existingProperties.put(key, System.getProperty(key)); + System.setProperty(key, value); + } + + private void resetProperties() { + existingProperties.forEach((k, v) -> resetProperty(k)); + } + + private void resetProperty(final String key) { + final String value = existingProperties.get(key); + if (value != null) { + System.setProperty(key, value); + } else { + System.clearProperty(key); + } + } + +} diff --git a/airbyte-integrations/connectors/destination-mysql-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/destination-mysql-strict-encrypt/Dockerfile index 530eb36c0b61..ad65ba60ef06 100644 --- a/airbyte-integrations/connectors/destination-mysql-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/destination-mysql-strict-encrypt/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-mysql-strict-encrypt COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.3 +LABEL io.airbyte.version=0.1.4 LABEL io.airbyte.name=airbyte/destination-mysql-strict-encrypt diff --git a/airbyte-integrations/connectors/destination-mysql/Dockerfile b/airbyte-integrations/connectors/destination-mysql/Dockerfile index b137bdd41e32..bc324b2bff11 100644 --- a/airbyte-integrations/connectors/destination-mysql/Dockerfile +++ b/airbyte-integrations/connectors/destination-mysql/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-mysql COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.17 +LABEL io.airbyte.version=0.1.18 LABEL io.airbyte.name=airbyte/destination-mysql diff --git a/airbyte-integrations/connectors/destination-mysql/src/main/java/io/airbyte/integrations/destination/mysql/MySQLDestination.java b/airbyte-integrations/connectors/destination-mysql/src/main/java/io/airbyte/integrations/destination/mysql/MySQLDestination.java index ad2b0505172e..7d4720e90c37 100644 --- a/airbyte-integrations/connectors/destination-mysql/src/main/java/io/airbyte/integrations/destination/mysql/MySQLDestination.java +++ b/airbyte-integrations/connectors/destination-mysql/src/main/java/io/airbyte/integrations/destination/mysql/MySQLDestination.java @@ -6,10 +6,8 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Streams; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.map.MoreMaps; -import io.airbyte.db.Databases; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.base.Destination; import io.airbyte.integrations.base.IntegrationRunner; @@ -18,13 +16,8 @@ import io.airbyte.integrations.destination.mysql.MySQLSqlOperations.VersionCompatibility; import io.airbyte.protocol.models.AirbyteConnectionStatus; import io.airbyte.protocol.models.AirbyteConnectionStatus.Status; -import java.util.Collection; -import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Objects; -import java.util.stream.Collectors; -import java.util.stream.Stream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -43,12 +36,20 @@ public class MySQLDestination extends AbstractJdbcDestination implements Destina public static final String DRIVER_CLASS = "com.mysql.cj.jdbc.Driver"; - static final Map SSL_JDBC_PARAMETERS = ImmutableMap.of( - "useSSL", "true", - "requireSSL", "true", - "verifyServerCertificate", "false"); static final Map DEFAULT_JDBC_PARAMETERS = ImmutableMap.of( - "zeroDateTimeBehavior", "convertToNull"); + // zero dates by default cannot be parsed into java date objects (they will throw an error) + // in addition, users don't always have agency in fixing them e.g: maybe they don't own the database + // and can't + // remove zero date values. + // since zero dates are placeholders, we convert them to null by default + "zeroDateTimeBehavior", "convertToNull", + "allowLoadLocalInfile", "true"); + + static final Map DEFAULT_SSL_JDBC_PARAMETERS = MoreMaps.merge(ImmutableMap.of( + "useSSL", "true", + "requireSSL", "true", + "verifyServerCertificate", "false"), + DEFAULT_JDBC_PARAMETERS); public static Destination sshWrappedDestination() { return new SshWrappedDestination(new MySQLDestination(), List.of(HOST_KEY), List.of(PORT_KEY)); @@ -86,101 +87,37 @@ public MySQLDestination() { } @Override - protected JdbcDatabase getDatabase(final JsonNode config) { - final JsonNode jdbcConfig = toJdbcConfig(config); - - return Databases.createJdbcDatabase( - jdbcConfig.get(USERNAME_KEY).asText(), - jdbcConfig.has(PASSWORD_KEY) ? jdbcConfig.get(PASSWORD_KEY).asText() : null, - jdbcConfig.get(JDBC_URL_KEY).asText(), - getDriverClass(), - "allowLoadLocalInfile=true"); + protected Map getDefaultConnectionProperties(final JsonNode config) { + if (useSSL(config)) { + return DEFAULT_SSL_JDBC_PARAMETERS; + } else { + return DEFAULT_JDBC_PARAMETERS; + } + } + + private boolean useSSL(final JsonNode config) { + return !config.has(SSL_KEY) || config.get(SSL_KEY).asBoolean(); } @Override public JsonNode toJdbcConfig(final JsonNode config) { - final List additionalParameters = getAdditionalParameters(config); - - final StringBuilder jdbcUrl = new StringBuilder(String.format("jdbc:mysql://%s:%s/%s", + final String jdbcUrl = String.format("jdbc:mysql://%s:%s/%s", config.get(HOST_KEY).asText(), config.get(PORT_KEY).asText(), - config.get(DATABASE_KEY).asText())); - // zero dates by default cannot be parsed into java date objects (they will throw an error) - // in addition, users don't always have agency in fixing them e.g: maybe they don't own the database - // and can't - // remove zero date values. - // since zero dates are placeholders, we convert them to null by default - if (!additionalParameters.isEmpty()) { - jdbcUrl.append("?"); - jdbcUrl.append(String.join("&", additionalParameters)); - } + config.get(DATABASE_KEY).asText()); final ImmutableMap.Builder configBuilder = ImmutableMap.builder() .put(USERNAME_KEY, config.get(USERNAME_KEY).asText()) - .put(JDBC_URL_KEY, jdbcUrl.toString()); + .put(JDBC_URL_KEY, jdbcUrl); if (config.has(PASSWORD_KEY)) { configBuilder.put(PASSWORD_KEY, config.get(PASSWORD_KEY).asText()); } - - return Jsons.jsonNode(configBuilder.build()); - } - - private List getAdditionalParameters(final JsonNode config) { - final Map customParameters = getCustomJdbcParameters(config); - - if (useSSL(config)) { - return convertToJdbcStrings(customParameters, MoreMaps.merge(DEFAULT_JDBC_PARAMETERS, SSL_JDBC_PARAMETERS)); - } else { - return convertToJdbcStrings(customParameters, DEFAULT_JDBC_PARAMETERS); - } - } - - private List convertToJdbcStrings(final Map customParameters, final Map defaultParametersMap) { - assertCustomParametersDontOverwriteDefaultParameters(customParameters, defaultParametersMap); - return Streams.concat(Stream.of(customParameters, defaultParametersMap)) - .map(Map::entrySet) - .flatMap(Collection::stream) - .map(entry -> formatParameter(entry.getKey(), entry.getValue())) - .collect(Collectors.toList()); - } - - private void assertCustomParametersDontOverwriteDefaultParameters(final Map customParameters, - final Map defaultParameters) { - for (final String key : defaultParameters.keySet()) { - if (customParameters.containsKey(key) && !Objects.equals(customParameters.get(key), defaultParameters.get(key))) { - throw new IllegalArgumentException("Cannot overwrite default JDBC parameter " + key); - } - } - } - - private Map getCustomJdbcParameters(final JsonNode config) { - final Map parameters = new HashMap<>(); if (config.has(JDBC_URL_PARAMS_KEY)) { - final String jdbcParams = config.get(JDBC_URL_PARAMS_KEY).asText(); - if (!jdbcParams.isBlank()) { - final String[] keyValuePairs = jdbcParams.split("&"); - for (final String kv : keyValuePairs) { - final String[] split = kv.split("="); - if (split.length == 2) { - parameters.put(split[0], split[1]); - } else { - throw new IllegalArgumentException( - "jdbc_url_params must be formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). Got " - + jdbcParams); - } - } - } + configBuilder.put(JDBC_URL_PARAMS_KEY, config.get(JDBC_URL_PARAMS_KEY)); } - return parameters; - } - - private boolean useSSL(final JsonNode config) { - return !config.has(SSL_KEY) || config.get(SSL_KEY).asBoolean(); - } - static String formatParameter(final String key, final String value) { - return String.format("%s=%s", key, value); + return Jsons.jsonNode(configBuilder.build()); } public static void main(final String[] args) throws Exception { diff --git a/airbyte-integrations/connectors/destination-mysql/src/test-integration/java/io/airbyte/integrations/destination/mysql/MySQLDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mysql/src/test-integration/java/io/airbyte/integrations/destination/mysql/MySQLDestinationAcceptanceTest.java index a9f491059c55..6016ca02c3be 100644 --- a/airbyte-integrations/connectors/destination-mysql/src/test-integration/java/io/airbyte/integrations/destination/mysql/MySQLDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-mysql/src/test-integration/java/io/airbyte/integrations/destination/mysql/MySQLDestinationAcceptanceTest.java @@ -189,8 +189,7 @@ protected void tearDown(final TestDestinationEnv testEnv) { public void testCustomDbtTransformations() throws Exception { // We need to create view for testing custom dbt transformations executeQuery("GRANT CREATE VIEW ON *.* TO " + db.getUsername() + "@'%';"); - // overrides test with a no-op until https://github.com/dbt-labs/jaffle_shop/pull/8 is merged - // super.testCustomDbtTransformations(); + super.testCustomDbtTransformations(); } @Test diff --git a/airbyte-integrations/connectors/destination-mysql/src/test/java/io/airbyte/integrations/destination/mysql/MySQLDestinationTest.java b/airbyte-integrations/connectors/destination-mysql/src/test/java/io/airbyte/integrations/destination/mysql/MySQLDestinationTest.java index b86977f55c68..6a98e8f3fa66 100644 --- a/airbyte-integrations/connectors/destination-mysql/src/test/java/io/airbyte/integrations/destination/mysql/MySQLDestinationTest.java +++ b/airbyte-integrations/connectors/destination-mysql/src/test/java/io/airbyte/integrations/destination/mysql/MySQLDestinationTest.java @@ -4,130 +4,74 @@ package io.airbyte.integrations.destination.mysql; -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.mockito.Mockito.spy; import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.map.MoreMaps; import java.util.Map; -import java.util.Map.Entry; import org.junit.jupiter.api.Test; public class MySQLDestinationTest { - private MySQLDestination getDestination() { - final MySQLDestination result = spy(MySQLDestination.class); - return result; - } + public static final String JDBC_URL = "jdbc:mysql://localhost:1337/db"; private JsonNode buildConfigNoJdbcParameters() { - final JsonNode config = Jsons.jsonNode(ImmutableMap.of( + return Jsons.jsonNode(ImmutableMap.of( "host", "localhost", "port", 1337, "username", "user", "database", "db")); - return config; } private JsonNode buildConfigWithExtraJdbcParameters(final String extraParam) { - final JsonNode config = Jsons.jsonNode(ImmutableMap.of( - "host", "localhost", - "port", 1337, - "username", "user", - "database", "db", - "jdbc_url_params", extraParam)); - return config; - } - - private JsonNode buildConfigWithExtraJdbcParametersWithNoSsl(final String extraParam) { - final JsonNode config = Jsons.jsonNode(ImmutableMap.of( + return Jsons.jsonNode(ImmutableMap.of( "host", "localhost", "port", 1337, "username", "user", "database", "db", - "ssl", false, "jdbc_url_params", extraParam)); - return config; } private JsonNode buildConfigNoExtraJdbcParametersWithoutSsl() { - final JsonNode config = Jsons.jsonNode(ImmutableMap.of( + return Jsons.jsonNode(ImmutableMap.of( "host", "localhost", "port", 1337, "username", "user", "database", "db", "ssl", false)); - return config; } @Test void testNoExtraParams() { - final JsonNode jdbcConfig = getDestination().toJdbcConfig(buildConfigNoJdbcParameters()); - final String url = jdbcConfig.get("jdbc_url").asText(); - assertEquals("jdbc:mysql://localhost:1337/db?verifyServerCertificate=false&zeroDateTimeBehavior=convertToNull&requireSSL=true&useSSL=true", url); + final JsonNode config = buildConfigNoJdbcParameters(); + final JsonNode jdbcConfig = new MySQLDestination().toJdbcConfig(config); + assertEquals(JDBC_URL, jdbcConfig.get("jdbc_url").asText()); } @Test void testEmptyExtraParams() { - final JsonNode jdbcConfig = getDestination().toJdbcConfig(buildConfigWithExtraJdbcParameters("")); - final String url = jdbcConfig.get("jdbc_url").asText(); - assertEquals("jdbc:mysql://localhost:1337/db?verifyServerCertificate=false&zeroDateTimeBehavior=convertToNull&requireSSL=true&useSSL=true", url); + final JsonNode jdbcConfig = new MySQLDestination().toJdbcConfig(buildConfigWithExtraJdbcParameters("")); + assertEquals(JDBC_URL, jdbcConfig.get("jdbc_url").asText()); } @Test void testExtraParams() { final String extraParam = "key1=value1&key2=value2&key3=value3"; - final JsonNode jdbcConfig = getDestination().toJdbcConfig(buildConfigWithExtraJdbcParameters(extraParam)); - final String url = jdbcConfig.get("jdbc_url").asText(); - assertEquals( - "jdbc:mysql://localhost:1337/db?key1=value1&key2=value2&key3=value3&verifyServerCertificate=false&zeroDateTimeBehavior=convertToNull&requireSSL=true&useSSL=true", - url); - } - - @Test - void testExtraParamsWithDefaultParameter() { - final Map allDefaultParameters = MoreMaps.merge(MySQLDestination.SSL_JDBC_PARAMETERS, - MySQLDestination.DEFAULT_JDBC_PARAMETERS); - for (final Entry entry : allDefaultParameters.entrySet()) { - final String identicalParameter = MySQLDestination.formatParameter(entry.getKey(), entry.getValue()); - final String overridingParameter = MySQLDestination.formatParameter(entry.getKey(), "DIFFERENT_VALUE"); - - // Do not throw an exception if the values are equal - assertDoesNotThrow(() -> getDestination().toJdbcConfig(buildConfigWithExtraJdbcParameters(identicalParameter)).get("jdbc_url").asText()); - // Throw an exception if the values are different - assertThrows(IllegalArgumentException.class, () -> getDestination().toJdbcConfig(buildConfigWithExtraJdbcParameters(overridingParameter))); - } - } - - @Test - void testExtraParameterNoSsl() { - final String extraParam = "key1=value1&key2=value2&key3=value3"; - final JsonNode jdbcConfig = getDestination().toJdbcConfig(buildConfigWithExtraJdbcParametersWithNoSsl(extraParam)); - final String url = jdbcConfig.get("jdbc_url").asText(); - assertEquals( - "jdbc:mysql://localhost:1337/db?key1=value1&key2=value2&key3=value3&zeroDateTimeBehavior=convertToNull", - url); + final JsonNode jdbcConfig = new MySQLDestination().toJdbcConfig(buildConfigWithExtraJdbcParameters(extraParam)); + assertEquals(JDBC_URL, jdbcConfig.get("jdbc_url").asText()); } @Test - void testNoExtraParameterNoSsl() { - final JsonNode jdbcConfig = getDestination().toJdbcConfig(buildConfigNoExtraJdbcParametersWithoutSsl()); - final String url = jdbcConfig.get("jdbc_url").asText(); - assertEquals( - "jdbc:mysql://localhost:1337/db?zeroDateTimeBehavior=convertToNull", - url); + void testDefaultParamsNoSSL() { + final Map defaultProperties = new MySQLDestination().getDefaultConnectionProperties(buildConfigNoExtraJdbcParametersWithoutSsl()); + assertEquals(MySQLDestination.DEFAULT_JDBC_PARAMETERS, defaultProperties); } @Test - void testInvalidExtraParam() { - final String extraParam = "key1=value1&sdf&"; - assertThrows(IllegalArgumentException.class, () -> { - getDestination().toJdbcConfig(buildConfigWithExtraJdbcParameters(extraParam)); - }); + void testDefaultParamsWithSSL() { + final Map defaultProperties = new MySQLDestination().getDefaultConnectionProperties(buildConfigNoJdbcParameters()); + assertEquals(MySQLDestination.DEFAULT_SSL_JDBC_PARAMETERS, defaultProperties); } } diff --git a/airbyte-integrations/connectors/destination-oracle-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/destination-oracle-strict-encrypt/Dockerfile index bfbd7c875274..f7ed56838c5f 100644 --- a/airbyte-integrations/connectors/destination-oracle-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/destination-oracle-strict-encrypt/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-oracle-strict-encrypt COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.3 +LABEL io.airbyte.version=0.1.4 LABEL io.airbyte.name=airbyte/destination-oracle-strict-encrypt diff --git a/airbyte-integrations/connectors/destination-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/oracle_strict_encrypt/OracleStrictEncryptDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/oracle_strict_encrypt/OracleStrictEncryptDestinationAcceptanceTest.java index 05b2a6d9eb43..04ab33055b98 100644 --- a/airbyte-integrations/connectors/destination-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/oracle_strict_encrypt/OracleStrictEncryptDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-oracle-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/oracle_strict_encrypt/OracleStrictEncryptDestinationAcceptanceTest.java @@ -15,6 +15,7 @@ import io.airbyte.db.Database; import io.airbyte.db.Databases; import io.airbyte.db.jdbc.JdbcDatabase; +import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.destination.oracle.OracleDestination; import io.airbyte.integrations.destination.oracle.OracleNameTransformer; @@ -77,7 +78,7 @@ protected boolean implementsNamespaces() { @Override protected boolean supportsDBT() { - return true; + return false; } @Override @@ -113,8 +114,8 @@ private List retrieveRecordsFromTable(final String tableName, final St throws SQLException { final List result = getDatabase(config) .query(ctx -> ctx.fetch( - String.format("SELECT * FROM %s.%s ORDER BY %s ASC", schemaName, tableName, - OracleDestination.COLUMN_NAME_EMITTED_AT)) + String.format("SELECT * FROM %s.%s ORDER BY %s ASC", schemaName, tableName, + OracleDestination.COLUMN_NAME_EMITTED_AT)) .stream() .collect(Collectors.toList())); return result @@ -176,9 +177,9 @@ public void testEncryption() throws SQLException { config.get("port").asText(), config.get("sid").asText()), "oracle.jdbc.driver.OracleDriver", - "oracle.net.encryption_client=REQUIRED;" + + JdbcUtils.parseJdbcParameters("oracle.net.encryption_client=REQUIRED;" + "oracle.net.encryption_types_client=( " - + algorithm + " )"); + + algorithm + " )")); final String network_service_banner = "select network_service_banner from v$session_connect_info where sid in (select distinct sid from v$mystat)"; @@ -202,9 +203,9 @@ public void testCheckProtocol() throws SQLException { clone.get("port").asText(), clone.get("sid").asText()), "oracle.jdbc.driver.OracleDriver", - "oracle.net.encryption_client=REQUIRED;" + + JdbcUtils.parseJdbcParameters("oracle.net.encryption_client=REQUIRED;" + "oracle.net.encryption_types_client=( " - + algorithm + " )"); + + algorithm + " )")); final String network_service_banner = "SELECT sys_context('USERENV', 'NETWORK_PROTOCOL') as network_protocol FROM dual"; final List collect = database.query(network_service_banner).collect(Collectors.toList()); diff --git a/airbyte-integrations/connectors/destination-oracle/Dockerfile b/airbyte-integrations/connectors/destination-oracle/Dockerfile index 408b559fa4fa..4bf980d8c5c6 100644 --- a/airbyte-integrations/connectors/destination-oracle/Dockerfile +++ b/airbyte-integrations/connectors/destination-oracle/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-oracle COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.14 +LABEL io.airbyte.version=0.1.15 LABEL io.airbyte.name=airbyte/destination-oracle diff --git a/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleDestination.java b/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleDestination.java index 6b7ea9eeb4e1..795882a30ad9 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleDestination.java +++ b/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleDestination.java @@ -14,8 +14,9 @@ import io.airbyte.integrations.destination.jdbc.AbstractJdbcDestination; import java.io.IOException; import java.io.PrintWriter; -import java.util.ArrayList; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.concurrent.TimeUnit; import org.apache.commons.lang3.RandomStringUtils; import org.slf4j.Logger; @@ -36,8 +37,10 @@ public class OracleDestination extends AbstractJdbcDestination implements Destin public static final String COLUMN_NAME_EMITTED_AT = "\"" + JavaBaseConstants.COLUMN_NAME_EMITTED_AT.toUpperCase() + "\""; - private static final String KEY_STORE_FILE_PATH = "clientkeystore.jks"; + protected static final String KEY_STORE_FILE_PATH = "clientkeystore.jks"; private static final String KEY_STORE_PASS = RandomStringUtils.randomAlphanumeric(8); + public static final String ENCRYPTION_KEY = "encryption"; + public static final String ENCRYPTION_METHOD_KEY = "encryption_method"; enum Protocol { TCP, @@ -54,12 +57,36 @@ public static Destination sshWrappedDestination() { } @Override - public JsonNode toJdbcConfig(final JsonNode config) { - final List additionalParameters = new ArrayList<>(); + protected Map getDefaultConnectionProperties(final JsonNode config) { + final HashMap properties = new HashMap<>(); + if (config.has(ENCRYPTION_KEY)) { + final JsonNode encryption = config.get(ENCRYPTION_KEY); + final String encryptionMethod = encryption.get(ENCRYPTION_METHOD_KEY).asText(); + switch (encryptionMethod) { + case "unencrypted" -> { + + } + case "client_nne" -> { + final String algorithm = encryption.get("encryption_algorithm").asText(); + properties.put("oracle.net.encryption_client", "REQUIRED"); + properties.put("oracle.net.encryption_types_client", "( " + algorithm + " )"); + } + case "encrypted_verify_certificate" -> { + tryConvertAndImportCertificate(encryption.get("ssl_certificate").asText()); + properties.put("javax.net.ssl.trustStore", KEY_STORE_FILE_PATH); + properties.put("javax.net.ssl.trustStoreType", "JKS"); + properties.put("javax.net.ssl.trustStorePassword", KEY_STORE_PASS); + } + default -> throw new RuntimeException("Failed to obtain connection protocol from config " + encryption.asText()); + } - final Protocol protocol = config.has("encryption") - ? obtainConnectionProtocol(config.get("encryption"), additionalParameters) - : Protocol.TCP; + } + return properties; + } + + @Override + public JsonNode toJdbcConfig(final JsonNode config) { + final Protocol protocol = obtainConnectionProtocol(config); final String connectionString = String.format( "jdbc:oracle:thin:@(DESCRIPTION=(ADDRESS=(PROTOCOL=%s)(HOST=%s)(PORT=%s))(CONNECT_DATA=(SID=%s)))", protocol, @@ -75,36 +102,20 @@ public JsonNode toJdbcConfig(final JsonNode config) { configBuilder.put("password", config.get("password").asText()); } - if (!additionalParameters.isEmpty()) { - final String connectionParams = String.join(";", additionalParameters); - configBuilder.put("connection_properties", connectionParams); - } - return Jsons.jsonNode(configBuilder.build()); } - private Protocol obtainConnectionProtocol(final JsonNode encryption, - final List additionalParameters) { - final String encryptionMethod = encryption.get("encryption_method").asText(); + protected Protocol obtainConnectionProtocol(final JsonNode config) { + if (!config.has(ENCRYPTION_KEY)) { + return Protocol.TCP; + } + final JsonNode encryption = config.get(ENCRYPTION_KEY); + final String encryptionMethod = encryption.get(ENCRYPTION_METHOD_KEY).asText(); switch (encryptionMethod) { - case "unencrypted" -> { - return Protocol.TCP; - } - case "client_nne" -> { - final String algorithm = encryption.get("encryption_algorithm").asText(); - additionalParameters.add("oracle.net.encryption_client=REQUIRED"); - additionalParameters.add("oracle.net.encryption_types_client=( " + algorithm + " )"); + case "unencrypted", "client_nne" -> { return Protocol.TCP; } case "encrypted_verify_certificate" -> { - try { - convertAndImportCertificate(encryption.get("ssl_certificate").asText()); - } catch (final IOException | InterruptedException e) { - throw new RuntimeException("Failed to import certificate into Java Keystore"); - } - additionalParameters.add("javax.net.ssl.trustStore=" + KEY_STORE_FILE_PATH); - additionalParameters.add("javax.net.ssl.trustStoreType=JKS"); - additionalParameters.add("javax.net.ssl.trustStorePassword=" + KEY_STORE_PASS); return Protocol.TCPS; } } @@ -112,6 +123,14 @@ private Protocol obtainConnectionProtocol(final JsonNode encryption, "Failed to obtain connection protocol from config " + encryption.asText()); } + private static void tryConvertAndImportCertificate(final String certificate) { + try { + convertAndImportCertificate(certificate); + } catch (final IOException | InterruptedException e) { + throw new RuntimeException("Failed to import certificate into Java Keystore"); + } + } + private static void convertAndImportCertificate(final String certificate) throws IOException, InterruptedException { final Runtime run = Runtime.getRuntime(); diff --git a/airbyte-integrations/connectors/destination-oracle/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-oracle/src/main/resources/spec.json index 3acaa8b981ae..9422b0958de9 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/destination-oracle/src/main/resources/spec.json @@ -2,8 +2,11 @@ "documentationUrl": "https://docs.airbyte.io/integrations/destinations/oracle", "supportsIncremental": true, "supportsNormalization": false, - "supportsDBT": true, - "supported_destination_sync_modes": ["overwrite", "append"], + "supportsDBT": false, + "supported_destination_sync_modes": [ + "overwrite", + "append" + ], "connectionSpecification": { "$schema": "http://json-schema.org/draft-07/schema#", "title": "Oracle Destination Spec", diff --git a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/NneOracleDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/NneOracleDestinationAcceptanceTest.java index 278b955220de..6554e982a506 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/NneOracleDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/NneOracleDestinationAcceptanceTest.java @@ -15,6 +15,7 @@ import io.airbyte.db.jdbc.JdbcDatabase; import java.sql.SQLException; import java.util.List; +import java.util.Map; import java.util.stream.Collectors; import org.junit.Test; @@ -37,18 +38,22 @@ public void testEncryption() throws SQLException { config.get("port").asText(), config.get("sid").asText()), "oracle.jdbc.driver.OracleDriver", - "oracle.net.encryption_client=REQUIRED;" + - "oracle.net.encryption_types_client=( " - + algorithm + " )"); + getAdditionalProperties(algorithm)); final String network_service_banner = "select network_service_banner from v$session_connect_info where sid in (select distinct sid from v$mystat)"; - final List collect = database.query(network_service_banner).collect(Collectors.toList()); + final List collect = database.query(network_service_banner).toList(); assertThat(collect.get(2).get("NETWORK_SERVICE_BANNER").asText(), equals("Oracle Advanced Security: " + algorithm + " encryption")); } + private Map getAdditionalProperties(final String algorithm) { + return ImmutableMap. + of("oracle.net.encryption_client", "REQUIRED", + "oracle.net.encryption_types_client", String.format("( %s )", algorithm)); + } + @Test public void testCheckProtocol() throws SQLException { final JsonNode clone = Jsons.clone(getConfig()); @@ -67,9 +72,7 @@ public void testCheckProtocol() throws SQLException { clone.get("port").asText(), clone.get("sid").asText()), "oracle.jdbc.driver.OracleDriver", - "oracle.net.encryption_client=REQUIRED;" + - "oracle.net.encryption_types_client=( " - + algorithm + " )"); + getAdditionalProperties(algorithm)); final String network_service_banner = "SELECT sys_context('USERENV', 'NETWORK_PROTOCOL') as network_protocol FROM dual"; final List collect = database.query(network_service_banner).collect(Collectors.toList()); diff --git a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/SshOracleDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/SshOracleDestinationAcceptanceTest.java index 1646e2f1dc0e..e2e9054a7f4b 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/SshOracleDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/SshOracleDestinationAcceptanceTest.java @@ -175,7 +175,7 @@ protected void tearDown(final TestDestinationEnv testEnv) throws Exception { @Override protected boolean supportsDBT() { - return true; + return false; } @Override diff --git a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/UnencryptedOracleDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/UnencryptedOracleDestinationAcceptanceTest.java index 8e57e31ef7ff..3201356782b2 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/UnencryptedOracleDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/UnencryptedOracleDestinationAcceptanceTest.java @@ -75,7 +75,7 @@ protected boolean implementsNamespaces() { @Override protected boolean supportsDBT() { - return true; + return false; } @Override @@ -111,8 +111,8 @@ private List retrieveRecordsFromTable(final String tableName, final St throws SQLException { final List result = getDatabase(config) .query(ctx -> ctx.fetch( - String.format("SELECT * FROM %s.%s ORDER BY %s ASC", schemaName, tableName, - OracleDestination.COLUMN_NAME_EMITTED_AT)) + String.format("SELECT * FROM %s.%s ORDER BY %s ASC", schemaName, tableName, + OracleDestination.COLUMN_NAME_EMITTED_AT)) .stream() .collect(Collectors.toList())); return result diff --git a/airbyte-integrations/connectors/destination-oracle/src/test/java/io/airbyte/integrations/destination/oracle/OracleDestinationTest.java b/airbyte-integrations/connectors/destination-oracle/src/test/java/io/airbyte/integrations/destination/oracle/OracleDestinationTest.java new file mode 100644 index 000000000000..be6c72a0e98f --- /dev/null +++ b/airbyte-integrations/connectors/destination-oracle/src/test/java/io/airbyte/integrations/destination/oracle/OracleDestinationTest.java @@ -0,0 +1,110 @@ +package io.airbyte.integrations.destination.oracle; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.map.MoreMaps; +import io.airbyte.integrations.destination.oracle.OracleDestination.Protocol; +import java.util.HashMap; +import java.util.Map; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.testcontainers.shaded.com.google.common.collect.ImmutableMap; + +public class OracleDestinationTest { + + private OracleDestination destination; + + private JsonNode createConfig() { + return createConfig(new HashMap<>()); + } + + private JsonNode createConfig(final Map additionalConfigs) { + return Jsons.jsonNode(MoreMaps.merge(baseParameters(), additionalConfigs)); + } + + private Map baseParameters() { + return ImmutableMap.builder() + .put("host", "localhost") + .put("port", "1773") + .put("database", "db") + .put("username", "username") + .put("password", "verysecure") + .build(); + } + + @BeforeEach + void setUp() { + destination = new OracleDestination(); + } + + @Test + void testNoEncryption() { + final Map properties = destination.getDefaultConnectionProperties(createConfig()); + assertNull(properties.get(OracleDestination.ENCRYPTION_KEY)); + assertNull(properties.get("javax.net.ssl.trustStorePassword")); + + final Protocol protocol = destination.obtainConnectionProtocol(createConfig()); + assertEquals(Protocol.TCP, protocol); + } + + @Test + void testUnencrypted() { + final Map encryptionNode = ImmutableMap.of(OracleDestination.ENCRYPTION_METHOD_KEY, "unencrypted"); + final JsonNode inputConfig = createConfig(ImmutableMap.of(OracleDestination.ENCRYPTION_KEY, encryptionNode)); + final Map properties = destination.getDefaultConnectionProperties(inputConfig); + assertNull(properties.get(OracleDestination.ENCRYPTION_KEY)); + assertNull(properties.get("javax.net.ssl.trustStorePassword")); + + final Protocol protocol = destination.obtainConnectionProtocol(inputConfig); + assertEquals(Protocol.TCP, protocol); + } + + @Test + void testClientNne() { + final String algorithm = "AES256"; + final Map encryptionNode = ImmutableMap.of( + OracleDestination.ENCRYPTION_METHOD_KEY, "client_nne", + "encryption_algorithm", algorithm); + final JsonNode inputConfig = createConfig(ImmutableMap.of(OracleDestination.ENCRYPTION_KEY, encryptionNode)); + final Map properties = destination.getDefaultConnectionProperties(inputConfig); + assertEquals(properties.get("oracle.net.encryption_client"), "REQUIRED"); + assertEquals(properties.get("oracle.net.encryption_types_client"), String.format("( %s )", algorithm)); + assertNull(properties.get("javax.net.ssl.trustStorePassword")); + + final Protocol protocol = destination.obtainConnectionProtocol(inputConfig); + assertEquals(Protocol.TCP, protocol); + } + + @Test + void testEncryptedVerifyCertificate() { + final Map encryptionNode = ImmutableMap.of( + OracleDestination.ENCRYPTION_METHOD_KEY, "encrypted_verify_certificate", "ssl_certificate", "certificate"); + final JsonNode inputConfig = createConfig(ImmutableMap.of(OracleDestination.ENCRYPTION_KEY, encryptionNode)); + final Map properties = destination.getDefaultConnectionProperties(inputConfig); + assertEquals(properties.get("javax.net.ssl.trustStore"), OracleDestination.KEY_STORE_FILE_PATH); + assertEquals(properties.get("javax.net.ssl.trustStoreType"), "JKS"); + assertNotNull(properties.get("javax.net.ssl.trustStorePassword")); + + final Protocol protocol = destination.obtainConnectionProtocol(inputConfig); + assertEquals(Protocol.TCPS, protocol); + } + + @Test + void testInvalidEncryptionMethod() { + final Map encryptionNode = ImmutableMap.of( + OracleDestination.ENCRYPTION_METHOD_KEY, "invalid_encryption_method"); + final JsonNode inputConfig = createConfig(ImmutableMap.of(OracleDestination.ENCRYPTION_KEY, encryptionNode)); + assertThrows(RuntimeException.class, () -> + destination.getDefaultConnectionProperties(inputConfig) + ); + assertThrows(RuntimeException.class, () -> + destination.obtainConnectionProtocol(inputConfig) + ); + } + +} diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/Dockerfile index d7a514ace031..1c32dea0e209 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-postgres-strict-encrypt COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.3 +LABEL io.airbyte.version=0.1.4 LABEL io.airbyte.name=airbyte/destination-postgres-strict-encrypt diff --git a/airbyte-integrations/connectors/destination-postgres/Dockerfile b/airbyte-integrations/connectors/destination-postgres/Dockerfile index a58a4cfb3e53..4bc95fceba40 100644 --- a/airbyte-integrations/connectors/destination-postgres/Dockerfile +++ b/airbyte-integrations/connectors/destination-postgres/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-postgres COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.3.14 +LABEL io.airbyte.version=0.3.15 LABEL io.airbyte.name=airbyte/destination-postgres diff --git a/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresDestination.java b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresDestination.java index 9803a677649f..73e4c81be33a 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresDestination.java +++ b/airbyte-integrations/connectors/destination-postgres/src/main/java/io/airbyte/integrations/destination/postgres/PostgresDestination.java @@ -11,8 +11,9 @@ import io.airbyte.integrations.base.IntegrationRunner; import io.airbyte.integrations.base.ssh.SshWrappedDestination; import io.airbyte.integrations.destination.jdbc.AbstractJdbcDestination; -import java.util.ArrayList; +import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.Optional; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -25,6 +26,10 @@ public class PostgresDestination extends AbstractJdbcDestination implements Dest public static final List HOST_KEY = List.of("host"); public static final List PORT_KEY = List.of("port"); + static final Map SSL_JDBC_PARAMETERS = ImmutableMap.of( + "ssl", "true", + "sslmode", "require"); + public static Destination sshWrappedDestination() { return new SshWrappedDestination(new PostgresDestination(), HOST_KEY, PORT_KEY); } @@ -33,29 +38,28 @@ public PostgresDestination() { super(DRIVER_CLASS, new PostgresSQLNameTransformer(), new PostgresSqlOperations()); } + @Override + protected Map getDefaultConnectionProperties(final JsonNode config) { + if (useSsl(config)) { + return SSL_JDBC_PARAMETERS; + } else { + // No need for any parameters if the connection doesn't use SSL + return Collections.emptyMap(); + } + } + @Override public JsonNode toJdbcConfig(final JsonNode config) { final String schema = Optional.ofNullable(config.get("schema")).map(JsonNode::asText).orElse("public"); - final List additionalParameters = new ArrayList<>(); - - final StringBuilder jdbcUrl = new StringBuilder(String.format("jdbc:postgresql://%s:%s/%s?", + final String jdbcUrl = String.format("jdbc:postgresql://%s:%s/%s?", config.get("host").asText(), config.get("port").asText(), - config.get("database").asText())); - - if (!config.has("ssl") || config.get("ssl").asBoolean()) { - additionalParameters.add("ssl=true"); - additionalParameters.add("sslmode=require"); - } - - if (!additionalParameters.isEmpty()) { - additionalParameters.forEach(x -> jdbcUrl.append(x).append("&")); - } + config.get("database").asText()); final ImmutableMap.Builder configBuilder = ImmutableMap.builder() .put("username", config.get("username").asText()) - .put("jdbc_url", jdbcUrl.toString()) + .put("jdbc_url", jdbcUrl) .put("schema", schema); if (config.has("password")) { @@ -64,6 +68,10 @@ public JsonNode toJdbcConfig(final JsonNode config) { return Jsons.jsonNode(configBuilder.build()); } + private boolean useSsl(final JsonNode config) { + return !config.has("ssl") || config.get("ssl").asBoolean(); + } + public static void main(final String[] args) throws Exception { final Destination destination = PostgresDestination.sshWrappedDestination(); LOGGER.info("starting destination: {}", PostgresDestination.class); diff --git a/airbyte-integrations/connectors/destination-postgres/src/test/java/io/airbyte/integrations/destination/postgres/PostgresDestinationTest.java b/airbyte-integrations/connectors/destination-postgres/src/test/java/io/airbyte/integrations/destination/postgres/PostgresDestinationTest.java index b2d236304efa..f66387241dca 100644 --- a/airbyte-integrations/connectors/destination-postgres/src/test/java/io/airbyte/integrations/destination/postgres/PostgresDestinationTest.java +++ b/airbyte-integrations/connectors/destination-postgres/src/test/java/io/airbyte/integrations/destination/postgres/PostgresDestinationTest.java @@ -9,6 +9,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.map.MoreMaps; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.base.AirbyteMessageConsumer; @@ -23,7 +24,9 @@ import io.airbyte.protocol.models.JsonSchemaType; import io.airbyte.test.utils.PostgreSQLContainerHelper; import java.time.Instant; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.stream.Collectors; import java.util.stream.IntStream; import org.junit.jupiter.api.AfterAll; @@ -47,6 +50,17 @@ public class PostgresDestinationTest { private JsonNode config; + private static final Map CONFIG_WITH_SSL = ImmutableMap.of( + "host", "localhost", + "port", "1337", + "username", "user", + "database", "db"); + + private static final Map CONFIG_NO_SSL = MoreMaps.merge( + CONFIG_WITH_SSL, + ImmutableMap.of( + "ssl", "false")); + @BeforeAll static void init() { PSQL_DB = new PostgreSQLContainer<>("postgres:13-alpine"); @@ -63,6 +77,20 @@ static void cleanUp() { PSQL_DB.close(); } + @Test + void testDefaultParamsNoSSL() { + final Map defaultProperties = new PostgresDestination().getDefaultConnectionProperties( + Jsons.jsonNode(CONFIG_NO_SSL)); + assertEquals(new HashMap<>(), defaultProperties); + } + + @Test + void testDefaultParamsWithSSL() { + final Map defaultProperties = new PostgresDestination().getDefaultConnectionProperties( + Jsons.jsonNode(CONFIG_WITH_SSL)); + assertEquals(PostgresDestination.SSL_JDBC_PARAMETERS, defaultProperties); + } + // This test is a bit redundant with PostgresIntegrationTest. It makes it easy to run the // destination in the same process as the test allowing us to put breakpoint in, which is handy for // debugging (especially since we use postgres as a guinea pig for most features). diff --git a/airbyte-integrations/connectors/destination-redshift/Dockerfile b/airbyte-integrations/connectors/destination-redshift/Dockerfile index d909bb1d72e6..d6d8d7ad3659 100644 --- a/airbyte-integrations/connectors/destination-redshift/Dockerfile +++ b/airbyte-integrations/connectors/destination-redshift/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-redshift COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.3.26 +LABEL io.airbyte.version=0.3.27 LABEL io.airbyte.name=airbyte/destination-redshift diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftInsertDestination.java b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftInsertDestination.java index 8311463a35e7..576a9246bb96 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftInsertDestination.java +++ b/airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftInsertDestination.java @@ -11,8 +11,7 @@ import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.base.Destination; import io.airbyte.integrations.destination.jdbc.AbstractJdbcDestination; -import java.util.ArrayList; -import java.util.List; +import java.util.Map; import java.util.Optional; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -23,6 +22,10 @@ public class RedshiftInsertDestination extends AbstractJdbcDestination implement public static final String DRIVER_CLASS = "com.amazon.redshift.jdbc.Driver"; + public static final Map SSL_JDBC_PARAMETERS = ImmutableMap.of( + "ssl", "true", + "sslfactory", "com.amazon.redshift.ssl.NonValidatingFactory"); + public RedshiftInsertDestination() { super(DRIVER_CLASS, new RedshiftSQLNameTransformer(), new RedshiftSqlOperations()); } @@ -37,21 +40,19 @@ public JdbcDatabase getDatabase(final JsonNode config) { return getJdbcDatabase(config); } - private static void addSsl(final List additionalProperties) { - additionalProperties.add("ssl=true"); - additionalProperties.add("sslfactory=com.amazon.redshift.ssl.NonValidatingFactory"); + @Override + protected Map getDefaultConnectionProperties(final JsonNode config) { + return SSL_JDBC_PARAMETERS; } public static JdbcDatabase getJdbcDatabase(final JsonNode config) { - final List additionalProperties = new ArrayList<>(); final var jdbcConfig = RedshiftInsertDestination.getJdbcConfig(config); - addSsl(additionalProperties); return Databases.createJdbcDatabase( jdbcConfig.get("username").asText(), jdbcConfig.has("password") ? jdbcConfig.get("password").asText() : null, jdbcConfig.get("jdbc_url").asText(), RedshiftInsertDestination.DRIVER_CLASS, - String.join(";", additionalProperties)); + SSL_JDBC_PARAMETERS); } public static JsonNode getJdbcConfig(final JsonNode redshiftConfig) { diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftCopyDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftCopyDestinationAcceptanceTest.java index a6fdd5f87700..413ef0c261ee 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftCopyDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftCopyDestinationAcceptanceTest.java @@ -21,8 +21,8 @@ import java.util.stream.Collectors; /** - * Integration test testing {@link RedshiftCopyS3Destination}. The default Redshift integration test - * credentials contain S3 credentials - this automatically causes COPY to be selected. + * Integration test testing {@link RedshiftCopyS3Destination}. The default Redshift integration test credentials contain S3 credentials - this + * automatically causes COPY to be selected. */ public class RedshiftCopyDestinationAcceptanceTest extends DestinationAcceptanceTest { @@ -141,7 +141,8 @@ protected Database getDatabase() { baseConfig.get("port").asText(), baseConfig.get("database").asText()), "com.amazon.redshift.jdbc.Driver", null, - "ssl=true;sslfactory=com.amazon.redshift.ssl.NonValidatingFactory"); + RedshiftInsertDestination.SSL_JDBC_PARAMETERS + ); } @Override diff --git a/airbyte-integrations/connectors/destination-snowflake/Dockerfile b/airbyte-integrations/connectors/destination-snowflake/Dockerfile index 5912f54ed3ba..6473177947fb 100644 --- a/airbyte-integrations/connectors/destination-snowflake/Dockerfile +++ b/airbyte-integrations/connectors/destination-snowflake/Dockerfile @@ -18,8 +18,8 @@ COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar RUN tar xf ${APPLICATION}.tar --strip-components=1 -ENV APPLICATION_VERSION 0.4.16 +ENV APPLICATION_VERSION 0.4.17 ENV ENABLE_SENTRY true -LABEL io.airbyte.version=0.4.16 +LABEL io.airbyte.version=0.4.17 LABEL io.airbyte.name=airbyte/destination-snowflake diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInsertDestination.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInsertDestination.java index 58758eeb3781..3a346fb6909b 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInsertDestination.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInsertDestination.java @@ -9,6 +9,8 @@ import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.base.Destination; import io.airbyte.integrations.destination.jdbc.AbstractJdbcDestination; +import java.util.Collections; +import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -26,6 +28,11 @@ protected JdbcDatabase getDatabase(final JsonNode config) { return SnowflakeDatabase.getDatabase(config); } + @Override + protected Map getDefaultConnectionProperties(final JsonNode config) { + return Collections.emptyMap(); + } + // this is a no op since we override getDatabase. @Override public JsonNode toJdbcConfig(final JsonNode config) { diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingDestination.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingDestination.java index 5a9a2db88b17..33897c854f36 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingDestination.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingDestination.java @@ -14,6 +14,8 @@ import io.airbyte.protocol.models.AirbyteConnectionStatus; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import java.util.Collections; +import java.util.Map; import java.util.UUID; import java.util.function.Consumer; import org.slf4j.Logger; @@ -54,7 +56,7 @@ private static void attemptSQLCreateAndDropStages(final String outputSchema, // verify we have permissions to create/drop stage final String outputTableName = namingResolver.getIdentifier("_airbyte_connection_test_" + UUID.randomUUID().toString().replaceAll("-", "")); - final String stageName = namingResolver.getStageName(outputSchema, outputTableName);; + final String stageName = namingResolver.getStageName(outputSchema, outputTableName); sqlOperations.createStageIfNotExists(database, stageName); sqlOperations.dropStageIfExists(database, stageName); } @@ -64,6 +66,11 @@ protected JdbcDatabase getDatabase(final JsonNode config) { return SnowflakeDatabase.getDatabase(config); } + @Override + protected Map getDefaultConnectionProperties(final JsonNode config) { + return Collections.emptyMap(); + } + // this is a no op since we override getDatabase. @Override public JsonNode toJdbcConfig(final JsonNode config) { diff --git a/airbyte-integrations/connectors/source-cockroachdb/src/main/java/io/airbyte/integrations/source/cockroachdb/CockroachDbSource.java b/airbyte-integrations/connectors/source-cockroachdb/src/main/java/io/airbyte/integrations/source/cockroachdb/CockroachDbSource.java index 7a200b0e3d46..a7e8c9b33247 100644 --- a/airbyte-integrations/connectors/source-cockroachdb/src/main/java/io/airbyte/integrations/source/cockroachdb/CockroachDbSource.java +++ b/airbyte-integrations/connectors/source-cockroachdb/src/main/java/io/airbyte/integrations/source/cockroachdb/CockroachDbSource.java @@ -11,6 +11,7 @@ import io.airbyte.commons.util.AutoCloseableIterator; import io.airbyte.db.Databases; import io.airbyte.db.jdbc.JdbcDatabase; +import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.base.IntegrationRunner; import io.airbyte.integrations.base.Source; import io.airbyte.integrations.base.ssh.SshWrappedSource; @@ -106,7 +107,7 @@ public Set getPrivilegesTableForCurrentUser(final JdbcDatabase } @Override - protected boolean isNotInternalSchema(JsonNode jsonNode, Set internalSchemas) { + protected boolean isNotInternalSchema(final JsonNode jsonNode, final Set internalSchemas) { return false; } @@ -115,19 +116,19 @@ public JdbcDatabase createDatabase(final JsonNode config) throws SQLException { final JsonNode jdbcConfig = toDatabaseConfig(config); final JdbcDatabase database = Databases.createJdbcDatabase( - jdbcConfig.get("username").asText(), - jdbcConfig.has("password") ? jdbcConfig.get("password").asText() : null, - jdbcConfig.get("jdbc_url").asText(), - driverClass, - jdbcConfig.has("connection_properties") ? jdbcConfig.get("connection_properties").asText() : null, - sourceOperations); + jdbcConfig.get("username").asText(), + jdbcConfig.has("password") ? jdbcConfig.get("password").asText() : null, + jdbcConfig.get("jdbc_url").asText(), + driverClass, + JdbcUtils.parseJdbcParameters(jdbcConfig, "connection_properties"), + sourceOperations); quoteString = (quoteString == null ? database.getMetaData().getIdentifierQuoteString() : quoteString); return new CockroachJdbcDatabase(database, sourceOperations); } - - private CheckedFunction getPrivileges(JdbcDatabase database) { + + private CheckedFunction getPrivileges(final JdbcDatabase database) { return connection -> { final PreparedStatement ps = connection.prepareStatement( "SELECT DISTINCT table_catalog, table_schema, table_name, privilege_type\n" @@ -138,7 +139,7 @@ private CheckedFunction getPrivileg }; } - private JdbcPrivilegeDto getPrivilegeDto(JsonNode jsonNode) { + private JdbcPrivilegeDto getPrivilegeDto(final JsonNode jsonNode) { return JdbcPrivilegeDto.builder() .schemaName(jsonNode.get("table_schema").asText()) .tableName(jsonNode.get("table_name").asText()) diff --git a/airbyte-integrations/connectors/source-cockroachdb/src/main/java/io/airbyte/integrations/source/cockroachdb/CockroachJdbcDatabase.java b/airbyte-integrations/connectors/source-cockroachdb/src/main/java/io/airbyte/integrations/source/cockroachdb/CockroachJdbcDatabase.java index 9df77e4e0af5..0aa9572cb5a2 100644 --- a/airbyte-integrations/connectors/source-cockroachdb/src/main/java/io/airbyte/integrations/source/cockroachdb/CockroachJdbcDatabase.java +++ b/airbyte-integrations/connectors/source-cockroachdb/src/main/java/io/airbyte/integrations/source/cockroachdb/CockroachJdbcDatabase.java @@ -9,10 +9,6 @@ import io.airbyte.commons.functional.CheckedFunction; import io.airbyte.db.JdbcCompatibleSourceOperations; import io.airbyte.db.jdbc.JdbcDatabase; -import io.airbyte.db.jdbc.JdbcStreamingQueryConfiguration; - -import javax.sql.DataSource; - import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.PreparedStatement; @@ -22,17 +18,15 @@ import java.util.stream.Stream; /** - * This implementation uses non-streamed queries to CockroachDB. CockroachDB - * does not currently support multiple active pgwire portals on the same session, - * which makes it impossible to replicate tables that have over ~1000 rows - * using StreamingJdbcDatabase. See: https://go.crdb.dev/issue-v/40195/v21.2 - * and in particular, the comment: - * https://github.com/cockroachdb/cockroach/issues/40195?version=v21.2#issuecomment-870570351 - * The same situation as kafka-connect applies to StreamingJdbcDatabase + * This implementation uses non-streamed queries to CockroachDB. CockroachDB does not currently + * support multiple active pgwire portals on the same session, which makes it impossible to + * replicate tables that have over ~1000 rows using StreamingJdbcDatabase. See: + * https://go.crdb.dev/issue-v/40195/v21.2 and in particular, the comment: + * https://github.com/cockroachdb/cockroach/issues/40195?version=v21.2#issuecomment-870570351 The + * same situation as kafka-connect applies to StreamingJdbcDatabase */ public class CockroachJdbcDatabase - extends JdbcDatabase -{ + extends JdbcDatabase { private final JdbcDatabase database; diff --git a/airbyte-integrations/connectors/source-cockroachdb/src/test/java/io/airbyte/integrations/source/cockroachdb/CockroachDbJdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-cockroachdb/src/test/java/io/airbyte/integrations/source/cockroachdb/CockroachDbJdbcSourceAcceptanceTest.java index 30b91c735849..81b6f470e12f 100644 --- a/airbyte-integrations/connectors/source-cockroachdb/src/test/java/io/airbyte/integrations/source/cockroachdb/CockroachDbJdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-cockroachdb/src/test/java/io/airbyte/integrations/source/cockroachdb/CockroachDbJdbcSourceAcceptanceTest.java @@ -16,6 +16,7 @@ import io.airbyte.commons.string.Strings; import io.airbyte.commons.util.MoreIterators; import io.airbyte.db.Databases; +import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.source.jdbc.AbstractJdbcSource; import io.airbyte.integrations.source.jdbc.test.JdbcSourceAcceptanceTest; import io.airbyte.integrations.source.relationaldb.models.DbState; @@ -85,7 +86,7 @@ public void setup() throws Exception { jdbcConfig.has("password") ? jdbcConfig.get("password").asText() : null, jdbcConfig.get("jdbc_url").asText(), getDriverClass(), - jdbcConfig.has("connection_properties") ? jdbcConfig.get("connection_properties").asText() : null); + JdbcUtils.parseJdbcParameters(jdbcConfig, "connection_properties")); database.execute(connection -> connection.createStatement().execute("CREATE DATABASE " + config.get("database") + ";")); super.setup(); @@ -120,28 +121,28 @@ static void cleanUp() { protected AirbyteCatalog getCatalog(final String defaultNamespace) { return new AirbyteCatalog().withStreams(Lists.newArrayList( CatalogHelpers.createAirbyteStream( - TABLE_NAME, - defaultNamespace, - Field.of(COL_ID, JsonSchemaType.NUMBER), - Field.of(COL_NAME, JsonSchemaType.STRING), - Field.of(COL_UPDATED_AT, JsonSchemaType.STRING)) + TABLE_NAME, + defaultNamespace, + Field.of(COL_ID, JsonSchemaType.NUMBER), + Field.of(COL_NAME, JsonSchemaType.STRING), + Field.of(COL_UPDATED_AT, JsonSchemaType.STRING)) .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) .withSourceDefinedPrimaryKey(List.of(List.of(COL_ID))), CatalogHelpers.createAirbyteStream( - TABLE_NAME_WITHOUT_PK, - defaultNamespace, - Field.of(COL_ID, JsonSchemaType.NUMBER), - Field.of(COL_NAME, JsonSchemaType.STRING), - Field.of(COL_UPDATED_AT, JsonSchemaType.STRING), - Field.of(COL_ROW_ID, JsonSchemaType.NUMBER)) + TABLE_NAME_WITHOUT_PK, + defaultNamespace, + Field.of(COL_ID, JsonSchemaType.NUMBER), + Field.of(COL_NAME, JsonSchemaType.STRING), + Field.of(COL_UPDATED_AT, JsonSchemaType.STRING), + Field.of(COL_ROW_ID, JsonSchemaType.NUMBER)) .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) .withSourceDefinedPrimaryKey(List.of(List.of(COL_ROW_ID))), CatalogHelpers.createAirbyteStream( - TABLE_NAME_COMPOSITE_PK, - defaultNamespace, - Field.of(COL_FIRST_NAME, JsonSchemaType.STRING), - Field.of(COL_LAST_NAME, JsonSchemaType.STRING), - Field.of(COL_UPDATED_AT, JsonSchemaType.STRING)) + TABLE_NAME_COMPOSITE_PK, + defaultNamespace, + Field.of(COL_FIRST_NAME, JsonSchemaType.STRING), + Field.of(COL_LAST_NAME, JsonSchemaType.STRING), + Field.of(COL_UPDATED_AT, JsonSchemaType.STRING)) .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) .withSourceDefinedPrimaryKey( List.of(List.of(COL_FIRST_NAME), List.of(COL_LAST_NAME))))); diff --git a/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java b/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java index 60d11c718449..9c38b060785b 100644 --- a/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java +++ b/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java @@ -30,6 +30,7 @@ import io.airbyte.db.SqlDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.JdbcStreamingQueryConfiguration; +import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.base.Source; import io.airbyte.integrations.source.jdbc.dto.JdbcPrivilegeDto; import io.airbyte.integrations.source.relationaldb.AbstractRelationalDbSource; @@ -55,9 +56,8 @@ import org.slf4j.LoggerFactory; /** - * This class contains helper functions and boilerplate for implementing a source connector for a - * relational DB source which can be accessed via JDBC driver. If you are implementing a connector - * for a relational DB which has a JDBC driver, make an effort to use this class. + * This class contains helper functions and boilerplate for implementing a source connector for a relational DB source which can be accessed via JDBC + * driver. If you are implementing a connector for a relational DB which has a JDBC driver, make an effort to use this class. */ public abstract class AbstractJdbcSource extends AbstractRelationalDbSource implements Source { @@ -114,10 +114,10 @@ protected List>> discoverInternal(final JdbcData final Set internalSchemas = new HashSet<>(getExcludedInternalNameSpaces()); final Set tablesWithSelectGrantPrivilege = getPrivilegesTableForCurrentUser(database, schema); return database.bufferedResultSetQuery( - // retrieve column metadata from the database - conn -> conn.getMetaData().getColumns(getCatalog(database), schema, null, null), - // store essential column metadata to a Json object from the result set about each column - this::getColumnMetadata) + // retrieve column metadata from the database + conn -> conn.getMetaData().getColumns(getCatalog(database), schema, null, null), + // store essential column metadata to a Json object from the result set about each column + this::getColumnMetadata) .stream() .filter(excludeNotAccessibleTables(internalSchemas, tablesWithSelectGrantPrivilege)) // group by schema and table name to handle the case where a table with the same name exists in @@ -139,7 +139,8 @@ protected List>> discoverInternal(final JdbcData f.get(INTERNAL_COLUMN_TYPE_NAME).asText(), f.get(INTERNAL_COLUMN_SIZE).asInt(), jsonType); - return new CommonField(f.get(INTERNAL_COLUMN_NAME).asText(), datatype) {}; + return new CommonField(f.get(INTERNAL_COLUMN_NAME).asText(), datatype) { + }; }) .collect(Collectors.toList())) .build()) @@ -155,14 +156,14 @@ protected Predicate excludeNotAccessibleTables(final Set inter return tablesWithSelectGrantPrivilege.stream() .anyMatch(e -> e.getSchemaName().equals(jsonNode.get(INTERNAL_SCHEMA_NAME).asText())) && tablesWithSelectGrantPrivilege.stream() - .anyMatch(e -> e.getTableName().equals(jsonNode.get(INTERNAL_TABLE_NAME).asText())) + .anyMatch(e -> e.getTableName().equals(jsonNode.get(INTERNAL_TABLE_NAME).asText())) && !internalSchemas.contains(jsonNode.get(INTERNAL_SCHEMA_NAME).asText()); }; } // needs to override isNotInternalSchema for connectors that override // getPrivilegesTableForCurrentUser() - protected boolean isNotInternalSchema(JsonNode jsonNode, Set internalSchemas) { + protected boolean isNotInternalSchema(final JsonNode jsonNode, final Set internalSchemas) { return !internalSchemas.contains(jsonNode.get(INTERNAL_SCHEMA_NAME).asText()); } @@ -185,8 +186,7 @@ private JsonNode getColumnMetadata(final ResultSet resultSet) throws SQLExceptio } /** - * @param field Essential column information returned from - * {@link AbstractJdbcSource#getColumnMetadata}. + * @param field Essential column information returned from {@link AbstractJdbcSource#getColumnMetadata}. */ public Datatype getFieldType(final JsonNode field) { return sourceOperations.getFieldType(field); @@ -293,7 +293,7 @@ public JdbcDatabase createDatabase(final JsonNode config) throws SQLException { jdbcConfig.get("jdbc_url").asText(), driverClass, jdbcStreamingQueryConfiguration, - jdbcConfig.has("connection_properties") ? jdbcConfig.get("connection_properties").asText() : null, + JdbcUtils.parseJdbcParameters(jdbcConfig, "connection_properties"), sourceOperations); quoteString = (quoteString == null ? database.getMetaData().getIdentifierQuoteString() : quoteString); diff --git a/airbyte-integrations/connectors/source-jdbc/src/testFixtures/java/io/airbyte/integrations/source/jdbc/test/JdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-jdbc/src/testFixtures/java/io/airbyte/integrations/source/jdbc/test/JdbcSourceAcceptanceTest.java index 02ab8faef80b..8511ad9d0520 100644 --- a/airbyte-integrations/connectors/source-jdbc/src/testFixtures/java/io/airbyte/integrations/source/jdbc/test/JdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-jdbc/src/testFixtures/java/io/airbyte/integrations/source/jdbc/test/JdbcSourceAcceptanceTest.java @@ -24,6 +24,7 @@ import io.airbyte.db.Databases; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.JdbcSourceOperations; +import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.base.Source; import io.airbyte.integrations.source.jdbc.AbstractJdbcSource; import io.airbyte.integrations.source.relationaldb.models.DbState; @@ -196,7 +197,7 @@ public void setup() throws Exception { jdbcConfig.has("password") ? jdbcConfig.get("password").asText() : null, jdbcConfig.get("jdbc_url").asText(), getDriverClass(), - jdbcConfig.has("connection_properties") ? jdbcConfig.get("connection_properties").asText() : null); + JdbcUtils.parseJdbcParameters(jdbcConfig, "connection_properties")); if (supportsSchemas()) { createSchemas(); diff --git a/airbyte-integrations/connectors/source-zendesk-support/Dockerfile b/airbyte-integrations/connectors/source-zendesk-support/Dockerfile index 1caf495ca778..41daded4afea 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/Dockerfile +++ b/airbyte-integrations/connectors/source-zendesk-support/Dockerfile @@ -25,5 +25,5 @@ COPY source_zendesk_support ./source_zendesk_support ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.12 +LABEL io.airbyte.version=0.2.0 LABEL io.airbyte.name=airbyte/source-zendesk-support diff --git a/airbyte-integrations/connectors/source-zendesk-support/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-zendesk-support/integration_tests/abnormal_state.json index 3c26f153bbec..1d4e33a89834 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/integration_tests/abnormal_state.json +++ b/airbyte-integrations/connectors/source-zendesk-support/integration_tests/abnormal_state.json @@ -1,41 +1,41 @@ { "users": { - "updated_at": "2022-07-19T22:21:37Z" + "updated_at": "2222-07-19T22:21:37Z" }, "groups": { - "updated_at": "2022-07-15T22:19:01Z" + "updated_at": "2222-07-15T22:19:01Z" }, "organizations": { - "updated_at": "2022-07-15T19:29:14Z" + "updated_at": "2222-07-15T19:29:14Z" }, "satisfaction_ratings": { - "updated_at": "2022-07-20T10:05:18Z" + "updated_at": "2222-07-20T10:05:18Z" }, "tickets": { - "generated_timestamp": 1816817368 + "updated_at": "2222-07-20T10:05:18Z" }, "group_memberships": { - "updated_at": "2022-04-23T15:34:20Z" + "updated_at": "2222-04-23T15:34:20Z" }, "ticket_fields": { - "updated_at": "2022-12-11T19:34:05Z" + "updated_at": "2222-12-11T19:34:05Z" }, "ticket_forms": { - "updated_at": "2022-12-11T20:34:37Z" + "updated_at": "2222-12-11T20:34:37Z" }, "ticket_metrics": { - "updated_at": "2022-07-19T22:21:26Z" + "updated_at": "2222-07-19T22:21:26Z" }, "ticket_metric_events": { - "time": "2022-07-19T22:21:26Z" + "time": "2222-07-19T22:21:26Z" }, "macros": { - "updated_at": "2022-12-11T19:34:06Z" + "updated_at": "2222-12-11T19:34:06Z" }, "ticket_comments": { - "created_at": "2022-07-19T22:21:26Z" + "created_at": "2222-07-19T22:21:26Z" }, "ticket_audits": { - "created_at": "2022-07-19T22:21:26Z" + "created_at": "2222-07-19T22:21:26Z" } } diff --git a/airbyte-integrations/connectors/source-zendesk-support/integration_tests/integration_test.py b/airbyte-integrations/connectors/source-zendesk-support/integration_tests/integration_test.py deleted file mode 100644 index 4a4b7f85f65e..000000000000 --- a/airbyte-integrations/connectors/source-zendesk-support/integration_tests/integration_test.py +++ /dev/null @@ -1,90 +0,0 @@ -# -# Copyright (c) 2021 Airbyte, Inc., all rights reserved. -# - -import json - -import pendulum -import requests_mock -from source_zendesk_support import SourceZendeskSupport -from source_zendesk_support.streams import LAST_END_TIME_KEY, Macros, TicketAudits, TicketMetrics, Tickets, Users - -CONFIG_FILE = "secrets/config.json" - - -class TestIntegrationZendeskSupport: - """This test class provides a set of tests for different Zendesk streams. - The Zendesk API has difference pagination and sorting mechanisms for streams. - Let's try to check them - """ - - @staticmethod - def prepare_stream_args(): - """Generates streams settings from a file""" - with open(CONFIG_FILE, "r") as f: - return SourceZendeskSupport.convert_config2stream_args(json.loads(f.read())) - - def _test_export_stream(self, stream_cls: type): - stream = stream_cls(**self.prepare_stream_args()) - stream.page_size = 1 - record_timestamps = {} - for record in stream.read_records(sync_mode=None): - # save the first 5 records - if len(record_timestamps) > 5: - break - if stream.last_end_time not in record_timestamps.values(): - record_timestamps[record["id"]] = stream.last_end_time - - stream.page_size = 10 - for record_id, timestamp in record_timestamps.items(): - state = {LAST_END_TIME_KEY: timestamp} - for record in stream.read_records(sync_mode=None, stream_state=state): - assert record["id"] != record_id - break - - def test_export_with_unixtime(self): - """Tickets stream has 'generated_timestamp' as cursor_field and it is unixtime format''""" - self._test_export_stream(Tickets) - - def test_export_with_str_datetime(self): - """Other export streams has 'updated_at' as cursor_field and it is datetime string format""" - self._test_export_stream(Users) - - def _test_insertion(self, stream_cls: type, index: int = None): - """try to update some item""" - stream = stream_cls(**self.prepare_stream_args()) - all_records = list(stream.read_records(sync_mode=None)) - state = stream.get_updated_state(current_stream_state=None, latest_record=all_records[-1]) - - incremental_records = list(stream_cls(**self.prepare_stream_args()).read_records(sync_mode=None, stream_state=state)) - assert len(incremental_records) == 0 - - if index is None: - # select a middle index - index = int(len(all_records) / 2) - updated_record_id = all_records[index]["id"] - all_records[index][stream.cursor_field] = stream.datetime2str(pendulum.now().astimezone()) - - with requests_mock.Mocker() as m: - url = stream.url_base + stream.path() - data = { - (stream.response_list_name or stream.name): all_records, - "next_page": None, - } - m.get(url, text=json.dumps(data)) - incremental_records = list(stream_cls(**self.prepare_stream_args()).read_records(sync_mode=None, stream_state=state)) - - assert len(incremental_records) == 1 - assert incremental_records[0]["id"] == updated_record_id - - def test_not_sorted_stream(self): - """for streams without sorting but with pagination""" - self._test_insertion(TicketMetrics) - - def test_sorted_page_stream(self): - """for streams with pagination and sorting mechanism""" - self._test_insertion(Macros, 0) - - def test_sorted_cursor_stream(self): - """for stream with cursor pagination and sorting mechanism""" - self._test_insertion(TicketAudits, 0) diff --git a/airbyte-integrations/connectors/source-zendesk-support/setup.py b/airbyte-integrations/connectors/source-zendesk-support/setup.py index 54e946c9e11c..b15858f0bf87 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/setup.py +++ b/airbyte-integrations/connectors/source-zendesk-support/setup.py @@ -5,7 +5,7 @@ from setuptools import find_packages, setup -MAIN_REQUIREMENTS = ["airbyte-cdk~=0.1.36", "pytz"] +MAIN_REQUIREMENTS = ["airbyte-cdk~=0.1.36", "pytz", "requests-futures~=1.0.0", "pendulum~=2.1.2"] TEST_REQUIREMENTS = ["pytest~=6.1", "source-acceptance-test", "requests-mock==1.9.3"] diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/ticket_audits.json b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/ticket_audits.json index d75ab135bca8..3e58f7d7ec7d 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/ticket_audits.json +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/schemas/ticket_audits.json @@ -144,7 +144,10 @@ "type": ["null", "integer"] }, "value": { - "type": ["null", "string"] + "type": ["null", "string", "array"], + "items": { + "type": ["null", "string"] + } }, "author_id": { "type": ["null", "integer"] diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py index 06b635801f65..f45d9aa5eab8 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/source.py @@ -9,6 +9,7 @@ from airbyte_cdk.sources import AbstractSource from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.http.requests_native_auth import TokenAuthenticator +from source_zendesk_support.streams import SourceZendeskException from .streams import ( Brands, @@ -20,7 +21,6 @@ SatisfactionRatings, Schedules, SlaPolicies, - SourceZendeskException, Tags, TicketAudits, TicketComments, @@ -68,7 +68,7 @@ def check_connection(self, logger, config) -> Tuple[bool, any]: auth = self.get_authenticator(config) settings = None try: - settings = UserSettingsStream(config["subdomain"], authenticator=auth).get_settings() + settings = UserSettingsStream(config["subdomain"], authenticator=auth, start_date=None).get_settings() except requests.exceptions.RequestException as e: return False, e diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py index f55589d8e6b6..c98d5bd1cfbc 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py @@ -2,21 +2,28 @@ # Copyright (c) 2021 Airbyte, Inc., all rights reserved. # - import calendar import time -from abc import ABC, abstractmethod -from collections import defaultdict +from abc import ABC +from collections import deque +from concurrent.futures import Future, ProcessPoolExecutor from datetime import datetime +from functools import partial +from math import ceil +from pickle import PickleError, dumps from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Union -from urllib.parse import parse_qsl, urlparse +from urllib.parse import parse_qsl, urljoin, urlparse +import pendulum import pytz import requests from airbyte_cdk.models import SyncMode from airbyte_cdk.sources.streams.http import HttpStream from airbyte_cdk.sources.streams.http.auth.core import HttpAuthenticator +from airbyte_cdk.sources.streams.http.exceptions import DefaultBackoffException from airbyte_cdk.sources.utils.transform import TransformConfig, TypeTransformer +from requests.auth import AuthBase +from requests_futures.sessions import PICKLE_ERROR, FuturesSession DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ" LAST_END_TIME_KEY = "_last_end_time" @@ -26,34 +33,40 @@ class SourceZendeskException(Exception): """default exception of custom SourceZendesk logic""" -class SourceZendeskSupportStream(HttpStream, ABC): - """ "Basic Zendesk class""" +class SourceZendeskSupportFuturesSession(FuturesSession): + """ + Check the docs at https://github.com/ross/requests-futures. + Used to async execute a set of requests. + """ - primary_key = "id" + def send_future(self, request: requests.PreparedRequest, **kwargs) -> Future: + """ + Use instead of default `Session.send()` method. + `Session.send()` should not be overridden as it used by `requests-futures` lib. + """ - page_size = 100 - created_at_field = "created_at" - updated_at_field = "updated_at" + if self.session: + func = self.session.send + else: + # avoid calling super to not break pickled method + func = partial(requests.Session.send, self) - transformer = TypeTransformer(TransformConfig.DefaultSchemaNormalization) + if isinstance(self.executor, ProcessPoolExecutor): + # verify function can be pickled + try: + dumps(func) + except (TypeError, PickleError): + raise RuntimeError(PICKLE_ERROR) - def __init__(self, subdomain: str, **kwargs): - super().__init__(**kwargs) + return self.executor.submit(func, request, **kwargs) - # add the custom value for generation of a zendesk domain - self._subdomain = subdomain - @property - def url_base(self) -> str: - return f"https://{self._subdomain}.zendesk.com/api/v2/" +class BaseSourceZendeskSupportStream(HttpStream, ABC): + def __init__(self, subdomain: str, start_date: str, **kwargs): + super().__init__(**kwargs) - @staticmethod - def _parse_next_page_number(response: requests.Response) -> Optional[int]: - """Parses a response and tries to find next page number""" - next_page = response.json().get("next_page") - if next_page: - return dict(parse_qsl(urlparse(next_page).query)).get("page") - return None + self._start_date = start_date + self._subdomain = subdomain def backoff_time(self, response: requests.Response) -> Union[int, float]: """ @@ -64,11 +77,10 @@ def backoff_time(self, response: requests.Response) -> Union[int, float]: """ retry_after = int(response.headers.get("Retry-After", 0)) - if retry_after and retry_after > 0: - self.logger.info(f"The rate limit of requests is exceeded. Waiting for {retry_after} seconds.") - return int(retry_after) + if retry_after > 0: + return retry_after - # the header X-Rate-Limit returns an amount of requests per minute + # the header X-Rate-Limit returns the amount of requests per minute # we try to wait twice as long rate_limit = float(response.headers.get("X-Rate-Limit", 0)) if rate_limit and rate_limit > 0: @@ -91,75 +103,6 @@ def datetime2str(dt: datetime) -> str: """ return datetime.strftime(dt.replace(tzinfo=pytz.UTC), DATETIME_FORMAT) - -class UserSettingsStream(SourceZendeskSupportStream): - """Stream for checking of a request token and permissions""" - - def path(self, *args, **kwargs) -> str: - return "account/settings.json" - - def next_page_token(self, *args, **kwargs) -> Optional[Mapping[str, Any]]: - # this data without listing - return None - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - """returns data from API""" - settings = response.json().get("settings") - if settings: - yield settings - - def get_settings(self) -> Mapping[str, Any]: - for resp in self.read_records(SyncMode.full_refresh): - return resp - raise SourceZendeskException("not found settings") - - -class IncrementalEntityStream(SourceZendeskSupportStream, ABC): - """Stream for endpoints where an entity name can be used in a path value - https://.zendesk.com/api/v2/.json - """ - - # default sorted field - cursor_field = SourceZendeskSupportStream.updated_at_field - - # for partial cases when JSON root name of responses is not equal a name value - response_list_name: str = None - - def __init__(self, start_date: str, **kwargs): - super().__init__(**kwargs) - # add the custom value for skipping of not relevant records - self._start_date = self.str2datetime(start_date) if isinstance(start_date, str) else start_date - # Flag for marking of completed process - self._finished = False - - @property - def authenticator(self) -> HttpAuthenticator: - """This function was redefined because CDK return NoAuth for some authenticator class. - It is bug and I hope it will be fixed in the future - """ - return self._session.auth or super().authenticator - - @property - def is_finished(self): - return self._finished - - def path(self, **kwargs) -> str: - return f"{self.name}.json" - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - """returns a list of records""" - # filter by start date - for record in response.json().get(self.response_list_name or self.name) or []: - if record.get(self.created_at_field) and self.str2datetime(record[self.created_at_field]) < self._start_date: - continue - yield record - - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: - # try to save maximum value of a cursor field - old_value = str((current_stream_state or {}).get(self.cursor_field, "")) - new_value = str((latest_record or {}).get(self.cursor_field, "")) - return {self.cursor_field: max(new_value, old_value)} - @staticmethod def str2unixtime(str_dt: str) -> Optional[int]: """convert string to unixtime number @@ -171,140 +114,199 @@ def str2unixtime(str_dt: str) -> Optional[int]: dt = datetime.strptime(str_dt, DATETIME_FORMAT) return calendar.timegm(dt.utctimetuple()) + def parse_response(self, response: requests.Response, stream_state: Mapping[str, Any], **kwargs) -> Iterable[Mapping]: + """try to select relevant data only""" -class IncrementalExportStream(IncrementalEntityStream, ABC): - """Use the incremental export API to get items that changed or - were created in Zendesk Support since the last request - See: https://developer.zendesk.com/api-reference/ticketing/ticket-management/incremental_exports/ - - You can make up to 10 requests per minute to these endpoints. - """ - - # maximum of 1,000 - page_size = 1000 + records = response.json().get(self.response_list_name or self.name) or [] + if not self.cursor_field: + yield from records + else: + cursor_date = (stream_state or {}).get(self.cursor_field) + for record in records: + updated = record[self.cursor_field] + if not cursor_date or updated > cursor_date: + yield record - # try to save a stage after every 100 records - # this endpoint provides responses in ascending order. - state_checkpoint_interval = 100 - def __init__(self, **kwargs): - super().__init__(**kwargs) +class SourceZendeskSupportStream(BaseSourceZendeskSupportStream): + """Basic Zendesk class""" - # for saving of last page cursor value - # endpoints can have different cursor format but incremental logic uses unixtime format only - self.last_end_time = None + primary_key = "id" - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - if self.is_finished: - return None - return {"start_time": self.last_end_time} + page_size = 100 + cursor_field = "updated_at" - def path(self, *args, **kwargs) -> str: - return f"incremental/{self.name}.json" + response_list_name: str = None + parent: "SourceZendeskSupportStream" = None + future_requests: deque = None - def request_params( - self, stream_state: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None, **kwargs - ) -> MutableMapping[str, Any]: + transformer = TypeTransformer(TransformConfig.DefaultSchemaNormalization) - params = {"per_page": self.page_size} - if not next_page_token: - current_state = stream_state.get(LAST_END_TIME_KEY) - if not current_state: - # try to search all records with generated_timestamp > start_time - current_state = stream_state.get(self.cursor_field) - if current_state and isinstance(current_state, str) and not current_state.isdigit(): - current_state = self.str2unixtime(current_state) - elif not self.last_end_time: - self.last_end_time = current_state - start_time = int(current_state or time.mktime(self._start_date.timetuple())) - # +1 because the API returns all records where generated_timestamp >= start_time - - now = calendar.timegm(datetime.now().utctimetuple()) - if start_time > now - 60: - # start_time must be more than 60 seconds ago - start_time = now - 61 - params["start_time"] = start_time + def __init__(self, authenticator: Union[AuthBase, HttpAuthenticator] = None, **kwargs): + super().__init__(**kwargs) - else: - params.update(next_page_token) - return params + self._session = SourceZendeskSupportFuturesSession() + self._session.auth = authenticator + self.future_requests = deque() def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: - # try to save maximum value of a cursor field + latest_benchmark = latest_record[self.cursor_field] + if current_stream_state.get(self.cursor_field): + return {self.cursor_field: max(latest_benchmark, current_stream_state[self.cursor_field])} + return {self.cursor_field: latest_benchmark} - state = super().get_updated_state(current_stream_state=current_stream_state, latest_record=latest_record) + def get_api_records_count(self, stream_slice: Mapping[str, Any] = None, stream_state: Mapping[str, Any] = None): + """ + Count stream records before generating the future requests + to then correctly generate the pagination parameters. + """ - if self.last_end_time: - state[LAST_END_TIME_KEY] = self.last_end_time - current_stream_state.update(state) - return current_stream_state + count_url = urljoin(self.url_base, f"{self.path(stream_state=stream_state, stream_slice=stream_slice)}/count.json") + + start_date = self._start_date + params = {} + if self.cursor_field and stream_state: + start_date = stream_state.get(self.cursor_field) + if start_date: + params["start_time"] = self.str2datetime(start_date) + + response = self._session.request("get", count_url).result() + records_count = response.json().get("count", {}).get("value", 0) + + return records_count + + def generate_future_requests( + self, + sync_mode: SyncMode, + cursor_field: List[str] = None, + stream_slice: Mapping[str, Any] = None, + stream_state: Mapping[str, Any] = None, + ): + records_count = self.get_api_records_count(stream_slice=stream_slice, stream_state=stream_state) + + page_count = ceil(records_count / self.page_size) + for page_number in range(1, page_count + 1): + params = self.request_params(stream_state=stream_state, stream_slice=stream_slice) + params["page"] = page_number + request_headers = self.request_headers(stream_state=stream_state, stream_slice=stream_slice) + + request = self._create_prepared_request( + path=self.path(stream_state=stream_state, stream_slice=stream_slice), + headers=dict(request_headers, **self.authenticator.get_auth_header()), + params=params, + json=self.request_body_json(stream_state=stream_state, stream_slice=stream_slice), + data=self.request_body_data(stream_state=stream_state, stream_slice=stream_slice), + ) - def get_last_end_time(self) -> Optional[Union[str, int]]: - """Updating of last_end_time for comparing with cursor fields""" - if not self.last_end_time: - return self.last_end_time - return self.datetime2str(datetime.fromtimestamp(self.last_end_time)) + request_kwargs = self.request_kwargs(stream_state=stream_state, stream_slice=stream_slice) + self.future_requests.append( + { + "future": self._send_request(request, request_kwargs), + "request": request, + "request_kwargs": request_kwargs, + "retries": 0, + "backoff_time": None, + } + ) - def parse_response( - self, response: requests.Response, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, **kwargs - ) -> Iterable[Mapping]: + def _send(self, request: requests.PreparedRequest, request_kwargs: Mapping[str, Any]) -> requests.Response: + response: requests.Response = self._session.send_future(request, **request_kwargs) + return response - # save previous end time for filtering of a current response - previous_end_time = self.get_last_end_time() + def _send_request(self, request: requests.PreparedRequest, request_kwargs: Mapping[str, Any]) -> requests.Response: + return self._send(request, request_kwargs) - data = response.json() - # save a last end time for the next attempt - self.last_end_time = data["end_time"] - # end_of_stream is true if the current request has returned all the results up to the current time; false otherwise - self._finished = data["end_of_stream"] - for record in super().parse_response(response, stream_state=stream_state, stream_slice=stream_slice, **kwargs): - if previous_end_time and record.get(self.cursor_field) <= previous_end_time: - continue - yield record + def request_params( + self, stream_state: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None, **kwargs + ) -> MutableMapping[str, Any]: + params = {} + stream_state = stream_state or {} + # try to search all records with generated_timestamp > start_time + current_state = stream_state.get(self.cursor_field) + if current_state and isinstance(current_state, str) and not current_state.isdigit(): + current_state = self.str2unixtime(current_state) + start_time = current_state or calendar.timegm(pendulum.parse(self._start_date).utctimetuple()) + # +1 because the API returns all records where generated_timestamp >= start_time + + now = calendar.timegm(datetime.now().utctimetuple()) + if start_time > now - 60: + # start_time must be more than 60 seconds ago + start_time = now - 61 + params["start_time"] = start_time + return params -class IncrementalUnsortedStream(IncrementalEntityStream, ABC): - """Stream for loading without sorting + def read_records( + self, + sync_mode: SyncMode, + cursor_field: List[str] = None, + stream_slice: Mapping[str, Any] = None, + stream_state: Mapping[str, Any] = None, + ) -> Iterable[Mapping[str, Any]]: + self.generate_future_requests(sync_mode=sync_mode, cursor_field=cursor_field, stream_slice=stream_slice, stream_state=stream_state) + + while len(self.future_requests) > 0: + item = self.future_requests.popleft() + + response = item["future"].result() + + if self.should_retry(response): + backoff_time = self.backoff_time(response) + if item["retries"] == self.max_retries: + raise DefaultBackoffException(request=item["request"], response=response) + else: + if response.elapsed.total_seconds() < backoff_time: + time.sleep(backoff_time - response.elapsed.total_seconds()) + + self.future_requests.append( + { + "future": self._send_request(item["request"], item["request_kwargs"]), + "request": item["request"], + "request_kwargs": item["request_kwargs"], + "retries": item["retries"] + 1, + "backoff_time": backoff_time, + } + ) + else: + yield from self.parse_response(response, stream_state=stream_state, stream_slice=stream_slice) - Some endpoints don't provide approaches for data filtration - We can load all records fully and select updated data only - """ + @property + def url_base(self) -> str: + return f"https://{self._subdomain}.zendesk.com/api/v2/" - def __init__(self, **kwargs): - super().__init__(**kwargs) - # For saving of a relevant last updated date - self._max_cursor_date = None + @staticmethod + def _parse_next_page_number(response: requests.Response) -> Optional[int]: + """Parses a response and tries to find next page number""" + next_page = response.json().get("next_page") + if next_page: + return dict(parse_qsl(urlparse(next_page).query)).get("page") + return None - def parse_response(self, response: requests.Response, stream_state: Mapping[str, Any], **kwargs) -> Iterable[Mapping]: - """try to select relevant data only""" + def path(self, **kwargs): + return self.name - if not self.cursor_field: - yield from super().parse_response(response, stream_state=stream_state, **kwargs) - else: - send_cnt = 0 - cursor_date = (stream_state or {}).get(self.cursor_field) - for record in super().parse_response(response, stream_state=stream_state, **kwargs): - updated = record[self.cursor_field] - if not self._max_cursor_date or self._max_cursor_date < updated: - self._max_cursor_date = updated - if not cursor_date or updated > cursor_date: - send_cnt += 1 - yield record - if not send_cnt: - self._finished = True + def next_page_token(self, *args, **kwargs): + return None - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: - return {self.cursor_field: max(self._max_cursor_date or "", (current_stream_state or {}).get(self.cursor_field, ""))} - @abstractmethod - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - """can be different for each case""" +class SourceZendeskSupportFullRefreshStream(BaseSourceZendeskSupportStream): + primary_key = "id" + response_list_name: str = None + @property + def url_base(self) -> str: + return f"https://{self._subdomain}.zendesk.com/api/v2/" -class IncrementalUnsortedPageStream(IncrementalUnsortedStream, ABC): - """Stream for loading without sorting but with pagination - This logic can be used for a small data size when this data is loaded fast - """ + def path(self, **kwargs): + return self.name + + @staticmethod + def _parse_next_page_number(response: requests.Response) -> Optional[int]: + """Parses a response and tries to find next page number""" + next_page = response.json().get("next_page") + if next_page: + return dict(parse_qsl(urlparse(next_page).query)).get("page") + return None def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: next_page = self._parse_next_page_number(response) @@ -324,52 +326,57 @@ def request_params(self, next_page_token: Mapping[str, Any] = None, **kwargs) -> return params -class IncrementalUnsortedCursorStream(IncrementalUnsortedStream, ABC): - """Stream for loading without sorting but with cursor based pagination""" +class SourceZendeskSupportCursorPaginationStream(SourceZendeskSupportFullRefreshStream): + next_page_field = "next_page" + prev_start_time = None + + def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: + # try to save maximum value of a cursor field + old_value = str((current_stream_state or {}).get(self.cursor_field, "")) + new_value = str((latest_record or {}).get(self.cursor_field, "")) + return {self.cursor_field: max(new_value, old_value)} def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - has_more = response.json().get("meta", {}).get("has_more") - if not has_more: - self._finished = True - return None - return response.json().get("meta", {}).get("after_cursor") + start_time = dict(parse_qsl(urlparse(response.json().get(self.next_page_field), "").query)).get("start_time") + if start_time != self.prev_start_time: + self.prev_start_time = start_time + return {self.cursor_field: start_time} def request_params(self, next_page_token: Mapping[str, Any] = None, **kwargs) -> MutableMapping[str, Any]: - params = super().request_params(next_page_token=next_page_token, **kwargs) - params["page[size]"] = self.page_size - if next_page_token: - params["page[after]"] = next_page_token + next_page_token = next_page_token or {} + if self.cursor_field: + params = { + "start_time": next_page_token.get(self.cursor_field, calendar.timegm(pendulum.parse(self._start_date).utctimetuple())) + } + else: + params = {"start_time": calendar.timegm(pendulum.parse(self._start_date).utctimetuple())} return params -class FullRefreshStream(IncrementalUnsortedPageStream, ABC): - """ "Stream for endpoints where there are not any created_at or updated_at fields""" - - # reset to default value - cursor_field = SourceZendeskSupportStream.cursor_field +class Users(SourceZendeskSupportStream): + """Users stream: https://developer.zendesk.com/api-reference/ticketing/ticket-management/incremental_exports/""" -class IncrementalSortedCursorStream(IncrementalUnsortedCursorStream, ABC): - """Stream for loading sorting data with cursor based pagination""" +class Organizations(SourceZendeskSupportStream): + """Organizations stream: https://developer.zendesk.com/api-reference/ticketing/ticket-management/incremental_exports/""" - def request_params(self, **kwargs) -> MutableMapping[str, Any]: - params = super().request_params(**kwargs) - if params: - params.update({"sort_by": self.cursor_field, "sort_order": "desc"}) - return params +class Tickets(SourceZendeskSupportStream): + """Tickets stream: https://developer.zendesk.com/api-reference/ticketing/ticket-management/incremental_exports/""" -class IncrementalSortedPageStream(IncrementalUnsortedPageStream, ABC): - """Stream for loading sorting data with normal pagination""" + # The API compares the start_time with the ticket's generated_timestamp value, not its updated_at value. + # The generated_timestamp value is updated for all entity updates, including system updates. + # If a system update occurs after an event, the unchanged updated_at time will become earlier + # relative to the updated generated_timestamp time. def request_params(self, **kwargs) -> MutableMapping[str, Any]: + """Adds the field 'comment_count'""" params = super().request_params(**kwargs) - if params: - params.update({"sort_by": self.cursor_field, "sort_order": "desc", "limit": self.page_size}) + params["include"] = "comment_count" return params -class TicketComments(IncrementalSortedCursorStream): +class TicketComments(SourceZendeskSupportStream): """TicketComments stream: https://developer.zendesk.com/api-reference/ticketing/tickets/ticket_comments/ ZenDesk doesn't provide API for loading of all comments by one direct endpoints. Thus at first we loads all updated tickets and after this tries to load all created/updated @@ -377,157 +384,37 @@ class TicketComments(IncrementalSortedCursorStream): # Tickets can be removed throughout synchronization. The ZendDesk API will return a response # with 404 code if a ticket is not exists. But it shouldn't break loading of other comments. - raise_on_http_errors = False + # raise_on_http_errors = False - response_list_name = "comments" - cursor_field = IncrementalSortedCursorStream.created_at_field + parent = Tickets + cursor_field = "created_at" - def __init__(self, **kwargs): - super().__init__(**kwargs) - # need to save a slice ticket state - # because the function get_updated_state doesn't have a stream_slice as argument - self._ticket_last_end_time = None + response_list_name = "comments" def path(self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: ticket_id = stream_slice["id"] - return f"tickets/{ticket_id}/comments.json" + return f"tickets/{ticket_id}/comments" def stream_slices( self, sync_mode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None ) -> Iterable[Optional[Mapping[str, Any]]]: - """Loads all updated tickets after last stream state""" - stream_state = stream_state or {} - # convert a comment state value to a ticket one - # tickets and comments have different cursor formats. For example: - # Ticket state {"generated_timestamp": 1627637409} - # Comment state: {"created_at": "2021-07-30T12:30:09Z"} - # At the first try to find a ticket cursor value - ticket_stream_value = stream_state.get(Tickets.cursor_field) - if not ticket_stream_value: - # for backward compatibility because not all relevant states can have some last ticket state - ticket_stream_value = self.str2unixtime(stream_state.get(self.cursor_field)) - - tickets_stream = Tickets(start_date=self._start_date, subdomain=self._subdomain, authenticator=self.authenticator) - ticket_pages = defaultdict(list) - last_end_time = stream_state.get(LAST_END_TIME_KEY, 0) - ticket_count = 0 - for ticket in tickets_stream.read_records( - sync_mode=sync_mode, - cursor_field=cursor_field, - stream_state={Tickets.cursor_field: ticket_stream_value, LAST_END_TIME_KEY: last_end_time}, - ): - if not ticket["comment_count"]: - # skip tickets without comments - continue - ticket_count += 1 - ticket_pages[tickets_stream.last_end_time].append( - { - "id": ticket["id"], - Tickets.cursor_field: ticket[Tickets.cursor_field], - } - ) - - if ticket_pages: - last_times = sorted(ticket_pages.keys()) - # tickets' loading is implemented per page but the stream 'tickets' has - # the addl stream state fields "_last_end_time" and its value is not compatible - # with comments' cursor fields. Thus we need to save it separately and add - # last_end_time info for every slice - last_page = {last_times[-1]: [ticket_pages[last_times[-1]].pop(-1)]} - - new_last_times = [last_end_time] + last_times[:-1] - ticket_pages = {new_last_times[i]: ticket_pages[last_times[i]] for i in range(len(last_times))} - ticket_pages.update(last_page) - - self.logger.info(f"Found {ticket_count} ticket(s) with comments") - for end_time, tickets in sorted(ticket_pages.items(), key=lambda t: t[0]): - self._ticket_last_end_time = end_time - yield from sorted(tickets, key=lambda ticket: ticket[Tickets.cursor_field]) - - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: - """Adds a last cursor ticket updated time for a comment state""" - new_state = super().get_updated_state(current_stream_state=current_stream_state, latest_record=latest_record) - if self._ticket_last_end_time: - - new_state[LAST_END_TIME_KEY] = self._ticket_last_end_time - return new_state - - def parse_response( - self, response: requests.Response, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, **kwargs - ) -> Iterable[Mapping]: - """Handle response status""" - if response.status_code == 200: - # Ticket ID not included in ticket comments response. - # Manually add ticket_id to ticket_comments dict. - ticket_id = stream_slice["id"] - result = super().parse_response(response, stream_state=stream_state, stream_slice=stream_slice, **kwargs) - enriched_result = map(lambda x: x.update({"ticket_id": ticket_id}) or x, result) - yield from enriched_result - elif response.status_code == 404: - ticket_id = stream_slice["id"] - # skip 404 errors for not found tickets - self.logger.info(f"ticket {ticket_id} not found (404 error). It could have been deleted.") - else: - response.raise_for_status() - - -# NOTE: all Zendesk endpoints can be split into several templates of data loading. -# 1) with API built-in incremental approach -# 2) pagination and sorting mechanism -# 3) cursor pagination and sorting mechanism -# 4) without sorting but with pagination -# 5) without sorting but with cursor pagination -# 6) without created_at/updated_at fields - -# endpoints provide a built-in incremental approach - - -class Users(IncrementalExportStream): - """Users stream: https://developer.zendesk.com/api-reference/ticketing/ticket-management/incremental_exports/""" - - -class Organizations(IncrementalExportStream): - """Organizations stream: https://developer.zendesk.com/api-reference/ticketing/ticket-management/incremental_exports/""" - - -class Tickets(IncrementalExportStream): - """Tickets stream: https://developer.zendesk.com/api-reference/ticketing/ticket-management/incremental_exports/""" - - # The API compares the start_time with the ticket's generated_timestamp value, not its updated_at value. - # The generated_timestamp value is updated for all entity updates, including system updates. - # If a system update occurs after a event, the unchanged updated_at time will become earlier relative to the updated generated_timestamp time. - cursor_field = "generated_timestamp" - - def request_params(self, **kwargs) -> MutableMapping[str, Any]: - """Adds the field 'comment_count'""" - params = super().request_params(**kwargs) - params["include"] = "comment_count" - return params - - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: - """Need to save a cursor values as integer""" - state = super().get_updated_state(current_stream_state=current_stream_state, latest_record=latest_record) - if state and state.get(self.cursor_field): - state[self.cursor_field] = int(state[self.cursor_field]) - return state - - def get_last_end_time(self) -> Optional[Union[str, int]]: - """A response with tickets provides cursor data as unixtime""" - return self.last_end_time - - -# endpoints provide a pagination mechanism but we can't manage a response order + tickets_stream = self.parent(start_date=self._start_date, subdomain=self._subdomain, authenticator=self._session.auth) + for ticket in tickets_stream.read_records(sync_mode=SyncMode.full_refresh, cursor_field=cursor_field, stream_state=stream_state): + if ticket["comment_count"]: + yield {"id": ticket["id"], "child_count": ticket["comment_count"]} -class Groups(IncrementalUnsortedCursorStream): +class Groups(SourceZendeskSupportStream): """Groups stream: https://developer.zendesk.com/api-reference/ticketing/groups/groups/""" -class GroupMemberships(IncrementalUnsortedCursorStream): +class GroupMemberships(SourceZendeskSupportCursorPaginationStream): """GroupMemberships stream: https://developer.zendesk.com/api-reference/ticketing/groups/group_memberships/""" + cursor_field = "updated_at" -class SatisfactionRatings(IncrementalUnsortedCursorStream): + +class SatisfactionRatings(SourceZendeskSupportStream): """SatisfactionRatings stream: https://developer.zendesk.com/api-reference/ticketing/ticket-management/satisfaction_ratings/ The ZenDesk API for this stream provides the filter "start_time" that can be used for incremental logic @@ -541,7 +428,7 @@ def request_params( start_time = self.str2unixtime((stream_state or {}).get(self.cursor_field)) if not start_time: - start_time = int(time.mktime(self._start_date.timetuple())) + start_time = self.str2unixtime(self._start_date) params.update( { "start_time": start_time, @@ -551,42 +438,38 @@ def request_params( return params -class TicketFields(IncrementalUnsortedPageStream): +class TicketFields(SourceZendeskSupportStream): """TicketFields stream: https://developer.zendesk.com/api-reference/ticketing/tickets/ticket_fields/""" -class TicketForms(IncrementalUnsortedPageStream): +class TicketForms(SourceZendeskSupportCursorPaginationStream): """TicketForms stream: https://developer.zendesk.com/api-reference/ticketing/tickets/ticket_forms/""" -class TicketMetrics(IncrementalUnsortedCursorStream): +class TicketMetrics(SourceZendeskSupportStream): """TicketMetric stream: https://developer.zendesk.com/api-reference/ticketing/tickets/ticket_metrics/""" - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - # Tickets are ordered chronologically by created date, from newest to oldest. - # No need to get next page once cursor passed initial state - if self.is_finished: - return None - - return super().next_page_token(response) - -class TicketMetricEvents(IncrementalExportStream): +class TicketMetricEvents(SourceZendeskSupportCursorPaginationStream): """TicketMetricEvents stream: https://developer.zendesk.com/api-reference/ticketing/tickets/ticket_metric_events/""" cursor_field = "time" + def path(self, **kwargs): + return "incremental/ticket_metric_events" + -class Macros(IncrementalSortedCursorStream): +class Macros(SourceZendeskSupportStream): """Macros stream: https://developer.zendesk.com/api-reference/ticketing/business-rules/macros/""" # endpoints provide a cursor pagination and sorting mechanism -class TicketAudits(IncrementalUnsortedStream): +class TicketAudits(SourceZendeskSupportCursorPaginationStream): """TicketAudits stream: https://developer.zendesk.com/api-reference/ticketing/tickets/ticket_audits/""" + parent = Tickets # can request a maximum of 1,000 results page_size = 1000 # ticket audits doesn't have the 'updated_by' field @@ -597,16 +480,13 @@ class TicketAudits(IncrementalUnsortedStream): # This endpoint uses a variant of cursor pagination with some differences from cursor pagination used in other endpoints. def request_params(self, next_page_token: Mapping[str, Any] = None, **kwargs) -> MutableMapping[str, Any]: - params = super().request_params(next_page_token=next_page_token, **kwargs) - params.update({"sort_by": self.cursor_field, "sort_order": "desc", "limit": self.page_size}) + params = {"sort_by": self.cursor_field, "sort_order": "desc", "limit": self.page_size} if next_page_token: params["cursor"] = next_page_token return params def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - if self.is_finished: - return None return response.json().get("before_cursor") @@ -614,30 +494,52 @@ def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, # thus we can't implement an incremental logic for them -class Tags(FullRefreshStream): +class Tags(SourceZendeskSupportFullRefreshStream): """Tags stream: https://developer.zendesk.com/api-reference/ticketing/ticket-management/tags/""" # doesn't have the 'id' field primary_key = "name" -class SlaPolicies(FullRefreshStream): +class SlaPolicies(SourceZendeskSupportFullRefreshStream): """SlaPolicies stream: https://developer.zendesk.com/api-reference/ticketing/business-rules/sla_policies/""" def path(self, *args, **kwargs) -> str: return "slas/policies.json" -class Brands(FullRefreshStream): +class Brands(SourceZendeskSupportFullRefreshStream): """Brands stream: https://developer.zendesk.com/api-reference/ticketing/account-configuration/brands/#list-brands""" -class CustomRoles(FullRefreshStream): +class CustomRoles(SourceZendeskSupportFullRefreshStream): """CustomRoles stream: https://developer.zendesk.com/api-reference/ticketing/account-configuration/custom_roles/#list-custom-roles""" -class Schedules(FullRefreshStream): +class Schedules(SourceZendeskSupportFullRefreshStream): """Schedules stream: https://developer.zendesk.com/api-reference/ticketing/ticket-management/schedules/#list-schedules""" def path(self, *args, **kwargs) -> str: return "business_hours/schedules.json" + + +class UserSettingsStream(SourceZendeskSupportFullRefreshStream): + """Stream for checking of a request token and permissions""" + + def path(self, *args, **kwargs) -> str: + return "account/settings.json" + + def next_page_token(self, *args, **kwargs) -> Optional[Mapping[str, Any]]: + # this data without listing + return None + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + """returns data from API""" + settings = response.json().get("settings") + if settings: + yield settings + + def get_settings(self) -> Mapping[str, Any]: + for resp in self.read_records(SyncMode.full_refresh): + return resp + raise SourceZendeskException("not found settings") diff --git a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/test_futures.py b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/test_futures.py new file mode 100644 index 000000000000..32fbb78ca389 --- /dev/null +++ b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/test_futures.py @@ -0,0 +1,120 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +import json +from datetime import timedelta +from urllib.parse import urljoin + +import pendulum +import pytest +import requests_mock +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources.streams.http.exceptions import DefaultBackoffException +from source_zendesk_support.source import BasicApiTokenAuthenticator +from source_zendesk_support.streams import Tickets + + +@pytest.fixture(scope="module") +def stream_args(): + return { + "subdomain": "fake-subdomain", + "start_date": "2021-01-27T00:00:00Z", + "authenticator": BasicApiTokenAuthenticator("test@airbyte.io", "api_token"), + } + + +@pytest.mark.parametrize( + "records_count,page_size,expected_futures_deque_len", + [ + (1000, 100, 10), + (1000, 10, 100), + (0, 100, 0), + (1, 100, 1), + (101, 100, 2), + ], +) +def test_proper_number_of_future_requests_generated(stream_args, records_count, page_size, expected_futures_deque_len): + stream = Tickets(**stream_args) + stream.page_size = page_size + + with requests_mock.Mocker() as m: + count_url = urljoin(stream.url_base, f"{stream.path()}/count.json") + m.get(count_url, text=json.dumps({"count": {"value": records_count}})) + + records_url = urljoin(stream.url_base, stream.path()) + m.get(records_url) + + stream.generate_future_requests(sync_mode=SyncMode.full_refresh, cursor_field=stream.cursor_field) + + assert len(stream.future_requests) == expected_futures_deque_len + + +@pytest.mark.parametrize( + "records_count,page_size,expected_futures_deque_len", + [ + (1000, 100, 10), + (1000, 10, 100), + (0, 100, 0), + (1, 100, 1), + (101, 100, 2), + ], +) +def test_parse_future_records(stream_args, records_count, page_size, expected_futures_deque_len): + stream = Tickets(**stream_args) + stream.page_size = page_size + expected_records = [ + {f"key{i}": f"val{i}", stream.cursor_field: (pendulum.parse("2020-01-01") + timedelta(days=i)).isoformat()} + for i in range(records_count) + ] + + with requests_mock.Mocker() as m: + count_url = urljoin(stream.url_base, f"{stream.path()}/count.json") + m.get( + count_url, + text=json.dumps({"count": {"value": records_count}}), + ) + + records_url = urljoin(stream.url_base, stream.path()) + m.get(records_url, text=json.dumps({stream.name: expected_records})) + + stream.generate_future_requests(sync_mode=SyncMode.full_refresh, cursor_field=stream.cursor_field) + if not stream.future_requests and not expected_futures_deque_len: + assert len(stream.future_requests) == 0 and not expected_records + else: + response = stream.future_requests[0]["future"].result() + records = list(stream.parse_response(response, stream_state=None, stream_slice=None)) + assert records == expected_records + + +@pytest.mark.parametrize( + "records_count,page_size,expected_futures_deque_len,should_retry", + [ + (1000, 100, 10, True), + (1000, 10, 100, True), + # (0, 100, 0, True), + # (1, 100, 1, False), + # (101, 100, 2, False), + ], +) +def test_read_records(stream_args, records_count, page_size, expected_futures_deque_len, should_retry): + stream = Tickets(**stream_args) + stream.page_size = page_size + expected_records = [ + {f"key{i}": f"val{i}", stream.cursor_field: (pendulum.parse("2020-01-01") + timedelta(days=i)).isoformat()} + for i in range(page_size) + ] + + with requests_mock.Mocker() as m: + count_url = urljoin(stream.url_base, f"{stream.path()}/count.json") + m.get(count_url, text=json.dumps({"count": {"value": records_count}})) + + records_url = urljoin(stream.url_base, stream.path()) + + m.get(records_url, status_code=429 if should_retry else 200, headers={"X-Rate-Limit": "700"}) + + if should_retry and expected_futures_deque_len: + with pytest.raises(DefaultBackoffException): + list(stream.read_records(sync_mode=SyncMode.full_refresh)) + else: + assert list(stream.read_records(sync_mode=SyncMode.full_refresh)) == expected_records diff --git a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/unit_test.py deleted file mode 100644 index 2be64443e865..000000000000 --- a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/unit_test.py +++ /dev/null @@ -1,149 +0,0 @@ -# -# Copyright (c) 2021 Airbyte, Inc., all rights reserved. -# - -import json -from unittest.mock import MagicMock, Mock - -import pytest -import requests -import requests_mock -from airbyte_cdk.models import AirbyteStream, ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, DestinationSyncMode, SyncMode -from requests.exceptions import HTTPError -from source_zendesk_support import SourceZendeskSupport -from source_zendesk_support.streams import Tags, TicketComments - -CONFIG_FILE = "secrets/config.json" - - -@pytest.fixture(scope="module") -def prepare_stream_args(): - """Generates streams settings from a file""" - with open(CONFIG_FILE, "r") as f: - return SourceZendeskSupport.convert_config2stream_args(json.loads(f.read())) - - -@pytest.fixture(scope="module") -def config(): - """Generates fake config""" - return { - "subdomain": "fake_domain", - "start_date": "2020-01-01T00:00:00Z", - "auth_method": {"auth_method": "api_token", "email": "email@email.com", "api_token": "fake_api_token"}, - } - - -@pytest.mark.parametrize( - "header_name,header_value,expected", - [ - # Retry-After > 0 - ("Retry-After", "123", 123), - # Retry-After < 0 - ("Retry-After", "-123", None), - # X-Rate-Limit > 0 - ("X-Rate-Limit", "100", 1.2), - # X-Rate-Limit header < 0 - ("X-Rate-Limit", "-100", None), - # Random header - ("Fake-Header", "-100", None), - ], -) -def test_backoff_cases(prepare_stream_args, header_name, header_value, expected): - """Zendesk sends the header different value for backoff logic""" - - stream = Tags(**prepare_stream_args) - with requests_mock.Mocker() as m: - url = stream.url_base + stream.path() - - m.get(url, headers={header_name: header_value}, status_code=429) - result = stream.backoff_time(requests.get(url)) - if expected: - assert (result - expected) < 0.005 - else: - assert result is None - - -@pytest.mark.parametrize( - "status_code,expected_comment_count,expected_exception", - [ - # success - (200, 1, None), - # not found ticket - (404, 0, None), - # some another code error. - (403, 0, HTTPError), - ], -) -def test_comments_not_found_ticket(prepare_stream_args, status_code, expected_comment_count, expected_exception): - """Checks the case when some ticket is removed while sync of comments""" - fake_id = 12345 - stream = TicketComments(**prepare_stream_args) - with requests_mock.Mocker() as comment_mock: - path = f"tickets/{fake_id}/comments.json" - stream.path = Mock(return_value=path) - url = stream.url_base + path - comment_mock.get( - url, - status_code=status_code, - json={ - "comments": [ - { - "id": fake_id, - TicketComments.cursor_field: "2121-07-22T06:55:55Z", - } - ] - }, - ) - comments = stream.read_records( - sync_mode=None, - stream_slice={ - "id": fake_id, - }, - ) - if expected_exception: - with pytest.raises(expected_exception): - next(comments) - else: - assert len(list(comments)) == expected_comment_count - - -@pytest.mark.parametrize( - "input_data,expected_data", - [ - ( - {"id": 123, "custom_fields": [{"id": 3213212, "value": ["fake_3000", "fake_5555"]}]}, - {"id": 123, "custom_fields": [{"id": 3213212, "value": "['fake_3000', 'fake_5555']"}]}, - ), - ( - {"id": 234, "custom_fields": [{"id": 2345234, "value": "fake_123"}]}, - {"id": 234, "custom_fields": [{"id": 2345234, "value": "fake_123"}]}, - ), - ( - {"id": 345, "custom_fields": [{"id": 5432123, "value": 55432.321}]}, - {"id": 345, "custom_fields": [{"id": 5432123, "value": "55432.321"}]}, - ), - ], -) -def test_transform_for_tickets_stream(config, input_data, expected_data): - """Checks Transform in case when records come with invalid fields data types""" - test_catalog = ConfiguredAirbyteCatalog( - streams=[ - ConfiguredAirbyteStream( - stream=AirbyteStream(name="tickets", json_schema={}), - sync_mode=SyncMode.full_refresh, - destination_sync_mode=DestinationSyncMode.overwrite, - ) - ] - ) - - with requests_mock.Mocker() as ticket_mock: - ticket_mock.get( - f"https://{config['subdomain']}.zendesk.com/api/v2/incremental/tickets.json", - status_code=200, - json={"tickets": [input_data], "end_time": "2021-07-22T06:55:55Z", "end_of_stream": True}, - ) - - source = SourceZendeskSupport() - records = source.read(MagicMock(), config, test_catalog, None) - for record in records: - assert record.record.data == expected_data diff --git a/airbyte-metrics/reporter/Dockerfile b/airbyte-metrics/reporter/Dockerfile index fffdb53e0962..b967ca541bf8 100644 --- a/airbyte-metrics/reporter/Dockerfile +++ b/airbyte-metrics/reporter/Dockerfile @@ -5,7 +5,7 @@ ENV APPLICATION airbyte-metrics-reporter WORKDIR /app -ADD bin/${APPLICATION}-0.35.39-alpha.tar /app +ADD bin/${APPLICATION}-0.35.42-alpha.tar /app # wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.39-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.42-alpha/bin/${APPLICATION}"] diff --git a/airbyte-scheduler/app/Dockerfile b/airbyte-scheduler/app/Dockerfile index 37dc71b6fbaa..a7e2b0db363e 100644 --- a/airbyte-scheduler/app/Dockerfile +++ b/airbyte-scheduler/app/Dockerfile @@ -5,7 +5,7 @@ ENV APPLICATION airbyte-scheduler WORKDIR /app -ADD bin/${APPLICATION}-0.35.39-alpha.tar /app +ADD bin/${APPLICATION}-0.35.42-alpha.tar /app # wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.39-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.42-alpha/bin/${APPLICATION}"] diff --git a/airbyte-server/Dockerfile b/airbyte-server/Dockerfile index 7de1970df09c..1ba8e9fb35c5 100644 --- a/airbyte-server/Dockerfile +++ b/airbyte-server/Dockerfile @@ -7,7 +7,7 @@ ENV APPLICATION airbyte-server WORKDIR /app -ADD bin/${APPLICATION}-0.35.39-alpha.tar /app +ADD bin/${APPLICATION}-0.35.42-alpha.tar /app # wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.39-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.42-alpha/bin/${APPLICATION}"] diff --git a/airbyte-server/src/main/java/io/airbyte/server/handlers/ConnectionsHandler.java b/airbyte-server/src/main/java/io/airbyte/server/handlers/ConnectionsHandler.java index c5eb017c85b3..8bac37471b50 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/handlers/ConnectionsHandler.java +++ b/airbyte-server/src/main/java/io/airbyte/server/handlers/ConnectionsHandler.java @@ -212,6 +212,11 @@ public ConnectionRead updateConnection(final ConnectionUpdate connectionUpdate) new HashSet<>(connectionUpdate.getOperationIds())); configRepository.writeStandardSync(newConnection); + + if (featureFlags.usesNewScheduler()) { + temporalWorkerRunFactory.update(connectionUpdate); + } + return buildConnectionRead(connectionUpdate.getConnectionId()); } diff --git a/airbyte-server/src/main/java/io/airbyte/server/handlers/WebBackendConnectionsHandler.java b/airbyte-server/src/main/java/io/airbyte/server/handlers/WebBackendConnectionsHandler.java index e99bcd781ead..836ebfd37813 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/handlers/WebBackendConnectionsHandler.java +++ b/airbyte-server/src/main/java/io/airbyte/server/handlers/WebBackendConnectionsHandler.java @@ -272,8 +272,6 @@ public WebBackendConnectionRead webBackendUpdateConnection(final WebBackendConne connectionRead = connectionsHandler.updateConnection(connectionUpdate); if (needReset) { - // todo (cgardens) - temporalWorkerRunFactory CANNOT be here. - temporalWorkerRunFactory.update(connectionUpdate); // todo (cgardens) - temporalWorkerRunFactory CANNOT be here. temporalWorkerRunFactory.synchronousResetConnection(webBackendConnectionUpdate.getConnectionId()); diff --git a/airbyte-server/src/test/java/io/airbyte/server/handlers/ConnectionsHandlerTest.java b/airbyte-server/src/test/java/io/airbyte/server/handlers/ConnectionsHandlerTest.java index 1d596113c5e9..2ada8688995f 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/handlers/ConnectionsHandlerTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/handlers/ConnectionsHandlerTest.java @@ -64,6 +64,8 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; class ConnectionsHandlerTest { @@ -293,8 +295,12 @@ void testCreateConnectionWithBadDefinitionIds() throws JsonValidationException, } - @Test - void testUpdateConnection() throws JsonValidationException, ConfigNotFoundException, IOException { + @ParameterizedTest + @ValueSource(booleans = {true, false}) + void testUpdateConnection(boolean useNewScheduler) throws JsonValidationException, ConfigNotFoundException, IOException { + when(featureFlags.usesNewScheduler()) + .thenReturn(useNewScheduler); + final AirbyteCatalog catalog = ConnectionHelpers.generateBasicApiCatalog(); catalog.getStreams().get(0).getStream().setName("azkaban_users"); catalog.getStreams().get(0).getConfig().setAliasName("azkaban_users"); @@ -349,6 +355,10 @@ void testUpdateConnection() throws JsonValidationException, ConfigNotFoundExcept assertEquals(expectedConnectionRead, actualConnectionRead); verify(configRepository).writeStandardSync(updatedStandardSync); + + if (useNewScheduler) { + verify(temporalWorkflowHandler).update(connectionUpdate); + } } @Test diff --git a/airbyte-server/src/test/java/io/airbyte/server/handlers/WebBackendConnectionsHandlerTest.java b/airbyte-server/src/test/java/io/airbyte/server/handlers/WebBackendConnectionsHandlerTest.java index 244780c29b4f..095c16778f99 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/handlers/WebBackendConnectionsHandlerTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/handlers/WebBackendConnectionsHandlerTest.java @@ -589,6 +589,7 @@ void testUpdateConnectionWithUpdatedSchemaNewScheduler() throws JsonValidationEx final ConnectionIdRequestBody connectionId = new ConnectionIdRequestBody().connectionId(result.getConnectionId()); verify(schedulerHandler, times(0)).resetConnection(connectionId); verify(schedulerHandler, times(0)).syncConnection(connectionId); + verify(connectionsHandler, times(1)).updateConnection(any()); final InOrder orderVerifier = inOrder(temporalWorkerRunFactory); orderVerifier.verify(temporalWorkerRunFactory, times(1)).synchronousResetConnection(connectionId.getConnectionId()); orderVerifier.verify(temporalWorkerRunFactory, times(1)).startNewManualSync(connectionId.getConnectionId()); diff --git a/airbyte-webapp/package-lock.json b/airbyte-webapp/package-lock.json index 2266d5d20f8f..3d4389caa84d 100644 --- a/airbyte-webapp/package-lock.json +++ b/airbyte-webapp/package-lock.json @@ -1,12 +1,12 @@ { "name": "airbyte-webapp", - "version": "0.35.39-alpha", + "version": "0.35.42-alpha", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "airbyte-webapp", - "version": "0.35.39-alpha", + "version": "0.35.42-alpha", "dependencies": { "@fortawesome/fontawesome-svg-core": "^1.2.36", "@fortawesome/free-brands-svg-icons": "^5.15.4", diff --git a/airbyte-webapp/package.json b/airbyte-webapp/package.json index 2458acf8800c..fbbeb73a74ea 100644 --- a/airbyte-webapp/package.json +++ b/airbyte-webapp/package.json @@ -1,6 +1,6 @@ { "name": "airbyte-webapp", - "version": "0.35.39-alpha", + "version": "0.35.42-alpha", "private": true, "engines": { "node": ">=16.0.0" diff --git a/airbyte-webapp/src/packages/cloud/locales/en.json b/airbyte-webapp/src/packages/cloud/locales/en.json index e43ace54d58d..07acfabdec53 100644 --- a/airbyte-webapp/src/packages/cloud/locales/en.json +++ b/airbyte-webapp/src/packages/cloud/locales/en.json @@ -19,7 +19,7 @@ "login.backLogin": "Back to Log in", "login.resetPassword": "Reset your password", "login.resetPassword.emailSent": "A password reset email has been sent to you", - "login.activateAccess": "Activate your 30-day free trial", + "login.activateAccess": "Activate your 14-day free trial", "login.activateAccess.subtitle": "Get our Special launch offer of $1,000 worth of free usage. No credit card required.", "login.fullName": "Full name*", "login.fullName.placeholder": "Christopher Smith", diff --git a/airbyte-webapp/src/views/Connector/ServiceForm/components/Controls/Instruction.tsx b/airbyte-webapp/src/views/Connector/ServiceForm/components/Controls/Instruction.tsx index 3f4c57724aaf..8cbc3f58338b 100644 --- a/airbyte-webapp/src/views/Connector/ServiceForm/components/Controls/Instruction.tsx +++ b/airbyte-webapp/src/views/Connector/ServiceForm/components/Controls/Instruction.tsx @@ -120,7 +120,7 @@ const Instruction: React.FC = ({ )} {docType === "internal" && ( - setIsSideViewOpen(true)}> + setIsSideViewOpen(true)}> )} diff --git a/airbyte-workers/Dockerfile b/airbyte-workers/Dockerfile index fbe371eaae3c..1b505ca2cc6e 100644 --- a/airbyte-workers/Dockerfile +++ b/airbyte-workers/Dockerfile @@ -30,7 +30,7 @@ ENV APPLICATION airbyte-workers WORKDIR /app # Move worker app -ADD bin/${APPLICATION}-0.35.39-alpha.tar /app +ADD bin/${APPLICATION}-0.35.42-alpha.tar /app # wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.39-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.35.42-alpha/bin/${APPLICATION}"] diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/process/DockerProcessFactory.java b/airbyte-workers/src/main/java/io/airbyte/workers/process/DockerProcessFactory.java index 709a12372567..ea4db97a3baa 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/process/DockerProcessFactory.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/process/DockerProcessFactory.java @@ -138,9 +138,6 @@ public Process create(final String jobId, cmd.add(entrypoint); } if (resourceRequirements != null) { - if (!Strings.isNullOrEmpty(resourceRequirements.getCpuRequest())) { - cmd.add(String.format("--cpu-shares=%s", resourceRequirements.getCpuRequest())); - } if (!Strings.isNullOrEmpty(resourceRequirements.getCpuLimit())) { cmd.add(String.format("--cpus=%s", resourceRequirements.getCpuLimit())); } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityImpl.java index 06bd2dafbfb7..bc3ae71f9f1d 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityImpl.java @@ -97,12 +97,14 @@ public JobCreationOutput createNewJob(final JobCreationInput input) { private void emitSrcIdDstIdToReleaseStagesMetric(final UUID srcId, final UUID dstId) throws IOException { final var releaseStages = configRepository.getDatabase().query(ctx -> MetricQueries.srcIdAndDestIdToReleaseStages(ctx, srcId, dstId)); - if (releaseStages == null) { + if (releaseStages == null || releaseStages.size() == 0) { return; } for (final ReleaseStage stage : releaseStages) { - DogStatsDMetricSingleton.count(MetricsRegistry.JOB_CREATED_BY_RELEASE_STAGE, 1, MetricTags.getReleaseStage(stage)); + if (stage != null) { + DogStatsDMetricSingleton.count(MetricsRegistry.JOB_CREATED_BY_RELEASE_STAGE, 1, MetricTags.getReleaseStage(stage)); + } } } @@ -213,12 +215,14 @@ public void reportJobStart(final ReportJobStartInput input) { private void emitJobIdToReleaseStagesMetric(final MetricsRegistry metric, final long jobId) throws IOException { final var releaseStages = configRepository.getDatabase().query(ctx -> MetricQueries.jobIdToReleaseStages(ctx, jobId)); - if (releaseStages == null) { + if (releaseStages == null || releaseStages.size() == 0) { return; } for (final ReleaseStage stage : releaseStages) { - DogStatsDMetricSingleton.count(metric, 1, MetricTags.getReleaseStage(stage)); + if (stage != null) { + DogStatsDMetricSingleton.count(metric, 1, MetricTags.getReleaseStage(stage)); + } } } diff --git a/charts/airbyte/Chart.yaml b/charts/airbyte/Chart.yaml index 0a8b795b16b0..36a3f1516fba 100644 --- a/charts/airbyte/Chart.yaml +++ b/charts/airbyte/Chart.yaml @@ -21,7 +21,7 @@ version: 0.3.0 # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: "0.35.39-alpha" +appVersion: "0.35.42-alpha" dependencies: - name: common diff --git a/charts/airbyte/README.md b/charts/airbyte/README.md index 817f5666140d..0d7c1702b743 100644 --- a/charts/airbyte/README.md +++ b/charts/airbyte/README.md @@ -29,7 +29,7 @@ | `webapp.replicaCount` | Number of webapp replicas | `1` | | `webapp.image.repository` | The repository to use for the airbyte webapp image. | `airbyte/webapp` | | `webapp.image.pullPolicy` | the pull policy to use for the airbyte webapp image | `IfNotPresent` | -| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.35.39-alpha` | +| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.35.42-alpha` | | `webapp.podAnnotations` | Add extra annotations to the webapp pod(s) | `{}` | | `webapp.containerSecurityContext` | Security context for the container | `{}` | | `webapp.livenessProbe.enabled` | Enable livenessProbe on the webapp | `true` | @@ -71,7 +71,7 @@ | `scheduler.replicaCount` | Number of scheduler replicas | `1` | | `scheduler.image.repository` | The repository to use for the airbyte scheduler image. | `airbyte/scheduler` | | `scheduler.image.pullPolicy` | the pull policy to use for the airbyte scheduler image | `IfNotPresent` | -| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.35.39-alpha` | +| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.35.42-alpha` | | `scheduler.podAnnotations` | Add extra annotations to the scheduler pod | `{}` | | `scheduler.resources.limits` | The resources limits for the scheduler container | `{}` | | `scheduler.resources.requests` | The requested resources for the scheduler container | `{}` | @@ -118,7 +118,7 @@ | `server.replicaCount` | Number of server replicas | `1` | | `server.image.repository` | The repository to use for the airbyte server image. | `airbyte/server` | | `server.image.pullPolicy` | the pull policy to use for the airbyte server image | `IfNotPresent` | -| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.35.39-alpha` | +| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.35.42-alpha` | | `server.podAnnotations` | Add extra annotations to the server pod | `{}` | | `server.containerSecurityContext` | Security context for the container | `{}` | | `server.livenessProbe.enabled` | Enable livenessProbe on the server | `true` | @@ -156,7 +156,7 @@ | `worker.replicaCount` | Number of worker replicas | `1` | | `worker.image.repository` | The repository to use for the airbyte worker image. | `airbyte/worker` | | `worker.image.pullPolicy` | the pull policy to use for the airbyte worker image | `IfNotPresent` | -| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.35.39-alpha` | +| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.35.42-alpha` | | `worker.podAnnotations` | Add extra annotations to the worker pod(s) | `{}` | | `worker.containerSecurityContext` | Security context for the container | `{}` | | `worker.livenessProbe.enabled` | Enable livenessProbe on the worker | `true` | @@ -188,7 +188,7 @@ | ----------------------------- | -------------------------------------------------------------------- | -------------------- | | `bootloader.image.repository` | The repository to use for the airbyte bootloader image. | `airbyte/bootloader` | | `bootloader.image.pullPolicy` | the pull policy to use for the airbyte bootloader image | `IfNotPresent` | -| `bootloader.image.tag` | The airbyte bootloader image tag. Defaults to the chart's AppVersion | `0.35.39-alpha` | +| `bootloader.image.tag` | The airbyte bootloader image tag. Defaults to the chart's AppVersion | `0.35.42-alpha` | ### Temporal parameters diff --git a/charts/airbyte/values.yaml b/charts/airbyte/values.yaml index d72ad479496d..eb7297831cde 100644 --- a/charts/airbyte/values.yaml +++ b/charts/airbyte/values.yaml @@ -43,7 +43,7 @@ webapp: image: repository: airbyte/webapp pullPolicy: IfNotPresent - tag: 0.35.39-alpha + tag: 0.35.42-alpha ## @param webapp.podAnnotations [object] Add extra annotations to the webapp pod(s) ## @@ -209,7 +209,7 @@ scheduler: image: repository: airbyte/scheduler pullPolicy: IfNotPresent - tag: 0.35.39-alpha + tag: 0.35.42-alpha ## @param scheduler.podAnnotations [object] Add extra annotations to the scheduler pod ## @@ -440,7 +440,7 @@ server: image: repository: airbyte/server pullPolicy: IfNotPresent - tag: 0.35.39-alpha + tag: 0.35.42-alpha ## @param server.podAnnotations [object] Add extra annotations to the server pod ## @@ -581,7 +581,7 @@ worker: image: repository: airbyte/worker pullPolicy: IfNotPresent - tag: 0.35.39-alpha + tag: 0.35.42-alpha ## @param worker.podAnnotations [object] Add extra annotations to the worker pod(s) ## @@ -699,7 +699,7 @@ bootloader: image: repository: airbyte/bootloader pullPolicy: IfNotPresent - tag: 0.35.39-alpha + tag: 0.35.42-alpha ## @param bootloader.podAnnotations [object] Add extra annotations to the bootloader pod ## diff --git a/docs/integrations/destinations/clickhouse.md b/docs/integrations/destinations/clickhouse.md index ee9b6c937115..7fd35a2f8908 100644 --- a/docs/integrations/destinations/clickhouse.md +++ b/docs/integrations/destinations/clickhouse.md @@ -77,9 +77,10 @@ Therefore, Airbyte ClickHouse destination will create tables and schemas using t ## Changelog -| Version | Date | Pull Request | Subject | -| :--- | :--- | :--- | :--- | -| 0.1.3 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | -| 0.1.1 | 2021-12-21 | [\#8982](https://github.com/airbytehq/airbyte/pull/8982) | Set isSchemaRequired to false | -| 0.1.0 | 2021-11-04 | [\#7620](https://github.com/airbytehq/airbyte/pull/7620) | Add ClickHouse destination | +| Version | Date | Pull Request | Subject | +|:--------|:-----------| :--- |:---------------------------------------------| +| 0.1.4 | 2022-02-25 | [10421](https://github.com/airbytehq/airbyte/pull/10421) | Refactor JDBC parameters handling | +| 0.1.3 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | +| 0.1.1 | 2021-12-21 | [\#8982](https://github.com/airbytehq/airbyte/pull/8982) | Set isSchemaRequired to false | +| 0.1.0 | 2021-11-04 | [\#7620](https://github.com/airbytehq/airbyte/pull/7620) | Add ClickHouse destination | diff --git a/docs/integrations/destinations/mariadb-columnstore.md b/docs/integrations/destinations/mariadb-columnstore.md index af57ac6d9dbd..e2a870033019 100644 --- a/docs/integrations/destinations/mariadb-columnstore.md +++ b/docs/integrations/destinations/mariadb-columnstore.md @@ -74,10 +74,11 @@ Using this feature requires additional configuration, when creating the destinat ## CHANGELOG -| Version | Date | Pull Request | Subject | -|:--------| :--- |:---------------------------------------------------------|:------------------------------------------| +| Version | Date | Pull Request | Subject | +|:--------|:-----------|:---------------------------------------------------------|:---------------------------------------------| +| 0.1.4 | 2022-02-25 | [10421](https://github.com/airbytehq/airbyte/pull/10421) | Refactor JDBC parameters handling | | 0.1.3 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | -| 0.1.2 | 2021-12-30 | [\#8809](https://github.com/airbytehq/airbyte/pull/8809) | Update connector fields title/description | -| 0.1.1 | 2021-12-01 | [\#8371](https://github.com/airbytehq/airbyte/pull/8371) | Fixed incorrect handling "\n" in ssh key. | -| 0.1.0 | 2021-11-15 | [\#7961](https://github.com/airbytehq/airbyte/pull/7961) | Added MariaDB ColumnStore destination. | +| 0.1.2 | 2021-12-30 | [\#8809](https://github.com/airbytehq/airbyte/pull/8809) | Update connector fields title/description | +| 0.1.1 | 2021-12-01 | [\#8371](https://github.com/airbytehq/airbyte/pull/8371) | Fixed incorrect handling "\n" in ssh key. | +| 0.1.0 | 2021-11-15 | [\#7961](https://github.com/airbytehq/airbyte/pull/7961) | Added MariaDB ColumnStore destination. | diff --git a/docs/integrations/destinations/mssql.md b/docs/integrations/destinations/mssql.md index 53d91ff37589..8346c156fb1d 100644 --- a/docs/integrations/destinations/mssql.md +++ b/docs/integrations/destinations/mssql.md @@ -117,26 +117,29 @@ Using this feature requires additional configuration, when creating the source. ## Changelog -| Version | Date | Pull Request | Subject | -|:--------| :--- |:---------------------------------------------------------| :--- | -| 0.1.14 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | -| 0.1.13 | 2021-12-28 | [\#9158](https://github.com/airbytehq/airbyte/pull/9158) | Update connector fields title/description | -| 0.1.12 | 2021-12-01 | [\#8371](https://github.com/airbytehq/airbyte/pull/8371) | Fixed incorrect handling "\n" in ssh key | +| Version | Date | Pull Request | Subject | +|:--------| :--- |:---------------------------------------------------------|:----------------------------------------------------------------------------------------------------| +| 0.1.15 | 2022-02-25 | [10421](https://github.com/airbytehq/airbyte/pull/10421) | Refactor JDBC parameters handling | +| 0.1.14 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | +| 0.1.13 | 2021-12-28 | [\#9158](https://github.com/airbytehq/airbyte/pull/9158) | Update connector fields title/description | +| 0.1.12 | 2021-12-01 | [\#8371](https://github.com/airbytehq/airbyte/pull/8371) | Fixed incorrect handling "\n" in ssh key | | 0.1.11 | 2021-11-08 | [#7719](https://github.com/airbytehq/airbyte/pull/7719) | Improve handling of wide rows by buffering records based on their byte size rather than their count | -| 0.1.10 | 2021-10-11 | [\#6877](https://github.com/airbytehq/airbyte/pull/6877) | Add `normalization` capability, add `append+deduplication` sync mode | -| 0.1.9 | 2021-09-29 | [\#5970](https://github.com/airbytehq/airbyte/pull/5970) | Add support & test cases for MSSQL Destination via SSH tunnels | -| 0.1.8 | 2021-08-07 | [\#5272](https://github.com/airbytehq/airbyte/pull/5272) | Add batch method to insert records | -| 0.1.7 | 2021-07-30 | [\#5125](https://github.com/airbytehq/airbyte/pull/5125) | Enable `additionalPropertities` in spec.json | -| 0.1.6 | 2021-06-21 | [\#3555](https://github.com/airbytehq/airbyte/pull/3555) | Partial Success in BufferedStreamConsumer | -| 0.1.5 | 2021-07-20 | [\#4874](https://github.com/airbytehq/airbyte/pull/4874) | declare object types correctly in spec | -| 0.1.4 | 2021-06-17 | [\#3744](https://github.com/airbytehq/airbyte/pull/3744) | Fix doc/params in specification file | -| 0.1.3 | 2021-05-28 | [\#3728](https://github.com/airbytehq/airbyte/pull/3973) | Change dockerfile entrypoint | -| 0.1.2 | 2021-05-13 | [\#3367](https://github.com/airbytehq/airbyte/pull/3671) | Fix handle symbols unicode | -| 0.1.1 | 2021-05-11 | [\#3566](https://github.com/airbytehq/airbyte/pull/3195) | MS SQL Server Destination Release! | +| 0.1.10 | 2021-10-11 | [\#6877](https://github.com/airbytehq/airbyte/pull/6877) | Add `normalization` capability, add `append+deduplication` sync mode | +| 0.1.9 | 2021-09-29 | [\#5970](https://github.com/airbytehq/airbyte/pull/5970) | Add support & test cases for MSSQL Destination via SSH tunnels | +| 0.1.8 | 2021-08-07 | [\#5272](https://github.com/airbytehq/airbyte/pull/5272) | Add batch method to insert records | +| 0.1.7 | 2021-07-30 | [\#5125](https://github.com/airbytehq/airbyte/pull/5125) | Enable `additionalPropertities` in spec.json | +| 0.1.6 | 2021-06-21 | [\#3555](https://github.com/airbytehq/airbyte/pull/3555) | Partial Success in BufferedStreamConsumer | +| 0.1.5 | 2021-07-20 | [\#4874](https://github.com/airbytehq/airbyte/pull/4874) | declare object types correctly in spec | +| 0.1.4 | 2021-06-17 | [\#3744](https://github.com/airbytehq/airbyte/pull/3744) | Fix doc/params in specification file | +| 0.1.3 | 2021-05-28 | [\#3728](https://github.com/airbytehq/airbyte/pull/3973) | Change dockerfile entrypoint | +| 0.1.2 | 2021-05-13 | [\#3367](https://github.com/airbytehq/airbyte/pull/3671) | Fix handle symbols unicode | +| 0.1.1 | 2021-05-11 | [\#3566](https://github.com/airbytehq/airbyte/pull/3195) | MS SQL Server Destination Release! | ### Changelog (Strict Encrypt) + | Version | Date | Pull Request | Subject | |:--------| :--- | :--- | :--- | +| 0.1.5 | 2022-02-25 | [10421](https://github.com/airbytehq/airbyte/pull/10421) | Refactor JDBC parameters handling | | 0.1.4 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | | 0.1.3 | 2021-12-28 | [\#9158](https://github.com/airbytehq/airbyte/pull/9158) | Update connector fields title/description | | 0.1.2 | 2021-12-01 | [8371](https://github.com/airbytehq/airbyte/pull/8371) | Fixed incorrect handling "\n" in ssh key | diff --git a/docs/integrations/destinations/mysql.md b/docs/integrations/destinations/mysql.md index 35486908b507..26ab85bbf7f2 100644 --- a/docs/integrations/destinations/mysql.md +++ b/docs/integrations/destinations/mysql.md @@ -103,31 +103,32 @@ Using this feature requires additional configuration, when creating the destinat ## CHANGELOG -| Version | Date | Pull Request | Subject | -|:--------| :--- | :--- | :--- | -| 0.1.17 | 2022-02-16 | [10362](https://github.com/airbytehq/airbyte/pull/10362) | Add jdbc_url_params support for optional JDBC parameters | -| 0.1.16 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | -| 0.1.15 | 2021-12-01 | [8371](https://github.com/airbytehq/airbyte/pull/8371) | Fixed incorrect handling "\n" in ssh key | +| Version | Date | Pull Request | Subject | +|:--------| :--- | :--- |:----------------------------------------------------------------------------------------------------| +| 0.1.18 | 2022-02-25 | [10421](https://github.com/airbytehq/airbyte/pull/10421) | Refactor JDBC parameters handling | +| 0.1.17 | 2022-02-16 | [10362](https://github.com/airbytehq/airbyte/pull/10362) | Add jdbc_url_params support for optional JDBC parameters | +| 0.1.16 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | +| 0.1.15 | 2021-12-01 | [8371](https://github.com/airbytehq/airbyte/pull/8371) | Fixed incorrect handling "\n" in ssh key | | 0.1.14 | 2021-11-08 | [#7719](https://github.com/airbytehq/airbyte/pull/7719) | Improve handling of wide rows by buffering records based on their byte size rather than their count | -| 0.1.13 | 2021-09-28 | [\#6506](https://github.com/airbytehq/airbyte/pull/6506) | Added support for MySQL destination via TLS/SSL | -| 0.1.12 | 2021-09-24 | [\#6317](https://github.com/airbytehq/airbyte/pull/6317) | Added option to connect to DB via SSH | -| 0.1.11 | 2021-07-30 | [\#5125](https://github.com/airbytehq/airbyte/pull/5125) | Enable `additionalPropertities` in spec.json | -| 0.1.10 | 2021-07-28 | [\#5026](https://github.com/airbytehq/airbyte/pull/5026) | Add sanitized json fields in raw tables to handle quotes in column names | -| 0.1.7 | 2021-07-09 | [\#4651](https://github.com/airbytehq/airbyte/pull/4651) | Switch normalization flag on so users can use normalization. | -| 0.1.6 | 2021-07-03 | [\#4531](https://github.com/airbytehq/airbyte/pull/4531) | Added normalization for MySQL. | -| 0.1.5 | 2021-07-03 | [\#3973](https://github.com/airbytehq/airbyte/pull/3973) | Added `AIRBYTE_ENTRYPOINT` for kubernetes support. | -| 0.1.4 | 2021-07-03 | [\#3290](https://github.com/airbytehq/airbyte/pull/3290) | Switched to get states from destination instead of source. | -| 0.1.3 | 2021-07-03 | [\#3387](https://github.com/airbytehq/airbyte/pull/3387) | Fixed a bug for message length checking. | -| 0.1.2 | 2021-07-03 | [\#3327](https://github.com/airbytehq/airbyte/pull/3327) | Fixed LSEP unicode characters. | -| 0.1.1 | 2021-07-03 | [\#3289](https://github.com/airbytehq/airbyte/pull/3289) | Added support for outputting messages. | -| 0.1.0 | 2021-05-06 | [\#3242](https://github.com/airbytehq/airbyte/pull/3242) | Added MySQL destination. | - +| 0.1.13 | 2021-09-28 | [\#6506](https://github.com/airbytehq/airbyte/pull/6506) | Added support for MySQL destination via TLS/SSL | +| 0.1.12 | 2021-09-24 | [\#6317](https://github.com/airbytehq/airbyte/pull/6317) | Added option to connect to DB via SSH | +| 0.1.11 | 2021-07-30 | [\#5125](https://github.com/airbytehq/airbyte/pull/5125) | Enable `additionalPropertities` in spec.json | +| 0.1.10 | 2021-07-28 | [\#5026](https://github.com/airbytehq/airbyte/pull/5026) | Add sanitized json fields in raw tables to handle quotes in column names | +| 0.1.7 | 2021-07-09 | [\#4651](https://github.com/airbytehq/airbyte/pull/4651) | Switch normalization flag on so users can use normalization. | +| 0.1.6 | 2021-07-03 | [\#4531](https://github.com/airbytehq/airbyte/pull/4531) | Added normalization for MySQL. | +| 0.1.5 | 2021-07-03 | [\#3973](https://github.com/airbytehq/airbyte/pull/3973) | Added `AIRBYTE_ENTRYPOINT` for kubernetes support. | +| 0.1.4 | 2021-07-03 | [\#3290](https://github.com/airbytehq/airbyte/pull/3290) | Switched to get states from destination instead of source. | +| 0.1.3 | 2021-07-03 | [\#3387](https://github.com/airbytehq/airbyte/pull/3387) | Fixed a bug for message length checking. | +| 0.1.2 | 2021-07-03 | [\#3327](https://github.com/airbytehq/airbyte/pull/3327) | Fixed LSEP unicode characters. | +| 0.1.1 | 2021-07-03 | [\#3289](https://github.com/airbytehq/airbyte/pull/3289) | Added support for outputting messages. | +| 0.1.0 | 2021-05-06 | [\#3242](https://github.com/airbytehq/airbyte/pull/3242) | Added MySQL destination. | ## CHANGELOG destination-mysql-strict-encrypt | Version | Date | Pull Request | Subject | |:--------| :--- |:---------------------------------------------------------| :--- | -| 0.1.3 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | +| 0.1.4 | 2022-02-25 | [10421](https://github.com/airbytehq/airbyte/pull/10421) | Refactor JDBC parameters handling | +| 0.1.3 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | | 0.1.2 | 2021-12-01 | [\#8371](https://github.com/airbytehq/airbyte/pull/8371) | Fixed incorrect handling "\n" in ssh key | | 0.1.1 | 2021-11-08 | [#7719](https://github.com/airbytehq/airbyte/pull/7719) | Improve handling of wide rows by buffering records based on their byte size rather than their count | | 0.1.0 | 06.10.2021 | [\#6763](https://github.com/airbytehq/airbyte/pull/6763) | Added destination-mysql-strict-encrypt that supports SSL connections only. | diff --git a/docs/integrations/destinations/oracle.md b/docs/integrations/destinations/oracle.md index 2d16d3c393ae..c2d244980c6f 100644 --- a/docs/integrations/destinations/oracle.md +++ b/docs/integrations/destinations/oracle.md @@ -70,66 +70,47 @@ When using an SSH tunnel, you are configuring Airbyte to connect to an intermedi Using this feature requires additional configuration, when creating the source. We will talk through what each piece of configuration means. 1. Configure all fields for the source as you normally would, except `SSH Tunnel Method`. -2. `SSH Tunnel Method` defaults to `No Tunnel` \(meaning a direct connection\). If you want to use - an SSH Tunnel choose `SSH Key Authentication` or `Password Authentication`. - 1. Choose `Key Authentication` if you will be using an RSA private key as your secret for - establishing the SSH Tunnel \(see below for more information on generating this key\). - 2. Choose `Password Authentication` if you will be using a password as your secret for - establishing the SSH Tunnel. -3. `SSH Tunnel Jump Server Host` refers to the intermediate \(bastion\) server that Airbyte will - connect to. This should be a hostname or an IP Address. -4. `SSH Connection Port` is the port on the bastion server with which to make the SSH connection. - The default port for SSH connections is `22`, so unless you have explicitly changed something, go - with the default. -5. `SSH Login Username` is the username that Airbyte should use when connection to the bastion - server. This is NOT the Oracle username. -6. If you are using `Password Authentication`, then `SSH Login Username` should be set to the - password of the User from the previous step. If you are using `SSH Key Authentication` leave this - blank. Again, this is not the Oracle password, but the password for the OS-user that Airbyte is - using to perform commands on the bastion. -7. If you are using `SSH Key Authentication`, then `SSH Private Key` should be set to the RSA - Private Key that you are using to create the SSH connection. This should be the full contents of - the key file starting with `-----BEGIN RSA PRIVATE KEY-----` and ending - with `-----END RSA PRIVATE KEY-----`. +2. `SSH Tunnel Method` defaults to `No Tunnel` \(meaning a direct connection\). If you want to use an SSH Tunnel choose `SSH Key Authentication` or `Password Authentication`. + 1. Choose `Key Authentication` if you will be using an RSA private key as your secret for establishing the SSH Tunnel \(see below for more information on generating this key\). + 2. Choose `Password Authentication` if you will be using a password as your secret for establishing the SSH Tunnel. +3. `SSH Tunnel Jump Server Host` refers to the intermediate \(bastion\) server that Airbyte will connect to. This should be a hostname or an IP Address. +4. `SSH Connection Port` is the port on the bastion server with which to make the SSH connection. The default port for SSH connections is `22`, so unless you have explicitly changed something, go with the default. +5. `SSH Login Username` is the username that Airbyte should use when connection to the bastion server. This is NOT the Oracle username. +6. If you are using `Password Authentication`, then `SSH Login Username` should be set to the password of the User from the previous step. If you are using `SSH Key Authentication` leave this blank. Again, this is not the Oracle password, but the password for the OS-user that Airbyte is using to perform commands on the bastion. +7. If you are using `SSH Key Authentication`, then `SSH Private Key` should be set to the RSA Private Key that you are using to create the SSH connection. This should be the full contents of the key file starting with `-----BEGIN RSA PRIVATE KEY-----` and ending with `-----END RSA PRIVATE KEY-----`. ## Encryption Options Airbite has the ability to connect to the Oracle source with 3 network connectivity options: -1. `Unencrypted` the connection will be made using the TCP protocol. In this case, all data over the - network will be transmitted in unencrypted form. -2. `Native network encryption` gives you the ability to encrypt database connections, without the - configuration overhead of TCP / IP and SSL / TLS and without the need to open and listen on - different ports. In this case, the *SQLNET.ENCRYPTION_CLIENT* - option will always be set as *REQUIRED* by default: The client or server will only accept - encrypted traffic, but the user has the opportunity to choose an `Encryption algorithm` according - to the security policies he needs. -3. `TLS Encrypted` (verify certificate) - if this option is selected, data transfer will be - transfered using the TLS protocol, taking into account the handshake procedure and certificate - verification. To use this option, insert the content of the certificate issued by the server into - the `SSL PEM file` field +1. `Unencrypted` the connection will be made using the TCP protocol. In this case, all data over the network will be transmitted in unencrypted form. +2. `Native network encryption` gives you the ability to encrypt database connections, without the configuration overhead of TCP / IP and SSL / TLS and without the need to open and listen on different ports. In this case, the *SQLNET.ENCRYPTION_CLIENT* + option will always be set as *REQUIRED* by default: The client or server will only accept encrypted traffic, but the user has the opportunity to choose an `Encryption algorithm` according to the security policies he needs. +3. `TLS Encrypted` (verify certificate) - if this option is selected, data transfer will be transfered using the TLS protocol, taking into account the handshake procedure and certificate verification. To use this option, insert the content of the certificate issued by the server into the `SSL PEM file` field ## Changelog -| Version | Date | Pull Request | Subject | -| :--- | :--- |:---------------------------------------------------------| :--- | -| 0.1.14 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | (unpublished) Add `-XX:+ExitOnOutOfMemoryError` JVM option | -| 0.1.13 | 2021-12-29 | [\#9177](https://github.com/airbytehq/airbyte/pull/9177) | Update connector fields title/description | -| 0.1.12 | 2021-11-08 | [\#7719](https://github.com/airbytehq/airbyte/pull/7719) | Improve handling of wide rows by buffering records based on their byte size rather than their count | -| 0.1.10 | 2021-10-08 | [\#6893](https://github.com/airbytehq/airbyte/pull/6893) | 🎉 Destination Oracle: implemented connection encryption | -| 0.1.9 | 2021-10-06 | [\#6611](https://github.com/airbytehq/airbyte/pull/6611) | 🐛 Destination Oracle: maxStringLength should be 128 | -| 0.1.8 | 2021-09-28 | [\#6370](https://github.com/airbytehq/airbyte/pull/6370) | Add SSH Support for Oracle Destination | -| 0.1.7 | 2021-08-30 | [\#5746](https://github.com/airbytehq/airbyte/pull/5746) | Use default column name for raw tables | -| 0.1.6 | 2021-08-23 | [\#5542](https://github.com/airbytehq/airbyte/pull/5542) | Remove support for Oracle 11g to allow normalization | -| 0.1.5 | 2021-08-10 | [\#5307](https://github.com/airbytehq/airbyte/pull/5307) | 🐛 Destination Oracle: Fix destination check for users without dba role | -| 0.1.4 | 2021-07-30 | [\#5125](https://github.com/airbytehq/airbyte/pull/5125) | Enable `additionalPropertities` in spec.json | -| 0.1.3 | 2021-07-21 | [\#3555](https://github.com/airbytehq/airbyte/pull/3555) | Partial Success in BufferedStreamConsumer | -| 0.1.2 | 2021-07-20 | [\#4874](https://github.com/airbytehq/airbyte/pull/4874) | Require `sid` instead of `database` in connector specification | - +| Version | Date | Pull Request | Subject | +|:--------| :--- |:---------------------------------------------------------|:----------------------------------------------------------------------------------------------------| +| 0.1.15 | 2022-02-25 | [10421](https://github.com/airbytehq/airbyte/pull/10421) | Refactor JDBC parameters handling and remove DBT support | +| 0.1.14 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | (unpublished) Add `-XX:+ExitOnOutOfMemoryError` JVM option | +| 0.1.13 | 2021-12-29 | [\#9177](https://github.com/airbytehq/airbyte/pull/9177) | Update connector fields title/description | +| 0.1.12 | 2021-11-08 | [\#7719](https://github.com/airbytehq/airbyte/pull/7719) | Improve handling of wide rows by buffering records based on their byte size rather than their count | +| 0.1.10 | 2021-10-08 | [\#6893](https://github.com/airbytehq/airbyte/pull/6893) | 🎉 Destination Oracle: implemented connection encryption | +| 0.1.9 | 2021-10-06 | [\#6611](https://github.com/airbytehq/airbyte/pull/6611) | 🐛 Destination Oracle: maxStringLength should be 128 | +| 0.1.8 | 2021-09-28 | [\#6370](https://github.com/airbytehq/airbyte/pull/6370) | Add SSH Support for Oracle Destination | +| 0.1.7 | 2021-08-30 | [\#5746](https://github.com/airbytehq/airbyte/pull/5746) | Use default column name for raw tables | +| 0.1.6 | 2021-08-23 | [\#5542](https://github.com/airbytehq/airbyte/pull/5542) | Remove support for Oracle 11g to allow normalization | +| 0.1.5 | 2021-08-10 | [\#5307](https://github.com/airbytehq/airbyte/pull/5307) | 🐛 Destination Oracle: Fix destination check for users without dba role | +| 0.1.4 | 2021-07-30 | [\#5125](https://github.com/airbytehq/airbyte/pull/5125) | Enable `additionalPropertities` in spec.json | +| 0.1.3 | 2021-07-21 | [\#3555](https://github.com/airbytehq/airbyte/pull/3555) | Partial Success in BufferedStreamConsumer | +| 0.1.2 | 2021-07-20 | [\#4874](https://github.com/airbytehq/airbyte/pull/4874) | Require `sid` instead of `database` in connector specification | ### Changelog (Strict Encrypt) -| Version | Date | Pull Request | Subject | -|:--------|:-----------|:--------------------------------------------------------| :--- | -| 0.1.3 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | (unpublished) Add `-XX:+ExitOnOutOfMemoryError` JVM option | -| 0.1.2 | 2021-01-29 | [\#9177](https://github.com/airbytehq/airbyte/pull/9177) | Update connector fields title/description | -| 0.1.1 | 2021-11-08 | [\#7719](https://github.com/airbytehq/airbyte/pull/7719) | Improve handling of wide rows by buffering records based on their byte size rather than their count | + +| Version | Date | Pull Request | Subject | +|:--------|:-----------|:--------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------| +| 0.1.4 | 2022-02-25 | [10421](https://github.com/airbytehq/airbyte/pull/10421) | Refactor JDBC parameters handling and remove DBT support | +| 0.1.3 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | (unpublished) Add `-XX:+ExitOnOutOfMemoryError` JVM option | +| 0.1.2 | 2021-01-29 | [\#9177](https://github.com/airbytehq/airbyte/pull/9177) | Update connector fields title/description | +| 0.1.1 | 2021-11-08 | [\#7719](https://github.com/airbytehq/airbyte/pull/7719) | Improve handling of wide rows by buffering records based on their byte size rather than their count | diff --git a/docs/integrations/destinations/postgres.md b/docs/integrations/destinations/postgres.md index 6ec3b4d743da..3c10867e65d8 100644 --- a/docs/integrations/destinations/postgres.md +++ b/docs/integrations/destinations/postgres.md @@ -82,11 +82,12 @@ Therefore, Airbyte Postgres destination will create tables and schemas using the ## Changelog -| Version | Date | Pull Request | Subject | -|:--------| :--- | :--- | :--- | -| 0.3.14 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | (unpublished) Add `-XX:+ExitOnOutOfMemoryError` JVM option | -| 0.3.13 | 2021-12-01 | [8371](https://github.com/airbytehq/airbyte/pull/8371) | Fixed incorrect handling "\n" in ssh key | +| Version | Date | Pull Request | Subject | +|:--------| :--- | :--- |:----------------------------------------------------------------------------------------------------| +| 0.3.15 | 2022-02-25 | [10421](https://github.com/airbytehq/airbyte/pull/10421) | Refactor JDBC parameters handling | +| 0.3.14 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | (unpublished) Add `-XX:+ExitOnOutOfMemoryError` JVM option | +| 0.3.13 | 2021-12-01 | [8371](https://github.com/airbytehq/airbyte/pull/8371) | Fixed incorrect handling "\n" in ssh key | | 0.3.12 | 2021-11-08 | [#7719](https://github.com/airbytehq/airbyte/pull/7719) | Improve handling of wide rows by buffering records based on their byte size rather than their count | -| 0.3.11 | 2021-09-07 | [\#5743](https://github.com/airbytehq/airbyte/pull/5743) | Add SSH Tunnel support | -| 0.3.10 | 2021-08-11 | [\#5336](https://github.com/airbytehq/airbyte/pull/5336) | 🐛 Destination Postgres: fix \u0000\(NULL\) value processing | +| 0.3.11 | 2021-09-07 | [\#5743](https://github.com/airbytehq/airbyte/pull/5743) | Add SSH Tunnel support | +| 0.3.10 | 2021-08-11 | [\#5336](https://github.com/airbytehq/airbyte/pull/5336) | 🐛 Destination Postgres: fix \u0000\(NULL\) value processing | diff --git a/docs/integrations/destinations/redshift.md b/docs/integrations/destinations/redshift.md index 7885ac484ec6..17d5c6fb3ed6 100644 --- a/docs/integrations/destinations/redshift.md +++ b/docs/integrations/destinations/redshift.md @@ -123,13 +123,14 @@ All Redshift connections are encrypted using SSL ## Changelog | Version | Date | Pull Request | Subject | -| :------ | :-------- | :----- | :------ | -| 0.3.25 | 2022-02-14 | [#9920](https://github.com/airbytehq/airbyte/pull/9920) | Updated the size of staging files for S3 staging. Also, added closure of S3 writers to staging files when data has been written to an staging file. | -| 0.3.24 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | -| 0.3.23 | 2021-12-16 | [\#8855](https://github.com/airbytehq/airbyte/pull/8855) | Add `purgeStagingData` option to enable/disable deleting the staging data | -| 0.3.22 | 2021-12-15 | [#8607](https://github.com/airbytehq/airbyte/pull/8607) | Accept a path for the staging data | -| 0.3.21 | 2021-12-10 | [#8562](https://github.com/airbytehq/airbyte/pull/8562) | Moving classes around for better dependency management | -| 0.3.20 | 2021-11-08 | [#7719](https://github.com/airbytehq/airbyte/pull/7719) | Improve handling of wide rows by buffering records based on their byte size rather than their count | +|:--------| :-------- | :----- | :------ | +| 0.3.27 | 2022-02-25 | [10421](https://github.com/airbytehq/airbyte/pull/10421) | Refactor JDBC parameters handling | +| 0.3.25 | 2022-02-14 | [#9920](https://github.com/airbytehq/airbyte/pull/9920) | Updated the size of staging files for S3 staging. Also, added closure of S3 writers to staging files when data has been written to an staging file. | +| 0.3.24 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | +| 0.3.23 | 2021-12-16 | [\#8855](https://github.com/airbytehq/airbyte/pull/8855) | Add `purgeStagingData` option to enable/disable deleting the staging data | +| 0.3.22 | 2021-12-15 | [#8607](https://github.com/airbytehq/airbyte/pull/8607) | Accept a path for the staging data | +| 0.3.21 | 2021-12-10 | [#8562](https://github.com/airbytehq/airbyte/pull/8562) | Moving classes around for better dependency management | +| 0.3.20 | 2021-11-08 | [#7719](https://github.com/airbytehq/airbyte/pull/7719) | Improve handling of wide rows by buffering records based on their byte size rather than their count | | 0.3.19 | 2021-10-21 | [7234](https://github.com/airbytehq/airbyte/pull/7234) | Allow SSL traffic only | | 0.3.17 | 2021-10-12 | [6965](https://github.com/airbytehq/airbyte/pull/6965) | Added SSL Support | | 0.3.16 | 2021-10-11 | [6949](https://github.com/airbytehq/airbyte/pull/6949) | Each stream was split into files of 10,000 records each for copying using S3 or GCS | diff --git a/docs/integrations/destinations/snowflake.md b/docs/integrations/destinations/snowflake.md index aa39e657f2b7..8f0b165cfce7 100644 --- a/docs/integrations/destinations/snowflake.md +++ b/docs/integrations/destinations/snowflake.md @@ -224,6 +224,7 @@ Finally, you need to add read/write permissions to your bucket with that email. | Version | Date | Pull Request | Subject | |:--------|:-----------| :----- | :------ | +| 0.4.17 | 2022-02-25 | [10421](https://github.com/airbytehq/airbyte/pull/10421) | Refactor JDBC parameters handling | | 0.4.14 | 2022-02-17 | [\#10394](https://github.com/airbytehq/airbyte/pull/10394) | Reduce memory footprint. | | 0.4.13 | 2022-02-16 | [\#10212](https://github.com/airbytehq/airbyte/pull/10212) | Execute COPY command in parallel for S3 and GCS staging | | 0.4.12 | 2022-02-15 | [\#10342](https://github.com/airbytehq/airbyte/pull/10342) | Use connection pool, and fix connection leak. | diff --git a/docs/integrations/sources/zendesk-support.md b/docs/integrations/sources/zendesk-support.md index 7fc8ea1afa91..83cdf21b65f5 100644 --- a/docs/integrations/sources/zendesk-support.md +++ b/docs/integrations/sources/zendesk-support.md @@ -97,6 +97,7 @@ We recommend creating a restricted, read-only key specifically for Airbyte acces | Version | Date | Pull Request | Subject | |:---------|:-----------| :----- |:-------------------------------------------------------| +| `0.2.0` | 2022-03-01 | [9456](https://github.com/airbytehq/airbyte/pull/9456) | Update source to use future requests | | `0.1.12` | 2022-01-25 | [9785](https://github.com/airbytehq/airbyte/pull/9785) | Add log message | | `0.1.11` | 2021-12-21 | [8987](https://github.com/airbytehq/airbyte/pull/8987) | Update connector fields title/description | | `0.1.9` | 2021-12-16 | [8616](https://github.com/airbytehq/airbyte/pull/8616) | Adds Brands, CustomRoles and Schedules | diff --git a/docs/operator-guides/upgrading-airbyte.md b/docs/operator-guides/upgrading-airbyte.md index 4755a0ff9825..836646fddd6f 100644 --- a/docs/operator-guides/upgrading-airbyte.md +++ b/docs/operator-guides/upgrading-airbyte.md @@ -101,7 +101,7 @@ If you are upgrading from \(i.e. your current version of Airbyte is\) Airbyte ve Here's an example of what it might look like with the values filled in. It assumes that the downloaded `airbyte_archive.tar.gz` is in `/tmp`. ```bash - docker run --rm -v /tmp:/config airbyte/migration:0.35.39-alpha --\ + docker run --rm -v /tmp:/config airbyte/migration:0.35.42-alpha --\ --input /config/airbyte_archive.tar.gz\ --output /config/airbyte_archive_migrated.tar.gz ``` diff --git a/kube/overlays/stable-with-resource-limits/.env b/kube/overlays/stable-with-resource-limits/.env index 3b0d850eca61..5646686b364a 100644 --- a/kube/overlays/stable-with-resource-limits/.env +++ b/kube/overlays/stable-with-resource-limits/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.35.39-alpha +AIRBYTE_VERSION=0.35.42-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable-with-resource-limits/kustomization.yaml b/kube/overlays/stable-with-resource-limits/kustomization.yaml index 76ecb37b5a5a..a5b67ebdeec1 100644 --- a/kube/overlays/stable-with-resource-limits/kustomization.yaml +++ b/kube/overlays/stable-with-resource-limits/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.35.39-alpha + newTag: 0.35.42-alpha - name: airbyte/bootloader - newTag: 0.35.39-alpha + newTag: 0.35.42-alpha - name: airbyte/scheduler - newTag: 0.35.39-alpha + newTag: 0.35.42-alpha - name: airbyte/server - newTag: 0.35.39-alpha + newTag: 0.35.42-alpha - name: airbyte/webapp - newTag: 0.35.39-alpha + newTag: 0.35.42-alpha - name: airbyte/worker - newTag: 0.35.39-alpha + newTag: 0.35.42-alpha - name: temporalio/auto-setup newTag: 1.7.0 diff --git a/kube/overlays/stable/.env b/kube/overlays/stable/.env index 3b0d850eca61..5646686b364a 100644 --- a/kube/overlays/stable/.env +++ b/kube/overlays/stable/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.35.39-alpha +AIRBYTE_VERSION=0.35.42-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable/kustomization.yaml b/kube/overlays/stable/kustomization.yaml index a334f477707d..bd10ccbc6ea9 100644 --- a/kube/overlays/stable/kustomization.yaml +++ b/kube/overlays/stable/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.35.39-alpha + newTag: 0.35.42-alpha - name: airbyte/bootloader - newTag: 0.35.39-alpha + newTag: 0.35.42-alpha - name: airbyte/scheduler - newTag: 0.35.39-alpha + newTag: 0.35.42-alpha - name: airbyte/server - newTag: 0.35.39-alpha + newTag: 0.35.42-alpha - name: airbyte/webapp - newTag: 0.35.39-alpha + newTag: 0.35.42-alpha - name: airbyte/worker - newTag: 0.35.39-alpha + newTag: 0.35.42-alpha - name: temporalio/auto-setup newTag: 1.7.0