diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 34b2b23072f1..79d6dfa74499 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.36.3-alpha +current_version = 0.36.4-alpha commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-[a-z]+)? diff --git a/.env b/.env index 056bc9558a38..7ac8ef4ad13e 100644 --- a/.env +++ b/.env @@ -10,7 +10,7 @@ ### SHARED ### -VERSION=0.36.3-alpha +VERSION=0.36.4-alpha # When using the airbyte-db via default docker image CONFIG_ROOT=/data @@ -40,7 +40,7 @@ DATABASE_PASSWORD=docker DATABASE_HOST=db DATABASE_PORT=5432 DATABASE_DB=airbyte -# translate manually DATABASE_URL=jdbc:postgresql://${DATABASE_HOST}:${DATABASE_PORT/${DATABASE_DB} (do not include the username or password here) +# translate manually DATABASE_URL=jdbc:postgresql://${DATABASE_HOST}:${DATABASE_PORT}/${DATABASE_DB} (do not include the username or password here) DATABASE_URL=jdbc:postgresql://db:5432/airbyte JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION=0.29.15.001 diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml deleted file mode 100644 index 793172b2911a..000000000000 --- a/.github/workflows/documentation.yml +++ /dev/null @@ -1,37 +0,0 @@ -name: compile-docusaurus-static-assets - -on: - push: - branches: [master] - - # Allows you to run this workflow manually from the Actions tab - workflow_dispatch: - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - deploy-docusaurus-to-docs-airbyte-io: - runs-on: ubuntu-latest - steps: - # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - name: Check out repo - # `uses` taps GH ORG/REPO@version. - # "actions" is a default org for some common GH actions - uses: actions/checkout@v3 - with: - fetch-depth: 0 - # Node is required for yarn - - name: Set up Yarn - uses: actions/setup-node@v2 - with: - node-version: '16.13.0' - cache: 'yarn' - cache-dependency-path: docusaurus - # # Build Docusaurus website - # - name: Check for docusaurus changes not committed - # run: ./tools/bin/check_docusaurus_build_changes - # # Install and build Docusaurus website - # - name: Deploy docs to production (it's weird) - # run: ./tools/bin/deploy_docusaurus - # env: - # GITHUB_TOKEN: ${{ secrets.OCTAVIA_PAT }} - diff --git a/.github/workflows/publish-command.yml b/.github/workflows/publish-command.yml index b7c0aaa17519..e1590d173a25 100644 --- a/.github/workflows/publish-command.yml +++ b/.github/workflows/publish-command.yml @@ -228,6 +228,7 @@ jobs: run: | git add -u git commit -m "auto-bump connector version" + git pull origin ${{ github.event.inputs.gitref }} git push origin ${{ github.event.inputs.gitref }} - name: Add Version Bump Success Comment if: github.event.inputs.comment-id && github.event.inputs.auto-bump-version == 'true' && success() diff --git a/airbyte-bootloader/Dockerfile b/airbyte-bootloader/Dockerfile index e2753b0957d2..68acc05b4ce0 100644 --- a/airbyte-bootloader/Dockerfile +++ b/airbyte-bootloader/Dockerfile @@ -1,7 +1,7 @@ ARG JDK_VERSION=17.0.1 FROM openjdk:${JDK_VERSION}-slim -ARG VERSION=0.36.3-alpha +ARG VERSION=0.36.4-alpha ENV APPLICATION airbyte-bootloader ENV VERSION ${VERSION} diff --git a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml index bcf75228f113..e9f195a59822 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml @@ -167,7 +167,7 @@ - name: Postgres destinationDefinitionId: 25c5221d-dce2-4163-ade9-739ef790f503 dockerRepository: airbyte/destination-postgres - dockerImageTag: 0.3.18 + dockerImageTag: 0.3.19 documentationUrl: https://docs.airbyte.io/integrations/destinations/postgres icon: postgresql.svg - name: Pulsar diff --git a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml index af9b9c4fb3d5..dfa8a9384627 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml @@ -3047,7 +3047,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-postgres:0.3.18" +- dockerImage: "airbyte/destination-postgres:0.3.19" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/postgres" connectionSpecification: diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index a021f9fce2f3..d642115f0f67 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -42,7 +42,7 @@ - name: Apify Dataset sourceDefinitionId: 47f17145-fe20-4ef5-a548-e29b048adf84 dockerRepository: airbyte/source-apify-dataset - dockerImageTag: 0.1.9 + dockerImageTag: 0.1.11 documentationUrl: https://docs.airbyte.io/integrations/sources/apify-dataset icon: apify.svg sourceType: api @@ -529,7 +529,7 @@ - name: OpenWeather sourceDefinitionId: d8540a80-6120-485d-b7d6-272bca477d9b dockerRepository: airbyte/source-openweather - dockerImageTag: 0.1.1 + dockerImageTag: 0.1.4 documentationUrl: https://docs.airbyte.io/integrations/sources/openweather sourceType: api - name: Oracle DB @@ -737,7 +737,7 @@ - name: Smartsheets sourceDefinitionId: 374ebc65-6636-4ea0-925c-7d35999a8ffc dockerRepository: airbyte/source-smartsheets - dockerImageTag: 0.1.9 + dockerImageTag: 0.1.10 documentationUrl: https://docs.airbyte.io/integrations/sources/smartsheets icon: smartsheet.svg sourceType: api @@ -751,7 +751,7 @@ - name: Snowflake sourceDefinitionId: e2d65910-8c8b-40a1-ae7d-ee2416b2bfa2 dockerRepository: airbyte/source-snowflake - dockerImageTag: 0.1.10 + dockerImageTag: 0.1.11 documentationUrl: https://docs.airbyte.io/integrations/sources/snowflake icon: snowflake.svg sourceType: database @@ -799,7 +799,7 @@ - name: TikTok Marketing sourceDefinitionId: 4bfac00d-ce15-44ff-95b9-9e3c3e8fbd35 dockerRepository: airbyte/source-tiktok-marketing - dockerImageTag: 0.1.6 + dockerImageTag: 0.1.7 documentationUrl: https://docs.airbyte.io/integrations/sources/tiktok-marketing icon: tiktok.svg sourceType: api diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index d69af5d6f599..f3e3f72075ba 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -511,7 +511,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-apify-dataset:0.1.9" +- dockerImage: "airbyte/source-apify-dataset:0.1.11" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/apify-dataset" connectionSpecification: @@ -5637,7 +5637,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-openweather:0.1.1" +- dockerImage: "airbyte/source-openweather:0.1.4" spec: documentationUrl: "https://docsurl.com" connectionSpecification: @@ -7856,7 +7856,7 @@ oauthFlowOutputParameters: - - "access_token" - - "refresh_token" -- dockerImage: "airbyte/source-smartsheets:0.1.9" +- dockerImage: "airbyte/source-smartsheets:0.1.10" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/smartsheets" connectionSpecification: @@ -7878,6 +7878,15 @@ title: "Sheet ID" description: "The spreadsheet ID. Find in the spreadsheet menu: File > Properties" type: "string" + start_datetime: + title: "Start Datetime" + type: "string" + examples: + - "2000-01-01T13:00:00" + - "2000-01-01T13:00:00-07:00" + description: "ISO 8601, for instance: `YYYY-MM-DDTHH:MM:SS`, `YYYY-MM-DDTHH:MM:SS+HH:MM`" + format: "date-time" + default: "2020-01-01T00:00:00+00:00" supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] @@ -7955,7 +7964,7 @@ - - "client_secret" oauthFlowOutputParameters: - - "refresh_token" -- dockerImage: "airbyte/source-snowflake:0.1.10" +- dockerImage: "airbyte/source-snowflake:0.1.11" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/snowflake" connectionSpecification: @@ -7968,10 +7977,77 @@ - "warehouse" - "database" - "schema" - - "username" - - "password" - additionalProperties: false + additionalProperties: true properties: + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + order: 0 + required: + - "client_id" + - "client_secret" + - "auth_type" + properties: + auth_type: + type: "string" + const: "OAuth" + default: "OAuth" + order: 0 + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Snowflake developer application." + airbyte_secret: true + order: 1 + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your Snowflake developer application." + airbyte_secret: true + order: 2 + access_token: + type: "string" + title: "Access Token" + description: "Access Token for making authenticated requests." + airbyte_secret: true + order: 3 + refresh_token: + type: "string" + title: "Refresh Token" + description: "Refresh Token for making authenticated requests." + airbyte_secret: true + order: 4 + - title: "Username and Password" + type: "object" + required: + - "username" + - "password" + - "auth_type" + order: 1 + properties: + auth_type: + type: "string" + const: "username/password" + default: "username/password" + order: 0 + username: + description: "The username you created to allow Airbyte to access\ + \ the database." + examples: + - "AIRBYTE_USER" + type: "string" + title: "Username" + order: 1 + password: + description: "The password associated with the username." + type: "string" + airbyte_secret: true + title: "Password" + order: 2 + order: 0 host: description: "The host domain of the snowflake instance (must include the\ \ account, region, cloud environment, and end with snowflakecomputing.com)." @@ -7979,58 +8055,96 @@ - "accountname.us-east-2.aws.snowflakecomputing.com" type: "string" title: "Account Name" - order: 0 + order: 1 role: description: "The role you created for Airbyte to access Snowflake." examples: - "AIRBYTE_ROLE" type: "string" title: "Role" - order: 1 + order: 2 warehouse: description: "The warehouse you created for Airbyte to access data." examples: - "AIRBYTE_WAREHOUSE" type: "string" title: "Warehouse" - order: 2 + order: 3 database: description: "The database you created for Airbyte to access data." examples: - "AIRBYTE_DATABASE" type: "string" title: "Database" - order: 3 + order: 4 schema: description: "The source Snowflake schema tables." examples: - "AIRBYTE_SCHEMA" type: "string" title: "Schema" - order: 4 - username: - description: "The username you created to allow Airbyte to access the database." - examples: - - "AIRBYTE_USER" - type: "string" - title: "Username" order: 5 - password: - description: "The password associated with the username." - type: "string" - airbyte_secret: true - title: "Password" - order: 6 jdbc_url_params: description: "Additional properties to pass to the JDBC URL string when\ \ connecting to the database formatted as 'key=value' pairs separated\ \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." title: "JDBC URL Params" type: "string" - order: 7 + order: 6 supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] + advanced_auth: + auth_flow_type: "oauth2.0" + predicate_key: + - "credentials" + - "auth_type" + predicate_value: "OAuth" + oauth_config_specification: + oauth_user_input_from_connector_config_specification: + type: "object" + additionalProperties: false + properties: + host: + type: "string" + path_in_connector_config: + - "host" + complete_oauth_output_specification: + type: "object" + additionalProperties: false + properties: + access_token: + type: "string" + path_in_connector_config: + - "credentials" + - "access_token" + refresh_token: + type: "string" + path_in_connector_config: + - "credentials" + - "refresh_token" + complete_oauth_server_input_specification: + type: "object" + additionalProperties: false + properties: + client_id: + type: "string" + client_secret: + type: "string" + complete_oauth_server_output_specification: + type: "object" + additionalProperties: false + properties: + client_id: + type: "string" + path_in_connector_config: + - "credentials" + - "client_id" + client_secret: + type: "string" + path_in_connector_config: + - "credentials" + - "client_secret" - dockerImage: "airbyte/source-square:0.1.4" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/square" @@ -8531,7 +8645,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-tiktok-marketing:0.1.6" +- dockerImage: "airbyte/source-tiktok-marketing:0.1.7" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/tiktok-marketing" changelogUrl: "https://docs.airbyte.io/integrations/sources/tiktok-marketing" @@ -8539,31 +8653,10 @@ title: "TikTok Marketing Source Spec" type: "object" properties: - start_date: - title: "Start Date" - description: "The Start Date in format: YYYY-MM-DD. Any data before this\ - \ date will not be replicated. If this parameter is not set, all data\ - \ will be replicated." - default: "2016-09-01" - pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" - order: 0 - type: "string" - report_granularity: - title: "Report Granularity" - description: "Which time granularity should be grouped by; for LIFETIME\ - \ there will be no grouping. This option is used for reports' streams\ - \ only." - default: "DAY" - enum: - - "LIFETIME" - - "DAY" - - "HOUR" - order: 1 - type: "string" credentials: - title: "Authorization Method" + title: "Authentication *" default: {} - order: 3 + order: 0 type: "object" oneOf: - title: "OAuth2.0" @@ -8573,8 +8666,6 @@ title: "Auth Type" const: "oauth2.0" order: 0 - enum: - - "oauth2.0" type: "string" app_id: title: "App ID" @@ -8602,8 +8693,6 @@ title: "Auth Type" const: "prod_access_token" order: 0 - enum: - - "prod_access_token" type: "string" app_id: title: "App ID" @@ -8630,8 +8719,6 @@ title: "Auth Type" const: "sandbox_access_token" order: 0 - enum: - - "sandbox_access_token" type: "string" advertiser_id: title: "Advertiser ID" @@ -8646,6 +8733,27 @@ required: - "advertiser_id" - "access_token" + start_date: + title: "Start Date *" + description: "The Start Date in format: YYYY-MM-DD. Any data before this\ + \ date will not be replicated. If this parameter is not set, all data\ + \ will be replicated." + default: "2016-09-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 1 + type: "string" + report_granularity: + title: "Report Granularity *" + description: "Which time granularity should be grouped by; for LIFETIME\ + \ there will be no grouping. This option is used for reports' streams\ + \ only." + default: "DAY" + enum: + - "LIFETIME" + - "DAY" + - "HOUR" + order: 2 + type: "string" supportsIncremental: true supportsNormalization: false supportsDBT: false diff --git a/airbyte-container-orchestrator/Dockerfile b/airbyte-container-orchestrator/Dockerfile index 68ea502c2695..9fb12182b0f0 100644 --- a/airbyte-container-orchestrator/Dockerfile +++ b/airbyte-container-orchestrator/Dockerfile @@ -25,7 +25,7 @@ RUN curl -fsSLo /usr/share/keyrings/kubernetes-archive-keyring.gpg https://packa RUN echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] https://apt.kubernetes.io/ kubernetes-xenial main" | tee /etc/apt/sources.list.d/kubernetes.list RUN apt-get update && apt-get install -y kubectl -ARG VERSION=0.36.3-alpha +ARG VERSION=0.36.4-alpha ENV APPLICATION airbyte-container-orchestrator ENV VERSION=${VERSION} diff --git a/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/AbstractSourceConnectorTest.java b/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/AbstractSourceConnectorTest.java index 02aeaacb3fde..9bc01f2f7208 100644 --- a/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/AbstractSourceConnectorTest.java +++ b/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/AbstractSourceConnectorTest.java @@ -140,6 +140,13 @@ protected StandardCheckConnectionOutput runCheck() throws Exception { .run(new StandardCheckConnectionInput().withConnectionConfiguration(getConfig()), jobRoot); } + protected String runCheckAndGetStatusAsString(JsonNode config) throws Exception { + return new DefaultCheckConnectionWorker( + workerConfigs, + new AirbyteIntegrationLauncher(JOB_ID, JOB_ATTEMPT, getImageName(), processFactory, workerConfigs.getResourceRequirements())) + .run(new StandardCheckConnectionInput().withConnectionConfiguration(config), jobRoot).getStatus().toString(); + } + protected AirbyteCatalog runDiscover() throws Exception { return new DefaultDiscoverCatalogWorker( workerConfigs, diff --git a/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksConstants.java b/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksConstants.java index f3d014d63726..4a5c1a4b146a 100644 --- a/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksConstants.java +++ b/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksConstants.java @@ -15,7 +15,6 @@ public class DatabricksConstants { "delta.autoOptimize.optimizeWrite = true", "delta.autoOptimize.autoCompact = true"); - private DatabricksConstants() { - } + private DatabricksConstants() {} } diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/SqlOperations.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/SqlOperations.java index 3d4eea93012b..37212fcff9e1 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/SqlOperations.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/SqlOperations.java @@ -21,7 +21,7 @@ public interface SqlOperations { /** * Create a schema with provided name if it does not already exist. * - * @param database Database that the connector is syncing + * @param database Database that the connector is syncing * @param schemaName Name of schema. * @throws Exception exception */ @@ -30,7 +30,7 @@ public interface SqlOperations { /** * Denotes whether the schema exists in destination database * - * @param database Database that the connector is syncing + * @param database Database that the connector is syncing * @param schemaName Name of schema. * @return true if the schema exists in destination database, false if it doesn't */ @@ -41,9 +41,9 @@ default boolean isSchemaExists(final JdbcDatabase database, final String schemaN /** * Create a table with provided name in provided schema if it does not already exist. * - * @param database Database that the connector is syncing + * @param database Database that the connector is syncing * @param schemaName Name of schema - * @param tableName Name of table + * @param tableName Name of table * @throws Exception exception */ void createTableIfNotExists(JdbcDatabase database, String schemaName, String tableName) throws Exception; @@ -51,9 +51,9 @@ default boolean isSchemaExists(final JdbcDatabase database, final String schemaN /** * Query to create a table with provided name in provided schema if it does not already exist. * - * @param database Database that the connector is syncing + * @param database Database that the connector is syncing * @param schemaName Name of schema - * @param tableName Name of table + * @param tableName Name of table * @return query */ String createTableQuery(JdbcDatabase database, String schemaName, String tableName); @@ -62,7 +62,7 @@ default boolean isSchemaExists(final JdbcDatabase database, final String schemaN * Drop the table if it exists. * * @param schemaName Name of schema - * @param tableName Name of table + * @param tableName Name of table * @throws Exception exception */ void dropTableIfExists(JdbcDatabase database, String schemaName, String tableName) throws Exception; @@ -70,9 +70,9 @@ default boolean isSchemaExists(final JdbcDatabase database, final String schemaN /** * Query to remove all records from a table. Assumes the table exists. * - * @param database Database that the connector is syncing + * @param database Database that the connector is syncing * @param schemaName Name of schema - * @param tableName Name of table + * @param tableName Name of table * @return Query */ String truncateTableQuery(JdbcDatabase database, String schemaName, String tableName); @@ -80,20 +80,21 @@ default boolean isSchemaExists(final JdbcDatabase database, final String schemaN /** * Insert records into table. Assumes the table exists. * - * @param database Database that the connector is syncing - * @param records Records to insert. + * @param database Database that the connector is syncing + * @param records Records to insert. * @param schemaName Name of schema - * @param tableName Name of table + * @param tableName Name of table * @throws Exception exception */ void insertRecords(JdbcDatabase database, List records, String schemaName, String tableName) throws Exception; /** - * Query to copy all records from source table to destination table. Both tables must be in the specified schema. Assumes both table exist. + * Query to copy all records from source table to destination table. Both tables must be in the + * specified schema. Assumes both table exist. * - * @param database Database that the connector is syncing - * @param schemaName Name of schema - * @param sourceTableName Name of source table + * @param database Database that the connector is syncing + * @param schemaName Name of schema + * @param sourceTableName Name of source table * @param destinationTableName Name of destination table * @return Query */ @@ -103,7 +104,7 @@ default boolean isSchemaExists(final JdbcDatabase database, final String schemaN * Given an arbitrary number of queries, execute a transaction. * * @param database Database that the connector is syncing - * @param queries Queries to execute + * @param queries Queries to execute * @throws Exception exception */ void executeTransaction(JdbcDatabase database, List queries) throws Exception; @@ -120,19 +121,21 @@ default boolean isSchemaExists(final JdbcDatabase database, final String schemaN */ boolean isSchemaRequired(); - /** - * The method is responsible for executing some specific DB Engine logic in onClose method. We can override this method to execute specific logic - * e.g. to handle any necessary migrations in the destination, etc. + * The method is responsible for executing some specific DB Engine logic in onClose method. We can + * override this method to execute specific logic e.g. to handle any necessary migrations in the + * destination, etc. *

- * In next example you can see how migration from VARCHAR to SUPER column is handled for the Redshift destination: + * In next example you can see how migration from VARCHAR to SUPER column is handled for the + * Redshift destination: * * @param database - Database that the connector is interacting with - * @param schemaNames - schemas will be discovered + * @param schemaNames - schemas will be discovered * @see io.airbyte.integrations.destination.redshift.RedshiftSqlOperations#onDestinationCloseOperations */ default void onDestinationCloseOperations(JdbcDatabase database, Set schemaNames) { // do nothing LOGGER.info("No onDestinationCloseOperations required for this destination."); } + } diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyConsumerFactory.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyConsumerFactory.java index 9970402d6787..2fb4d0b3bf3d 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyConsumerFactory.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyConsumerFactory.java @@ -35,13 +35,13 @@ public class CopyConsumerFactory { private static final Logger LOGGER = LoggerFactory.getLogger(CopyConsumerFactory.class); public static AirbyteMessageConsumer create(final Consumer outputRecordCollector, - final JdbcDatabase database, - final SqlOperations sqlOperations, - final ExtendedNameTransformer namingResolver, - final T config, - final ConfiguredAirbyteCatalog catalog, - final StreamCopierFactory streamCopierFactory, - final String defaultSchema) { + final JdbcDatabase database, + final SqlOperations sqlOperations, + final ExtendedNameTransformer namingResolver, + final T config, + final ConfiguredAirbyteCatalog catalog, + final StreamCopierFactory streamCopierFactory, + final String defaultSchema) { final Map pairToCopier = createWriteConfigs( namingResolver, config, @@ -65,12 +65,12 @@ public static AirbyteMessageConsumer create(final Consumer o } private static Map createWriteConfigs(final ExtendedNameTransformer namingResolver, - final T config, - final ConfiguredAirbyteCatalog catalog, - final StreamCopierFactory streamCopierFactory, - final String defaultSchema, - final JdbcDatabase database, - final SqlOperations sqlOperations) { + final T config, + final ConfiguredAirbyteCatalog catalog, + final StreamCopierFactory streamCopierFactory, + final String defaultSchema, + final JdbcDatabase database, + final SqlOperations sqlOperations) { final Map pairToCopier = new HashMap<>(); final String stagingFolder = UUID.randomUUID().toString(); for (final var configuredStream : catalog.getStreams()) { @@ -89,8 +89,8 @@ private static OnStartFunction onStartFunction(final Map recordWriterFunction(final Map pairToCopier, - final SqlOperations sqlOperations, - final Map pairToIgnoredRecordCount) { + final SqlOperations sqlOperations, + final Map pairToIgnoredRecordCount) { return (AirbyteStreamNameNamespacePair pair, List records) -> { final var fileName = pairToCopier.get(pair).prepareStagingFile(); for (final AirbyteRecordMessage recordMessage : records) { @@ -117,9 +117,9 @@ private static CheckAndRemoveRecordWriter removeStagingFilePrinter(final Map pairToCopier, - final JdbcDatabase database, - final SqlOperations sqlOperations, - final Map pairToIgnoredRecordCount) { + final JdbcDatabase database, + final SqlOperations sqlOperations, + final Map pairToIgnoredRecordCount) { return (hasFailed) -> { pairToIgnoredRecordCount .forEach((pair, count) -> LOGGER.warn("A total of {} record(s) of data from stream {} were invalid and were ignored.", count, pair)); @@ -128,9 +128,9 @@ private static OnCloseFunction onCloseFunction(final Map pairToCopier, - boolean hasFailed, - final JdbcDatabase db, - final SqlOperations sqlOperations) + boolean hasFailed, + final JdbcDatabase db, + final SqlOperations sqlOperations) throws Exception { Exception firstException = null; List streamCopiers = new ArrayList<>(pairToCopier.values()); diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/StreamCopier.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/StreamCopier.java index 93eb78cadafc..d655bea2f147 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/StreamCopier.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/StreamCopier.java @@ -77,4 +77,5 @@ public interface StreamCopier { * @return current staging file name */ String getCurrentFile(); + } diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/staging/StagingOperations.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/staging/StagingOperations.java index 5af382004d75..e2a1b799e48c 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/staging/StagingOperations.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/staging/StagingOperations.java @@ -11,7 +11,7 @@ import java.util.UUID; import org.joda.time.DateTime; -public interface StagingOperations extends SqlOperations { +public interface StagingOperations extends SqlOperations { String getStageName(String namespace, String streamName); diff --git a/airbyte-integrations/connectors/destination-local-json/src/test-integration/java/io/airbyte/integrations/destination/local_json/LocalJsonDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-local-json/src/test-integration/java/io/airbyte/integrations/destination/local_json/LocalJsonDestinationAcceptanceTest.java index aa17f0a82513..63e7dd55d6c6 100644 --- a/airbyte-integrations/connectors/destination-local-json/src/test-integration/java/io/airbyte/integrations/destination/local_json/LocalJsonDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-local-json/src/test-integration/java/io/airbyte/integrations/destination/local_json/LocalJsonDestinationAcceptanceTest.java @@ -14,7 +14,6 @@ import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; - import java.nio.file.Files; import java.nio.file.Path; import java.util.List; diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariaDbTestDataComparator.java b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariaDbTestDataComparator.java index 45b6f093f5f9..d10d4349dc23 100644 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariaDbTestDataComparator.java +++ b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariaDbTestDataComparator.java @@ -1,22 +1,26 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + package io.airbyte.integrations.destination.mariadb_columnstore; import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; - import java.util.ArrayList; import java.util.List; public class MariaDbTestDataComparator extends AdvancedTestDataComparator { - private final ExtendedNameTransformer namingResolver = new MariadbColumnstoreNameTransformer(); + private final ExtendedNameTransformer namingResolver = new MariadbColumnstoreNameTransformer(); + + @Override + protected List resolveIdentifier(final String identifier) { + final List result = new ArrayList<>(); + final String resolved = namingResolver.getIdentifier(identifier); + result.add(identifier); + result.add(resolved); - @Override - protected List resolveIdentifier(final String identifier) { - final List result = new ArrayList<>(); - final String resolved = namingResolver.getIdentifier(identifier); - result.add(identifier); - result.add(resolved); + return result; + } - return result; - } } diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationAcceptanceTest.java index 442e684de020..8098ab53ae45 100644 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationAcceptanceTest.java @@ -13,13 +13,10 @@ import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import java.sql.SQLException; -import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; - -import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; -import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testcontainers.containers.MariaDBContainer; diff --git a/airbyte-integrations/connectors/destination-meilisearch/src/test-integration/java/io/airbyte/integrations/destination/meilisearch/MeiliSearchDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-meilisearch/src/test-integration/java/io/airbyte/integrations/destination/meilisearch/MeiliSearchDestinationAcceptanceTest.java index 66cd9a83e29b..bd94430bebdb 100644 --- a/airbyte-integrations/connectors/destination-meilisearch/src/test-integration/java/io/airbyte/integrations/destination/meilisearch/MeiliSearchDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-meilisearch/src/test-integration/java/io/airbyte/integrations/destination/meilisearch/MeiliSearchDestinationAcceptanceTest.java @@ -14,15 +14,14 @@ import io.airbyte.commons.stream.MoreStreams; import io.airbyte.commons.text.Names; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; +import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.Comparator; import java.util.List; import java.util.stream.Collectors; - -import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; -import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import org.testcontainers.containers.GenericContainer; import org.testcontainers.utility.DockerImageName; diff --git a/airbyte-integrations/connectors/destination-mongodb/src/test-integration/java/io/airbyte/integrations/destination/mongodb/MongodbDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mongodb/src/test-integration/java/io/airbyte/integrations/destination/mongodb/MongodbDestinationAcceptanceTest.java index ac93c4e54a5d..a04fdd273ec6 100644 --- a/airbyte-integrations/connectors/destination-mongodb/src/test-integration/java/io/airbyte/integrations/destination/mongodb/MongodbDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-mongodb/src/test-integration/java/io/airbyte/integrations/destination/mongodb/MongodbDestinationAcceptanceTest.java @@ -12,11 +12,10 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.db.mongodb.MongoDatabase; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; -import java.util.ArrayList; -import java.util.List; - import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; +import java.util.ArrayList; +import java.util.List; import org.bson.Document; import org.testcontainers.containers.MongoDBContainer; diff --git a/airbyte-integrations/connectors/destination-mqtt/src/test-integration/java/io/airbyte/integrations/destination/mqtt/MqttDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mqtt/src/test-integration/java/io/airbyte/integrations/destination/mqtt/MqttDestinationAcceptanceTest.java index 8e216112d462..d4f9b381187c 100644 --- a/airbyte-integrations/connectors/destination-mqtt/src/test-integration/java/io/airbyte/integrations/destination/mqtt/MqttDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-mqtt/src/test-integration/java/io/airbyte/integrations/destination/mqtt/MqttDestinationAcceptanceTest.java @@ -13,6 +13,8 @@ import com.hivemq.testcontainer.junit5.HiveMQTestContainerExtension; import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; +import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import java.net.InetAddress; import java.net.NetworkInterface; import java.net.SocketException; @@ -23,9 +25,6 @@ import java.util.List; import java.util.Map; import java.util.UUID; - -import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; -import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import org.eclipse.paho.client.mqttv3.MqttClient; import org.eclipse.paho.client.mqttv3.MqttConnectOptions; import org.eclipse.paho.client.mqttv3.MqttException; diff --git a/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleOperations.java b/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleOperations.java index b32cf07cbb45..a133dfb5285f 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleOperations.java +++ b/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleOperations.java @@ -94,9 +94,9 @@ public String truncateTableQuery(final JdbcDatabase database, final String schem @Override public void insertRecords(final JdbcDatabase database, - final List records, - final String schemaName, - final String tempTableName) + final List records, + final String schemaName, + final String tempTableName) throws Exception { final String tableName = String.format("%s.%s", schemaName, tempTableName); final String columns = String.format("(%s, %s, %s)", @@ -107,11 +107,11 @@ public void insertRecords(final JdbcDatabase database, // Adapted from SqlUtils.insertRawRecordsInSingleQuery to meet some needs specific to Oracle syntax private static void insertRawRecordsInSingleQuery(final String tableName, - final String columns, - final String recordQueryComponent, - final JdbcDatabase jdbcDatabase, - final List records, - final Supplier uuidSupplier) + final String columns, + final String recordQueryComponent, + final JdbcDatabase jdbcDatabase, + final List records, + final Supplier uuidSupplier) throws SQLException { if (records.isEmpty()) { return; @@ -152,9 +152,9 @@ private static void insertRawRecordsInSingleQuery(final String tableName, @Override public String copyTableQuery(final JdbcDatabase database, - final String schemaName, - final String sourceTableName, - final String destinationTableName) { + final String schemaName, + final String sourceTableName, + final String destinationTableName) { return String.format("INSERT INTO %s.%s SELECT * FROM %s.%s\n", schemaName, destinationTableName, schemaName, sourceTableName); } diff --git a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/OracleTestDataComparator.java b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/OracleTestDataComparator.java index 5be791e75410..0ddb650fe2c6 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/OracleTestDataComparator.java +++ b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/OracleTestDataComparator.java @@ -1,25 +1,29 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + package io.airbyte.integrations.destination.oracle; import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; - import java.util.ArrayList; import java.util.List; public class OracleTestDataComparator extends AdvancedTestDataComparator { - private final ExtendedNameTransformer namingResolver = new OracleNameTransformer(); + private final ExtendedNameTransformer namingResolver = new OracleNameTransformer(); - @Override - protected List resolveIdentifier(final String identifier) { - final List result = new ArrayList<>(); - final String resolved = namingResolver.getIdentifier(identifier); - result.add(identifier); - result.add(resolved); - if (!resolved.startsWith("\"")) { - result.add(resolved.toLowerCase()); - result.add(resolved.toUpperCase()); - } - return result; + @Override + protected List resolveIdentifier(final String identifier) { + final List result = new ArrayList<>(); + final String resolved = namingResolver.getIdentifier(identifier); + result.add(identifier); + result.add(resolved); + if (!resolved.startsWith("\"")) { + result.add(resolved.toLowerCase()); + result.add(resolved.toUpperCase()); } + return result; + } + } diff --git a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/SshOracleDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/SshOracleDestinationAcceptanceTest.java index ee48ebcbcc74..133a44263c25 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/SshOracleDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/SshOracleDestinationAcceptanceTest.java @@ -17,13 +17,11 @@ import io.airbyte.integrations.base.ssh.SshTunnel; import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import java.io.IOException; -import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.stream.Collectors; - -import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import org.testcontainers.containers.Network; public abstract class SshOracleDestinationAcceptanceTest extends DestinationAcceptanceTest { diff --git a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/UnencryptedOracleDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/UnencryptedOracleDestinationAcceptanceTest.java index fd404bee7955..1342c57dafd3 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/UnencryptedOracleDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/UnencryptedOracleDestinationAcceptanceTest.java @@ -17,13 +17,10 @@ import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import java.sql.SQLException; -import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; - -import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; -import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import org.junit.Test; public class UnencryptedOracleDestinationAcceptanceTest extends DestinationAcceptanceTest { diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/Dockerfile index 1c32dea0e209..0c472d5343f8 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-postgres-strict-encrypt COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.4 +LABEL io.airbyte.version=0.1.5 LABEL io.airbyte.name=airbyte/destination-postgres-strict-encrypt diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test/resources/expected_spec.json b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test/resources/expected_spec.json index 10e1c1251a44..8ba1678fcb55 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test/resources/expected_spec.json +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test/resources/expected_spec.json @@ -54,6 +54,12 @@ "airbyte_secret": true, "order": 5 }, + "jdbc_url_params": { + "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).", + "title": "JDBC URL Params", + "type": "string", + "order": 7 + }, "tunnel_method": { "type": "object", "title": "SSH Tunnel Method", diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-redshift/src/main/resources/spec.json index 360372f2ca89..243259955ddf 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/destination-redshift/src/main/resources/spec.json @@ -49,22 +49,22 @@ "title": "Default Schema" }, "s3_bucket_name": { - "title": "S3 Bucket Name", + "title": "S3 Bucket Name (Optional)", "type": "string", - "description": "The name of the staging S3 bucket to use if utilising a COPY strategy. COPY is recommended for production workloads for better speed and scalability. See AWS docs for more details.", + "description": "The name of the staging S3 bucket to use if utilising a COPY strategy. COPY is recommended for production workloads for better speed and scalability. See AWS docs for more details.", "examples": ["airbyte.staging"] }, "s3_bucket_path": { - "title": "S3 Bucket Path", + "title": "S3 Bucket Path (Optional)", "type": "string", - "description": "The directory under the S3 bucket where data will be written. If not provided, then defaults to the root directory.", + "description": "The directory under the S3 bucket where data will be written. If not provided, then defaults to the root directory. See path's name recommendations for more details.", "examples": ["data_sync/test"] }, "s3_bucket_region": { - "title": "S3 Bucket Region", + "title": "S3 Bucket Region (Optional)", "type": "string", "default": "", - "description": "The region of the S3 staging bucket to use if utilising a copy strategy.", + "description": "The region of the S3 staging bucket to use if utilising a COPY strategy. See AWS docs for details.", "enum": [ "", "us-east-1", @@ -94,14 +94,14 @@ }, "access_key_id": { "type": "string", - "description": "The Access Key Id granting allow one to access the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket.", - "title": "S3 Key Id", + "description": "This ID grants access to the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket. See AWS docs on how to generate an access key ID and secret access key.", + "title": "S3 Key Id (Optional)", "airbyte_secret": true }, "secret_access_key": { "type": "string", - "description": "The corresponding secret to the above access key id.", - "title": "S3 Access Key", + "description": "The corresponding secret to the above access key id. See AWS docs on how to generate an access key ID and secret access key.", + "title": "S3 Access Key (Optional)", "airbyte_secret": true }, "part_size": { @@ -109,13 +109,13 @@ "minimum": 10, "maximum": 100, "examples": ["10"], - "description": "Optional. Increase this if syncing tables larger than 100GB. Only relevant for COPY. Files are streamed to S3 in parts. This determines the size of each part, in MBs. As S3 has a limit of 10,000 parts per file, part size affects the table size. This is 10MB by default, resulting in a default limit of 100GB tables. Note, a larger part size will result in larger memory requirements. A rule of thumb is to multiply the part size by 10 to get the memory requirement. Modify this with care.", - "title": "Stream Part Size" + "description": "Increase this if syncing tables larger than 100GB. Only relevant for COPY. Files are streamed to S3 in parts. This determines the size of each part, in MBs. As S3 has a limit of 10,000 parts per file, part size affects the table size. This is 10MB by default, resulting in a default limit of 100GB tables. Note: a larger part size will result in larger memory requirements. A rule of thumb is to multiply the part size by 10 to get the memory requirement. Modify this with care. See docs for details.", + "title": "Stream Part Size (Optional)" }, "purge_staging_data": { - "title": "Purge Staging Files and Tables", + "title": "Purge Staging Files and Tables (Optional)", "type": "boolean", - "description": "Whether to delete the staging files from S3 after completing the sync. See the docs for details. Only relevant for COPY. Defaults to true.", + "description": "Whether to delete the staging files from S3 after completing the sync. See docs for details.", "default": true } } diff --git a/airbyte-integrations/connectors/source-apify-dataset/Dockerfile b/airbyte-integrations/connectors/source-apify-dataset/Dockerfile index fbf31c680e2f..3c25c0ce7cbd 100644 --- a/airbyte-integrations/connectors/source-apify-dataset/Dockerfile +++ b/airbyte-integrations/connectors/source-apify-dataset/Dockerfile @@ -12,5 +12,5 @@ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.9 +LABEL io.airbyte.version=0.1.11 LABEL io.airbyte.name=airbyte/source-apify-dataset diff --git a/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java b/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java index f85b4eebc0c5..1d6c237a3659 100644 --- a/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java +++ b/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java @@ -289,7 +289,7 @@ public JdbcDatabase createDatabase(final JsonNode config) throws SQLException { final JsonNode jdbcConfig = toDatabaseConfig(config); final JdbcDatabase database = Databases.createStreamingJdbcDatabase( - jdbcConfig.get("username").asText(), + jdbcConfig.has("username") ? jdbcConfig.get("username").asText() : null, jdbcConfig.has("password") ? jdbcConfig.get("password").asText() : null, jdbcConfig.get("jdbc_url").asText(), driverClass, diff --git a/airbyte-integrations/connectors/source-openweather/Dockerfile b/airbyte-integrations/connectors/source-openweather/Dockerfile index 8b82589d62f2..b344b066bd47 100644 --- a/airbyte-integrations/connectors/source-openweather/Dockerfile +++ b/airbyte-integrations/connectors/source-openweather/Dockerfile @@ -34,5 +34,5 @@ COPY source_openweather ./source_openweather ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.1 +LABEL io.airbyte.version=0.1.4 LABEL io.airbyte.name=airbyte/source-openweather diff --git a/airbyte-integrations/connectors/source-smartsheets/Dockerfile b/airbyte-integrations/connectors/source-smartsheets/Dockerfile index 7907f022cc86..cb26f971e9da 100644 --- a/airbyte-integrations/connectors/source-smartsheets/Dockerfile +++ b/airbyte-integrations/connectors/source-smartsheets/Dockerfile @@ -14,5 +14,5 @@ COPY $CODE_PATH ./$CODE_PATH ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.9 +LABEL io.airbyte.version=0.1.10 LABEL io.airbyte.name=airbyte/source-smartsheets diff --git a/airbyte-integrations/connectors/source-smartsheets/acceptance-test-config.yml b/airbyte-integrations/connectors/source-smartsheets/acceptance-test-config.yml index 54c4a0e8df86..063f068e5caa 100644 --- a/airbyte-integrations/connectors/source-smartsheets/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-smartsheets/acceptance-test-config.yml @@ -12,6 +12,16 @@ tests: basic_read: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + expect_records: + path: "integration_tests/expected_records.txt" + extra_fields: yes + exact_order: yes + extra_records: no full_refresh: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" + incremental: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + future_state_path: "integration_tests/abnormal_state.json" diff --git a/airbyte-integrations/connectors/source-smartsheets/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-smartsheets/integration_tests/abnormal_state.json new file mode 100644 index 000000000000..461ef6d45b57 --- /dev/null +++ b/airbyte-integrations/connectors/source-smartsheets/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "aws_s3_sample": { + "modifiedAt": "2222-03-07T11:30:00+00:00" + } +} diff --git a/airbyte-integrations/connectors/source-smartsheets/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-smartsheets/integration_tests/configured_catalog.json index e263f7cae208..f919a67cd985 100644 --- a/airbyte-integrations/connectors/source-smartsheets/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-smartsheets/integration_tests/configured_catalog.json @@ -13,7 +13,8 @@ "gender": { "type": "string" }, "ip_address": { "type": "string" }, "primary_email": { "type": "string" }, - "dob": { "type": "string", "format": "date" } + "dob": { "type": "string", "format": "date" }, + "modifiedAt": { "type": "string", "format": "date-time" } } }, "supported_sync_modes": ["full_refresh"] diff --git a/airbyte-integrations/connectors/source-smartsheets/integration_tests/expected_records.txt b/airbyte-integrations/connectors/source-smartsheets/integration_tests/expected_records.txt new file mode 100644 index 000000000000..7d1e55999bc1 --- /dev/null +++ b/airbyte-integrations/connectors/source-smartsheets/integration_tests/expected_records.txt @@ -0,0 +1,100 @@ +{"stream": "aws_s3_sample", "data": {"id": "1.0", "first_name": "Joni", "last_name": "Watling", "email": "jwatling0@amazonaws.com", "gender": "Genderqueer", "ip_address": "195.50.216.194", "dob": "2020-11-23"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "2.0", "first_name": "Bernardo", "last_name": "Klaaassen", "email": "bklaaassen1@cbc.ca", "gender": "Polygender", "ip_address": "116.208.253.97", "dob": "2020-02-22"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "3.0", "first_name": "Drake", "last_name": "Bednell", "email": "dbednell2@theguardian.com", "gender": "Non-binary", "ip_address": "120.15.24.132", "dob": "2020-08-21"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "4.0", "first_name": "Alfreda", "last_name": "Brumbye", "email": "abrumbye3@howstuffworks.com", "gender": "Genderqueer", "ip_address": "64.22.217.122", "dob": "2020-12-29"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "5.0", "first_name": "Boycey", "last_name": "Brisson", "email": "bbrisson4@bizjournals.com", "gender": "Bigender", "ip_address": "59.220.127.45", "dob": "2020-06-26"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "6.0", "first_name": "Ursuline", "last_name": "Lintott", "email": "ulintott5@ow.ly", "gender": "Genderqueer", "ip_address": "47.253.138.238", "dob": "2020-07-26"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "7.0", "first_name": "Bettine", "last_name": "McKennan", "email": "bmckennan6@census.gov", "gender": "Bigender", "ip_address": "35.42.88.34", "dob": "2020-06-12"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "8.0", "first_name": "Eustace", "last_name": "Aaronsohn", "email": "eaaronsohn7@yale.edu", "gender": "Male", "ip_address": "84.153.189.160", "dob": "2020-12-14"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "9.0", "first_name": "Chrystel", "last_name": "Blum", "email": "cblum8@360.cn", "gender": "Bigender", "ip_address": "44.5.17.116", "dob": "2020-09-14"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "10.0", "first_name": "Kathryne", "last_name": "Cuncarr", "email": "kcuncarr9@hhs.gov", "gender": "Female", "ip_address": "50.63.175.212", "dob": "2020-06-24"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "11.0", "first_name": "Filmer", "last_name": "Ginni", "email": "fginnia@ucoz.com", "gender": "Genderfluid", "ip_address": "248.137.123.63", "dob": "2020-12-30"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "12.0", "first_name": "Anthiathia", "last_name": "Sketh", "email": "askethb@1688.com", "gender": "Female", "ip_address": "40.58.34.216", "dob": "2020-05-10"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "13.0", "first_name": "Pamella", "last_name": "Winterson", "email": "pwintersonc@biglobe.ne.jp", "gender": "Female", "ip_address": "173.8.175.104", "dob": "2020-06-28"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "14.0", "first_name": "Zuzana", "last_name": "Esmead", "email": "zesmeadd@bloglovin.com", "gender": "Polygender", "ip_address": "98.192.39.217", "dob": "2020-02-27"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "15.0", "first_name": "Donica", "last_name": "Jozaitis", "email": "djozaitise@amazon.de", "gender": "Female", "ip_address": "160.231.57.131", "dob": "2021-01-04"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "16.0", "first_name": "Pennie", "last_name": "Dunrige", "email": "pdunrigef@gravatar.com", "gender": "Non-binary", "ip_address": "208.255.160.56", "dob": "2020-03-01"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "17.0", "first_name": "Blanca", "last_name": "Allcroft", "email": "ballcroftg@furl.net", "gender": "Agender", "ip_address": "21.129.47.109", "dob": "2021-01-01"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "18.0", "first_name": "Webb", "last_name": "Simkins", "email": "wsimkinsh@qq.com", "gender": "Male", "ip_address": "2.125.148.89", "dob": "2020-06-16"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "19.0", "first_name": "Dorrie", "last_name": "Esser", "email": "desseri@rediff.com", "gender": "Female", "ip_address": "17.148.200.84", "dob": "2020-11-27"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "20.0", "first_name": "Kara", "last_name": "Gley", "email": "kgleyj@php.net", "gender": "Bigender", "ip_address": "117.130.134.124", "dob": "2020-12-02"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "21.0", "first_name": "Felicle", "last_name": "Roscrigg", "email": "froscriggk@java.com", "gender": "Female", "ip_address": "36.67.5.211", "dob": "2020-03-05"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "22.0", "first_name": "Carmine", "last_name": "Backshill", "email": "cbackshilll@addthis.com", "gender": "Polygender", "ip_address": "103.28.140.64", "dob": "2020-12-13"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "23.0", "first_name": "Helge", "last_name": "Kneeshaw", "email": "hkneeshawm@goo.gl", "gender": "Genderfluid", "ip_address": "154.154.89.226", "dob": "2020-07-15"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "24.0", "first_name": "Suzy", "last_name": "Ohm", "email": "sohmn@columbia.edu", "gender": "Bigender", "ip_address": "100.54.193.73", "dob": "2020-10-25"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "25.0", "first_name": "Bianka", "last_name": "Melmore", "email": "bmelmoreo@sohu.com", "gender": "Genderqueer", "ip_address": "38.63.204.171", "dob": "2020-11-02"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "26.0", "first_name": "Kim", "last_name": "Joust", "email": "kjoustp@sbwire.com", "gender": "Male", "ip_address": "87.176.59.210", "dob": "2020-12-27"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "27.0", "first_name": "Darrin", "last_name": "Warlawe", "email": "dwarlaweq@shinystat.com", "gender": "Male", "ip_address": "138.16.204.148", "dob": "2020-11-17"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "28.0", "first_name": "Edgard", "last_name": "Byfford", "email": "ebyffordr@spotify.com", "gender": "Polygender", "ip_address": "162.208.75.173", "dob": "2020-07-25"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "29.0", "first_name": "Dee", "last_name": "Bourgeois", "email": "dbourgeoiss@elegantthemes.com", "gender": "Polygender", "ip_address": "20.250.26.143", "dob": "2020-10-20"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "30.0", "first_name": "Fredrika", "last_name": "Ingry", "email": "fingryt@slashdot.org", "gender": "Non-binary", "ip_address": "255.214.102.98", "dob": "2020-04-03"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "31.0", "first_name": "Christie", "last_name": "Krier", "email": "ckrieru@aboutads.info", "gender": "Bigender", "ip_address": "29.122.167.180", "dob": "2020-09-24"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "32.0", "first_name": "Joshuah", "last_name": "Braffington", "email": "jbraffingtonv@foxnews.com", "gender": "Agender", "ip_address": "189.155.6.135", "dob": "2020-09-22"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "33.0", "first_name": "Bailie", "last_name": "Fossey", "email": "bfosseyw@flickr.com", "gender": "Agender", "ip_address": "129.166.4.82", "dob": "2020-05-30"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "34.0", "first_name": "Westley", "last_name": "Kupper", "email": "wkupperx@a8.net", "gender": "Agender", "ip_address": "12.125.54.217", "dob": "2020-04-10"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "35.0", "first_name": "Allie", "last_name": "Moogan", "email": "amoogany@jigsy.com", "gender": "Male", "ip_address": "158.225.146.105", "dob": "2020-12-24"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "36.0", "first_name": "Obadias", "last_name": "Stammers", "email": "ostammersz@shinystat.com", "gender": "Polygender", "ip_address": "210.226.250.161", "dob": "2021-01-01"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "37.0", "first_name": "Philippine", "last_name": "Barhems", "email": "pbarhems10@ted.com", "gender": "Male", "ip_address": "169.205.179.145", "dob": "2021-01-14"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "38.0", "first_name": "Theo", "last_name": "Messitt", "email": "tmessitt11@deviantart.com", "gender": "Male", "ip_address": "103.212.77.16", "dob": "2020-09-20"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "39.0", "first_name": "Roch", "last_name": "Cuphus", "email": "rcuphus12@pinterest.com", "gender": "Agender", "ip_address": "43.96.220.113", "dob": "2020-12-20"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "40.0", "first_name": "Sinclair", "last_name": "Chittey", "email": "schittey13@tamu.edu", "gender": "Genderfluid", "ip_address": "128.194.26.163", "dob": "2020-10-10"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "41.0", "first_name": "Eleonore", "last_name": "Guerrieri", "email": "eguerrieri14@typepad.com", "gender": "Genderfluid", "ip_address": "79.210.103.73", "dob": "2020-07-17"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "42.0", "first_name": "Elana", "last_name": "Secret", "email": "esecret15@mysql.com", "gender": "Polygender", "ip_address": "102.139.145.231", "dob": "2021-01-15"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "43.0", "first_name": "Dennie", "last_name": "Prati", "email": "dprati16@nytimes.com", "gender": "Genderqueer", "ip_address": "51.119.24.56", "dob": "2020-10-06"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "44.0", "first_name": "Roderick", "last_name": "Dand", "email": "rdand17@gmpg.org", "gender": "Genderqueer", "ip_address": "188.187.179.115", "dob": "2020-11-26"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "45.0", "first_name": "Lonnie", "last_name": "Grigolashvill", "email": "lgrigolashvill18@hhs.gov", "gender": "Non-binary", "ip_address": "96.104.221.230", "dob": "2020-05-05"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "46.0", "first_name": "Leslie", "last_name": "Iddins", "email": "liddins19@sbwire.com", "gender": "Genderqueer", "ip_address": "77.228.177.247", "dob": "2020-06-27"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "47.0", "first_name": "Conant", "last_name": "Gaishson", "email": "cgaishson1a@oakley.com", "gender": "Agender", "ip_address": "71.118.171.42", "dob": "2020-12-29"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "48.0", "first_name": "Aileen", "last_name": "Derrell", "email": "aderrell1b@amazonaws.com", "gender": "Genderfluid", "ip_address": "233.79.86.81", "dob": "2020-06-02"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "49.0", "first_name": "Heywood", "last_name": "Poulston", "email": "hpoulston1c@opera.com", "gender": "Genderqueer", "ip_address": "115.6.245.150", "dob": "2020-04-16"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "50.0", "first_name": "Neddie", "last_name": "Rickert", "email": "nrickert1d@omniture.com", "gender": "Polygender", "ip_address": "25.55.171.143", "dob": "2020-11-17"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "51.0", "first_name": "Ronny", "last_name": "Bondley", "email": "rbondley1e@loc.gov", "gender": "Genderqueer", "ip_address": "33.164.53.233", "dob": "2020-05-22"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "52.0", "first_name": "Filippa", "last_name": "McCuis", "email": "fmccuis1f@desdev.cn", "gender": "Bigender", "ip_address": "30.78.184.43", "dob": "2021-01-12"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "53.0", "first_name": "Kipper", "last_name": "Corton", "email": "kcorton1g@t.co", "gender": "Bigender", "ip_address": "177.22.101.164", "dob": "2021-01-23"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "54.0", "first_name": "Clementine", "last_name": "Callen", "email": "ccallen1h@storify.com", "gender": "Genderfluid", "ip_address": "122.40.201.54", "dob": "2020-06-03"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "55.0", "first_name": "Silvie", "last_name": "Royse", "email": "sroyse1i@mapquest.com", "gender": "Genderqueer", "ip_address": "38.145.193.0", "dob": "2020-06-27"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "56.0", "first_name": "Noble", "last_name": "Purslow", "email": "npurslow1j@redcross.org", "gender": "Non-binary", "ip_address": "119.89.26.248", "dob": "2020-11-21"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "57.0", "first_name": "Marjy", "last_name": "Gloves", "email": "mgloves1k@drupal.org", "gender": "Genderqueer", "ip_address": "250.108.63.170", "dob": "2020-07-24"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "58.0", "first_name": "Ellwood", "last_name": "Gullam", "email": "egullam1l@google.cn", "gender": "Genderfluid", "ip_address": "128.65.236.88", "dob": "2020-05-07"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "59.0", "first_name": "Adora", "last_name": "Povele", "email": "apovele1m@statcounter.com", "gender": "Genderfluid", "ip_address": "215.67.227.145", "dob": "2020-06-23"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "60.0", "first_name": "Miles", "last_name": "Zapatero", "email": "mzapatero1n@ezinearticles.com", "gender": "Non-binary", "ip_address": "212.252.221.177", "dob": "2020-05-22"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "61.0", "first_name": "Eddie", "last_name": "Menichi", "email": "emenichi1o@about.com", "gender": "Genderqueer", "ip_address": "138.77.252.222", "dob": "2020-02-11"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "62.0", "first_name": "Jakob", "last_name": "Showalter", "email": "jshowalter1p@cargocollective.com", "gender": "Genderfluid", "ip_address": "138.186.250.131", "dob": "2021-01-05"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "63.0", "first_name": "Zebadiah", "last_name": "Geratt", "email": "zgeratt1q@surveymonkey.com", "gender": "Genderfluid", "ip_address": "239.69.201.221", "dob": "2020-05-02"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "64.0", "first_name": "Carleton", "last_name": "Gayther", "email": "cgayther1r@si.edu", "gender": "Genderqueer", "ip_address": "138.237.56.77", "dob": "2020-03-09"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "65.0", "first_name": "Gwendolyn", "last_name": "Cotgrave", "email": "gcotgrave1s@dyndns.org", "gender": "Agender", "ip_address": "103.26.18.169", "dob": "2020-06-26"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "66.0", "first_name": "Nikki", "last_name": "Corry", "email": "ncorry1t@dedecms.com", "gender": "Female", "ip_address": "118.138.87.91", "dob": "2020-08-26"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "67.0", "first_name": "Kat", "last_name": "Figgins", "email": "kfiggins1u@jugem.jp", "gender": "Male", "ip_address": "202.202.94.181", "dob": "2020-06-19"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "68.0", "first_name": "Norean", "last_name": "Trendle", "email": "ntrendle1v@elpais.com", "gender": "Genderqueer", "ip_address": "134.89.22.248", "dob": "2020-08-24"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "69.0", "first_name": "Foster", "last_name": "Durker", "email": "fdurker1w@engadget.com", "gender": "Non-binary", "ip_address": "189.149.34.80", "dob": "2020-11-02"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "70.0", "first_name": "Rod", "last_name": "Jarnell", "email": "rjarnell1x@sphinn.com", "gender": "Genderfluid", "ip_address": "169.148.199.234", "dob": "2020-08-19"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "71.0", "first_name": "Lancelot", "last_name": "Plaxton", "email": "lplaxton1y@spiegel.de", "gender": "Agender", "ip_address": "81.194.71.38", "dob": "2020-09-07"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "72.0", "first_name": "Rudyard", "last_name": "Olliff", "email": "rolliff1z@bbb.org", "gender": "Agender", "ip_address": "113.39.154.178", "dob": "2021-01-07"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "73.0", "first_name": "Shelley", "last_name": "Lipprose", "email": "slipprose20@engadget.com", "gender": "Polygender", "ip_address": "117.254.24.20", "dob": "2021-01-03"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "74.0", "first_name": "Prudi", "last_name": "Boichat", "email": "pboichat21@cam.ac.uk", "gender": "Agender", "ip_address": "99.169.9.122", "dob": "2020-08-25"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "75.0", "first_name": "Denny", "last_name": "Bollum", "email": "dbollum22@skyrock.com", "gender": "Bigender", "ip_address": "77.112.28.180", "dob": "2020-07-31"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "76.0", "first_name": "Lucila", "last_name": "Charteris", "email": "lcharteris23@linkedin.com", "gender": "Genderfluid", "ip_address": "194.161.40.83", "dob": "2020-05-17"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "77.0", "first_name": "Marrissa", "last_name": "Wurz", "email": "mwurz24@pinterest.com", "gender": "Agender", "ip_address": "72.219.43.46", "dob": "2020-09-04"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "78.0", "first_name": "Teresina", "last_name": "Micklewicz", "email": "tmicklewicz25@goo.ne.jp", "gender": "Genderqueer", "ip_address": "214.116.247.204", "dob": "2020-09-13"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "79.0", "first_name": "Idette", "last_name": "Ilieve", "email": "iilieve26@mozilla.com", "gender": "Bigender", "ip_address": "25.25.28.17", "dob": "2020-09-21"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "80.0", "first_name": "Noemi", "last_name": "Lempenny", "email": "nlempenny27@jugem.jp", "gender": "Bigender", "ip_address": "194.139.183.130", "dob": "2020-09-01"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "81.0", "first_name": "Faye", "last_name": "Ashbee", "email": "fashbee28@google.com", "gender": "Bigender", "ip_address": "191.149.120.198", "dob": "2020-03-12"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "82.0", "first_name": "Olly", "last_name": "Siaspinski", "email": "osiaspinski29@amazonaws.com", "gender": "Polygender", "ip_address": "150.134.136.240", "dob": "2020-07-12"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "83.0", "first_name": "Marji", "last_name": "Dahlen", "email": "mdahlen2a@zdnet.com", "gender": "Bigender", "ip_address": "185.226.214.79", "dob": "2020-11-28"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "84.0", "first_name": "Aubine", "last_name": "Genner", "email": "agenner2b@chronoengine.com", "gender": "Genderfluid", "ip_address": "109.51.123.153", "dob": "2020-03-28"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "85.0", "first_name": "Dix", "last_name": "Civitillo", "email": "dcivitillo2c@bluehost.com", "gender": "Female", "ip_address": "112.89.157.163", "dob": "2020-05-30"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "86.0", "first_name": "Birk", "last_name": "Mussolini", "email": "bmussolini2d@wikimedia.org", "gender": "Agender", "ip_address": "235.49.78.159", "dob": "2020-03-05"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "87.0", "first_name": "Lenci", "last_name": "Wager", "email": "lwager2e@fda.gov", "gender": "Agender", "ip_address": "113.145.228.184", "dob": "2020-03-27"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "88.0", "first_name": "Avrit", "last_name": "Yosifov", "email": "ayosifov2f@umn.edu", "gender": "Male", "ip_address": "112.171.167.81", "dob": "2021-01-18"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "89.0", "first_name": "Honor", "last_name": "McMorran", "email": "hmcmorran2g@bbc.co.uk", "gender": "Genderqueer", "ip_address": "11.179.26.90", "dob": "2020-04-07"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "90.0", "first_name": "Lilah", "last_name": "Carnall", "email": "lcarnall2h@barnesandnoble.com", "gender": "Polygender", "ip_address": "51.194.48.153", "dob": "2020-06-13"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "91.0", "first_name": "Daffie", "last_name": "Cheke", "email": "dcheke2i@theatlantic.com", "gender": "Polygender", "ip_address": "158.53.238.38", "dob": "2020-11-12"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "92.0", "first_name": "Ariel", "last_name": "Minor", "email": "aminor2j@blogger.com", "gender": "Polygender", "ip_address": "29.0.88.144", "dob": "2020-07-16"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "93.0", "first_name": "Kenna", "last_name": "Spraggon", "email": "kspraggon2k@google.fr", "gender": "Agender", "ip_address": "139.245.147.77", "dob": "2020-11-15"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "94.0", "first_name": "Evelyn", "last_name": "Oleshunin", "email": "eoleshunin2l@istockphoto.com", "gender": "Genderqueer", "ip_address": "26.117.119.59", "dob": "2020-08-11"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "95.0", "first_name": "Marcel", "last_name": "Kuhnt", "email": "mkuhnt2m@google.com.au", "gender": "Genderfluid", "ip_address": "84.158.205.130", "dob": "2020-08-22"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "96.0", "first_name": "Wendye", "last_name": "Wigelsworth", "email": "wwigelsworth2n@webs.com", "gender": "Polygender", "ip_address": "241.71.79.173", "dob": "2020-02-26"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "97.0", "first_name": "Nonie", "last_name": "Cadany", "email": "ncadany2o@cdbaby.com", "gender": "Female", "ip_address": "87.132.223.229", "dob": "2020-05-30"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "98.0", "first_name": "Arthur", "last_name": "Norsister", "email": "anorsister2p@csmonitor.com", "gender": "Male", "ip_address": "21.50.95.6", "dob": "2020-05-13"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "99.0", "first_name": "Auria", "last_name": "Haryngton", "email": "aharyngton2q@mapquest.com", "gender": "Non-binary", "ip_address": "246.28.159.95", "dob": "2020-06-28"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "100.0", "first_name": "Phelia", "last_name": "Simmig", "email": "psimmig2r@example.com", "gender": "Agender", "ip_address": "205.35.103.161", "dob": "2020-04-05"}, "emitted_at": 1649842201000} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/sheet.py b/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/sheet.py new file mode 100644 index 000000000000..570849225937 --- /dev/null +++ b/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/sheet.py @@ -0,0 +1,92 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +import logging +from functools import cached_property +from typing import Any, Dict, Iterable, Mapping, Optional, Tuple + +import smartsheet + + +class SmartSheetAPIWrapper: + def __init__(self, config: Mapping[str, Any]): + self._spreadsheet_id = config["spreadsheet_id"] + self._access_token = config["access_token"] + api_client = smartsheet.Smartsheet(self._access_token) + api_client.errors_as_exceptions(True) + # each call to `Sheets` makes a new instance, so we save it here to make no more new objects + self._get_sheet = api_client.Sheets.get_sheet + self._data = None + + def _fetch_sheet(self, from_dt: Optional[str] = None) -> None: + kwargs = {"rows_modified_since": from_dt} + if not from_dt: + kwargs["page_size"] = 1 + self._data = self._get_sheet(self._spreadsheet_id, **kwargs) + + @staticmethod + def _column_to_property(column_type: str) -> Dict[str, any]: + type_mapping = { + "TEXT_NUMBER": {"type": "string"}, + "DATE": {"type": "string", "format": "date"}, + "DATETIME": {"type": "string", "format": "date-time"}, + } + return type_mapping.get(column_type, {"type": "string"}) + + def _construct_record(self, row: smartsheet.models.Row) -> Dict[str, str]: + values_column_map = {cell.column_id: str(cell.value or "") for cell in row.cells} + record = {column.title: values_column_map[column.id] for column in self.data.columns} + record["modifiedAt"] = row.modified_at.isoformat() + return record + + @property + def data(self) -> smartsheet.models.Row: + if not self._data: + self._fetch_sheet() + return self._data + + @property + def name(self) -> str: + return self.data.name + + @property + def row_count(self) -> int: + return len(self.data.rows) + + @cached_property + def primary_key(self) -> str: + for column in self.data.columns: + if column.primary: + return column.title + + @cached_property + def json_schema(self) -> Dict[str, Any]: + column_info = {column.title: self._column_to_property(column.type.value) for column in self.data.columns} + column_info["modifiedAt"] = {"type": "string", "format": "date-time"} # add cursor field explicitly + json_schema = { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": column_info, + } + return json_schema + + def read_records(self, from_dt: str) -> Iterable[Dict[str, str]]: + self._fetch_sheet(from_dt) + for row in self.data.rows: + yield self._construct_record(row) + + def check_connection(self, logger: logging.Logger) -> Tuple[bool, Optional[str]]: + try: + _ = self.data + except smartsheet.exceptions.ApiError as e: + err = e.error.result + code = 404 if err.code == 1006 else err.code + reason = f"{err.name}: {code} - {err.message} | Check your spreadsheet ID." + logger.error(reason) + return False, reason + except Exception as e: + reason = str(e) + logger.error(reason) + return False, reason + return True, None diff --git a/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/source.py b/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/source.py index 542cdb042103..3bce5c71f7e4 100644 --- a/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/source.py +++ b/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/source.py @@ -2,120 +2,21 @@ # Copyright (c) 2021 Airbyte, Inc., all rights reserved. # +import logging +from typing import Any, List, Mapping, Tuple -import json -from datetime import datetime -from typing import Dict, Generator, List +from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.streams import Stream -import smartsheet -from airbyte_cdk import AirbyteLogger -from airbyte_cdk.models import ( - AirbyteCatalog, - AirbyteConnectionStatus, - AirbyteMessage, - AirbyteRecordMessage, - AirbyteStream, - ConfiguredAirbyteCatalog, - Status, - Type, -) -from airbyte_cdk.sources import Source +from .sheet import SmartSheetAPIWrapper +from .streams import SmartsheetStream -def get_prop(col_type: str) -> Dict[str, any]: - props = { - "TEXT_NUMBER": {"type": "string"}, - "DATE": {"type": "string", "format": "date"}, - "DATETIME": {"type": "string", "format": "date-time"}, - } - return props.get(col_type, {"type": "string"}) +class SourceSmartsheets(AbstractSource): + def check_connection(self, logger: logging.Logger, config: Mapping[str, Any]) -> Tuple[bool, any]: + sheet = SmartSheetAPIWrapper(config) + return sheet.check_connection(logger) - -def construct_record(sheet_columns: List[Dict], row_cells: List[Dict]) -> Dict: - # convert all data to string as it is only expected format in schema - values_column_map = {cell["columnId"]: str(cell.get("value", "")) for cell in row_cells} - return {column["title"]: values_column_map[column["id"]] for column in sheet_columns} - - -def get_json_schema(sheet_columns: List[Dict]) -> Dict: - column_info = {column["title"]: get_prop(column["type"]) for column in sheet_columns} - json_schema = { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": column_info, - } - return json_schema - - -class SourceSmartsheets(Source): - def check(self, logger: AirbyteLogger, config: json) -> AirbyteConnectionStatus: - try: - access_token = config["access_token"] - spreadsheet_id = config["spreadsheet_id"] - - smartsheet_client = smartsheet.Smartsheet(access_token) - smartsheet_client.errors_as_exceptions(True) - smartsheet_client.Sheets.get_sheet(spreadsheet_id) - - return AirbyteConnectionStatus(status=Status.SUCCEEDED) - except Exception as e: - if isinstance(e, smartsheet.exceptions.ApiError): - err = e.error.result - code = 404 if err.code == 1006 else err.code - reason = f"{err.name}: {code} - {err.message} | Check your spreadsheet ID." - else: - reason = str(e) - logger.error(reason) - return AirbyteConnectionStatus(status=Status.FAILED) - - def discover(self, logger: AirbyteLogger, config: json) -> AirbyteCatalog: - access_token = config["access_token"] - spreadsheet_id = config["spreadsheet_id"] - streams = [] - - smartsheet_client = smartsheet.Smartsheet(access_token) - try: - sheet = smartsheet_client.Sheets.get_sheet(spreadsheet_id) - sheet = json.loads(str(sheet)) # make it subscriptable - sheet_json_schema = get_json_schema(sheet["columns"]) - logger.info(f"Running discovery on sheet: {sheet['name']} with {spreadsheet_id}") - - stream = AirbyteStream(name=sheet["name"], json_schema=sheet_json_schema) - stream.supported_sync_modes = ["full_refresh"] - streams.append(stream) - - except Exception as e: - raise Exception(f"Could not run discovery: {str(e)}") - - return AirbyteCatalog(streams=streams) - - def read( - self, logger: AirbyteLogger, config: json, catalog: ConfiguredAirbyteCatalog, state: Dict[str, any] - ) -> Generator[AirbyteMessage, None, None]: - - access_token = config["access_token"] - spreadsheet_id = config["spreadsheet_id"] - smartsheet_client = smartsheet.Smartsheet(access_token) - - for configured_stream in catalog.streams: - stream = configured_stream.stream - try: - sheet = smartsheet_client.Sheets.get_sheet(spreadsheet_id) - sheet = json.loads(str(sheet)) # make it subscriptable - logger.info(f"Starting syncing spreadsheet {sheet['name']}") - logger.info(f"Row count: {sheet['totalRowCount']}") - - for row in sheet["rows"]: - try: - record = construct_record(sheet["columns"], row["cells"]) - yield AirbyteMessage( - type=Type.RECORD, - record=AirbyteRecordMessage(stream=stream.name, data=record, emitted_at=int(datetime.now().timestamp()) * 1000), - ) - except Exception as e: - logger.error(f"Unable to encode row into an AirbyteMessage with the following error: {e}") - - except Exception as e: - logger.error(f"Could not read smartsheet: {stream.name}") - raise e - logger.info(f"Finished syncing spreadsheet with ID: {spreadsheet_id}") + def streams(self, config: Mapping[str, Any]) -> List["Stream"]: + sheet = SmartSheetAPIWrapper(config) + return [SmartsheetStream(sheet, config)] diff --git a/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/spec.json b/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/spec.json index 57876a9a81a1..93c5d422ea23 100644 --- a/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/spec.json +++ b/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/spec.json @@ -17,6 +17,14 @@ "title": "Sheet ID", "description": "The spreadsheet ID. Find in the spreadsheet menu: File > Properties", "type": "string" + }, + "start_datetime": { + "title": "Start Datetime", + "type": "string", + "examples": ["2000-01-01T13:00:00", "2000-01-01T13:00:00-07:00"], + "description": "ISO 8601, for instance: `YYYY-MM-DDTHH:MM:SS`, `YYYY-MM-DDTHH:MM:SS+HH:MM`", + "format": "date-time", + "default": "2020-01-01T00:00:00+00:00" } } }, diff --git a/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/streams.py b/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/streams.py new file mode 100644 index 000000000000..5a5893923760 --- /dev/null +++ b/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/streams.py @@ -0,0 +1,58 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +import datetime +from typing import Any, Dict, Iterable, List, Mapping + +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources.streams import Stream +from source_smartsheets.sheet import SmartSheetAPIWrapper + + +class SmartsheetStream(Stream): + cursor_field = "modifiedAt" + + def __init__(self, smartsheet: SmartSheetAPIWrapper, config: Mapping[str, Any]): + self.smartsheet = smartsheet + self._state = {} + self._config = config + self._start_datetime = self._config.get("start_datetime") or "2020-01-01T00:00:00+00:00" + + @property + def primary_key(self) -> str: + return self.smartsheet.primary_key + + def get_json_schema(self) -> Dict[str, Any]: + return self.smartsheet.json_schema + + @property + def name(self) -> str: + return self.smartsheet.name + + @property + def state(self) -> Mapping[str, Any]: + if not self._state: + self._state = {self.cursor_field: self._start_datetime} + return self._state + + @state.setter + def state(self, value: Mapping[str, Any]): + self._state = value + + def read_records( + self, + sync_mode: SyncMode, + cursor_field: List[str] = None, + stream_slice: Mapping[str, Any] = None, + stream_state: Mapping[str, Any] = None, + ) -> Iterable[Mapping[str, Any]]: + def iso_dt(src): + return datetime.datetime.fromisoformat(src) + + for record in self.smartsheet.read_records(self.state[self.cursor_field]): + current_cursor_value = iso_dt(self.state[self.cursor_field]) + latest_cursor_value = iso_dt(record[self.cursor_field]) + new_cursor_value = max(latest_cursor_value, current_cursor_value) + self.state = {self.cursor_field: new_cursor_value.isoformat("T", "seconds")} + yield record diff --git a/airbyte-integrations/connectors/source-smartsheets/unit_tests/conftest.py b/airbyte-integrations/connectors/source-smartsheets/unit_tests/conftest.py new file mode 100644 index 000000000000..e168f2fe831c --- /dev/null +++ b/airbyte-integrations/connectors/source-smartsheets/unit_tests/conftest.py @@ -0,0 +1,34 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +import json +from pathlib import Path +from unittest.mock import Mock + +import pytest +from smartsheet.models import Sheet + +HERE = Path(__file__).parent.absolute() + + +@pytest.fixture +def response_mock(): + with open(HERE / "response.json") as json_file: + return json.loads(json_file.read()) + + +@pytest.fixture +def config(): + return {"spreadsheet_id": "id", "access_token": "token"} + + +@pytest.fixture +def get_sheet_mocker(mocker, response_mock): + def _mocker(api_wrapper, data=None): + sheet_obj = Sheet(props=response_mock, base_obj=api_wrapper) + get_sheet_mock = Mock(return_value=sheet_obj) + mocker.patch.object(api_wrapper, "_get_sheet", data or get_sheet_mock) + return get_sheet_mock, sheet_obj + + return _mocker diff --git a/airbyte-integrations/connectors/source-smartsheets/unit_tests/response.json b/airbyte-integrations/connectors/source-smartsheets/unit_tests/response.json new file mode 100644 index 000000000000..99e8122ceef2 --- /dev/null +++ b/airbyte-integrations/connectors/source-smartsheets/unit_tests/response.json @@ -0,0 +1,251 @@ +{ + "accessLevel": "OWNER", + "columns": [ + { + "id": 1101932201830276, + "index": 0, + "primary": true, + "title": "id", + "type": "TEXT_NUMBER", + "validation": false, + "version": 0, + "width": 51 + }, + { + "id": 5605531829200772, + "index": 1, + "title": "first_name", + "type": "TEXT_NUMBER", + "validation": false, + "version": 0, + "width": 275 + }, + { + "id": 3353732015515524, + "index": 2, + "title": "last_name", + "type": "TEXT_NUMBER", + "validation": false, + "version": 0, + "width": 224 + }, + { + "id": 7857331642886020, + "index": 3, + "title": "email", + "type": "TEXT_NUMBER", + "validation": false, + "version": 0, + "width": 231 + }, + { + "id": 2227832108672900, + "index": 4, + "options": [ + "Agender", + "Bigender", + "Female", + "Genderfluid", + "Genderqueer", + "Male", + "Non-binary", + "Polygender" + ], + "title": "gender", + "type": "PICKLIST", + "validation": false, + "version": 0, + "width": 193 + }, + { + "id": 6731431736043396, + "index": 5, + "title": "ip_address", + "type": "TEXT_NUMBER", + "validation": false, + "version": 0, + "width": 206 + }, + { + "id": 4479631922358148, + "index": 6, + "title": "dob", + "type": "DATE", + "validation": false, + "version": 0, + "width": 201 + } + ], + "createdAt": "2021-08-27T11:36:41+00:00", + "dependenciesEnabled": false, + "effectiveAttachmentOptions": [ + "GOOGLE_DRIVE", + "DROPBOX", + "ONEDRIVE", + "LINK", + "EVERNOTE", + "BOX_COM", + "FILE", + "EGNYTE" + ], + "ganttEnabled": false, + "hasSummaryFields": false, + "id": 679252988323716, + "modifiedAt": "2022-04-13T06:50:10+00:00", + "name": "aws_s3_sample", + "permalink": "https://app.smartsheet.com/sheets/v7vHw7qHJChcvfHQ8j3xJpG8H82Fh39Rc9PRGvQ1", + "resourceManagementEnabled": false, + "rows": [ + { + "cells": [ + { "columnId": 1101932201830276, "displayValue": "1", "value": 1.0 }, + { + "columnId": 5605531829200772, + "displayValue": "Joni", + "value": "Joni" + }, + { + "columnId": 3353732015515524, + "displayValue": "Watling", + "value": "Watling" + }, + { + "columnId": 7857331642886020, + "displayValue": "jwatling0@amazonaws.com", + "value": "jwatling0@amazonaws.com" + }, + { + "columnId": 2227832108672900, + "displayValue": "Genderqueer", + "value": "Genderqueer" + }, + { + "columnId": 6731431736043396, + "displayValue": "195.50.216.194", + "value": "195.50.216.194" + }, + { "columnId": 4479631922358148, "value": "2020-11-23" } + ], + "createdAt": "2021-08-27T11:36:41+00:00", + "expanded": true, + "id": 3201922565072772, + "modifiedAt": "2021-08-27T11:36:41+00:00", + "rowNumber": 1 + }, + { + "cells": [ + { "columnId": 1101932201830276, "displayValue": "2", "value": 2.0 }, + { + "columnId": 5605531829200772, + "displayValue": "Bernardo", + "value": "Bernardo" + }, + { + "columnId": 3353732015515524, + "displayValue": "Klaaassen", + "value": "Klaaassen" + }, + { + "columnId": 7857331642886020, + "displayValue": "bklaaassen1@cbc.ca", + "value": "bklaaassen1@cbc.ca" + }, + { + "columnId": 2227832108672900, + "displayValue": "Polygender", + "value": "Polygender" + }, + { + "columnId": 6731431736043396, + "displayValue": "116.208.253.97", + "value": "116.208.253.97" + }, + { "columnId": 4479631922358148, "value": "2020-02-22" } + ], + "createdAt": "2021-08-27T11:36:41+00:00", + "expanded": true, + "id": 7705522192443268, + "modifiedAt": "2021-08-27T11:36:41+00:00", + "rowNumber": 2, + "siblingId": 3201922565072772 + }, + { + "cells": [ + { "columnId": 1101932201830276, "displayValue": "3", "value": 3.0 }, + { + "columnId": 5605531829200772, + "displayValue": "Drake", + "value": "Drake" + }, + { + "columnId": 3353732015515524, + "displayValue": "Bednell", + "value": "Bednell" + }, + { + "columnId": 7857331642886020, + "displayValue": "dbednell2@theguardian.com", + "value": "dbednell2@theguardian.com" + }, + { + "columnId": 2227832108672900, + "displayValue": "Non-binary", + "value": "Non-binary" + }, + { + "columnId": 6731431736043396, + "displayValue": "120.15.24.132", + "value": "120.15.24.132" + }, + { "columnId": 4479631922358148, "value": "2020-08-21" } + ], + "createdAt": "2021-08-27T11:36:41+00:00", + "expanded": true, + "id": 2076022658230148, + "modifiedAt": "2021-08-27T11:36:41+00:00", + "rowNumber": 3, + "siblingId": 7705522192443268 + }, + { + "cells": [ + { "columnId": 1101932201830276, "displayValue": "4", "value": 4.0 }, + { + "columnId": 5605531829200772, + "displayValue": "Alfreda", + "value": "Alfreda" + }, + { + "columnId": 3353732015515524, + "displayValue": "Brumbye", + "value": "Brumbye" + }, + { + "columnId": 7857331642886020, + "displayValue": "abrumbye3@howstuffworks.com", + "value": "abrumbye3@howstuffworks.com" + }, + { + "columnId": 2227832108672900, + "displayValue": "Genderqueer", + "value": "Genderqueer" + }, + { + "columnId": 6731431736043396, + "displayValue": "64.22.217.122", + "value": "64.22.217.122" + }, + { "columnId": 4479631922358148, "value": "2020-12-29" } + ], + "createdAt": "2021-08-27T11:36:41+00:00", + "expanded": true, + "id": 6579622285600644, + "modifiedAt": "2021-08-27T11:36:41+00:00", + "rowNumber": 4, + "siblingId": 2076022658230148 + } + ], + "totalRowCount": 100, + "userPermissions": { "summaryPermissions": "ADMIN" }, + "userSettings": { "criticalPathEnabled": false, "displaySummaryTasks": true }, + "version": 9 +} diff --git a/airbyte-integrations/connectors/source-smartsheets/unit_tests/test_sheets.py b/airbyte-integrations/connectors/source-smartsheets/unit_tests/test_sheets.py new file mode 100644 index 000000000000..662c7b24882b --- /dev/null +++ b/airbyte-integrations/connectors/source-smartsheets/unit_tests/test_sheets.py @@ -0,0 +1,119 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +import logging +from itertools import permutations +from unittest.mock import ANY, Mock + +import pytest +from smartsheet.exceptions import ApiError +from source_smartsheets.sheet import SmartSheetAPIWrapper + + +def test_fetch_sheet(config, get_sheet_mocker): + spreadsheet_id = config["spreadsheet_id"] + sheet = SmartSheetAPIWrapper(config) + mock, resp = get_sheet_mocker(sheet) + + sheet._fetch_sheet() + mock.assert_called_once_with(spreadsheet_id, rows_modified_since=None, page_size=1) + assert sheet.data == resp + + sheet._fetch_sheet(from_dt="2022-03-04T00:00:00Z") + mock.assert_called_with(spreadsheet_id, rows_modified_since="2022-03-04T00:00:00Z") + assert sheet.data == resp + + +def test_properties(config, get_sheet_mocker): + sheet = SmartSheetAPIWrapper(config) + _, resp = get_sheet_mocker(sheet) + assert sheet.data == resp + assert sheet.name == "aws_s3_sample" + assert sheet.row_count == 4 + assert sheet.primary_key == "id" + + +@pytest.mark.parametrize( + ("column_type", "expected_schema"), + ( + ("TEXT_NUMBER", {"type": "string"}), + ("DATE", {"type": "string", "format": "date"}), + ("DATETIME", {"type": "string", "format": "date-time"}), + ("DURATION", {"type": "string"}), + ), +) +def test_column_types(config, column_type, expected_schema): + sheet = SmartSheetAPIWrapper(config) + assert sheet._column_to_property(column_type) == expected_schema + + +def test_json_schema(config, get_sheet_mocker): + sheet = SmartSheetAPIWrapper(config) + _ = get_sheet_mocker(sheet) + json_schema = sheet.json_schema + assert json_schema["$schema"] == "http://json-schema.org/draft-07/schema#" + assert json_schema["type"] == "object" + assert "properties" in json_schema + assert "modifiedAt" in json_schema["properties"] + + +def _make_api_error(code, message, name): + result_mock = Mock(code=code, message=message) + result_mock.name = name + return ApiError(error=Mock(result=result_mock)) + + +@pytest.mark.parametrize( + ("side_effect", "expected_error"), + ( + (Exception("Internal Server Error"), "Internal Server Error"), + ( + _make_api_error(code=1006, message="Resource not found", name="Not Found"), + "Not Found: 404 - Resource not found | Check your spreadsheet ID.", + ), + ( + _make_api_error(code=4003, message="Too many requests", name="Limit reached"), + "Limit reached: 4003 - Too many requests | Check your spreadsheet ID.", + ), + ), +) +def test_check_connection_fail(mocker, config, side_effect, expected_error): + sheet = SmartSheetAPIWrapper(config) + with mocker.patch.object(sheet, "_get_sheet", side_effect=side_effect): + status, error = sheet.check_connection(logger=logging.getLogger()) + assert error == expected_error + assert status is False + + +def test_check_connection_success(mocker, config): + sheet = SmartSheetAPIWrapper(config) + with mocker.patch.object(sheet, "_get_sheet"): + status, error = sheet.check_connection(logger=logging.getLogger()) + assert error is None + assert status is True + + +_columns = [ + Mock(id="1101932201830276", title="id", type="TEXT_NUMBER"), + Mock(id="5605531829200772", title="first_name", type="TEXT_NUMBER"), + Mock(id="3353732015515524", title="last_name", type="TEXT_NUMBER"), +] + + +_cells = [ + Mock(column_id="1101932201830276", value="11"), + Mock(column_id="5605531829200772", value="Leonardo"), + Mock(column_id="3353732015515524", value="Dicaprio"), +] + + +@pytest.mark.parametrize(("row", "columns"), (*((perm, _columns) for perm in permutations(_cells)), ([], _columns), ([], []))) +def test_different_cell_order_produces_same_result(get_sheet_mocker, config, row, columns): + sheet = SmartSheetAPIWrapper(config) + sheet_mock = Mock(rows=[Mock(cells=row)] if row else [], columns=columns) + get_sheet_mocker(sheet, data=Mock(return_value=sheet_mock)) + + records = sheet.read_records(from_dt="2020-01-01T00:00:00Z") + expected_records = [] if not row else [{"id": "11", "first_name": "Leonardo", "last_name": "Dicaprio", "modifiedAt": ANY}] + assert list(records) == expected_records diff --git a/airbyte-integrations/connectors/source-smartsheets/unit_tests/test_source.py b/airbyte-integrations/connectors/source-smartsheets/unit_tests/test_source.py index 09bfcec10c48..4b71c4d0cedd 100644 --- a/airbyte-integrations/connectors/source-smartsheets/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-smartsheets/unit_tests/test_source.py @@ -2,45 +2,23 @@ # Copyright (c) 2021 Airbyte, Inc., all rights reserved. # -import json -from itertools import permutations +import logging from unittest.mock import Mock import pytest from source_smartsheets.source import SourceSmartsheets +from source_smartsheets.streams import SmartsheetStream -@pytest.fixture -def config(): - return {"access_token": "token", "spreadsheet_id": "id"} - - -@pytest.fixture(name="catalog") -def configured_catalog(): - stream_mock = Mock() - stream_mock.name = "test" # cannot be used in __init__ - return Mock(streams=[Mock(stream=stream_mock)]) - - -_columns = [ - {"id": "1101932201830276", "title": "id", "type": "TEXT_NUMBER"}, - {"id": "5605531829200772", "title": "first_name", "type": "TEXT_NUMBER"}, - {"id": "3353732015515524", "title": "last_name", "type": "TEXT_NUMBER"}, -] - - -_cells = [ - {"columnId": "1101932201830276", "value": "11"}, - {"columnId": "5605531829200772", "value": "Leonardo"}, - {"columnId": "3353732015515524", "value": "Dicaprio"}, -] +@pytest.mark.parametrize("connection_status", ((True, None), (False, "Internal Server Error"))) +def test_check_connection(mocker, config, connection_status): + mocker.patch("source_smartsheets.source.SmartSheetAPIWrapper.check_connection", Mock(return_value=connection_status)) + source = SourceSmartsheets() + assert source.check_connection(logger=logging.getLogger(), config=config) == connection_status -@pytest.mark.parametrize(("row", "columns"), (*((perm, _columns) for perm in permutations(_cells)), ([], _columns), ([], []))) -def test_different_cell_order_produces_one_result(mocker, config, catalog, row, columns): - sheet = json.dumps({"name": "test", "totalRowCount": 3, "columns": columns, "rows": [{"cells": row}] if row else []}) - mocker.patch("smartsheet.Smartsheet", Mock(return_value=Mock(Sheets=Mock(get_sheet=Mock(return_value=sheet))))) +def test_streams(config): source = SourceSmartsheets() - records = [message.record.data for message in source.read(logger=Mock(), config=config, catalog=catalog, state={})] - expected_records = [] if not row else [{"id": "11", "first_name": "Leonardo", "last_name": "Dicaprio"}] - assert list(records) == expected_records + streams_iter = iter(source.streams(config)) + assert type(next(streams_iter)) == SmartsheetStream + assert next(streams_iter, None) is None diff --git a/airbyte-integrations/connectors/source-smartsheets/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-smartsheets/unit_tests/test_streams.py new file mode 100644 index 000000000000..eec95654047d --- /dev/null +++ b/airbyte-integrations/connectors/source-smartsheets/unit_tests/test_streams.py @@ -0,0 +1,21 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +import datetime +from unittest.mock import Mock + +from airbyte_cdk.models import SyncMode +from source_smartsheets.streams import SmartsheetStream + + +def test_state_saved_after_each_record(config, get_sheet_mocker): + today_dt = datetime.datetime.now(datetime.timezone.utc) + before_yesterday = (today_dt - datetime.timedelta(days=2)).isoformat(timespec="seconds") + today = today_dt.isoformat(timespec="seconds") + record = {"id": "1", "name": "Georgio", "last_name": "Armani", "modifiedAt": today} + stream = SmartsheetStream(Mock(read_records=Mock(return_value=[record])), config) + stream.state = {stream.cursor_field: before_yesterday} + for _ in stream.read_records(SyncMode.incremental): + assert _ == record + assert stream.state == {stream.cursor_field: today} diff --git a/airbyte-integrations/connectors/source-snowflake/Dockerfile b/airbyte-integrations/connectors/source-snowflake/Dockerfile index 2756403f5960..f2d1461977b5 100644 --- a/airbyte-integrations/connectors/source-snowflake/Dockerfile +++ b/airbyte-integrations/connectors/source-snowflake/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-snowflake COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.10 +LABEL io.airbyte.version=0.1.11 LABEL io.airbyte.name=airbyte/source-snowflake diff --git a/airbyte-integrations/connectors/source-snowflake/README.md b/airbyte-integrations/connectors/source-snowflake/README.md index 124f7a6c5ccf..759a0a649388 100644 --- a/airbyte-integrations/connectors/source-snowflake/README.md +++ b/airbyte-integrations/connectors/source-snowflake/README.md @@ -13,10 +13,28 @@ "warehouse": "AIRBYTE_WAREHOUSE", "database": "AIRBYTE_DATABASE", "schema": "AIRBYTE_SCHEMA", - "username": "AIRBYTE_USER", - "password": "SOMEPASSWORD" + "credentails" { + "auth_type": "username/password", + "username": "AIRBYTE_USER", + "password": "SOMEPASSWORD" + } +} +``` +3. Create a file at `secrets/config_auth.json` with the following format: +``` +{ + "host": "ACCOUNT.REGION.PROVIDER.snowflakecomputing.com", + "role": "AIRBYTE_ROLE", + "warehouse": "AIRBYTE_WAREHOUSE", + "database": "AIRBYTE_DATABASE", + "schema": "AIRBYTE_SCHEMA", + "credentails" { + "auth_type": "OAuth", + "client_id": "client_id", + "client_secret": "client_secret", + "refresh_token": "refresh_token" + } } ``` - ## For Airbyte employees Put the contents of the `Snowflake Insert Test Creds` secret on Lastpass into `secrets/config.json` to be able to run integration tests locally. diff --git a/airbyte-integrations/connectors/source-snowflake/build.gradle b/airbyte-integrations/connectors/source-snowflake/build.gradle index 84f73f77e172..c641b62056d6 100644 --- a/airbyte-integrations/connectors/source-snowflake/build.gradle +++ b/airbyte-integrations/connectors/source-snowflake/build.gradle @@ -17,6 +17,7 @@ dependencies { implementation project(':airbyte-protocol:models') implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) implementation group: 'net.snowflake', name: 'snowflake-jdbc', version: '3.13.9' + implementation 'com.zaxxer:HikariCP:5.0.1' testImplementation testFixtures(project(':airbyte-integrations:connectors:source-jdbc')) testImplementation project(':airbyte-test-utils') diff --git a/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeDataSourceUtils.java b/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeDataSourceUtils.java new file mode 100644 index 000000000000..9d0351943116 --- /dev/null +++ b/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeDataSourceUtils.java @@ -0,0 +1,197 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.snowflake; + +import static java.util.stream.Collectors.joining; + +import com.fasterxml.jackson.databind.JsonNode; +import com.zaxxer.hikari.HikariConfig; +import com.zaxxer.hikari.HikariDataSource; +import io.airbyte.commons.json.Jsons; +import java.io.IOException; +import java.net.URI; +import java.net.URLEncoder; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpRequest.BodyPublisher; +import java.net.http.HttpRequest.BodyPublishers; +import java.net.http.HttpResponse; +import java.nio.charset.StandardCharsets; +import java.time.Duration; +import java.util.Base64; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; +import java.util.concurrent.TimeUnit; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class SnowflakeDataSourceUtils { + + public static final String OAUTH_METHOD = "OAuth"; + public static final String USERNAME_PASSWORD_METHOD = "username/password"; + public static final String UNRECOGNIZED = "Unrecognized"; + + private static final Logger LOGGER = LoggerFactory.getLogger(SnowflakeDataSourceUtils.class); + private static final int PAUSE_BETWEEN_TOKEN_REFRESH_MIN = 7; // snowflake access token's TTL is 10min and can't be modified + private static final String REFRESH_TOKEN_URL = "https://%s/oauth/token-request"; + private static final HttpClient httpClient = HttpClient.newBuilder() + .version(HttpClient.Version.HTTP_2) + .connectTimeout(Duration.ofSeconds(10)) + .build(); + + /** + * Snowflake OAuth access token expires in 10 minutes. For the cases when sync duration is more than + * 10 min, it requires updating 'token' property after the start of connection pool. + * HikariDataSource brings support for this requirement. + * + * @param config source config JSON + * @return datasource + */ + public static HikariDataSource createDataSource(final JsonNode config) { + HikariDataSource dataSource = new HikariDataSource(); + dataSource.setJdbcUrl(buildJDBCUrl(config)); + + if (config.has("credentials")) { + JsonNode credentials = config.get("credentials"); + final String authType = credentials.has("auth_type") ? credentials.get("auth_type").asText() : UNRECOGNIZED; + switch (authType) { + case OAUTH_METHOD -> { + LOGGER.info("Authorization mode is OAuth"); + dataSource.setDataSourceProperties(buildAuthProperties(config)); + // thread to keep the refresh token up to date + SnowflakeSource.SCHEDULED_EXECUTOR_SERVICE.scheduleAtFixedRate( + getAccessTokenTask(dataSource), + PAUSE_BETWEEN_TOKEN_REFRESH_MIN, PAUSE_BETWEEN_TOKEN_REFRESH_MIN, TimeUnit.MINUTES); + } + case USERNAME_PASSWORD_METHOD -> { + LOGGER.info("Authorization mode is 'Username and password'"); + populateUsernamePasswordConfig(dataSource, config.get("credentials")); + } + default -> throw new IllegalArgumentException("Unrecognized auth type: " + authType); + } + } else { + LOGGER.info("Authorization mode is deprecated 'Username and password'. Please update your source configuration"); + populateUsernamePasswordConfig(dataSource, config); + } + + return dataSource; + } + + /** + * Method to make request for a new access token using refresh token and client credentials. + * + * @return access token + */ + public static String getAccessTokenUsingRefreshToken(final String hostName, + final String clientId, + final String clientSecret, + final String refreshToken) + throws IOException { + final var refreshTokenUri = String.format(REFRESH_TOKEN_URL, hostName); + final Map requestBody = new HashMap<>(); + requestBody.put("grant_type", "refresh_token"); + requestBody.put("refresh_token", refreshToken); + + try { + final BodyPublisher bodyPublisher = BodyPublishers.ofString(requestBody.keySet().stream() + .map(key -> key + "=" + URLEncoder.encode(requestBody.get(key), StandardCharsets.UTF_8)) + .collect(joining("&"))); + + final byte[] authorization = Base64.getEncoder() + .encode((clientId + ":" + clientSecret).getBytes(StandardCharsets.UTF_8)); + + final HttpRequest request = HttpRequest.newBuilder() + .POST(bodyPublisher) + .uri(URI.create(refreshTokenUri)) + .header("Content-Type", "application/x-www-form-urlencoded") + .header("Accept", "application/json") + .header("Authorization", "Basic " + new String(authorization, StandardCharsets.UTF_8)) + .build(); + + final HttpResponse response = httpClient.send(request, + HttpResponse.BodyHandlers.ofString()); + + final JsonNode jsonResponse = Jsons.deserialize(response.body()); + if (jsonResponse.has("access_token")) { + return jsonResponse.get("access_token").asText(); + } else { + LOGGER.error("Failed to obtain accessToken using refresh token. " + jsonResponse); + throw new RuntimeException( + "Failed to obtain accessToken using refresh token."); + } + } catch (final InterruptedException e) { + throw new IOException("Failed to refreshToken", e); + } + } + + public static String buildJDBCUrl(JsonNode config) { + final StringBuilder jdbcUrl = new StringBuilder(String.format("jdbc:snowflake://%s/?", + config.get("host").asText())); + + // Add required properties + jdbcUrl.append(String.format( + "role=%s&warehouse=%s&database=%s&schema=%s&JDBC_QUERY_RESULT_FORMAT=%s&CLIENT_SESSION_KEEP_ALIVE=%s", + config.get("role").asText(), + config.get("warehouse").asText(), + config.get("database").asText(), + config.get("schema").asText(), + // Needed for JDK17 - see + // https://stackoverflow.com/questions/67409650/snowflake-jdbc-driver-internal-error-fail-to-retrieve-row-count-for-first-arrow + "JSON", + true)); + + // https://docs.snowflake.com/en/user-guide/jdbc-configure.html#jdbc-driver-connection-string + if (config.has("jdbc_url_params")) { + jdbcUrl.append("&").append(config.get("jdbc_url_params").asText()); + } + return jdbcUrl.toString(); + } + + private static Runnable getAccessTokenTask(final HikariDataSource dataSource) { + return () -> { + LOGGER.info("Refresh token process started"); + var props = dataSource.getDataSourceProperties(); + try { + var token = getAccessTokenUsingRefreshToken(props.getProperty("host"), + props.getProperty("client_id"), props.getProperty("client_secret"), + props.getProperty("refresh_token")); + props.setProperty("token", token); + dataSource.setDataSourceProperties(props); + LOGGER.info("New access token has been obtained"); + } catch (IOException e) { + LOGGER.error("Failed to obtain a fresh accessToken:" + e); + } + }; + } + + public static Properties buildAuthProperties(JsonNode config) { + Properties properties = new Properties(); + try { + var credentials = config.get("credentials"); + properties.setProperty("client_id", credentials.get("client_id").asText()); + properties.setProperty("client_secret", credentials.get("client_secret").asText()); + properties.setProperty("refresh_token", credentials.get("refresh_token").asText()); + properties.setProperty("host", config.get("host").asText()); + properties.put("authenticator", "oauth"); + properties.put("account", config.get("host").asText()); + + String accessToken = getAccessTokenUsingRefreshToken( + config.get("host").asText(), credentials.get("client_id").asText(), + credentials.get("client_secret").asText(), credentials.get("refresh_token").asText()); + + properties.put("token", accessToken); + } catch (IOException e) { + LOGGER.error("Request access token was failed with error" + e.getMessage()); + } + return properties; + } + + private static void populateUsernamePasswordConfig(HikariConfig hikariConfig, JsonNode config) { + hikariConfig.setUsername(config.get("username").asText()); + hikariConfig.setPassword(config.get("password").asText()); + } + +} diff --git a/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeSource.java b/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeSource.java index b404e4fc3b3b..33fe4f434671 100644 --- a/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeSource.java +++ b/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeSource.java @@ -4,14 +4,25 @@ package io.airbyte.integrations.source.snowflake; +import static io.airbyte.integrations.source.snowflake.SnowflakeDataSourceUtils.OAUTH_METHOD; +import static io.airbyte.integrations.source.snowflake.SnowflakeDataSourceUtils.UNRECOGNIZED; +import static io.airbyte.integrations.source.snowflake.SnowflakeDataSourceUtils.USERNAME_PASSWORD_METHOD; + import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; +import io.airbyte.db.jdbc.JdbcDatabase; +import io.airbyte.db.jdbc.StreamingJdbcDatabase; import io.airbyte.integrations.base.IntegrationRunner; import io.airbyte.integrations.base.Source; import io.airbyte.integrations.source.jdbc.AbstractJdbcSource; +import java.io.IOException; import java.sql.JDBCType; +import java.sql.SQLException; import java.util.Set; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import javax.sql.DataSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -19,54 +30,79 @@ public class SnowflakeSource extends AbstractJdbcSource implements Sou private static final Logger LOGGER = LoggerFactory.getLogger(SnowflakeSource.class); public static final String DRIVER_CLASS = "net.snowflake.client.jdbc.SnowflakeDriver"; + public static final ScheduledExecutorService SCHEDULED_EXECUTOR_SERVICE = Executors.newScheduledThreadPool(1); public SnowflakeSource() { - super(DRIVER_CLASS, new SnowflakeJdbcStreamingQueryConfiguration(), new SnowflakeSourceOperations()); + super(DRIVER_CLASS, new SnowflakeJdbcStreamingQueryConfiguration(), + new SnowflakeSourceOperations()); } public static void main(final String[] args) throws Exception { final Source source = new SnowflakeSource(); LOGGER.info("starting source: {}", SnowflakeSource.class); new IntegrationRunner(source).run(args); + SCHEDULED_EXECUTOR_SERVICE.shutdownNow(); LOGGER.info("completed source: {}", SnowflakeSource.class); } + @Override + public JdbcDatabase createDatabase(JsonNode config) throws SQLException { + final DataSource dataSource = SnowflakeDataSourceUtils.createDataSource(config); + var database = new StreamingJdbcDatabase(dataSource, new SnowflakeSourceOperations(), + new SnowflakeJdbcStreamingQueryConfiguration()); + quoteString = database.getMetaData().getIdentifierQuoteString(); + return database; + } + @Override public JsonNode toDatabaseConfig(final JsonNode config) { + final String jdbcUrl = SnowflakeDataSourceUtils.buildJDBCUrl(config); - final StringBuilder jdbcUrl = new StringBuilder(String.format("jdbc:snowflake://%s/?", - config.get("host").asText())); + if (config.has("credentials")) { + JsonNode credentials = config.get("credentials"); + final String authType = + credentials.has("auth_type") ? credentials.get("auth_type").asText() : UNRECOGNIZED; + return switch (authType) { + case OAUTH_METHOD -> buildOAuthConfig(config, jdbcUrl); + case USERNAME_PASSWORD_METHOD -> buildUsernamePasswordConfig(config.get("credentials"), + jdbcUrl); + default -> throw new IllegalArgumentException("Unrecognized auth type: " + authType); + }; + } else { + return buildUsernamePasswordConfig(config, jdbcUrl); + } + } - // Add required properties - jdbcUrl.append(String.format("role=%s&warehouse=%s&database=%s&schema=%s&JDBC_QUERY_RESULT_FORMAT=%s&CLIENT_SESSION_KEEP_ALIVE=%s", - config.get("role").asText(), - config.get("warehouse").asText(), - config.get("database").asText(), - config.get("schema").asText(), - // Needed for JDK17 - see - // https://stackoverflow.com/questions/67409650/snowflake-jdbc-driver-internal-error-fail-to-retrieve-row-count-for-first-arrow - "JSON", - true)); + @Override + public Set getExcludedInternalNameSpaces() { + return Set.of( + "INFORMATION_SCHEMA"); + } - // https://docs.snowflake.com/en/user-guide/jdbc-configure.html#jdbc-driver-connection-string - if (config.has("jdbc_url_params")) { - jdbcUrl.append("&").append(config.get("jdbc_url_params").asText()); + private JsonNode buildOAuthConfig(JsonNode config, String jdbcUrl) { + final String accessToken; + var credentials = config.get("credentials"); + try { + accessToken = SnowflakeDataSourceUtils.getAccessTokenUsingRefreshToken( + config.get("host").asText(), credentials.get("client_id").asText(), + credentials.get("client_secret").asText(), credentials.get("refresh_token").asText()); + } catch (IOException e) { + throw new RuntimeException(e); } + final ImmutableMap.Builder configBuilder = ImmutableMap.builder() + .put("connection_properties", + String.join(";", "authenticator=oauth", "token=" + accessToken)) + .put("jdbc_url", jdbcUrl); + return Jsons.jsonNode(configBuilder.build()); + } - LOGGER.info(jdbcUrl.toString()); - + private JsonNode buildUsernamePasswordConfig(JsonNode config, String jdbcUrl) { final ImmutableMap.Builder configBuilder = ImmutableMap.builder() .put("username", config.get("username").asText()) .put("password", config.get("password").asText()) - .put("jdbc_url", jdbcUrl.toString()); - + .put("jdbc_url", jdbcUrl); + LOGGER.info(jdbcUrl); return Jsons.jsonNode(configBuilder.build()); } - @Override - public Set getExcludedInternalNameSpaces() { - return Set.of( - "INFORMATION_SCHEMA"); - } - } diff --git a/airbyte-integrations/connectors/source-snowflake/src/main/resources/spec.json b/airbyte-integrations/connectors/source-snowflake/src/main/resources/spec.json index 95b989811537..689926366c68 100644 --- a/airbyte-integrations/connectors/source-snowflake/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/source-snowflake/src/main/resources/spec.json @@ -4,71 +4,183 @@ "$schema": "http://json-schema.org/draft-07/schema#", "title": "Snowflake Source Spec", "type": "object", - "required": [ - "host", - "role", - "warehouse", - "database", - "schema", - "username", - "password" - ], - "additionalProperties": false, + "required": ["host", "role", "warehouse", "database", "schema"], + "additionalProperties": true, "properties": { + "credentials": { + "title": "Authorization Method", + "type": "object", + "oneOf": [ + { + "type": "object", + "title": "OAuth2.0", + "order": 0, + "required": ["client_id", "client_secret", "auth_type"], + "properties": { + "auth_type": { + "type": "string", + "const": "OAuth", + "default": "OAuth", + "order": 0 + }, + "client_id": { + "type": "string", + "title": "Client ID", + "description": "The Client ID of your Snowflake developer application.", + "airbyte_secret": true, + "order": 1 + }, + "client_secret": { + "type": "string", + "title": "Client Secret", + "description": "The Client Secret of your Snowflake developer application.", + "airbyte_secret": true, + "order": 2 + }, + "access_token": { + "type": "string", + "title": "Access Token", + "description": "Access Token for making authenticated requests.", + "airbyte_secret": true, + "order": 3 + }, + "refresh_token": { + "type": "string", + "title": "Refresh Token", + "description": "Refresh Token for making authenticated requests.", + "airbyte_secret": true, + "order": 4 + } + } + }, + { + "title": "Username and Password", + "type": "object", + "required": ["username", "password", "auth_type"], + "order": 1, + "properties": { + "auth_type": { + "type": "string", + "const": "username/password", + "default": "username/password", + "order": 0 + }, + "username": { + "description": "The username you created to allow Airbyte to access the database.", + "examples": ["AIRBYTE_USER"], + "type": "string", + "title": "Username", + "order": 1 + }, + "password": { + "description": "The password associated with the username.", + "type": "string", + "airbyte_secret": true, + "title": "Password", + "order": 2 + } + } + } + ], + "order": 0 + }, "host": { "description": "The host domain of the snowflake instance (must include the account, region, cloud environment, and end with snowflakecomputing.com).", "examples": ["accountname.us-east-2.aws.snowflakecomputing.com"], "type": "string", "title": "Account Name", - "order": 0 + "order": 1 }, "role": { "description": "The role you created for Airbyte to access Snowflake.", "examples": ["AIRBYTE_ROLE"], "type": "string", "title": "Role", - "order": 1 + "order": 2 }, "warehouse": { "description": "The warehouse you created for Airbyte to access data.", "examples": ["AIRBYTE_WAREHOUSE"], "type": "string", "title": "Warehouse", - "order": 2 + "order": 3 }, "database": { "description": "The database you created for Airbyte to access data.", "examples": ["AIRBYTE_DATABASE"], "type": "string", "title": "Database", - "order": 3 + "order": 4 }, "schema": { "description": "The source Snowflake schema tables.", "examples": ["AIRBYTE_SCHEMA"], "type": "string", "title": "Schema", - "order": 4 - }, - "username": { - "description": "The username you created to allow Airbyte to access the database.", - "examples": ["AIRBYTE_USER"], - "type": "string", - "title": "Username", "order": 5 }, - "password": { - "description": "The password associated with the username.", - "type": "string", - "airbyte_secret": true, - "title": "Password", - "order": 6 - }, "jdbc_url_params": { "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).", "title": "JDBC URL Params", "type": "string", - "order": 7 + "order": 6 + } + } + }, + "advanced_auth": { + "auth_flow_type": "oauth2.0", + "predicate_key": ["credentials", "auth_type"], + "predicate_value": "OAuth", + "oauth_config_specification": { + "oauth_user_input_from_connector_config_specification": { + "type": "object", + "additionalProperties": false, + "properties": { + "host": { + "type": "string", + "path_in_connector_config": ["host"] + } + } + }, + "complete_oauth_output_specification": { + "type": "object", + "additionalProperties": false, + "properties": { + "access_token": { + "type": "string", + "path_in_connector_config": ["credentials", "access_token"] + }, + "refresh_token": { + "type": "string", + "path_in_connector_config": ["credentials", "refresh_token"] + } + } + }, + "complete_oauth_server_input_specification": { + "type": "object", + "additionalProperties": false, + "properties": { + "client_id": { + "type": "string" + }, + "client_secret": { + "type": "string" + } + } + }, + "complete_oauth_server_output_specification": { + "type": "object", + "additionalProperties": false, + "properties": { + "client_id": { + "type": "string", + "path_in_connector_config": ["credentials", "client_id"] + }, + "client_secret": { + "type": "string", + "path_in_connector_config": ["credentials", "client_secret"] + } + } } } } diff --git a/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeJdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeJdbcSourceAcceptanceTest.java index 011c4aad414b..9c81721ebc70 100644 --- a/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeJdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeJdbcSourceAcceptanceTest.java @@ -4,19 +4,25 @@ package io.airbyte.integrations.io.airbyte.integration_tests.sources; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableSet; import io.airbyte.commons.io.IOs; import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.source.jdbc.AbstractJdbcSource; import io.airbyte.integrations.source.jdbc.test.JdbcSourceAcceptanceTest; import io.airbyte.integrations.source.snowflake.SnowflakeSource; +import io.airbyte.protocol.models.AirbyteConnectionStatus; +import io.airbyte.protocol.models.AirbyteConnectionStatus.Status; import java.math.BigDecimal; import java.nio.file.Path; import java.sql.JDBCType; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; class SnowflakeJdbcSourceAcceptanceTest extends JdbcSourceAcceptanceTest { @@ -26,10 +32,6 @@ class SnowflakeJdbcSourceAcceptanceTest extends JdbcSourceAcceptanceTest { static void init() { snConfig = Jsons .deserialize(IOs.readFile(Path.of("secrets/config.json"))); - } - - @BeforeEach - public void setup() throws Exception { // due to case sensitiveness in SnowflakeDB SCHEMA_NAME = "JDBC_INTEGRATION_TEST1"; SCHEMA_NAME2 = "JDBC_INTEGRATION_TEST2"; @@ -49,7 +51,10 @@ public void setup() throws Exception { ID_VALUE_3 = new BigDecimal(3); ID_VALUE_4 = new BigDecimal(4); ID_VALUE_5 = new BigDecimal(5); + } + @BeforeEach + public void setup() throws Exception { super.setup(); } @@ -79,4 +84,11 @@ public AbstractJdbcSource getJdbcSource() { return new SnowflakeSource(); } + @Test + void testCheckFailure() throws Exception { + ((ObjectNode) config.get("credentials")).put("password", "fake"); + final AirbyteConnectionStatus actual = source.check(config); + assertEquals(Status.FAILED, actual.getStatus()); + } + } diff --git a/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceAcceptanceTest.java index 9f7047ea306c..9b676083f03e 100644 --- a/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceAcceptanceTest.java @@ -4,7 +4,10 @@ package io.airbyte.integrations.io.airbyte.integration_tests.sources; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.Lists; import io.airbyte.commons.io.IOs; import io.airbyte.commons.json.Jsons; @@ -26,6 +29,7 @@ import java.util.HashMap; import java.util.Map; import org.apache.commons.lang3.RandomStringUtils; +import org.junit.jupiter.api.Test; public class SnowflakeSourceAcceptanceTest extends SourceAcceptanceTest { @@ -35,8 +39,8 @@ public class SnowflakeSourceAcceptanceTest extends SourceAcceptanceTest { private static final String STREAM_NAME2 = "ID_AND_NAME2"; // config which refers to the schema that the test is being run in. - private JsonNode config; - private JdbcDatabase database; + protected JsonNode config; + protected JdbcDatabase database; @Override protected String getImageName() { @@ -90,17 +94,7 @@ protected JsonNode getState() { // for each test we create a new schema in the database. run the test in there and then remove it. @Override protected void setupEnvironment(final TestDestinationEnv environment) throws Exception { - config = Jsons.clone(getStaticConfig()); - database = Databases.createJdbcDatabase( - config.get("username").asText(), - config.get("password").asText(), - String.format("jdbc:snowflake://%s/", - config.get("host").asText()), - SnowflakeSource.DRIVER_CLASS, - Map.of("role", config.get("role").asText(), - "warehouse", config.get("warehouse").asText(), - "database", config.get("database").asText())); - + database = setupDataBase(); final String createSchemaQuery = String.format("CREATE SCHEMA IF NOT EXISTS %s", SCHEMA_NAME); final String createTableQuery1 = String .format("CREATE OR REPLACE TABLE %s.%s (ID INTEGER, NAME VARCHAR(200))", SCHEMA_NAME, @@ -130,4 +124,30 @@ protected void tearDown(final TestDestinationEnv testEnv) throws Exception { database.close(); } + protected JdbcDatabase setupDataBase() { + config = Jsons.clone(getStaticConfig()); + return Databases.createJdbcDatabase( + config.get("credentials").get("username").asText(), + config.get("credentials").get("password").asText(), + String.format("jdbc:snowflake://%s/", + config.get("host").asText()), + SnowflakeSource.DRIVER_CLASS, + Map.of("role", config.get("role").asText(), + "warehouse", config.get("warehouse").asText(), + "database", config.get("database").asText())); + } + + @Test + public void testBackwardCompatibilityAfterAddingOAuth() throws Exception { + final JsonNode deprecatedStyleConfig = Jsons.clone(config); + final JsonNode password = deprecatedStyleConfig.get("credentials").get("password"); + final JsonNode username = deprecatedStyleConfig.get("credentials").get("username"); + + ((ObjectNode) deprecatedStyleConfig).remove("credentials"); + ((ObjectNode) deprecatedStyleConfig).set("password", password); + ((ObjectNode) deprecatedStyleConfig).set("username", username); + + assertEquals("SUCCEEDED", runCheckAndGetStatusAsString(deprecatedStyleConfig).toUpperCase()); + } + } diff --git a/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceAuthAcceptanceTest.java b/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceAuthAcceptanceTest.java new file mode 100644 index 000000000000..bdcc57e9e08c --- /dev/null +++ b/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceAuthAcceptanceTest.java @@ -0,0 +1,94 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.io.airbyte.integration_tests.sources; + +import com.fasterxml.jackson.databind.JsonNode; +import com.zaxxer.hikari.HikariDataSource; +import io.airbyte.commons.io.IOs; +import io.airbyte.commons.json.Jsons; +import io.airbyte.db.jdbc.JdbcDatabase; +import io.airbyte.db.jdbc.JdbcUtils; +import io.airbyte.db.jdbc.StreamingJdbcDatabase; +import io.airbyte.integrations.source.snowflake.SnowflakeDataSourceUtils; +import io.airbyte.integrations.source.snowflake.SnowflakeJdbcStreamingQueryConfiguration; +import java.io.IOException; +import java.nio.file.Path; +import java.util.Properties; +import javax.sql.DataSource; + +public class SnowflakeSourceAuthAcceptanceTest extends SnowflakeSourceAcceptanceTest { + + @Override + protected JdbcDatabase setupDataBase() { + config = getStaticConfig(); + final DataSource dataSource = createDataSource(getStaticConfig()); + return new StreamingJdbcDatabase(dataSource, + JdbcUtils.getDefaultSourceOperations(), + new SnowflakeJdbcStreamingQueryConfiguration()); + } + + private HikariDataSource createDataSource(final JsonNode config) { + HikariDataSource dataSource = new HikariDataSource(); + Properties properties = new Properties(); + + final StringBuilder jdbcUrl = new StringBuilder( + String.format("jdbc:snowflake://%s/?", config.get("host").asText())); + jdbcUrl.append(String.format( + "role=%s&warehouse=%s&database=%s&schema=%s&JDBC_QUERY_RESULT_FORMAT=%s&CLIENT_SESSION_KEEP_ALIVE=%s", + config.get("role").asText(), + config.get("warehouse").asText(), + config.get("database").asText(), + config.get("schema").asText(), + // Needed for JDK17 - see + // https://stackoverflow.com/questions/67409650/snowflake-jdbc-driver-internal-error-fail-to-retrieve-row-count-for-first-arrow + "JSON", + true)); + if (config.has("jdbc_url_params")) { + jdbcUrl.append(config.get("jdbc_url_params").asText()); + } + + var credentials = config.get("credentials"); + try { + properties.setProperty("client_id", credentials.get("client_id").asText()); + properties.setProperty("client_secret", credentials.get("client_secret").asText()); + properties.setProperty("refresh_token", credentials.get("refresh_token").asText()); + properties.setProperty("host", config.get("host").asText()); + var accessToken = SnowflakeDataSourceUtils.getAccessTokenUsingRefreshToken( + config.get("host").asText(), credentials.get("client_id").asText(), + credentials.get("client_secret").asText(), credentials.get("refresh_token").asText()); + properties.put("authenticator", "oauth"); + properties.put("token", accessToken); + } catch (IOException e) { + throw new RuntimeException(e); + } + + properties.put("warehouse", config.get("warehouse").asText()); + properties.put("account", config.get("host").asText()); + properties.put("role", config.get("role").asText()); + // allows queries to contain any number of statements + properties.put("MULTI_STATEMENT_COUNT", "0"); + // https://docs.snowflake.com/en/user-guide/jdbc-parameters.html#application + // identify airbyte traffic to snowflake to enable partnership & optimization opportunities + properties.put("dataSource.application", "airbyte"); + // Needed for JDK17 - see + // https://stackoverflow.com/questions/67409650/snowflake-jdbc-driver-internal-error-fail-to-retrieve-row-count-for-first-arrow + properties.put("JDBC_QUERY_RESULT_FORMAT", "JSON"); + + dataSource.setDriverClassName("net.snowflake.client.jdbc.SnowflakeDriver"); + dataSource.setJdbcUrl(jdbcUrl.toString()); + dataSource.setDataSourceProperties(properties); + return dataSource; + } + + JsonNode getStaticConfig() { + return Jsons + .deserialize(IOs.readFile(Path.of("secrets/config_auth.json"))); + } + + @Override + public void testBackwardCompatibilityAfterAddingOAuth() throws Exception { + // this test case is not valid for OAuth method + } +} diff --git a/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceDatatypeTest.java b/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceDatatypeTest.java index bf739e15246b..df49c9884d3f 100644 --- a/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceDatatypeTest.java +++ b/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceDatatypeTest.java @@ -51,8 +51,8 @@ protected Database setupDatabase() throws Exception { private Database getDatabase() { return Databases.createDatabase( - config.get("username").asText(), - config.get("password").asText(), + config.get("credentials").get("username").asText(), + config.get("credentials").get("password").asText(), String.format("jdbc:snowflake://%s/", config.get("host").asText()), SnowflakeSource.DRIVER_CLASS, diff --git a/airbyte-integrations/connectors/source-snowflake/src/test/java/io/airbyte/integrations/source/snowflake/SnowflakeDataSourceUtilsTest.java b/airbyte-integrations/connectors/source-snowflake/src/test/java/io/airbyte/integrations/source/snowflake/SnowflakeDataSourceUtilsTest.java new file mode 100644 index 000000000000..bf7080d82b0a --- /dev/null +++ b/airbyte-integrations/connectors/source-snowflake/src/test/java/io/airbyte/integrations/source/snowflake/SnowflakeDataSourceUtilsTest.java @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.snowflake; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import io.airbyte.commons.json.Jsons; +import org.junit.jupiter.api.Test; + +class SnowflakeDataSourceUtilsTest { + + private final String config = """ + { + "host": "host", + "role": "role", + "schema": "SOURCE_SCHEMA", + "database": "DATABASE", + "warehouse": "WAREHOUSE", + "credentials": { + "auth_type": "OAuth", + "client_id": "someid", + "access_token": "**********", + "client_secret": "clientSecret", + "refresh_token": "token" + } + } + """; + private final String expectedJdbcUrl = + "jdbc:snowflake://host/?role=role&warehouse=WAREHOUSE&database=DATABASE&schema=SOURCE_SCHEMA&JDBC_QUERY_RESULT_FORMAT=JSON&CLIENT_SESSION_KEEP_ALIVE=true"; + + @Test + void testBuildJDBCUrl() { + JsonNode expectedConfig = Jsons.deserialize(config); + + String jdbcURL = SnowflakeDataSourceUtils.buildJDBCUrl(expectedConfig); + + assertEquals(expectedJdbcUrl, jdbcURL); + } + + @Test + void testBuildJDBCUrlWithParams() { + JsonNode expectedConfig = Jsons.deserialize(config); + String params = "someParameter1¶m2=someParameter2"; + ((ObjectNode) expectedConfig).put("jdbc_url_params", params); + + String jdbcURL = SnowflakeDataSourceUtils.buildJDBCUrl(expectedConfig); + + assertEquals(expectedJdbcUrl + "&" + params, jdbcURL); + } + +} diff --git a/airbyte-integrations/connectors/source-tiktok-marketing/Dockerfile b/airbyte-integrations/connectors/source-tiktok-marketing/Dockerfile index 690dfa479c14..5524acd9717b 100644 --- a/airbyte-integrations/connectors/source-tiktok-marketing/Dockerfile +++ b/airbyte-integrations/connectors/source-tiktok-marketing/Dockerfile @@ -32,5 +32,5 @@ COPY source_tiktok_marketing ./source_tiktok_marketing ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.6 +LABEL io.airbyte.version=0.1.7 LABEL io.airbyte.name=airbyte/source-tiktok-marketing diff --git a/airbyte-integrations/connectors/source-tiktok-marketing/integration_tests/spec.json b/airbyte-integrations/connectors/source-tiktok-marketing/integration_tests/spec.json index bd914e8e5d62..da6cad26a536 100644 --- a/airbyte-integrations/connectors/source-tiktok-marketing/integration_tests/spec.json +++ b/airbyte-integrations/connectors/source-tiktok-marketing/integration_tests/spec.json @@ -5,26 +5,10 @@ "title": "TikTok Marketing Source Spec", "type": "object", "properties": { - "start_date": { - "title": "Start Date", - "description": "The Start Date in format: YYYY-MM-DD. Any data before this date will not be replicated. If this parameter is not set, all data will be replicated.", - "default": "2016-09-01", - "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}$", - "order": 0, - "type": "string" - }, - "report_granularity": { - "title": "Report Granularity", - "description": "Which time granularity should be grouped by; for LIFETIME there will be no grouping. This option is used for reports' streams only.", - "default": "DAY", - "enum": ["LIFETIME", "DAY", "HOUR"], - "order": 1, - "type": "string" - }, "credentials": { - "title": "Authorization Method", + "title": "Authentication *", "default": {}, - "order": 3, + "order": 0, "type": "object", "oneOf": [ { @@ -113,6 +97,22 @@ "required": ["advertiser_id", "access_token"] } ] + }, + "start_date": { + "title": "Start Date *", + "description": "The Start Date in format: YYYY-MM-DD. Any data before this date will not be replicated. If this parameter is not set, all data will be replicated.", + "default": "2016-09-01", + "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}$", + "order": 1, + "type": "string" + }, + "report_granularity": { + "title": "Report Granularity *", + "description": "Which time granularity should be grouped by; for LIFETIME there will be no grouping. This option is used for reports' streams only.", + "default": "DAY", + "enum": ["LIFETIME", "DAY", "HOUR"], + "order": 2, + "type": "string" } } }, diff --git a/airbyte-integrations/connectors/source-tiktok-marketing/source_tiktok_marketing/spec.py b/airbyte-integrations/connectors/source-tiktok-marketing/source_tiktok_marketing/spec.py index 79019570f0ce..f33e829befff 100644 --- a/airbyte-integrations/connectors/source-tiktok-marketing/source_tiktok_marketing/spec.py +++ b/airbyte-integrations/connectors/source-tiktok-marketing/source_tiktok_marketing/spec.py @@ -57,26 +57,26 @@ class SourceTiktokMarketingSpec(BaseModel): class Config: title = "TikTok Marketing Source Spec" + credentials: Union[OauthCredSpec, ProductionEnvSpec, SandboxEnvSpec] = Field( + title="Authentication *", order=0, default={}, type="object" + ) + start_date: str = Field( - title="Start Date", + title="Start Date *", default=DEFAULT_START_DATE, pattern="^[0-9]{4}-[0-9]{2}-[0-9]{2}$", description="The Start Date in format: YYYY-MM-DD. Any data before this date will not be replicated. " "If this parameter is not set, all data will be replicated.", - order=0, + order=1, ) report_granularity: str = Field( - title="Report Granularity", + title="Report Granularity *", description="Which time granularity should be grouped by; for LIFETIME there will be no grouping. " "This option is used for reports' streams only.", default=ReportGranularity.default().value, enum=[g.value for g in ReportGranularity], - order=1, - ) - - credentials: Union[OauthCredSpec, ProductionEnvSpec, SandboxEnvSpec] = Field( - title="Authorization Method", order=3, default={}, type="object" + order=2, ) @classmethod diff --git a/airbyte-metrics/reporter/Dockerfile b/airbyte-metrics/reporter/Dockerfile index 024bbb22fd34..2e71cbe04746 100644 --- a/airbyte-metrics/reporter/Dockerfile +++ b/airbyte-metrics/reporter/Dockerfile @@ -1,7 +1,7 @@ ARG JDK_VERSION=17.0.1 FROM openjdk:${JDK_VERSION}-slim AS metrics-reporter -ARG VERSION=0.36.3-alpha +ARG VERSION=0.36.4-alpha ENV APPLICATION airbyte-metrics-reporter ENV VERSION ${VERSION} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java index 9fb4056af3f5..06723d5b0ef3 100644 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java @@ -63,6 +63,7 @@ public OAuthImplementationFactory(final ConfigRepository configRepository, final .put("airbyte/source-shopify", new ShopifyOAuthFlow(configRepository, httpClient)) .put("airbyte/source-tiktok-marketing", new TikTokMarketingOAuthFlow(configRepository, httpClient)) .put("airbyte/destination-snowflake", new DestinationSnowflakeOAuthFlow(configRepository, httpClient)) + .put("airbyte/source-snowflake", new SourceSnowflakeOAuthFlow(configRepository, httpClient)) .build(); } diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SourceSnowflakeOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SourceSnowflakeOAuthFlow.java new file mode 100644 index 000000000000..d9c976cf5ea3 --- /dev/null +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SourceSnowflakeOAuthFlow.java @@ -0,0 +1,144 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.oauth.flows; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.ImmutableMap; +import io.airbyte.commons.json.Jsons; +import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.oauth.BaseOAuth2Flow; +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.nio.charset.StandardCharsets; +import java.util.Base64; +import java.util.HashMap; +import java.util.Map; +import java.util.UUID; +import java.util.function.Supplier; +import org.apache.http.client.utils.URIBuilder; + +public class SourceSnowflakeOAuthFlow extends BaseOAuth2Flow { + + private static final String AUTHORIZE_URL = "https://%s/oauth/authorize"; + private static final String ACCESS_TOKEN_URL = "https://%s/oauth/token-request"; + + public SourceSnowflakeOAuthFlow(ConfigRepository configRepository, HttpClient httpClient) { + super(configRepository, httpClient); + } + + @VisibleForTesting + public SourceSnowflakeOAuthFlow(ConfigRepository configRepository, HttpClient httpClient, final Supplier stateSupplier) { + super(configRepository, httpClient, stateSupplier); + } + + @Override + protected String formatConsentUrl(UUID definitionId, + String clientId, + String redirectUrl, + JsonNode inputOAuthConfiguration) + throws IOException { + try { + return new URIBuilder(String.format(AUTHORIZE_URL, extractUrl(inputOAuthConfiguration))) + .addParameter("client_id", clientId) + .addParameter("redirect_uri", redirectUrl) + .addParameter("response_type", "code") + .addParameter("state", getState()) + .build().toString(); + } catch (final URISyntaxException e) { + throw new IOException("Failed to format Consent URL for OAuth flow", e); + } + } + + @Override + protected String getAccessTokenUrl(JsonNode inputOAuthConfiguration) { + return String.format(ACCESS_TOKEN_URL, extractUrl(inputOAuthConfiguration)); + } + + @Override + protected String extractCodeParameter(Map queryParams) throws IOException { + return super.extractCodeParameter(queryParams); + } + + @Override + protected Map getAccessTokenQueryParameters(String clientId, + String clientSecret, + String authCode, + String redirectUrl) { + return ImmutableMap.builder() + // required + .put("grant_type", "authorization_code") + .put("code", authCode) + .put("redirect_uri", redirectUrl) + .build(); + } + + @Override + protected Map completeOAuthFlow(final String clientId, + final String clientSecret, + final String authCode, + final String redirectUrl, + final JsonNode inputOAuthConfiguration, + final JsonNode oAuthParamConfig) + throws IOException { + final var accessTokenUrl = getAccessTokenUrl(inputOAuthConfiguration); + final byte[] authorization = Base64.getEncoder() + .encode((clientId + ":" + clientSecret).getBytes(StandardCharsets.UTF_8)); + final HttpRequest request = HttpRequest.newBuilder() + .POST(HttpRequest.BodyPublishers + .ofString(tokenReqContentType.getConverter().apply( + getAccessTokenQueryParameters(clientId, clientSecret, authCode, redirectUrl)))) + .uri(URI.create(accessTokenUrl)) + .header("Content-Type", tokenReqContentType.getContentType()) + .header("Accept", "application/json") + .header("Authorization", "Basic " + new String(authorization, StandardCharsets.UTF_8)) + .build(); + try { + final HttpResponse response = httpClient.send(request, + HttpResponse.BodyHandlers.ofString()); + + return extractOAuthOutput(Jsons.deserialize(response.body()), accessTokenUrl); + } catch (final InterruptedException e) { + throw new IOException("Failed to complete OAuth flow", e); + } + } + + @Override + protected Map extractOAuthOutput(JsonNode data, String accessTokenUrl) + throws IOException { + final Map result = new HashMap<>(); + // access_token is valid for only 10 minutes + if (data.has("access_token")) { + result.put("access_token", data.get("access_token").asText()); + } else { + throw new IOException(String.format("Missing 'access_token' in query params from %s", + accessTokenUrl)); + } + + if (data.has("refresh_token")) { + result.put("refresh_token", data.get("refresh_token").asText()); + } else { + throw new IOException(String.format("Missing 'refresh_token' in query params from %s", + accessTokenUrl)); + } + if (data.has("username")) { + result.put("username", data.get("username").asText()); + } else { + throw new IOException(String.format("Missing 'username' in query params from %s", + accessTokenUrl)); + } + return result; + } + + private String extractUrl(JsonNode inputOAuthConfiguration) { + var url = inputOAuthConfiguration.get("host"); + return url == null ? "snowflakecomputing.com" : url.asText(); + } + +} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/facebook/InstagramOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/facebook/InstagramOAuthFlow.java index c330b42980a6..48f72b58144a 100644 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/facebook/InstagramOAuthFlow.java +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/facebook/InstagramOAuthFlow.java @@ -12,7 +12,7 @@ // Instagram Graph API require Facebook API User token public class InstagramOAuthFlow extends FacebookMarketingOAuthFlow { - private static final String SCOPES = "ads_management,instagram_basic,instagram_manage_insights,read_insights"; + private static final String SCOPES = "ads_management,instagram_basic,instagram_manage_insights"; public InstagramOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { super(configRepository, httpClient); diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SnowflakeOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SnowflakeOAuthFlowTest.java new file mode 100644 index 000000000000..e982170807f0 --- /dev/null +++ b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SnowflakeOAuthFlowTest.java @@ -0,0 +1,82 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.oauth.flows; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.collect.ImmutableMap; +import io.airbyte.commons.json.Jsons; +import io.airbyte.oauth.BaseOAuthFlow; +import io.airbyte.oauth.MoreOAuthParameters; +import java.util.Map; +import org.junit.jupiter.api.Test; + +public class SnowflakeOAuthFlowTest extends BaseOAuthFlowTest { + + @Override + protected BaseOAuthFlow getOAuthFlow() { + return new SourceSnowflakeOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); + } + + @Override + protected String getExpectedConsentUrl() { + return "https://account.aws.snowflakecomputing.com/oauth/authorize?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&response_type=code&state=state"; + } + + @Override + protected Map getExpectedOutput() { + return Map.of( + "access_token", "access_token_response", + "refresh_token", "refresh_token_response", + "username", "username"); + } + + @Override + protected JsonNode getCompleteOAuthOutputSpecification() { + return getJsonSchema(Map.of("access_token", Map.of("type", "string"), "refresh_token", Map.of("type", "string"))); + } + + @Override + protected Map getExpectedFilteredOutput() { + return Map.of( + "access_token", "access_token_response", + "refresh_token", "refresh_token_response", + "client_id", MoreOAuthParameters.SECRET_MASK); + } + + protected JsonNode getOAuthParamConfig() { + return Jsons.jsonNode(ImmutableMap.builder() + .put("client_id", "test_client_id") + .put("client_secret", "test_client_secret") + .build()); + } + + @Override + protected JsonNode getInputOAuthConfiguration() { + return Jsons.jsonNode(ImmutableMap.builder() + .put("host", "account.aws.snowflakecomputing.com") + .build()); + } + + protected JsonNode getUserInputFromConnectorConfigSpecification() { + return getJsonSchema(Map.of("host", Map.of("type", "string"))); + } + + @Test + @Override + public void testGetSourceConsentUrlEmptyOAuthSpec() {} + + @Test + @Override + public void testGetDestinationConsentUrlEmptyOAuthSpec() {} + + @Test + @Override + public void testDeprecatedCompleteDestinationOAuth() {} + + @Test + @Override + public void testDeprecatedCompleteSourceOAuth() {} + +} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/facebook/InstagramOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/facebook/InstagramOAuthFlowTest.java index f4ed295a2300..31cb39bc935f 100644 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/facebook/InstagramOAuthFlowTest.java +++ b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/facebook/InstagramOAuthFlowTest.java @@ -20,7 +20,7 @@ protected BaseOAuthFlow getOAuthFlow() { @Override protected String getExpectedConsentUrl() { - return "https://www.facebook.com/v12.0/dialog/oauth?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&state=state&scope=ads_management%2Cinstagram_basic%2Cinstagram_manage_insights%2Cread_insights"; + return "https://www.facebook.com/v12.0/dialog/oauth?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&state=state&scope=ads_management%2Cinstagram_basic%2Cinstagram_manage_insights"; } @Override diff --git a/airbyte-scheduler/app/Dockerfile b/airbyte-scheduler/app/Dockerfile index fd8099512cf7..0337dd7a570d 100644 --- a/airbyte-scheduler/app/Dockerfile +++ b/airbyte-scheduler/app/Dockerfile @@ -1,7 +1,7 @@ ARG JDK_VERSION=17.0.1 FROM openjdk:${JDK_VERSION}-slim AS scheduler -ARG VERSION=0.36.3-alpha +ARG VERSION=0.36.4-alpha ENV APPLICATION airbyte-scheduler ENV VERSION ${VERSION} diff --git a/airbyte-server/Dockerfile b/airbyte-server/Dockerfile index 8504029b1386..684de4acea24 100644 --- a/airbyte-server/Dockerfile +++ b/airbyte-server/Dockerfile @@ -3,7 +3,7 @@ FROM openjdk:${JDK_VERSION}-slim AS server EXPOSE 8000 -ARG VERSION=0.36.3-alpha +ARG VERSION=0.36.4-alpha ENV APPLICATION airbyte-server ENV VERSION ${VERSION} diff --git a/airbyte-webapp/package-lock.json b/airbyte-webapp/package-lock.json index e779bd55ab20..fd32fbc5bdff 100644 --- a/airbyte-webapp/package-lock.json +++ b/airbyte-webapp/package-lock.json @@ -1,12 +1,12 @@ { "name": "airbyte-webapp", - "version": "0.36.3-alpha", + "version": "0.36.4-alpha", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "airbyte-webapp", - "version": "0.36.3-alpha", + "version": "0.36.4-alpha", "dependencies": { "@fortawesome/fontawesome-svg-core": "^6.1.1", "@fortawesome/free-brands-svg-icons": "^6.1.1", diff --git a/airbyte-webapp/package.json b/airbyte-webapp/package.json index af16a467609a..25e460e3dd04 100644 --- a/airbyte-webapp/package.json +++ b/airbyte-webapp/package.json @@ -1,6 +1,6 @@ { "name": "airbyte-webapp", - "version": "0.36.3-alpha", + "version": "0.36.4-alpha", "private": true, "engines": { "node": ">=16.0.0" diff --git a/airbyte-webapp/public/images/bowtie-half.svg b/airbyte-webapp/public/images/bowtie-half.svg new file mode 100644 index 000000000000..48372107abbd --- /dev/null +++ b/airbyte-webapp/public/images/bowtie-half.svg @@ -0,0 +1,31 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/airbyte-webapp/public/images/octavia/empty-connections.png b/airbyte-webapp/public/images/octavia/empty-connections.png new file mode 100644 index 000000000000..e40446edb42d Binary files /dev/null and b/airbyte-webapp/public/images/octavia/empty-connections.png differ diff --git a/airbyte-webapp/public/images/octavia/empty-destinations.png b/airbyte-webapp/public/images/octavia/empty-destinations.png new file mode 100644 index 000000000000..c985d0821ade Binary files /dev/null and b/airbyte-webapp/public/images/octavia/empty-destinations.png differ diff --git a/airbyte-webapp/public/images/octavia/empty-sources.png b/airbyte-webapp/public/images/octavia/empty-sources.png new file mode 100644 index 000000000000..460ea9be3887 Binary files /dev/null and b/airbyte-webapp/public/images/octavia/empty-sources.png differ diff --git a/airbyte-webapp/src/components/ConfirmationModal/ConfirmationModal.tsx b/airbyte-webapp/src/components/ConfirmationModal/ConfirmationModal.tsx index 2747f956b9aa..5bc370088960 100644 --- a/airbyte-webapp/src/components/ConfirmationModal/ConfirmationModal.tsx +++ b/airbyte-webapp/src/components/ConfirmationModal/ConfirmationModal.tsx @@ -29,6 +29,7 @@ export interface ConfirmationModalProps { text: string; submitButtonText: string; onSubmit: () => void; + submitButtonDataId?: string; } export const ConfirmationModal: React.FC = ({ @@ -37,6 +38,7 @@ export const ConfirmationModal: React.FC = ({ text, onSubmit, submitButtonText, + submitButtonDataId, }) => ( }> @@ -45,7 +47,7 @@ export const ConfirmationModal: React.FC = ({ - diff --git a/airbyte-webapp/src/components/DeleteBlock/DeleteBlock.tsx b/airbyte-webapp/src/components/DeleteBlock/DeleteBlock.tsx index 4769a2fe519f..680cac1f929e 100644 --- a/airbyte-webapp/src/components/DeleteBlock/DeleteBlock.tsx +++ b/airbyte-webapp/src/components/DeleteBlock/DeleteBlock.tsx @@ -1,11 +1,12 @@ -import React, { useState } from "react"; +import React, { useCallback } from "react"; import { FormattedMessage } from "react-intl"; import styled from "styled-components"; import { Button, H5 } from "components"; import ContentCard from "components/ContentCard"; -import DeleteModal from "./components/DeleteModal"; +import { useConfirmationModalService } from "hooks/services/ConfirmationModal"; +import useRouter from "hooks/useRouter"; type IProps = { type: "source" | "destination" | "connection"; @@ -29,7 +30,22 @@ const Text = styled.div` `; const DeleteBlock: React.FC = ({ type, onDelete }) => { - const [isModalOpen, setIsModalOpen] = useState(false); + const { openConfirmationModal, closeConfirmationModal } = useConfirmationModalService(); + const { push } = useRouter(); + + const onDeleteButtonClick = useCallback(() => { + openConfirmationModal({ + text: `tables.${type}DeleteModalText`, + title: `tables.${type}DeleteConfirm`, + submitButtonText: "form.delete", + onSubmit: async () => { + await onDelete(); + closeConfirmationModal(); + push("../.."); + }, + submitButtonDataId: "delete", + }); + }, [closeConfirmationModal, onDelete, openConfirmationModal, push, type]); return ( <> @@ -40,11 +56,10 @@ const DeleteBlock: React.FC = ({ type, onDelete }) => { - - {isModalOpen && setIsModalOpen(false)} onSubmit={onDelete} />} ); }; diff --git a/airbyte-webapp/src/components/DeleteBlock/components/DeleteModal.tsx b/airbyte-webapp/src/components/DeleteBlock/components/DeleteModal.tsx deleted file mode 100644 index a8a4f36859ba..000000000000 --- a/airbyte-webapp/src/components/DeleteBlock/components/DeleteModal.tsx +++ /dev/null @@ -1,52 +0,0 @@ -import React from "react"; -import { FormattedMessage } from "react-intl"; -import { useMutation } from "react-query"; -import styled from "styled-components"; - -import { Button, LoadingButton } from "components"; -import Modal from "components/Modal"; -export type IProps = { - onClose: () => void; - onSubmit: () => Promise; - type: "source" | "destination" | "connection"; -}; - -const Content = styled.div` - width: 585px; - font-size: 14px; - line-height: 28px; - padding: 10px 40px 15px 37px; - white-space: pre-line; -`; - -const ButtonContent = styled.div` - padding-top: 28px; - display: flex; - justify-content: flex-end; -`; - -const ButtonWithMargin = styled(Button)` - margin-right: 12px; -`; - -const DeleteModal: React.FC = ({ onClose, onSubmit, type }) => { - const { isLoading, mutateAsync } = useMutation(() => onSubmit()); - - return ( - }> - - - - - - - mutateAsync()} data-id="delete"> - - - - - - ); -}; - -export default DeleteModal; diff --git a/airbyte-webapp/src/components/EmptyResourceListView/EmptyResourceListView.tsx b/airbyte-webapp/src/components/EmptyResourceListView/EmptyResourceListView.tsx new file mode 100644 index 000000000000..b70f0d551806 --- /dev/null +++ b/airbyte-webapp/src/components/EmptyResourceListView/EmptyResourceListView.tsx @@ -0,0 +1,96 @@ +import { useMemo } from "react"; +import { FormattedMessage } from "react-intl"; +import styled from "styled-components"; + +import { Button, H2 } from "components/base"; + +interface EmptyResourceListViewProps { + resourceType: "connections" | "destinations" | "sources"; + onCreateClick: () => void; + disableCreateButton?: boolean; +} + +const Container = styled.div` + display: flex; + flex-direction: column; + align-items: center; + justify-content: center; + height: 100%; +`; + +export const Heading = styled(H2)` + font-weight: 700; + font-size: 24px; + line-height: 29px; + max-width: 386px; + text-align: center; + strong { + color: ${({ theme }) => theme.redColor}; + } +`; + +const IllustrationContainer = styled(Container)` + position: relative; + width: 592px; + height: 276px; + + pointer-events: none; + user-select: none; +`; + +const OctaviaImg = styled.img` + max-height: 203px; + max-width: 100%; + z-index: 1; +`; + +export const BowtieImg = styled.img` + position: absolute; + + &.empty-list-bowtie--right { + right: 0; + transform: scaleX(-1); + } + + &.empty-list-bowtie--left { + left: 0; + } +`; + +export const EmptyResourceListView: React.FC = ({ + resourceType, + onCreateClick, + disableCreateButton, +}) => { + const { headingMessageId, buttonMessageId, singularResourceType } = useMemo(() => { + const singularResourceType = resourceType.substring(0, resourceType.length - 1); + const baseMessageId = resourceType === "connections" ? singularResourceType : resourceType; + + const headingMessageId = `${baseMessageId}.description`; + const buttonMessageId = `${baseMessageId}.new${ + singularResourceType.substring(0, 1).toUpperCase() + singularResourceType.substring(1) + }`; + + return { headingMessageId, buttonMessageId, singularResourceType }; + }, [resourceType]); + + return ( + + + + + + {resourceType !== "destinations" && ( + + )} + {resourceType !== "sources" && ( + + )} + + + + + ); +}; diff --git a/airbyte-webapp/src/components/EmptyResourceListView/index.ts b/airbyte-webapp/src/components/EmptyResourceListView/index.ts new file mode 100644 index 000000000000..585fb92a64ce --- /dev/null +++ b/airbyte-webapp/src/components/EmptyResourceListView/index.ts @@ -0,0 +1 @@ +export * from "./EmptyResourceListView"; diff --git a/airbyte-webapp/src/components/base/Button/Button.tsx b/airbyte-webapp/src/components/base/Button/Button.tsx index fb3d2c16388e..3e304cce814f 100644 --- a/airbyte-webapp/src/components/base/Button/Button.tsx +++ b/airbyte-webapp/src/components/base/Button/Button.tsx @@ -1,9 +1,9 @@ import styled from "styled-components"; import { Theme } from "theme"; -import { IProps } from "./types"; +import { ButtonProps } from "./types"; -type IStyleProps = IProps & { theme: Theme }; +type IStyleProps = ButtonProps & { theme: Theme }; const getBorderColor = (props: IStyleProps) => { if ((props.secondary && props.wasActive) || props.iconOnly) { @@ -96,14 +96,14 @@ const getPadding = (props: IStyleProps) => { return "5px 16px"; }; -const Button = styled.button` +const Button = styled.button` width: ${(props) => (props.full ? "100%" : "auto")}; display: ${(props) => (props.full ? "block" : "inline-block")}; border: 1px solid ${(props) => getBorderColor(props)}; outline: none; border-radius: 4px; padding: ${(props) => getPadding(props)}; - font-weight: ${(props) => (props.size === "xl" ? 300 : 500)}; + font-weight: ${(props) => (props.size === "xl" ? 600 : 500)}; font-size: ${(props) => getFontSize(props)}px; /* TODO: should try to get rid of line-height altogether */ line-height: ${(props) => (props.size === "xl" ? "initial" : "15px")}; diff --git a/airbyte-webapp/src/components/base/Button/LoadingButton.tsx b/airbyte-webapp/src/components/base/Button/LoadingButton.tsx index db3250d2fe55..2c621077d5c4 100644 --- a/airbyte-webapp/src/components/base/Button/LoadingButton.tsx +++ b/airbyte-webapp/src/components/base/Button/LoadingButton.tsx @@ -4,7 +4,7 @@ import React from "react"; import styled, { keyframes } from "styled-components"; import Button from "./Button"; -import { IProps } from "./types"; +import { ButtonProps } from "./types"; export const SpinAnimation = keyframes` 0% { @@ -15,7 +15,7 @@ export const SpinAnimation = keyframes` } `; -const SymbolSpinner = styled(FontAwesomeIcon)` +const SymbolSpinner = styled(FontAwesomeIcon)` display: inline-block; font-size: 18px; position: absolute; @@ -25,7 +25,7 @@ const SymbolSpinner = styled(FontAwesomeIcon)` margin: -1px 0 -3px -9px; `; -const ButtonView = styled(Button)` +const ButtonView = styled(Button)` pointer-events: none; background: ${({ theme }) => theme.primaryColor25}; border-color: transparent; @@ -36,7 +36,7 @@ const Invisible = styled.div` color: rgba(255, 255, 255, 0); `; -const LoadingButton: React.FC = (props) => { +const LoadingButton: React.FC = (props) => { if (props.isLoading) { return ( diff --git a/airbyte-webapp/src/components/base/Button/types.tsx b/airbyte-webapp/src/components/base/Button/types.tsx index 39d9464de28a..63abaa5049eb 100644 --- a/airbyte-webapp/src/components/base/Button/types.tsx +++ b/airbyte-webapp/src/components/base/Button/types.tsx @@ -1,4 +1,4 @@ -export type IProps = { +export interface ButtonProps extends React.ButtonHTMLAttributes { full?: boolean; danger?: boolean; secondary?: boolean; @@ -7,4 +7,4 @@ export type IProps = { wasActive?: boolean; clickable?: boolean; size?: "m" | "xl"; -} & React.ButtonHTMLAttributes; +} diff --git a/airbyte-webapp/src/hooks/services/ConfirmationModal/ConfirmationModalService.tsx b/airbyte-webapp/src/hooks/services/ConfirmationModal/ConfirmationModalService.tsx index 6b4b18ceba19..089effe26b5f 100644 --- a/airbyte-webapp/src/hooks/services/ConfirmationModal/ConfirmationModalService.tsx +++ b/airbyte-webapp/src/hooks/services/ConfirmationModal/ConfirmationModalService.tsx @@ -64,6 +64,7 @@ export const ConfirmationModalService = ({ children }: { children: React.ReactNo text={state.confirmationModal.text} onSubmit={state.confirmationModal.onSubmit} submitButtonText={state.confirmationModal.submitButtonText} + submitButtonDataId={state.confirmationModal.submitButtonDataId} /> ) : null} diff --git a/airbyte-webapp/src/locales/en.json b/airbyte-webapp/src/locales/en.json index 2c1378b5b4ad..7e6de2b13dae 100644 --- a/airbyte-webapp/src/locales/en.json +++ b/airbyte-webapp/src/locales/en.json @@ -232,12 +232,13 @@ "onboarding.syncFailed": "Your sync is failed. Please try again", "onboarding.startAgain": "Your sync was cancelled. You can start it again", + "sources.description": "Sources Sources are where you want to pull data from.", "sources.searchIncremental": "Search cursor value for incremental", "sources.incrementalDefault": "{value} (default)", "sources.incrementalSourceCursor": "Incremental - source-defined cursor", "sources.full_refresh": "Full refresh", "sources.incremental": "Incremental - based on...", - "sources.newSource": "+ new source", + "sources.newSource": "+ New source", "sources.newSourceTitle": "New Source", "sources.selectSource": "Select a source", "sources.status": "Status", @@ -291,7 +292,8 @@ "sources.lastAttempt": "Last attempt:", "destination.destinationSettings": "Destination Settings", - "destination.newDestination": "+ new destination", + "destinations.newDestination": "+ New destination", + "destinations.description": "Destinations are where you send or push your data to.", "destinations.noDestinations": "Destination list is empty", "destinations.noSources": "No sources yet", "destinations.addSourceReplicateData": "Add sources where to replicate data from.", @@ -309,6 +311,7 @@ "connection.warningUpdateSchema": "WARNING! Updating the schema will delete all the data for this connection in your destination and start syncing from scratch.", "connection.title": "Connection", + "connection.description": "Connections link Sources to Destinations.", "connection.fromTo": "{source} → {destination}", "connection.connectionSettings": "Connection settings", "connection.testsPassed": "All connection tests passed", @@ -321,7 +324,7 @@ "connection.resetData": "Reset your data", "connection.updateSchema": "Refresh source schema", "connection.updateSchemaText": "WARNING! Updating the schema will delete all the data for this connection in your destination and start syncing from scratch. Are you sure you want to do this?", - "connection.newConnection": "+ new connection", + "connection.newConnection": "+ New connection", "connection.newConnectionTitle": "New connection", "connection.noConnections": "Connection list is empty", "connection.disabledConnection": "Disabled connection", diff --git a/airbyte-webapp/src/pages/ConnectionPage/pages/AllConnectionsPage/AllConnectionsPage.tsx b/airbyte-webapp/src/pages/ConnectionPage/pages/AllConnectionsPage/AllConnectionsPage.tsx index 03f365770065..3a402f14589b 100644 --- a/airbyte-webapp/src/pages/ConnectionPage/pages/AllConnectionsPage/AllConnectionsPage.tsx +++ b/airbyte-webapp/src/pages/ConnectionPage/pages/AllConnectionsPage/AllConnectionsPage.tsx @@ -2,8 +2,8 @@ import React, { Suspense } from "react"; import { FormattedMessage } from "react-intl"; import { Button, LoadingPage, MainPageWithScroll, PageTitle } from "components"; +import { EmptyResourceListView } from "components/EmptyResourceListView"; import HeadTitle from "components/HeadTitle"; -import Placeholder, { ResourceTypes } from "components/Placeholder"; import { FeatureItem, useFeatureService } from "hooks/services/Feature"; import { useConnectionList } from "hooks/services/useConnectionHook"; @@ -19,30 +19,34 @@ const AllConnectionsPage: React.FC = () => { const { hasFeature } = useFeatureService(); const allowCreateConnection = hasFeature(FeatureItem.AllowCreateConnection); - const onClick = () => push(`${RoutePaths.ConnectionNew}`); + const onCreateClick = () => push(`${RoutePaths.ConnectionNew}`); return ( - } - pageTitle={ - } - endComponent={ - + }> + {connections.length ? ( + } + pageTitle={ + } + endComponent={ + + } + /> } - /> - } - > - }> - {connections.length ? ( + > - ) : ( - - )} - - + + ) : ( + + )} + ); }; diff --git a/airbyte-webapp/src/pages/DestinationPage/pages/AllDestinationsPage/AllDestinationsPage.tsx b/airbyte-webapp/src/pages/DestinationPage/pages/AllDestinationsPage/AllDestinationsPage.tsx index 63a2cd1c6ebc..f7bbd69a2487 100644 --- a/airbyte-webapp/src/pages/DestinationPage/pages/AllDestinationsPage/AllDestinationsPage.tsx +++ b/airbyte-webapp/src/pages/DestinationPage/pages/AllDestinationsPage/AllDestinationsPage.tsx @@ -2,9 +2,9 @@ import React from "react"; import { FormattedMessage } from "react-intl"; import { Button, MainPageWithScroll } from "components"; +import { EmptyResourceListView } from "components/EmptyResourceListView"; import HeadTitle from "components/HeadTitle"; import PageTitle from "components/PageTitle"; -import Placeholder, { ResourceTypes } from "components/Placeholder"; import { useDestinationList } from "hooks/services/useDestinationHook"; import useRouter from "hooks/useRouter"; @@ -18,7 +18,7 @@ const AllDestinationsPage: React.FC = () => { const onCreateDestination = () => push(`${RoutePaths.DestinationNew}`); - return ( + return destinations.length ? ( } pageTitle={ @@ -26,18 +26,16 @@ const AllDestinationsPage: React.FC = () => { title={} endComponent={ } /> } > - {destinations.length ? ( - - ) : ( - - )} + + ) : ( + ); }; diff --git a/airbyte-webapp/src/pages/SourcesPage/pages/AllSourcesPage/AllSourcesPage.tsx b/airbyte-webapp/src/pages/SourcesPage/pages/AllSourcesPage/AllSourcesPage.tsx index 3e47d6f4068c..32d75a296dc0 100644 --- a/airbyte-webapp/src/pages/SourcesPage/pages/AllSourcesPage/AllSourcesPage.tsx +++ b/airbyte-webapp/src/pages/SourcesPage/pages/AllSourcesPage/AllSourcesPage.tsx @@ -2,9 +2,9 @@ import React from "react"; import { FormattedMessage } from "react-intl"; import { Button, MainPageWithScroll } from "components"; +import { EmptyResourceListView } from "components/EmptyResourceListView"; import HeadTitle from "components/HeadTitle"; import PageTitle from "components/PageTitle"; -import Placeholder, { ResourceTypes } from "components/Placeholder"; import { useSourceList } from "hooks/services/useSourceHook"; import useRouter from "hooks/useRouter"; @@ -17,7 +17,7 @@ const AllSourcesPage: React.FC = () => { const { sources } = useSourceList(); const onCreateSource = () => push(`${RoutePaths.SourceNew}`); - return ( + return sources.length ? ( } pageTitle={ @@ -31,8 +31,10 @@ const AllSourcesPage: React.FC = () => { /> } > - {sources.length ? : } + + ) : ( + ); }; diff --git a/airbyte-webapp/src/views/Connector/ServiceForm/components/Sections/auth/GoogleAuthButton.tsx b/airbyte-webapp/src/views/Connector/ServiceForm/components/Sections/auth/GoogleAuthButton.tsx index b23bd04d92a3..75efe97b826d 100644 --- a/airbyte-webapp/src/views/Connector/ServiceForm/components/Sections/auth/GoogleAuthButton.tsx +++ b/airbyte-webapp/src/views/Connector/ServiceForm/components/Sections/auth/GoogleAuthButton.tsx @@ -1,9 +1,9 @@ import React from "react"; import styled from "styled-components"; -import { IProps } from "components/base/Button/types"; +import { ButtonProps } from "components/base/Button/types"; -const StyledButton = styled.button` +const StyledButton = styled.button` align-items: center; background: #4285f4; border: 0 solid #4285f4; diff --git a/airbyte-workers/Dockerfile b/airbyte-workers/Dockerfile index 3300fb0b083c..a6c42cec075e 100644 --- a/airbyte-workers/Dockerfile +++ b/airbyte-workers/Dockerfile @@ -25,7 +25,7 @@ RUN curl -fsSLo /usr/share/keyrings/kubernetes-archive-keyring.gpg https://packa RUN echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] https://apt.kubernetes.io/ kubernetes-xenial main" | tee /etc/apt/sources.list.d/kubernetes.list RUN apt-get update && apt-get install -y kubectl -ARG VERSION=0.36.3-alpha +ARG VERSION=0.36.4-alpha ENV APPLICATION airbyte-workers ENV VERSION ${VERSION} diff --git a/charts/airbyte/Chart.yaml b/charts/airbyte/Chart.yaml index 81a2129c32a9..66e31a7d1fa9 100644 --- a/charts/airbyte/Chart.yaml +++ b/charts/airbyte/Chart.yaml @@ -21,7 +21,7 @@ version: 0.3.1 # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: "0.36.3-alpha" +appVersion: "0.36.4-alpha" dependencies: - name: common diff --git a/charts/airbyte/README.md b/charts/airbyte/README.md index 2638d0a19dee..41d8fc7fa98e 100644 --- a/charts/airbyte/README.md +++ b/charts/airbyte/README.md @@ -31,7 +31,7 @@ Helm charts for Airbyte. | `webapp.replicaCount` | Number of webapp replicas | `1` | | `webapp.image.repository` | The repository to use for the airbyte webapp image. | `airbyte/webapp` | | `webapp.image.pullPolicy` | the pull policy to use for the airbyte webapp image | `IfNotPresent` | -| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.36.3-alpha` | +| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.36.4-alpha` | | `webapp.podAnnotations` | Add extra annotations to the webapp pod(s) | `{}` | | `webapp.containerSecurityContext` | Security context for the container | `{}` | | `webapp.livenessProbe.enabled` | Enable livenessProbe on the webapp | `true` | @@ -73,7 +73,7 @@ Helm charts for Airbyte. | `scheduler.replicaCount` | Number of scheduler replicas | `1` | | `scheduler.image.repository` | The repository to use for the airbyte scheduler image. | `airbyte/scheduler` | | `scheduler.image.pullPolicy` | the pull policy to use for the airbyte scheduler image | `IfNotPresent` | -| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.36.3-alpha` | +| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.36.4-alpha` | | `scheduler.podAnnotations` | Add extra annotations to the scheduler pod | `{}` | | `scheduler.resources.limits` | The resources limits for the scheduler container | `{}` | | `scheduler.resources.requests` | The requested resources for the scheduler container | `{}` | @@ -120,7 +120,7 @@ Helm charts for Airbyte. | `server.replicaCount` | Number of server replicas | `1` | | `server.image.repository` | The repository to use for the airbyte server image. | `airbyte/server` | | `server.image.pullPolicy` | the pull policy to use for the airbyte server image | `IfNotPresent` | -| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.36.3-alpha` | +| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.36.4-alpha` | | `server.podAnnotations` | Add extra annotations to the server pod | `{}` | | `server.containerSecurityContext` | Security context for the container | `{}` | | `server.livenessProbe.enabled` | Enable livenessProbe on the server | `true` | @@ -158,7 +158,7 @@ Helm charts for Airbyte. | `worker.replicaCount` | Number of worker replicas | `1` | | `worker.image.repository` | The repository to use for the airbyte worker image. | `airbyte/worker` | | `worker.image.pullPolicy` | the pull policy to use for the airbyte worker image | `IfNotPresent` | -| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.36.3-alpha` | +| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.36.4-alpha` | | `worker.podAnnotations` | Add extra annotations to the worker pod(s) | `{}` | | `worker.containerSecurityContext` | Security context for the container | `{}` | | `worker.livenessProbe.enabled` | Enable livenessProbe on the worker | `true` | @@ -190,7 +190,7 @@ Helm charts for Airbyte. | ----------------------------- | -------------------------------------------------------------------- | -------------------- | | `bootloader.image.repository` | The repository to use for the airbyte bootloader image. | `airbyte/bootloader` | | `bootloader.image.pullPolicy` | the pull policy to use for the airbyte bootloader image | `IfNotPresent` | -| `bootloader.image.tag` | The airbyte bootloader image tag. Defaults to the chart's AppVersion | `0.36.3-alpha` | +| `bootloader.image.tag` | The airbyte bootloader image tag. Defaults to the chart's AppVersion | `0.36.4-alpha` | ### Temporal parameters diff --git a/charts/airbyte/values.yaml b/charts/airbyte/values.yaml index 4a19a4c4b0a2..25a245a34229 100644 --- a/charts/airbyte/values.yaml +++ b/charts/airbyte/values.yaml @@ -43,7 +43,7 @@ webapp: image: repository: airbyte/webapp pullPolicy: IfNotPresent - tag: 0.36.3-alpha + tag: 0.36.4-alpha ## @param webapp.podAnnotations [object] Add extra annotations to the webapp pod(s) ## @@ -209,7 +209,7 @@ scheduler: image: repository: airbyte/scheduler pullPolicy: IfNotPresent - tag: 0.36.3-alpha + tag: 0.36.4-alpha ## @param scheduler.podAnnotations [object] Add extra annotations to the scheduler pod ## @@ -440,7 +440,7 @@ server: image: repository: airbyte/server pullPolicy: IfNotPresent - tag: 0.36.3-alpha + tag: 0.36.4-alpha ## @param server.podAnnotations [object] Add extra annotations to the server pod ## @@ -581,7 +581,7 @@ worker: image: repository: airbyte/worker pullPolicy: IfNotPresent - tag: 0.36.3-alpha + tag: 0.36.4-alpha ## @param worker.podAnnotations [object] Add extra annotations to the worker pod(s) ## @@ -699,7 +699,7 @@ bootloader: image: repository: airbyte/bootloader pullPolicy: IfNotPresent - tag: 0.36.3-alpha + tag: 0.36.4-alpha ## @param bootloader.podAnnotations [object] Add extra annotations to the bootloader pod ## diff --git a/docs/integrations/destinations/redshift.md b/docs/integrations/destinations/redshift.md index 8339f8e7c8ec..6dd424fdaa3d 100644 --- a/docs/integrations/destinations/redshift.md +++ b/docs/integrations/destinations/redshift.md @@ -1,124 +1,139 @@ # Redshift -## Overview +This page guides you through the process of setting up the Redshift destination connector. + +## Prerequisites The Airbyte Redshift destination allows you to sync data to Redshift. This Redshift destination connector has two replication strategies: 1. INSERT: Replicates data via SQL INSERT queries. This is built on top of the destination-jdbc code base and is configured to rely on JDBC 4.2 standard drivers provided by Amazon via Mulesoft [here](https://mvnrepository.com/artifact/com.amazon.redshift/redshift-jdbc42) as described in Redshift documentation [here](https://docs.aws.amazon.com/redshift/latest/mgmt/jdbc20-install.html). **Not recommended for production workloads as this does not scale well**. -2. COPY: Replicates data by first uploading data to an S3 bucket and issuing a COPY command. This is the recommended loading approach described by Redshift [best practices](https://docs.aws.amazon.com/redshift/latest/dg/c_loading-data-best-practices.html). Requires an S3 bucket and credentials. - -Airbyte automatically picks an approach depending on the given configuration - if S3 configuration is present, Airbyte will use the COPY strategy and vice versa. - -We recommend users use INSERT for testing, to avoid any additional setup, and switch to COPY for production workloads. - -### Sync overview - -#### Output schema - -Each stream will be output into its own raw table in Redshift. Each table will contain 3 columns: - -* `_airbyte_ab_id`: a uuid assigned by Airbyte to each event that is processed. The column type in Redshift is `VARCHAR`. -* `_airbyte_emitted_at`: a timestamp representing when the event was pulled from the data source. The column type in Redshift is `TIMESTAMP WITH TIME ZONE`. -* `_airbyte_data`: a json blob representing with the event data. The column type in Redshift is `VARCHAR` but can be be parsed with JSON functions. - -#### Features -| Feature | Supported?\(Yes/No\) | Notes | -| :--- | :--- | :--- | -| Full Refresh Sync | Yes | | -| Incremental - Append Sync | Yes | | -| Incremental - Deduped History | Yes | | -| Namespaces | Yes | | -| SSL Support | Yes | | +For INSERT strategy: +* **Host** +* **Port** +* **Username** +* **Password** +* **Schema** +* **Database** + * This database needs to exist within the cluster provided. -#### Target Database +2. COPY: Replicates data by first uploading data to an S3 bucket and issuing a COPY command. This is the recommended loading approach described by Redshift [best practices](https://docs.aws.amazon.com/redshift/latest/dg/c_loading-data-best-practices.html). Requires an S3 bucket and credentials. -You will need to choose an existing database or create a new database that will be used to store synced data from Airbyte. +Airbyte automatically picks an approach depending on the given configuration - if S3 configuration is present, Airbyte will use the COPY strategy and vice versa. -## Getting started +For COPY strategy: -### Requirements +* **S3 Bucket Name** + * See [this](https://docs.aws.amazon.com/AmazonS3/latest/userguide/create-bucket-overview.html) to create an S3 bucket. +* **S3 Bucket Region** + * Place the S3 bucket and the Redshift cluster in the same region to save on networking costs. +* **Access Key Id** + * See [this](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys) on how to generate an access key. + * We recommend creating an Airbyte-specific user. This user will require [read and write permissions](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_examples_s3_rw-bucket.html) to objects in the staging bucket. +* **Secret Access Key** + * Corresponding key to the above key id. +* **Part Size** + * Affects the size limit of an individual Redshift table. Optional. Increase this if syncing tables larger than 100GB. Files are streamed to S3 in parts. This determines the size of each part, in MBs. As S3 has a limit of 10,000 parts per file, part size affects the table size. This is 10MB by default, resulting in a default table limit of 100GB. Note, a larger part size will result in larger memory requirements. A rule of thumb is to multiply the part size by 10 to get the memory requirement. Modify this with care. -1. Active Redshift cluster -2. Allow connections from Airbyte to your Redshift cluster \(if they exist in separate VPCs\) -3. A staging S3 bucket with credentials \(for the COPY strategy\). +Optional parameters: +* **Bucket Path** + * The directory within the S3 bucket to place the staging data. For example, if you set this to `yourFavoriteSubdirectory`, we will place the staging data inside `s3://yourBucket/yourFavoriteSubdirectory`. If not provided, defaults to the root directory. +* **Purge Staging Data** + * Whether to delete the staging files from S3 after completing the sync. Specifically, the connector will create CSV files named `bucketPath/namespace/streamName/syncDate_epochMillis_randomUuid.csv` containing three columns (`ab_id`, `data`, `emitted_at`). Normally these files are deleted after the `COPY` command completes; if you want to keep them for other purposes, set `purge_staging_data` to `false`. -:::info -Even if your Airbyte instance is running on a server in the same VPC as your Redshift cluster, you may need to place them in the **same security group** to allow connections between the two. +## Step 1: Set up Redshift -::: +1. [Log in](https://aws.amazon.com/console/) to AWS Management console. + If you don't have a AWS account already, you’ll need to [create](https://aws.amazon.com/premiumsupport/knowledge-center/create-and-activate-aws-account/) one in order to use the API. +2. Go to the AWS Redshift service +3. [Create](https://docs.aws.amazon.com/ses/latest/dg/event-publishing-redshift-cluster.html) and activate AWS Redshift cluster if you don't have one ready +4. (Optional) [Allow](https://aws.amazon.com/premiumsupport/knowledge-center/cannot-connect-redshift-cluster/) connections from Airbyte to your Redshift cluster \(if they exist in separate VPCs\) +5. (Optional) [Create](https://docs.aws.amazon.com/AmazonS3/latest/userguide/create-bucket-overview.html) a staging S3 bucket \(for the COPY strategy\). -### Setup guide +## Step 2: Set up the destination connector in Airbyte -#### 1. Make sure your cluster is active and accessible from the machine running Airbyte +**For Airbyte Cloud:** -This is dependent on your networking setup. The easiest way to verify if Airbyte is able to connect to your Redshift cluster is via the check connection tool in the UI. You can check AWS Redshift documentation with a tutorial on how to properly configure your cluster's access [here](https://docs.aws.amazon.com/redshift/latest/gsg/rs-gsg-authorize-cluster-access.html) +1. [Log into your Airbyte Cloud](https://cloud.airbyte.io/workspaces) account. +2. In the left navigation bar, click **Destinations**. In the top-right corner, click **+ new destination**. +3. On the destination setup page, select **Redshift** from the Destination type dropdown and enter a name for this connector. +4. Fill in all the required fields to use the INSERT or COPY strategy +5. Click `Set up destination`. -#### 2. Fill up connection info +**For Airbyte OSS:** -Next is to provide the necessary information on how to connect to your cluster such as the `host` whcih is part of the connection string or Endpoint accessible [here](https://docs.aws.amazon.com/redshift/latest/gsg/rs-gsg-connect-to-cluster.html#rs-gsg-how-to-get-connection-string) without the `port` and `database` name \(it typically includes the cluster-id, region and end with `.redshift.amazonaws.com`\). +1. Go to local Airbyte page. +2. In the left navigation bar, click **Destinations**. In the top-right corner, click **+ new destination**. +3. On the destination setup page, select **Redshift** from the Destination type dropdown and enter a name for this connector. +4. Fill in all the required fields to use the INSERT or COPY strategy +5. Click `Set up destination`. -You should have all the requirements needed to configure Redshift as a destination in the UI. You'll need the following information to configure the destination: -* **Host** -* **Port** -* **Username** -* **Password** -* **Schema** -* **Database** - * This database needs to exist within the cluster provided. +## Supported sync modes -#### 2a. Fill up S3 info \(for COPY strategy\) +The Redshift destination connector supports the following [sync modes](https://docs.airbyte.com/cloud/core-concepts/#connection-sync-mode): +- Full Refresh +- Incremental - Append Sync +- Incremental - Deduped History -Provide the required S3 info. +## Performance considerations -* **S3 Bucket Name** - * See [this](https://docs.aws.amazon.com/AmazonS3/latest/userguide/create-bucket-overview.html) to create an S3 bucket. -* **S3 Bucket Region** - * Place the S3 bucket and the Redshift cluster in the same region to save on networking costs. -* **Access Key Id** - * See [this](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys) on how to generate an access key. - * We recommend creating an Airbyte-specific user. This user will require [read and write permissions](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_examples_s3_rw-bucket.html) to objects in the staging bucket. -* **Secret Access Key** - * Corresponding key to the above key id. -* **Part Size** - * Affects the size limit of an individual Redshift table. Optional. Increase this if syncing tables larger than 100GB. Files are streamed to S3 in parts. This determines the size of each part, in MBs. As S3 has a limit of 10,000 parts per file, part size affects the table size. This is 10MB by default, resulting in a default table limit of 100GB. Note, a larger part size will result in larger memory requirements. A rule of thumb is to multiply the part size by 10 to get the memory requirement. Modify this with care. +Synchronization performance depends on the amount of data to be transferred. +Cluster scaling issues can be resolved directly using the cluster settings in the AWS Redshift console -Optional parameters: -* **Bucket Path** - * The directory within the S3 bucket to place the staging data. For example, if you set this to `yourFavoriteSubdirectory`, we will place the staging data inside `s3://yourBucket/yourFavoriteSubdirectory`. If not provided, defaults to the root directory. -* **Purge Staging Data** - * Whether to delete the staging files from S3 after completing the sync. Specifically, the connector will create CSV files named `bucketPath/namespace/streamName/syncDate_epochMillis_randomUuid.csv` containing three columns (`ab_id`, `data`, `emitted_at`). Normally these files are deleted after the `COPY` command completes; if you want to keep them for other purposes, set `purge_staging_data` to `false`. +## Connector-specific features & highlights -## Notes about Redshift Naming Conventions +### Notes about Redshift Naming Conventions From [Redshift Names & Identifiers](https://docs.aws.amazon.com/redshift/latest/dg/r_names.html): -### Standard Identifiers +#### Standard Identifiers * Begin with an ASCII single-byte alphabetic character or underscore character, or a UTF-8 multibyte character two to four bytes long. * Subsequent characters can be ASCII single-byte alphanumeric characters, underscores, or dollar signs, or UTF-8 multibyte characters two to four bytes long. * Be between 1 and 127 bytes in length, not including quotation marks for delimited identifiers. * Contain no quotation marks and no spaces. -### Delimited Identifiers +#### Delimited Identifiers Delimited identifiers \(also known as quoted identifiers\) begin and end with double quotation marks \("\). If you use a delimited identifier, you must use the double quotation marks for every reference to that object. The identifier can contain any standard UTF-8 printable characters other than the double quotation mark itself. Therefore, you can create column or table names that include otherwise illegal characters, such as spaces or the percent symbol. ASCII letters in delimited identifiers are case-insensitive and are folded to lowercase. To use a double quotation mark in a string, you must precede it with another double quotation mark character. Therefore, Airbyte Redshift destination will create tables and schemas using the Unquoted identifiers when possible or fallback to Quoted Identifiers if the names are containing special characters. -## Data Size Limitations +### Data Size Limitations Redshift specifies a maximum limit of 65535 bytes to store the raw JSON record data. Thus, when a row is too big to fit, the Redshift destination fails to load such data and currently ignores that record. See [docs](https://docs.aws.amazon.com/redshift/latest/dg/r_Character_types.html) -## Encryption +### Encryption All Redshift connections are encrypted using SSL +### Output schema + +Each stream will be output into its own raw table in Redshift. Each table will contain 3 columns: + +* `_airbyte_ab_id`: a uuid assigned by Airbyte to each event that is processed. The column type in Redshift is `VARCHAR`. +* `_airbyte_emitted_at`: a timestamp representing when the event was pulled from the data source. The column type in Redshift is `TIMESTAMP WITH TIME ZONE`. +* `_airbyte_data`: a json blob representing with the event data. The column type in Redshift is `VARCHAR` but can be be parsed with JSON functions. + +## Data type mapping + +| Redshift Type | Airbyte Type | Notes | +| :--- | :--- | :--- | +| `boolean` | `boolean` | | +| `int` | `integer` | | +| `float` | `number` | | +| `varchar` | `string` | | +| `date/varchar` | `date` | | +| `time/varchar` | `time` | | +| `timestamptz/varchar` | `timestamp_with_timezone` | | +| `varchar` | `array` | | +| `varchar` | `object` | | + ## Changelog | Version | Date | Pull Request | Subject | @@ -142,3 +157,4 @@ All Redshift connections are encrypted using SSL | 0.3.12 | 2021-07-21 | [3555](https://github.com/airbytehq/airbyte/pull/3555) | Enable partial checkpointing for halfway syncs | | 0.3.11 | 2021-07-20 | [4874](https://github.com/airbytehq/airbyte/pull/4874) | allow `additionalProperties` in connector spec | + diff --git a/docs/integrations/sources/apify-dataset.md b/docs/integrations/sources/apify-dataset.md index dbd4473494ad..1390e4e36339 100644 --- a/docs/integrations/sources/apify-dataset.md +++ b/docs/integrations/sources/apify-dataset.md @@ -43,6 +43,7 @@ The Apify dataset connector uses [Apify Python Client](https://docs.apify.com/ap | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | +| 0.1.11 | 2022-04-27 | [12397](https://github.com/airbytehq/airbyte/pull/12397) | No changes. Used connector to test publish workflow changes. | | 0.1.9 | 2022-04-05 | [PR\#11712](https://github.com/airbytehq/airbyte/pull/11712) | No changes from 0.1.4. Used connector to test publish workflow changes. | | 0.1.4 | 2021-12-23 | [PR\#8434](https://github.com/airbytehq/airbyte/pull/8434) | Update fields in source-connectors specifications | | 0.1.2 | 2021-11-08 | [PR\#7499](https://github.com/airbytehq/airbyte/pull/7499) | Remove base-python dependencies | diff --git a/docs/integrations/sources/openweather.md b/docs/integrations/sources/openweather.md index dba899a2e39d..8609d916597b 100644 --- a/docs/integrations/sources/openweather.md +++ b/docs/integrations/sources/openweather.md @@ -34,5 +34,6 @@ The free plan allows 60 calls per minute and 1,000,000 calls per month, you won' | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | +| 0.1.4 | 2022-04-27 | [12397](https://github.com/airbytehq/airbyte/pull/12397) | No changes. Used connector to test publish workflow changes. | | 0.1.0 | 2021-10-27 | [7434](https://github.com/airbytehq/airbyte/pull/7434) | Initial release | diff --git a/docs/integrations/sources/smartsheets.md b/docs/integrations/sources/smartsheets.md index afdf7682f02d..595e348d673d 100644 --- a/docs/integrations/sources/smartsheets.md +++ b/docs/integrations/sources/smartsheets.md @@ -86,7 +86,8 @@ To setup your new Smartsheets source, Airbyte will need: ## Changelog -| Version | Date | Pull Request | Subject | -|:--------|:-----------|:---------------------------------------------------------|:--------------------------| -| 0.1.9 | 2022-04-12 | [11911](https://github.com/airbytehq/airbyte/pull/11911) | Bugfix: scrambled columns | -| 0.1.8 | 2022-02-04 | [9792](https://github.com/airbytehq/airbyte/pull/9792) | Added oauth support | +| Version | Date | Pull Request | Subject | +|:--------|:-----------|:---------------------------------------------------------|:----------------------------------------------------------| +| 0.1.10 | 2022-04-15 | [12077](https://github.com/airbytehq/airbyte/pull/12077) | Implement incremental read and improve code test coverage | +| 0.1.9 | 2022-04-12 | [11911](https://github.com/airbytehq/airbyte/pull/11911) | Bugfix: scrambled columns | +| 0.1.8 | 2022-02-04 | [9792](https://github.com/airbytehq/airbyte/pull/9792) | Added oauth support | diff --git a/docs/integrations/sources/snowflake.md b/docs/integrations/sources/snowflake.md index 0c9a15e483ba..f500081c8b2a 100644 --- a/docs/integrations/sources/snowflake.md +++ b/docs/integrations/sources/snowflake.md @@ -72,10 +72,38 @@ You can limit this grant down to specific schemas instead of the whole database. Your database user should now be ready for use with Airbyte. +###Authentication +#### There are 2 way ways of oauth supported: login\pass and oauth2. + +### Login and Password +| Field | Description | +|---|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [Host](https://docs.snowflake.com/en/user-guide/admin-account-identifier.html) | The host domain of the snowflake instance (must include the account, region, cloud environment, and end with snowflakecomputing.com). Example: `accountname.us-east-2.aws.snowflakecomputing.com` | +| [Role](https://docs.snowflake.com/en/user-guide/security-access-control-overview.html#roles) | The role you created in Step 1 for Airbyte to access Snowflake. Example: `AIRBYTE_ROLE` | +| [Warehouse](https://docs.snowflake.com/en/user-guide/warehouses-overview.html#overview-of-warehouses) | The warehouse you created in Step 1 for Airbyte to sync data into. Example: `AIRBYTE_WAREHOUSE` | +| [Database](https://docs.snowflake.com/en/sql-reference/ddl-database.html#database-schema-share-ddl) | The database you created in Step 1 for Airbyte to sync data into. Example: `AIRBYTE_DATABASE` | +| [Schema](https://docs.snowflake.com/en/sql-reference/ddl-database.html#database-schema-share-ddl) | The default schema used as the target schema for all statements issued from the connection that do not explicitly specify a schema name. | +| Username | The username you created in Step 2 to allow Airbyte to access the database. Example: `AIRBYTE_USER` | +| Password | The password associated with the username. | +| [JDBC URL Params](https://docs.snowflake.com/en/user-guide/jdbc-parameters.html) (Optional) | Additional properties to pass to the JDBC URL string when connecting to the database formatted as `key=value` pairs separated by the symbol `&`. Example: `key1=value1&key2=value2&key3=value3` | + + +### OAuth 2.0 +Field | Description | +|---|---| +| [Host](https://docs.snowflake.com/en/user-guide/admin-account-identifier.html) | The host domain of the snowflake instance (must include the account, region, cloud environment, and end with snowflakecomputing.com). Example: `accountname.us-east-2.aws.snowflakecomputing.com` | +| [Role](https://docs.snowflake.com/en/user-guide/security-access-control-overview.html#roles) | The role you created in Step 1 for Airbyte to access Snowflake. Example: `AIRBYTE_ROLE` | +| [Warehouse](https://docs.snowflake.com/en/user-guide/warehouses-overview.html#overview-of-warehouses) | The warehouse you created in Step 1 for Airbyte to sync data into. Example: `AIRBYTE_WAREHOUSE` | +| [Database](https://docs.snowflake.com/en/sql-reference/ddl-database.html#database-schema-share-ddl) | The database you created in Step 1 for Airbyte to sync data into. Example: `AIRBYTE_DATABASE` | +| [Schema](https://docs.snowflake.com/en/sql-reference/ddl-database.html#database-schema-share-ddl) | The default schema used as the target schema for all statements issued from the connection that do not explicitly specify a schema name. | +| OAuth2 | The Login name and password to obtain auth token. | +| [JDBC URL Params](https://docs.snowflake.com/en/user-guide/jdbc-parameters.html) (Optional) | Additional properties to pass to the JDBC URL string when connecting to the database formatted as `key=value` pairs separated by the symbol `&`. Example: `key1=value1&key2=value2&key3=value3` | + ## Changelog | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | +| 0.1.11 | 2022-04-27 | [10953](https://github.com/airbytehq/airbyte/pull/10953) | Implement OAuth flow | | 0.1.9 | 2022-02-21 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Fixed cursor for old connectors that use non-microsecond format. Now connectors work with both formats | | 0.1.8 | 2022-02-18 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Updated timestamp transformation with microseconds | | 0.1.7 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | diff --git a/docs/integrations/sources/tiktok-marketing.md b/docs/integrations/sources/tiktok-marketing.md index 0642ece8c5a4..9ae8455bfcad 100644 --- a/docs/integrations/sources/tiktok-marketing.md +++ b/docs/integrations/sources/tiktok-marketing.md @@ -1,64 +1,527 @@ # TikTok Marketing -## Overview +This page guides you through the process of setting up the TikTok Marketing source connector. -The [TikTok For Business Marketing API](https://ads.tiktok.com/marketing_api/homepage?rid=uvtbok1h19) allows you to directly interact with the TikTok Ads Manager platform for automated ad management and analysis. +## Prerequisites -The TikTok Marketing source supports both Full Refresh and Incremental syncs. You can choose if this connector will copy only the new or updated data, or all rows in the tables and columns you set up for replication, every time a sync is run. +For Production environment: +* Access token +* Secret +* App ID -This Source Connector is based on a [Airbyte CDK](https://docs.airbyte.io/connector-development/cdk-python). +For Sandbox environment: +* Access token +* Advertiser ID -### Streams information +* Start date +* Report Granularity (LIFETIME, DAY, HOUR) -| Stream | Environment | Granularities | Key | Incremental | Schema | -|:----------------------------------|--------------|-------------------|-------------|:---------------|-----------------------------------------------------------------------------------------------| -| Advertisers | Prod,Sandbox | LIFETIME,DAY,HOUR | id | No | [Link](https://business-api.tiktok.com/marketing_api/docs?id=1708503202263042) | -| AdGroups | Prod,Sandbox | LIFETIME,DAY,HOUR | adgroup_id | Yes (DAY,HOUR) | [Link](https://business-api.tiktok.com/marketing_api/docs?id=1708503489590273) | -| Ads | Prod,Sandbox | LIFETIME,DAY,HOUR | ad_id | Yes (DAY,HOUR) | [Link](https://business-api.tiktok.com/marketing_api/docs?id=1708572923161602) | -| Campaigns | Prod,Sandbox | LIFETIME,DAY,HOUR | campaign_id | Yes (DAY,HOUR) | [Link](https://business-api.tiktok.com/marketing_api/docs?id=1708582970809346) | -| AdsReports | Prod,Sandbox | LIFETIME,DAY,HOUR | None | Yes (DAY,HOUR) | [BasicReportsLink](https://business-api.tiktok.com/marketing_api/docs?id=1707957200780290) | -| AdvertisersReports | Prod | LIFETIME,DAY,HOUR | None | Yes (DAY,HOUR) | [BasicReportsLink](https://business-api.tiktok.com/marketing_api/docs?id=1707957200780290) | -| AdGroupsReports | Prod,Sandbox | LIFETIME,DAY,HOUR | None | Yes (DAY,HOUR) | [BasicReportsLink](https://business-api.tiktok.com/marketing_api/docs?id=1707957200780290) | -| CampaignsReports | Prod,Sandbox | LIFETIME,DAY,HOUR | None | Yes (DAY,HOUR) | [BasicReportsLink](https://business-api.tiktok.com/marketing_api/docs?id=1707957200780290) | -| AdvertisersAudienceReports | Prod | LIFETIME,DAY,HOUR | None | Yes (DAY,HOUR) | [AudienceReportsLink](https://business-api.tiktok.com/marketing_api/docs?id=1707957217727489) | -| AdGroupAudienceReports | Prod,Sandbox | DAY,HOUR | None | Yes (DAY,HOUR) | [AudienceReportsLink](https://business-api.tiktok.com/marketing_api/docs?id=1707957217727489) | -| AdsAudienceReports | Prod,Sandbox | DAY,HOUR | None | Yes (DAY,HOUR) | [AudienceReportsLink](https://business-api.tiktok.com/marketing_api/docs?id=1707957217727489) | -| CampaignsAudienceReportsByCountry | Prod,Sandbox | DAY,HOUR | None | Yes (DAY,HOUR) | [AudienceReportsLink](https://business-api.tiktok.com/marketing_api/docs?id=1707957217727489) | +## Step 1: Set up TikTok -If there are more endpoints you'd like Airbyte to support, please [create an issue.](https://github.com/airbytehq/airbyte/issues/new/choose) +1. Create a TikTok For Business account: [Link](https://ads.tiktok.com/marketing_api/docs?rid=fgvgaumno25&id=1702715936951297) +2. Create developer application: [Link](https://ads.tiktok.com/marketing_api/docs?rid=fgvgaumno25&id=1702716474845185) +3. For sandbox environment: create a Sandbox Ad Account [Link](https://ads.tiktok.com/marketing_api/docs?rid=fgvgaumno25&id=1701890920013825) -### Features +## Step 2: Set up the source connector in Airbyte -| Feature | Supported? | -| :--- | :--- | -| Full Refresh Sync | Yes | -| Incremental - Append Sync | Yes | -| SSL connection | Yes | -| Namespaces | No | +**For Airbyte Cloud:** -### Performance considerations +1. [Log into your Airbyte Cloud](https://cloud.airbyte.io/workspaces) account. +2. In the left navigation bar, click **Sources**. In the top-right corner, click **+ new source**. +3. On the source setup page, select **Tiktok Marketing** from the Source type dropdown and enter a name for this connector. +4. Select `OAuth2.0` Authorization method, then click `Authenticate your account`. +5. Log in and Authorize to the Tiktok account +6. Choose required Start date and report granularity +7. click `Set up source`. -The connector is restricted by [requests limitation](https://ads.tiktok.com/marketing_api/docs?rid=fgvgaumno25&id=1701890997610497). This connector should not run into TikTok Marketing API limitations under normal usage. Please [create an issue](https://github.com/airbytehq/airbyte/issues) if you see any rate limit issues that are not automatically retried successfully. +**For Airbyte OSS:** -## Getting started +1. Go to local Airbyte page. +2. In the left navigation bar, click **Sources**. In the top-right corner, click **+ new source**. +3. On the Set up the source page, enter the name for the connector and select **Tiktok Marketing** from the Source type dropdown. +4. Select `Production Access Token` or `Sandbox Access Token` Authorization method, then copy and paste info from step 1. +5. Choose required Start date and report granularity +6. Click `Set up source`. -### Requirements +## Supported streams and sync modes -* Access Token - This token will not expire. -* Production Environment - * App ID - * Secret -* SandBox Environment - * Advertiser ID - It is generated for sandbox in one copy +| Stream | Environment | Granularities | Key | Incremental | +|:----------------------------------|--------------|-------------------|-------------|:---------------| +| Advertisers | Prod,Sandbox | LIFETIME,DAY,HOUR | id | No | +| AdGroups | Prod,Sandbox | LIFETIME,DAY,HOUR | adgroup_id | Yes (DAY,HOUR) | +| Ads | Prod,Sandbox | LIFETIME,DAY,HOUR | ad_id | Yes (DAY,HOUR) | +| Campaigns | Prod,Sandbox | LIFETIME,DAY,HOUR | campaign_id | Yes (DAY,HOUR) | +| AdsReports | Prod,Sandbox | LIFETIME,DAY,HOUR | None | Yes (DAY,HOUR) | +| AdvertisersReports | Prod | LIFETIME,DAY,HOUR | None | Yes (DAY,HOUR) | +| AdGroupsReports | Prod,Sandbox | LIFETIME,DAY,HOUR | None | Yes (DAY,HOUR) | +| CampaignsReports | Prod,Sandbox | LIFETIME,DAY,HOUR | None | Yes (DAY,HOUR) | +| AdvertisersAudienceReports | Prod | LIFETIME,DAY,HOUR | None | Yes (DAY,HOUR) | +| AdGroupAudienceReports | Prod,Sandbox | DAY,HOUR | None | Yes (DAY,HOUR) | +| AdsAudienceReports | Prod,Sandbox | DAY,HOUR | None | Yes (DAY,HOUR) | +| CampaignsAudienceReportsByCountry | Prod,Sandbox | DAY,HOUR | None | Yes (DAY,HOUR) | -### Setup guide +**[Advertisers](https://ads.tiktok.com/marketing_api/docs?id=1708503202263042) Stream** +``` +{ + "contacter": "Ai***te", + "phonenumber": "+13*****5753", + "license_no": "", + "promotion_center_city": null, + "balance": 10, + "license_url": null, + "timezone": "Etc/GMT+8", + "reason": "", + "telephone": "+14*****6785", + "id": 7002238017842757633, + "language": "en", + "country": "US", + "role": "ROLE_ADVERTISER", + "license_province": null, + "display_timezone": "America/Los_Angeles", + "email": "i***************@**********", + "license_city": null, + "industry": "291905", + "create_time": 1630335591, + "promotion_center_province": null, + "address": "350 29th avenue, San Francisco", + "currency": "USD", + "promotion_area": "0", + "status": "STATUS_ENABLE", + "description": "https://", + "brand": null, + "name": "Airbyte0830", + "company": "Airbyte" +} +``` -Please read [How to get your AppID, Secret and Access Token](https://ads.tiktok.com/marketing_api/docs?rid=fgvgaumno25&id=1701890909484033) or [How to create a SandBox Environment](https://ads.tiktok.com/marketing_api/docs?rid=fgvgaumno25&id=1701890920013825) +**[AdGroups](https://ads.tiktok.com/marketing_api/docs?id=1708503489590273) Stream** +``` +{ + "placement_type": "PLACEMENT_TYPE_AUTOMATIC", + "budget": 20, + "budget_mode": "BUDGET_MODE_DAY", + "display_mode": null, + "schedule_infos": null, + "billing_event": "CPC", + "conversion_window": null, + "adgroup_name": "Ad Group20211020010107", + "interest_keywords": [], + "is_comment_disable": 0, + "rf_buy_type": null, + "frequency": null, + "bid_type": "BID_TYPE_NO_BID", + "placement": null, + "bid": 0, + "include_custom_actions": [], + "operation_system": [], + "pixel_id": null, + "dayparting": "111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111", + "app_type": null, + "conversion_id": 0, + "rf_predict_cpr": null, + "deep_bid_type": null, + "scheduled_budget": 0.0, + "adgroup_id": 1714125049901106, + "frequency_schedule": null, + "exclude_custom_actions": [], + "advertiser_id": 7002238017842757633, + "deep_cpabid": 0, + "is_new_structure": true, + "buy_impression": null, + "external_type": "WEBSITE", + "excluded_audience": [], + "deep_external_action": null, + "interest_category_v2": [], + "rf_predict_frequency": null, + "audience": [], + "pacing": "PACING_MODE_SMOOTH", + "brand_safety_partner": null, + "daily_retention_ratio": null, + "optimize_goal": "CLICK", + "enable_search_result": false, + "conversion_bid": 0, + "schedule_end_time": "2021-10-31 09:01:07", + "opt_status": "ENABLE", + "status": "ADGROUP_STATUS_CAMPAIGN_DISABLE", + "app_id": null, + "external_action": null, + "schedule_type": "SCHEDULE_START_END", + "brand_safety": "NO_BRAND_SAFETY", + "campaign_id": 1714125042508817, + "campaign_name": "Website Traffic20211020010104", + "split_test_adgroup_ids": [], + "action_v2": [], + "is_hfss": false, + "keywords": null, + "create_time": "2021-10-20 08:04:05", + "feed_type": null, + "languages": ["en"], + "enable_inventory_filter": false, + "device_price": [], + "location": [6252001], + "schedule_start_time": "2021-10-20 09:01:07", + "skip_learning_phase": 0, + "gender": "GENDER_UNLIMITED", + "creative_material_mode": "CUSTOM", + "app_download_url": null, + "device_models": [], + "automated_targeting": "OFF", + "connection_type": [], + "ios14_quota_type": "UNOCCUPIED", + "modify_time": "2022-03-24 12:06:54", + "category": 0, + "statistic_type": null, + "video_download": "ALLOW_DOWNLOAD", + "age": ["AGE_25_34", "AGE_35_44", "AGE_45_54"], + "buy_reach": null, + "is_share_disable": false +} +``` + +**[Ads](https://ads.tiktok.com/marketing_api/docs?id=1708572923161602) Stream** +``` +{ + "vast_moat": false, + "is_new_structure": true, + "campaign_name": "CampaignVadimTraffic", + "landing_page_urls": null, + "card_id": null, + "adgroup_id": 1728545385226289, + "campaign_id": 1728545382536225, + "status": "AD_STATUS_CAMPAIGN_DISABLE", + "brand_safety_postbid_partner": "UNSET", + "advertiser_id": 7002238017842757633, + "is_aco": false, + "ad_text": "Open-source\ndata integration for modern data teams", + "identity_id": "7080121820963422209", + "display_name": "airbyte", + "open_url": "", + "external_action": null, + "playable_url": "", + "create_time": "2022-03-28 12:09:09", + "product_ids": [], + "adgroup_name": "AdGroupVadim", + "fallback_type": "UNSET", + "creative_type": null, + "ad_name": "AdVadim-Optimized Version 3_202203281449_2022-03-28 05:03:44", + "video_id": "v10033g50000c90q1d3c77ub6e96fvo0", + "ad_format": "SINGLE_VIDEO", + "profile_image": "https://p21-ad-sg.ibyteimg.com/large/ad-site-i18n-sg/202203285d0de5c114d0690a462bb6a4", + "open_url_type": "NORMAL", + "click_tracking_url": null, + "page_id": null, + "ad_texts": null, + "landing_page_url": "https://airbyte.com", + "identity_type": "CUSTOMIZED_USER", + "avatar_icon_web_uri": "ad-site-i18n-sg/202203285d0de5c114d0690a462bb6a4", + "app_name": "", + "modify_time": "2022-03-28 21:34:26", + "opt_status": "ENABLE", + "call_to_action_id": "7080120957230238722", + "image_ids": ["v0201/7f371ff6f0764f8b8ef4f37d7b980d50"], + "ad_id": 1728545390695442, + "impression_tracking_url": null, + "is_creative_authorized": false +} +``` + +**[Campaigns](https://ads.tiktok.com/marketing_api/docs?id=1708582970809346) Stream** +``` +{ + "create_time": "2021-10-19 18:18:08", + "campaign_id": 1714073078669329, + "roas_bid": 0.0, + "advertiser_id": 7002238017842757633, + "modify_time": "2022-03-28 12:01:56", + "campaign_type": "REGULAR_CAMPAIGN", + "status": "CAMPAIGN_STATUS_DISABLE", + "objective_type": "TRAFFIC", + "split_test_variable": null, + "opt_status": "DISABLE", + "budget": 50, + "is_new_structure": true, + "deep_bid_type": null, + "campaign_name": "Website Traffic20211019110444", + "budget_mode": "BUDGET_MODE_DAY", + "objective": "LANDING_PAGE" +} +``` + +**AdsReports Stream - [BasicReports](https://ads.tiktok.com/marketing_api/docs?id=1707957200780290)** +``` +{ + "dimensions": { + "ad_id": 1728545390695442, + "stat_time_day": "2022-03-29 00:00:00" + }, + "metrics": { + "real_time_result_rate": 0.93, + "campaign_id": 1728545382536225, + "placement": "Automatic Placement", + "frequency": 1.17, + "cpc": 0.35, + "ctr": 0.93, + "cost_per_result": 0.3509, + "impressions": 6137, + "cost_per_conversion": 0, + "real_time_result": 57, + "adgroup_id": 1728545385226289, + "result_rate": 0.93, + "cost_per_1000_reached": 3.801, + "ad_text": "Open-source\ndata integration for modern data teams", + "spend": 20, + "conversion_rate": 0, + "real_time_cost_per_conversion": 0, + "promotion_type": "Website", + "tt_app_id": 0, + "real_time_cost_per_result": 0.3509, + "conversion": 0, + "secondary_goal_result": null, + "campaign_name": "CampaignVadimTraffic", + "cpm": 3.26, + "result": 57, + "ad_name": "AdVadim-Optimized Version 3_202203281449_2022-03-28 05:03:44", + "secondary_goal_result_rate": null, + "clicks": 57, + "reach": 5262, + "cost_per_secondary_goal_result": null, + "real_time_conversion": 0, + "real_time_conversion_rate": 0, + "mobile_app_id": "0", + "tt_app_name": "0", + "adgroup_name": "AdGroupVadim", + "dpa_target_audience_type": null + } +} +``` + +**AdvertisersReports Stream - [BasicReports](https://ads.tiktok.com/marketing_api/docs?id=1707957200780290)** +``` +{ + "metrics": { + "cpm": 5.43, + "impressions": 3682, + "frequency": 1.17, + "reach": 3156, + "cash_spend": 20, + "ctr": 1.14, + "spend": 20, + "cpc": 0.48, + "cost_per_1000_reached": 6.337, + "clicks": 42, + "voucher_spend": 0 + }, + "dimensions": { + "stat_time_day": "2022-03-30 00:00:00", + "advertiser_id": 7002238017842757633 + } +} + +``` + +**AdGroupsReports Stream - [BasicReports](https://ads.tiktok.com/marketing_api/docs?id=1707957200780290)** +``` +{ + "metrics": { + "real_time_conversion": 0, + "real_time_cost_per_conversion": 0, + "cost_per_1000_reached": 3.801, + "mobile_app_id": "0", + "reach": 5262, + "cpm": 3.26, + "conversion": 0, + "promotion_type": "Website", + "clicks": 57, + "real_time_result_rate": 0.93, + "real_time_conversion_rate": 0, + "cost_per_conversion": 0, + "dpa_target_audience_type": null, + "result": 57, + "cpc": 0.35, + "impressions": 6137, + "cost_per_result": 0.3509, + "tt_app_id": 0, + "cost_per_secondary_goal_result": null, + "frequency": 1.17, + "spend": 20, + "secondary_goal_result_rate": null, + "real_time_cost_per_result": 0.3509, + "real_time_result": 57, + "placement": "Automatic Placement", + "result_rate": 0.93, + "tt_app_name": "0", + "campaign_name": "CampaignVadimTraffic", + "secondary_goal_result": null, + "campaign_id": 1728545382536225, + "conversion_rate": 0, + "ctr": 0.93, + "adgroup_name": "AdGroupVadim" + }, + "dimensions": { + "adgroup_id": 1728545385226289, + "stat_time_day": "2022-03-29 00:00:00" + } +} +``` + +**CampaignsReports Stream - [BasicReports](https://ads.tiktok.com/marketing_api/docs?id=1707957200780290)** +``` +{ + "metrics": { + "cpc": 0.43, + "spend": 20, + "clicks": 46, + "cost_per_1000_reached": 4.002, + "impressions": 5870, + "ctr": 0.78, + "frequency": 1.17, + "cpm": 3.41, + "campaign_name": "CampaignVadimTraffic", + "reach": 4997 + }, + "dimensions": { + "campaign_id": 1728545382536225, + "stat_time_day": "2022-03-28 00:00:00" + } +} + +``` + +**AdsAudienceReports Stream - [AudienceReports](https://ads.tiktok.com/marketing_api/docs?id=1707957217727489)** +``` +{ + { + "result": 17, + "clicks": 17, + "real_time_conversion_rate": 0, + "adgroup_id": 1728545385226289, + "cpm": 3.01, + "cost_per_result": 0.4165, + "real_time_cost_per_result": 0.4165, + "mobile_app_id": 0, + "spend": 7.08, + "cpc": 0.42, + "placement": "Automatic Placement", + "real_time_conversion": 0, + "dpa_target_audience_type": null, + "real_time_result_rate": 0.72, + "adgroup_name": "AdGroupVadim", + "tt_app_id": 0, + "ctr": 0.72, + "ad_text": "Open-source\ndata integration for modern data teams", + "result_rate": 0.72, + "ad_name": "AdVadim-Optimized Version 3_202203281449_2022-03-28 05:03:44", + "conversion_rate": 0, + "real_time_result": 17, + "tt_app_name": "0", + "cost_per_conversion": 0, + "real_time_cost_per_conversion": 0, + "conversion": 0, + "impressions": 2350, + "promotion_type": "Website", + "campaign_id": 1728545382536225, + "campaign_name": "CampaignVadimTraffic" + }, + "dimensions": { + "gender": "MALE", + "age": "AGE_25_34", + "ad_id": 1728545390695442, + "stat_time_day": "2022-03-28 00:00:00" + } +} +``` + +**AdvertisersAudienceReports Stream - [AudienceReports](https://ads.tiktok.com/marketing_api/docs?id=1707957217727489)** +``` +{ + "dimensions": { + "stat_time_day": "2022-03-28 00:00:00", + "gender": "FEMALE", + "advertiser_id": 7002238017842757633, + "age": "AGE_35_44" + }, + "metrics": { + "spend": 3.09, + "ctr": 0.93, + "cpc": 0.44, + "clicks": 7, + "cpm": 4.11, + "impressions": 752 + } +} +``` + +**AdGroupAudienceReports Stream - [AudienceReports](https://ads.tiktok.com/marketing_api/docs?id=1707957217727489)** +``` +{ + "dimensions": { + "gender": "MALE", + "age": "AGE_25_34", + "stat_time_day": "2022-03-29 00:00:00", + "adgroup_id": 1728545385226289 + }, + "metrics": { + "cost_per_conversion": 0, + "campaign_id": 1728545382536225, + "campaign_name": "CampaignVadimTraffic", + "clicks": 20, + "dpa_target_audience_type": null, + "mobile_app_id": 0, + "promotion_type": "Website", + "conversion_rate": 0, + "cpm": 3.9, + "cost_per_result": 0.3525, + "cpc": 0.35, + "real_time_cost_per_conversion": 0, + "ctr": 1.11, + "spend": 7.05, + "result": 20, + "real_time_result": 20, + "impressions": 1806, + "conversion": 0, + "real_time_result_rate": 1.11, + "real_time_conversion_rate": 0, + "real_time_conversion": 0, + "adgroup_name": "AdGroupVadim", + "tt_app_name": "0", + "placement": "Automatic Placement", + "real_time_cost_per_result": 0.3525, + "result_rate": 1.11, + "tt_app_id": 0 + } +} +``` + +**CampaignsAudienceReportsByCountry Stream - [AudienceReports](https://ads.tiktok.com/marketing_api/docs?id=1707957217727489)** +``` +{ + "metrics": { + "impressions": 5870, + "campaign_name": "CampaignVadimTraffic", + "cpm": 3.41, + "clicks": 46, + "spend": 20, + "ctr": 0.78, + "cpc": 0.43 + }, + "dimensions": { + "stat_time_day": "2022-03-28 00:00:00", + "campaign_id": 1728545382536225, + "country_code": "US" + } +} + +``` + +## Performance considerations + +The connector is restricted by [requests limitation](https://ads.tiktok.com/marketing_api/docs?rid=fgvgaumno25&id=1725359439428610). This connector should not run into TikTok Marketing API limitations under normal usage. Please [create an issue](https://github.com/airbytehq/airbyte/issues) if you see any rate limit issues that are not automatically retried successfully. ## Changelog | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:----------------------------------------------------------------------------------------------| +| 0.1.7 | 2022-04-27 | [12380](https://github.com/airbytehq/airbyte/pull/12380) | fixed spec descriptions and documentation | | 0.1.6 | 2022-04-19 | [11378](https://github.com/airbytehq/airbyte/pull/11378) | updated logic for stream initializations, fixed errors in schemas, updated SAT and unit tests | | 0.1.5 | 2022-02-17 | [10398](https://github.com/airbytehq/airbyte/pull/10398) | Add Audience reports | | 0.1.4 | 2021-12-30 | [7636](https://github.com/airbytehq/airbyte/pull/7636) | Add OAuth support | diff --git a/docs/operator-guides/upgrading-airbyte.md b/docs/operator-guides/upgrading-airbyte.md index e58aa63432b0..585a53655ed3 100644 --- a/docs/operator-guides/upgrading-airbyte.md +++ b/docs/operator-guides/upgrading-airbyte.md @@ -103,7 +103,7 @@ If you are upgrading from (i.e. your current version of Airbyte is) Airbyte vers Here's an example of what it might look like with the values filled in. It assumes that the downloaded `airbyte_archive.tar.gz` is in `/tmp`. ```bash - docker run --rm -v /tmp:/config airbyte/migration:0.36.3-alpha --\ + docker run --rm -v /tmp:/config airbyte/migration:0.36.4-alpha --\ --input /config/airbyte_archive.tar.gz\ --output /config/airbyte_archive_migrated.tar.gz ``` diff --git a/kube/overlays/stable-with-resource-limits/.env b/kube/overlays/stable-with-resource-limits/.env index 75b1cfdbfa75..2c4fabdad591 100644 --- a/kube/overlays/stable-with-resource-limits/.env +++ b/kube/overlays/stable-with-resource-limits/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.36.3-alpha +AIRBYTE_VERSION=0.36.4-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable-with-resource-limits/kustomization.yaml b/kube/overlays/stable-with-resource-limits/kustomization.yaml index 9de37822186a..4a40316bec6e 100644 --- a/kube/overlays/stable-with-resource-limits/kustomization.yaml +++ b/kube/overlays/stable-with-resource-limits/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: airbyte/bootloader - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: airbyte/scheduler - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: airbyte/server - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: airbyte/webapp - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: airbyte/worker - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: temporalio/auto-setup newTag: 1.7.0 diff --git a/kube/overlays/stable/.env b/kube/overlays/stable/.env index 3b82b057729c..89bb305385fa 100644 --- a/kube/overlays/stable/.env +++ b/kube/overlays/stable/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.36.3-alpha +AIRBYTE_VERSION=0.36.4-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable/kustomization.yaml b/kube/overlays/stable/kustomization.yaml index 277f73114c8f..336bdd4f2f75 100644 --- a/kube/overlays/stable/kustomization.yaml +++ b/kube/overlays/stable/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: airbyte/bootloader - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: airbyte/scheduler - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: airbyte/server - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: airbyte/webapp - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: airbyte/worker - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: temporalio/auto-setup newTag: 1.7.0 diff --git a/octavia-cli/Dockerfile b/octavia-cli/Dockerfile index 0172db4bf5b4..adba617012e5 100644 --- a/octavia-cli/Dockerfile +++ b/octavia-cli/Dockerfile @@ -14,5 +14,5 @@ USER octavia-cli WORKDIR /home/octavia-project ENTRYPOINT ["octavia"] -LABEL io.airbyte.version=0.36.3-alpha +LABEL io.airbyte.version=0.36.4-alpha LABEL io.airbyte.name=airbyte/octavia-cli diff --git a/octavia-cli/README.md b/octavia-cli/README.md index f1da22e32d3c..444e39503b3e 100644 --- a/octavia-cli/README.md +++ b/octavia-cli/README.md @@ -105,7 +105,7 @@ This script: ```bash touch ~/.octavia # Create a file to store env variables that will be mapped the octavia-cli container mkdir my_octavia_project_directory # Create your octavia project directory where YAML configurations will be stored. -docker run --name octavia-cli -i --rm -v my_octavia_project_directory:/home/octavia-project --network host --user $(id -u):$(id -g) --env-file ~/.octavia airbyte/octavia-cli:0.36.3-alpha +docker run --name octavia-cli -i --rm -v my_octavia_project_directory:/home/octavia-project --network host --user $(id -u):$(id -g) --env-file ~/.octavia airbyte/octavia-cli:0.36.4-alpha ``` ### Using `docker-compose` diff --git a/octavia-cli/install.sh b/octavia-cli/install.sh index 4d6e1d2d1659..5f3802e7e046 100755 --- a/octavia-cli/install.sh +++ b/octavia-cli/install.sh @@ -3,7 +3,7 @@ # This install scripts currently only works for ZSH and Bash profiles. # It creates an octavia alias in your profile bound to a docker run command and your current user. -VERSION=0.36.3-alpha +VERSION=0.36.4-alpha OCTAVIA_ENV_FILE=${HOME}/.octavia detect_profile() { diff --git a/octavia-cli/setup.py b/octavia-cli/setup.py index 5e528cae231e..8c3418a48b36 100644 --- a/octavia-cli/setup.py +++ b/octavia-cli/setup.py @@ -15,7 +15,7 @@ setup( name="octavia-cli", - version="0.36.3", + version="0.36.4", description="A command line interface to manage Airbyte configurations", long_description=README, author="Airbyte", diff --git a/tools/bin/deploy_docusaurus b/tools/bin/deploy_docusaurus index 2f74f66cac9c..fb4c206b15d8 100755 --- a/tools/bin/deploy_docusaurus +++ b/tools/bin/deploy_docusaurus @@ -17,6 +17,17 @@ else exit 1 fi +# if a string +if $(git remote get-url origin | grep --quiet "http"); then + set +o xtrace + echo -e "$red_text""This program requires a ssh-based github repo""$default_text" + echo -e "$red_text""https://docs.github.com/en/authentication/connecting-to-github-with-ssh/adding-a-new-ssh-key-to-your-github-account""$default_text" + echo -e "$red_text""You will need to change your remote to continue. Yell @topher for help""$default_text" + echo -e "$red_text""change your remote command:""$default_text" + echo -e "$red_text""git remote set-url origin git@github.com:airbytehq/airbyte.git""$default_text" + exit 1 +fi + # ------------- Start Main set +o xtrace