From 109461b7223089d45dec18eca781651dfde35536 Mon Sep 17 00:00:00 2001 From: Jared Rhizor Date: Mon, 8 Nov 2021 20:29:53 -0800 Subject: [PATCH] Bump Airbyte version from 0.30.35-alpha to 0.30.36-alpha (#7772) Co-authored-by: sherifnada --- .bumpversion.cfg | 2 +- .env | 2 +- .../resources/seed/destination_specs.yaml | 3917 ++++++++--------- .../BufferedStreamConsumer.java | 5 +- airbyte-migration/Dockerfile | 2 +- airbyte-scheduler/app/Dockerfile | 4 +- airbyte-server/Dockerfile | 4 +- airbyte-webapp/package-lock.json | 4 +- airbyte-webapp/package.json | 2 +- airbyte-workers/Dockerfile | 4 +- .../NormalizationRunnerFactory.java | 12 +- charts/airbyte/Chart.yaml | 2 +- charts/airbyte/README.md | 8 +- charts/airbyte/values.yaml | 8 +- docs/operator-guides/upgrading-airbyte.md | 2 +- .../overlays/stable-with-resource-limits/.env | 2 +- .../kustomization.yaml | 10 +- kube/overlays/stable/.env | 2 +- kube/overlays/stable/kustomization.yaml | 10 +- 19 files changed, 1926 insertions(+), 2076 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index e1d1ef2fb72f..22926c23ff9d 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.30.35-alpha +current_version = 0.30.36-alpha commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-[a-z]+)? diff --git a/.env b/.env index bb8d76ea3ff1..027883ca01c5 100644 --- a/.env +++ b/.env @@ -1,4 +1,4 @@ -VERSION=0.30.35-alpha +VERSION=0.30.36-alpha # Airbyte Internal Job Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_USER=docker diff --git a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml index e9445bdc84c2..f464eab706ff 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml @@ -9,78 +9,75 @@ title: "AzureBlobStorage Destination Spec" type: "object" required: - - "azure_blob_storage_account_name" - - "azure_blob_storage_account_key" - - "format" + - "azure_blob_storage_account_name" + - "azure_blob_storage_account_key" + - "format" additionalProperties: false properties: azure_blob_storage_endpoint_domain_name: title: "Endpoint Domain Name" type: "string" default: "blob.core.windows.net" - description: - "This is Azure Blob Storage endpoint domain name. Leave default\ + description: "This is Azure Blob Storage endpoint domain name. Leave default\ \ value (or leave it empty if run container from command line) to use\ \ Microsoft native from example." examples: - - "blob.core.windows.net" + - "blob.core.windows.net" azure_blob_storage_container_name: title: "Azure blob storage container (Bucket) Name" type: "string" - description: - "The name of the Azure blob storage container. If not exists\ + description: "The name of the Azure blob storage container. If not exists\ \ - will be created automatically. May be empty, then will be created\ \ automatically airbytecontainer+timestamp" examples: - - "airbytetescontainername" + - "airbytetescontainername" azure_blob_storage_account_name: title: "Azure Blob Storage account name" type: "string" description: "The account's name of the Azure Blob Storage." examples: - - "airbyte5storage" + - "airbyte5storage" azure_blob_storage_account_key: description: "The Azure blob storage account key." airbyte_secret: true type: "string" examples: - - "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd==" + - "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd==" format: title: "Output Format" type: "object" description: "Output data format" oneOf: - - title: "CSV: Comma-Separated Values" - required: - - "format_type" - - "flattening" - properties: - format_type: - type: "string" - const: "CSV" - flattening: - type: "string" - title: "Normalization (Flattening)" - description: - "Whether the input json data should be normalized (flattened)\ - \ in the output CSV. Please refer to docs for details." - default: "No flattening" - enum: - - "No flattening" - - "Root level flattening" - - title: "JSON Lines: newline-delimited JSON" - required: - - "format_type" - properties: - format_type: - type: "string" - const: "JSONL" + - title: "CSV: Comma-Separated Values" + required: + - "format_type" + - "flattening" + properties: + format_type: + type: "string" + const: "CSV" + flattening: + type: "string" + title: "Normalization (Flattening)" + description: "Whether the input json data should be normalized (flattened)\ + \ in the output CSV. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + - title: "JSON Lines: newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + type: "string" + const: "JSONL" supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-bigquery:0.5.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/bigquery" @@ -89,14 +86,13 @@ title: "BigQuery Destination Spec" type: "object" required: - - "project_id" - - "dataset_id" + - "project_id" + - "dataset_id" additionalProperties: true properties: big_query_client_buffer_size_mb: title: "Google BigQuery client chunk size" - description: - "Google BigQuery client's chunk(buffer) size (MIN=1, MAX =\ + description: "Google BigQuery client's chunk(buffer) size (MIN=1, MAX =\ \ 15) for each table. The default 15MiB value is used if not set explicitly.\ \ It's recommended to decrease value for big data sets migration for less\ \ HEAP memory consumption and avoiding crashes. For more details refer\ @@ -106,60 +102,56 @@ maximum: 15 default: 15 examples: - - "15" + - "15" project_id: type: "string" - description: - "The GCP project ID for the project containing the target BigQuery\ + description: "The GCP project ID for the project containing the target BigQuery\ \ dataset." title: "Project ID" dataset_id: type: "string" - description: - "Default BigQuery Dataset ID tables are replicated to if the\ + description: "Default BigQuery Dataset ID tables are replicated to if the\ \ source does not specify a namespace." title: "Default Dataset ID" dataset_location: type: "string" - description: - "The location of the dataset. Warning: Changes made after creation\ + description: "The location of the dataset. Warning: Changes made after creation\ \ will not be applied." title: "Dataset Location" default: "US" enum: - - "US" - - "EU" - - "asia-east1" - - "asia-east2" - - "asia-northeast1" - - "asia-northeast2" - - "asia-northeast3" - - "asia-south1" - - "asia-southeast1" - - "asia-southeast2" - - "australia-southeast1" - - "europe-central1" - - "europe-central2" - - "europe-north1" - - "europe-west1" - - "europe-west2" - - "europe-west3" - - "europe-west4" - - "europe-west5" - - "europe-west6" - - "northamerica-northeast1" - - "southamerica-east1" - - "us-central1" - - "us-east1" - - "us-east4" - - "us-west-1" - - "us-west-2" - - "us-west-3" - - "us-west-4" + - "US" + - "EU" + - "asia-east1" + - "asia-east2" + - "asia-northeast1" + - "asia-northeast2" + - "asia-northeast3" + - "asia-south1" + - "asia-southeast1" + - "asia-southeast2" + - "australia-southeast1" + - "europe-central1" + - "europe-central2" + - "europe-north1" + - "europe-west1" + - "europe-west2" + - "europe-west3" + - "europe-west4" + - "europe-west5" + - "europe-west6" + - "northamerica-northeast1" + - "southamerica-east1" + - "us-central1" + - "us-east1" + - "us-east4" + - "us-west-1" + - "us-west-2" + - "us-west-3" + - "us-west-4" credentials_json: type: "string" - description: - "The contents of the JSON service account key. Check out the\ + description: "The contents of the JSON service account key. Check out the\ \ docs if you need help generating this key. Default credentials will\ \ be used if this field is left empty." @@ -167,107 +159,101 @@ airbyte_secret: true transformation_priority: type: "string" - description: - "When running custom transformations or Basic normalization,\ + description: "When running custom transformations or Basic normalization,\ \ running queries on interactive mode can hit BQ limits, choosing batch\ \ will solve those limitss." title: "Transformation Query Run Type" default: "interactive" enum: - - "interactive" - - "batch" + - "interactive" + - "batch" loading_method: type: "object" title: "Loading Method" - description: - "Loading method used to send select the way data will be uploaded\ + description: "Loading method used to send select the way data will be uploaded\ \ to BigQuery." oneOf: - - title: "Standard Inserts" - additionalProperties: false - description: "Direct uploading using streams." - required: - - "method" - properties: - method: - type: "string" - const: "Standard" - - title: "GCS Staging" - additionalProperties: false - description: - "Writes large batches of records to a file, uploads the file\ - \ to GCS, then uses
COPY INTO table
to upload the file. Recommended\ - \ for large production workloads for better speed and scalability." - required: - - "method" - - "gcs_bucket_name" - - "gcs_bucket_path" - - "credential" - properties: - method: - type: "string" - const: "GCS Staging" - gcs_bucket_name: - title: "GCS Bucket Name" - type: "string" - description: "The name of the GCS bucket." - examples: - - "airbyte_sync" - gcs_bucket_path: - description: "Directory under the GCS bucket where data will be written." - type: "string" - examples: - - "data_sync/test" - keep_files_in_gcs-bucket: - type: "string" - description: - "This upload method is supposed to temporary store records\ - \ in GCS bucket. What do you want to do with data in GCS bucket\ - \ when migration has finished?" - title: "GCS tmp files afterward processing" - default: "Delete all tmp files from GCS" - enum: - - "Delete all tmp files from GCS" - - "Keep all tmp files in GCS" - credential: - title: "Credential" - type: "object" - oneOf: - - title: "HMAC key" - required: - - "credential_type" - - "hmac_key_access_id" - - "hmac_key_secret" - properties: - credential_type: - type: "string" - const: "HMAC_KEY" - hmac_key_access_id: - type: "string" - description: - "HMAC key access ID. When linked to a service account,\ - \ this ID is 61 characters long; when linked to a user account,\ - \ it is 24 characters long." - title: "HMAC Key Access ID" - airbyte_secret: true - examples: - - "1234567890abcdefghij1234" - hmac_key_secret: - type: "string" - description: - "The corresponding secret for the access ID. It\ - \ is a 40-character base-64 encoded string." - title: "HMAC Key Secret" - airbyte_secret: true - examples: - - "1234567890abcdefghij1234567890ABCDEFGHIJ" + - title: "Standard Inserts" + additionalProperties: false + description: "Direct uploading using streams." + required: + - "method" + properties: + method: + type: "string" + const: "Standard" + - title: "GCS Staging" + additionalProperties: false + description: "Writes large batches of records to a file, uploads the file\ + \ to GCS, then uses
COPY INTO table
to upload the file. Recommended\ + \ for large production workloads for better speed and scalability." + required: + - "method" + - "gcs_bucket_name" + - "gcs_bucket_path" + - "credential" + properties: + method: + type: "string" + const: "GCS Staging" + gcs_bucket_name: + title: "GCS Bucket Name" + type: "string" + description: "The name of the GCS bucket." + examples: + - "airbyte_sync" + gcs_bucket_path: + description: "Directory under the GCS bucket where data will be written." + type: "string" + examples: + - "data_sync/test" + keep_files_in_gcs-bucket: + type: "string" + description: "This upload method is supposed to temporary store records\ + \ in GCS bucket. What do you want to do with data in GCS bucket\ + \ when migration has finished?" + title: "GCS tmp files afterward processing" + default: "Delete all tmp files from GCS" + enum: + - "Delete all tmp files from GCS" + - "Keep all tmp files in GCS" + credential: + title: "Credential" + type: "object" + oneOf: + - title: "HMAC key" + required: + - "credential_type" + - "hmac_key_access_id" + - "hmac_key_secret" + properties: + credential_type: + type: "string" + const: "HMAC_KEY" + hmac_key_access_id: + type: "string" + description: "HMAC key access ID. When linked to a service account,\ + \ this ID is 61 characters long; when linked to a user account,\ + \ it is 24 characters long." + title: "HMAC Key Access ID" + airbyte_secret: true + examples: + - "1234567890abcdefghij1234" + hmac_key_secret: + type: "string" + description: "The corresponding secret for the access ID. It\ + \ is a 40-character base-64 encoded string." + title: "HMAC Key Secret" + airbyte_secret: true + examples: + - "1234567890abcdefghij1234567890ABCDEFGHIJ" supportsIncremental: true supportsNormalization: true supportsDBT: true supported_destination_sync_modes: - - "overwrite" - - "append" - - "append_dedup" + - "overwrite" + - "append" + - "append_dedup" - dockerImage: "airbyte/destination-bigquery-denormalized:0.1.8" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/bigquery" @@ -276,63 +262,59 @@ title: "BigQuery Denormalized Typed Struct Destination Spec" type: "object" required: - - "project_id" - - "dataset_id" + - "project_id" + - "dataset_id" additionalProperties: true properties: project_id: type: "string" - description: - "The GCP project ID for the project containing the target BigQuery\ + description: "The GCP project ID for the project containing the target BigQuery\ \ dataset." title: "Project ID" dataset_id: type: "string" - description: - "Default BigQuery Dataset ID tables are replicated to if the\ + description: "Default BigQuery Dataset ID tables are replicated to if the\ \ source does not specify a namespace." title: "Default Dataset ID" dataset_location: type: "string" - description: - "The location of the dataset. Warning: Changes made after creation\ + description: "The location of the dataset. Warning: Changes made after creation\ \ will not be applied." title: "Dataset Location" default: "US" enum: - - "US" - - "EU" - - "asia-east1" - - "asia-east2" - - "asia-northeast1" - - "asia-northeast2" - - "asia-northeast3" - - "asia-south1" - - "asia-southeast1" - - "asia-southeast2" - - "australia-southeast1" - - "europe-central1" - - "europe-central2" - - "europe-north1" - - "europe-west1" - - "europe-west2" - - "europe-west3" - - "europe-west4" - - "europe-west5" - - "europe-west6" - - "northamerica-northeast1" - - "southamerica-east1" - - "us-central1" - - "us-east1" - - "us-east4" - - "us-west-1" - - "us-west-2" - - "us-west-3" - - "us-west-4" + - "US" + - "EU" + - "asia-east1" + - "asia-east2" + - "asia-northeast1" + - "asia-northeast2" + - "asia-northeast3" + - "asia-south1" + - "asia-southeast1" + - "asia-southeast2" + - "australia-southeast1" + - "europe-central1" + - "europe-central2" + - "europe-north1" + - "europe-west1" + - "europe-west2" + - "europe-west3" + - "europe-west4" + - "europe-west5" + - "europe-west6" + - "northamerica-northeast1" + - "southamerica-east1" + - "us-central1" + - "us-east1" + - "us-east4" + - "us-west-1" + - "us-west-2" + - "us-west-3" + - "us-west-4" credentials_json: type: "string" - description: - "The contents of the JSON service account key. Check out the\ + description: "The contents of the JSON service account key. Check out the\ \ docs if you need help generating this key. Default credentials will\ \ be used if this field is left empty." @@ -342,8 +324,8 @@ supportsNormalization: false supportsDBT: true supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-cassandra:0.1.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/cassandra" @@ -352,11 +334,11 @@ title: "Cassandra Destination Spec" type: "object" required: - - "keyspace" - - "username" - - "password" - - "address" - - "port" + - "keyspace" + - "username" + - "password" + - "address" + - "port" additionalProperties: true properties: keyspace: @@ -380,7 +362,7 @@ description: "Address to connect to." type: "string" examples: - - "localhost,127.0.0.1" + - "localhost,127.0.0.1" order: 3 port: title: "Port" @@ -399,8 +381,7 @@ replication: title: "Replication factor" type: "integer" - description: - "Indicates to how many nodes the data should be replicated\ + description: "Indicates to how many nodes the data should be replicated\ \ to." default: 1 order: 6 @@ -408,8 +389,8 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-keen:0.2.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/keen" @@ -418,26 +399,25 @@ title: "Keen Spec" type: "object" required: - - "project_id" - - "api_key" + - "project_id" + - "api_key" additionalProperties: false properties: project_id: description: "Keen Project ID" type: "string" examples: - - "58b4acc22ba938934e888322e" + - "58b4acc22ba938934e888322e" api_key: title: "API Key" description: "Keen Master API key" type: "string" examples: - - "ABCDEFGHIJKLMNOPRSTUWXYZ" + - "ABCDEFGHIJKLMNOPRSTUWXYZ" airbyte_secret: true infer_timestamp: title: "Infer Timestamp" - description: - "Allow connector to guess keen.timestamp value based on the\ + description: "Allow connector to guess keen.timestamp value based on the\ \ streamed data" type: "boolean" default: true @@ -445,8 +425,8 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-dynamodb:0.1.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/dynamodb" @@ -455,81 +435,79 @@ title: "DynamoDB Destination Spec" type: "object" required: - - "dynamodb_table_name" - - "dynamodb_region" - - "access_key_id" - - "secret_access_key" + - "dynamodb_table_name" + - "dynamodb_region" + - "access_key_id" + - "secret_access_key" additionalProperties: false properties: dynamodb_endpoint: title: "Endpoint" type: "string" default: "" - description: - "This is your DynamoDB endpoint url.(if you are working with\ + description: "This is your DynamoDB endpoint url.(if you are working with\ \ AWS DynamoDB, just leave empty)." examples: - - "http://localhost:9000" + - "http://localhost:9000" dynamodb_table_name: title: "DynamoDB Table Name" type: "string" description: "The name of the DynamoDB table." examples: - - "airbyte_sync" + - "airbyte_sync" dynamodb_region: title: "DynamoDB Region" type: "string" default: "" description: "The region of the DynamoDB." enum: - - "" - - "us-east-1" - - "us-east-2" - - "us-west-1" - - "us-west-2" - - "af-south-1" - - "ap-east-1" - - "ap-south-1" - - "ap-northeast-1" - - "ap-northeast-2" - - "ap-northeast-3" - - "ap-southeast-1" - - "ap-southeast-2" - - "ca-central-1" - - "cn-north-1" - - "cn-northwest-1" - - "eu-central-1" - - "eu-north-1" - - "eu-south-1" - - "eu-west-1" - - "eu-west-2" - - "eu-west-3" - - "sa-east-1" - - "me-south-1" - - "us-gov-east-1" - - "us-gov-west-1" + - "" + - "us-east-1" + - "us-east-2" + - "us-west-1" + - "us-west-2" + - "af-south-1" + - "ap-east-1" + - "ap-south-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-southeast-1" + - "ap-southeast-2" + - "ca-central-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-north-1" + - "eu-south-1" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "sa-east-1" + - "me-south-1" + - "us-gov-east-1" + - "us-gov-west-1" access_key_id: type: "string" - description: - "The access key id to access the DynamoDB. Airbyte requires\ + description: "The access key id to access the DynamoDB. Airbyte requires\ \ Read and Write permissions to the DynamoDB." title: "DynamoDB Key Id" airbyte_secret: true examples: - - "A012345678910EXAMPLE" + - "A012345678910EXAMPLE" secret_access_key: type: "string" description: "The corresponding secret to the access key id." title: "DynamoDB Access Key" airbyte_secret: true examples: - - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-elasticsearch:0.1.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/elasticsearch" @@ -538,7 +516,7 @@ title: "Elasticsearch Connection Configuration" type: "object" required: - - "endpoint" + - "endpoint" additionalProperties: false properties: endpoint: @@ -548,8 +526,7 @@ upsert: type: "boolean" title: "Upsert Records" - description: - "If a primary key identifier is defined in the source, an upsert\ + description: "If a primary key identifier is defined in the source, an upsert\ \ will be performed using the primary key value as the elasticsearch doc\ \ id. Does not support composite primary keys." default: true @@ -558,67 +535,64 @@ type: "object" description: "The type of authentication to be used" oneOf: - - title: "None" - additionalProperties: false - description: "No authentication will be used" - required: - - "method" - properties: - method: - type: "string" - const: "none" - - title: "Api Key/Secret" - additionalProperties: false - description: "Use a api key and secret combination to authenticate" - required: - - "method" - - "apiKeyId" - - "apiKeySecret" - properties: - method: - type: "string" - const: "secret" - apiKeyId: - title: "API Key ID" - description: - "The Key ID to used when accessing an enterprise Elasticsearch\ - \ instance." - type: "string" - apiKeySecret: - title: "API Key Secret" - description: "The secret associated with the API Key ID." - type: "string" - airbyte_secret: true - - title: "Username/Password" - additionalProperties: false - description: "Basic auth header with a username and password" - required: - - "method" - - "username" - - "password" - properties: - method: - type: "string" - const: "basic" - username: - title: "Username" - description: - "Basic auth username to access a secure Elasticsearch\ - \ server" - type: "string" - password: - title: "Password" - description: - "Basic auth password to access a secure Elasticsearch\ - \ server" - type: "string" - airbyte_secret: true + - title: "None" + additionalProperties: false + description: "No authentication will be used" + required: + - "method" + properties: + method: + type: "string" + const: "none" + - title: "Api Key/Secret" + additionalProperties: false + description: "Use a api key and secret combination to authenticate" + required: + - "method" + - "apiKeyId" + - "apiKeySecret" + properties: + method: + type: "string" + const: "secret" + apiKeyId: + title: "API Key ID" + description: "The Key ID to used when accessing an enterprise Elasticsearch\ + \ instance." + type: "string" + apiKeySecret: + title: "API Key Secret" + description: "The secret associated with the API Key ID." + type: "string" + airbyte_secret: true + - title: "Username/Password" + additionalProperties: false + description: "Basic auth header with a username and password" + required: + - "method" + - "username" + - "password" + properties: + method: + type: "string" + const: "basic" + username: + title: "Username" + description: "Basic auth username to access a secure Elasticsearch\ + \ server" + type: "string" + password: + title: "Password" + description: "Basic auth password to access a secure Elasticsearch\ + \ server" + type: "string" + airbyte_secret: true supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" supportsNamespaces: true - dockerImage: "airbyte/destination-gcs:0.1.3" spec: @@ -628,11 +602,11 @@ title: "GCS Destination Spec" type: "object" required: - - "gcs_bucket_name" - - "gcs_bucket_path" - - "gcs_bucket_region" - - "credential" - - "format" + - "gcs_bucket_name" + - "gcs_bucket_path" + - "gcs_bucket_region" + - "credential" + - "format" additionalProperties: false properties: gcs_bucket_name: @@ -640,341 +614,327 @@ type: "string" description: "The name of the GCS bucket." examples: - - "airbyte_sync" + - "airbyte_sync" gcs_bucket_path: description: "Directory under the GCS bucket where data will be written." type: "string" examples: - - "data_sync/test" + - "data_sync/test" gcs_bucket_region: title: "GCS Bucket Region" type: "string" default: "" description: "The region of the GCS bucket." enum: - - "" - - "-- North America --" - - "northamerica-northeast1" - - "us-central1" - - "us-east1" - - "us-east4" - - "us-west1" - - "us-west2" - - "us-west3" - - "us-west4" - - "-- South America --" - - "southamerica-east1" - - "-- Europe --" - - "europe-central2" - - "europe-north1" - - "europe-west1" - - "europe-west2" - - "europe-west3" - - "europe-west4" - - "europe-west6" - - "-- Asia --" - - "asia-east1" - - "asia-east2" - - "asia-northeast1" - - "asia-northeast2" - - "asia-northeast3" - - "asia-south1" - - "asia-south2" - - "asia-southeast1" - - "asia-southeast2" - - "-- Australia --" - - "australia-southeast1" - - "australia-southeast2" - - "-- Multi-regions --" - - "asia" - - "eu" - - "us" - - "-- Dual-regions --" - - "asia1" - - "eur4" - - "nam4" + - "" + - "-- North America --" + - "northamerica-northeast1" + - "us-central1" + - "us-east1" + - "us-east4" + - "us-west1" + - "us-west2" + - "us-west3" + - "us-west4" + - "-- South America --" + - "southamerica-east1" + - "-- Europe --" + - "europe-central2" + - "europe-north1" + - "europe-west1" + - "europe-west2" + - "europe-west3" + - "europe-west4" + - "europe-west6" + - "-- Asia --" + - "asia-east1" + - "asia-east2" + - "asia-northeast1" + - "asia-northeast2" + - "asia-northeast3" + - "asia-south1" + - "asia-south2" + - "asia-southeast1" + - "asia-southeast2" + - "-- Australia --" + - "australia-southeast1" + - "australia-southeast2" + - "-- Multi-regions --" + - "asia" + - "eu" + - "us" + - "-- Dual-regions --" + - "asia1" + - "eur4" + - "nam4" credential: title: "Credential" type: "object" oneOf: - - title: "HMAC key" - required: - - "credential_type" - - "hmac_key_access_id" - - "hmac_key_secret" - properties: - credential_type: - type: "string" - enum: - - "HMAC_KEY" - default: "HMAC_KEY" - hmac_key_access_id: - type: "string" - description: - "HMAC key access ID. When linked to a service account,\ - \ this ID is 61 characters long; when linked to a user account,\ - \ it is 24 characters long." - title: "HMAC Key Access ID" - airbyte_secret: true - examples: - - "1234567890abcdefghij1234" - hmac_key_secret: - type: "string" - description: - "The corresponding secret for the access ID. It is a\ - \ 40-character base-64 encoded string." - title: "HMAC Key Secret" - airbyte_secret: true - examples: - - "1234567890abcdefghij1234567890ABCDEFGHIJ" + - title: "HMAC key" + required: + - "credential_type" + - "hmac_key_access_id" + - "hmac_key_secret" + properties: + credential_type: + type: "string" + enum: + - "HMAC_KEY" + default: "HMAC_KEY" + hmac_key_access_id: + type: "string" + description: "HMAC key access ID. When linked to a service account,\ + \ this ID is 61 characters long; when linked to a user account,\ + \ it is 24 characters long." + title: "HMAC Key Access ID" + airbyte_secret: true + examples: + - "1234567890abcdefghij1234" + hmac_key_secret: + type: "string" + description: "The corresponding secret for the access ID. It is a\ + \ 40-character base-64 encoded string." + title: "HMAC Key Secret" + airbyte_secret: true + examples: + - "1234567890abcdefghij1234567890ABCDEFGHIJ" format: title: "Output Format" type: "object" description: "Output data format" oneOf: - - title: "Avro: Apache Avro" - required: - - "format_type" - - "compression_codec" - properties: - format_type: - type: "string" - enum: - - "Avro" - default: "Avro" - compression_codec: - title: "Compression Codec" - description: - "The compression algorithm used to compress data. Default\ - \ to no compression." - type: "object" - oneOf: - - title: "no compression" - required: - - "codec" - properties: - codec: - type: "string" - enum: - - "no compression" - default: "no compression" - - title: "Deflate" - required: - - "codec" - - "compression_level" - properties: - codec: - type: "string" - enum: - - "Deflate" - default: "Deflate" - compression_level: - title: "Deflate level" - description: - "0: no compression & fastest, 9: best compression\ - \ & slowest." - type: "integer" - default: 0 - minimum: 0 - maximum: 9 - - title: "bzip2" - required: - - "codec" - properties: - codec: - type: "string" - enum: - - "bzip2" - default: "bzip2" - - title: "xz" - required: - - "codec" - - "compression_level" - properties: - codec: - type: "string" - enum: - - "xz" - default: "xz" - compression_level: - title: "Compression level" - description: - "See here for details." - type: "integer" - default: 6 - minimum: 0 - maximum: 9 - - title: "zstandard" - required: - - "codec" - - "compression_level" - properties: - codec: - type: "string" - enum: - - "zstandard" - default: "zstandard" - compression_level: - title: "Compression level" - description: - "Negative levels are 'fast' modes akin to lz4 or\ - \ snappy, levels above 9 are generally for archival purposes,\ - \ and levels above 18 use a lot of memory." - type: "integer" - default: 3 - minimum: -5 - maximum: 22 - include_checksum: - title: "Include checksum" - description: "If true, include a checksum with each data block." - type: "boolean" - default: false - - title: "snappy" - required: - - "codec" - properties: - codec: - type: "string" - enum: - - "snappy" - default: "snappy" - part_size_mb: - title: "Block Size (MB) for GCS multipart upload" - description: - "This is the size of a \"Part\" being buffered in memory.\ - \ It limits the memory usage when writing. Larger values will allow\ - \ to upload a bigger files and improve the speed, but consumes9\ - \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." - type: "integer" - default: 5 - examples: - - 5 - - title: "CSV: Comma-Separated Values" - required: - - "format_type" - - "flattening" - properties: - format_type: - type: "string" - enum: - - "CSV" - default: "CSV" - flattening: - type: "string" - title: "Normalization (Flattening)" - description: - "Whether the input json data should be normalized (flattened)\ - \ in the output CSV. Please refer to docs for details." - default: "No flattening" - enum: - - "No flattening" - - "Root level flattening" - part_size_mb: - title: "Block Size (MB) for GCS multipart upload" - description: - "This is the size of a \"Part\" being buffered in memory.\ - \ It limits the memory usage when writing. Larger values will allow\ - \ to upload a bigger files and improve the speed, but consumes9\ - \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." - type: "integer" - default: 5 - examples: - - 5 - - title: "JSON Lines: newline-delimited JSON" - required: - - "format_type" - properties: - format_type: - type: "string" - enum: - - "JSONL" - default: "JSONL" - part_size_mb: - title: "Block Size (MB) for GCS multipart upload" - description: - "This is the size of a \"Part\" being buffered in memory.\ - \ It limits the memory usage when writing. Larger values will allow\ - \ to upload a bigger files and improve the speed, but consumes9\ - \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." - type: "integer" - default: 5 - examples: - - 5 - - title: "Parquet: Columnar Storage" - required: - - "format_type" - properties: - format_type: - type: "string" - enum: - - "Parquet" - default: "Parquet" - compression_codec: - title: "Compression Codec" - description: "The compression algorithm used to compress data pages." - type: "string" - enum: - - "UNCOMPRESSED" - - "SNAPPY" - - "GZIP" - - "LZO" - - "BROTLI" - - "LZ4" - - "ZSTD" - default: "UNCOMPRESSED" - block_size_mb: - title: "Block Size (Row Group Size) (MB)" - description: - "This is the size of a row group being buffered in memory.\ - \ It limits the memory usage when writing. Larger values will improve\ - \ the IO when reading, but consume more memory when writing. Default:\ - \ 128 MB." - type: "integer" - default: 128 - examples: - - 128 - max_padding_size_mb: - title: "Max Padding Size (MB)" - description: - "Maximum size allowed as padding to align row groups.\ - \ This is also the minimum size of a row group. Default: 8 MB." - type: "integer" - default: 8 - examples: - - 8 - page_size_kb: - title: "Page Size (KB)" - description: - "The page size is for compression. A block is composed\ - \ of pages. A page is the smallest unit that must be read fully\ - \ to access a single record. If this value is too small, the compression\ - \ will deteriorate. Default: 1024 KB." - type: "integer" - default: 1024 - examples: - - 1024 - dictionary_page_size_kb: - title: "Dictionary Page Size (KB)" - description: - "There is one dictionary page per column per row group\ - \ when dictionary encoding is used. The dictionary page size works\ - \ like the page size but for dictionary. Default: 1024 KB." - type: "integer" - default: 1024 - examples: - - 1024 - dictionary_encoding: - title: "Dictionary Encoding" - description: "Default: true." - type: "boolean" - default: true + - title: "Avro: Apache Avro" + required: + - "format_type" + - "compression_codec" + properties: + format_type: + type: "string" + enum: + - "Avro" + default: "Avro" + compression_codec: + title: "Compression Codec" + description: "The compression algorithm used to compress data. Default\ + \ to no compression." + type: "object" + oneOf: + - title: "no compression" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "no compression" + default: "no compression" + - title: "Deflate" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "Deflate" + default: "Deflate" + compression_level: + title: "Deflate level" + description: "0: no compression & fastest, 9: best compression\ + \ & slowest." + type: "integer" + default: 0 + minimum: 0 + maximum: 9 + - title: "bzip2" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "bzip2" + default: "bzip2" + - title: "xz" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "xz" + default: "xz" + compression_level: + title: "Compression level" + description: "See here for details." + type: "integer" + default: 6 + minimum: 0 + maximum: 9 + - title: "zstandard" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "zstandard" + default: "zstandard" + compression_level: + title: "Compression level" + description: "Negative levels are 'fast' modes akin to lz4 or\ + \ snappy, levels above 9 are generally for archival purposes,\ + \ and levels above 18 use a lot of memory." + type: "integer" + default: 3 + minimum: -5 + maximum: 22 + include_checksum: + title: "Include checksum" + description: "If true, include a checksum with each data block." + type: "boolean" + default: false + - title: "snappy" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "snappy" + default: "snappy" + part_size_mb: + title: "Block Size (MB) for GCS multipart upload" + description: "This is the size of a \"Part\" being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will allow\ + \ to upload a bigger files and improve the speed, but consumes9\ + \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." + type: "integer" + default: 5 + examples: + - 5 + - title: "CSV: Comma-Separated Values" + required: + - "format_type" + - "flattening" + properties: + format_type: + type: "string" + enum: + - "CSV" + default: "CSV" + flattening: + type: "string" + title: "Normalization (Flattening)" + description: "Whether the input json data should be normalized (flattened)\ + \ in the output CSV. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + part_size_mb: + title: "Block Size (MB) for GCS multipart upload" + description: "This is the size of a \"Part\" being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will allow\ + \ to upload a bigger files and improve the speed, but consumes9\ + \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." + type: "integer" + default: 5 + examples: + - 5 + - title: "JSON Lines: newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "JSONL" + default: "JSONL" + part_size_mb: + title: "Block Size (MB) for GCS multipart upload" + description: "This is the size of a \"Part\" being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will allow\ + \ to upload a bigger files and improve the speed, but consumes9\ + \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." + type: "integer" + default: 5 + examples: + - 5 + - title: "Parquet: Columnar Storage" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "Parquet" + default: "Parquet" + compression_codec: + title: "Compression Codec" + description: "The compression algorithm used to compress data pages." + type: "string" + enum: + - "UNCOMPRESSED" + - "SNAPPY" + - "GZIP" + - "LZO" + - "BROTLI" + - "LZ4" + - "ZSTD" + default: "UNCOMPRESSED" + block_size_mb: + title: "Block Size (Row Group Size) (MB)" + description: "This is the size of a row group being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will improve\ + \ the IO when reading, but consume more memory when writing. Default:\ + \ 128 MB." + type: "integer" + default: 128 + examples: + - 128 + max_padding_size_mb: + title: "Max Padding Size (MB)" + description: "Maximum size allowed as padding to align row groups.\ + \ This is also the minimum size of a row group. Default: 8 MB." + type: "integer" + default: 8 + examples: + - 8 + page_size_kb: + title: "Page Size (KB)" + description: "The page size is for compression. A block is composed\ + \ of pages. A page is the smallest unit that must be read fully\ + \ to access a single record. If this value is too small, the compression\ + \ will deteriorate. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_page_size_kb: + title: "Dictionary Page Size (KB)" + description: "There is one dictionary page per column per row group\ + \ when dictionary encoding is used. The dictionary page size works\ + \ like the page size but for dictionary. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_encoding: + title: "Dictionary Encoding" + description: "Default: true." + type: "boolean" + default: true supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" $schema: "http://json-schema.org/draft-07/schema#" - dockerImage: "airbyte/destination-pubsub:0.1.1" spec: @@ -984,9 +944,9 @@ title: "Google PubSub Destination Spec" type: "object" required: - - "project_id" - - "topic_id" - - "credentials_json" + - "project_id" + - "topic_id" + - "credentials_json" additionalProperties: true properties: project_id: @@ -999,8 +959,7 @@ title: "PubSub Topic ID" credentials_json: type: "string" - description: - "The contents of the JSON service account key. Check out the\ + description: "The contents of the JSON service account key. Check out the\ \ docs if you need help generating this key." title: "Credentials JSON" @@ -1009,7 +968,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "append" + - "append" - dockerImage: "airbyte/destination-kafka:0.1.2" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/kafka" @@ -1018,32 +977,31 @@ title: "Kafka Destination Spec" type: "object" required: - - "bootstrap_servers" - - "topic_pattern" - - "protocol" - - "acks" - - "enable_idempotence" - - "compression_type" - - "batch_size" - - "linger_ms" - - "max_in_flight_requests_per_connection" - - "client_dns_lookup" - - "buffer_memory" - - "max_request_size" - - "retries" - - "socket_connection_setup_timeout_ms" - - "socket_connection_setup_timeout_max_ms" - - "max_block_ms" - - "request_timeout_ms" - - "delivery_timeout_ms" - - "send_buffer_bytes" - - "receive_buffer_bytes" + - "bootstrap_servers" + - "topic_pattern" + - "protocol" + - "acks" + - "enable_idempotence" + - "compression_type" + - "batch_size" + - "linger_ms" + - "max_in_flight_requests_per_connection" + - "client_dns_lookup" + - "buffer_memory" + - "max_request_size" + - "retries" + - "socket_connection_setup_timeout_ms" + - "socket_connection_setup_timeout_max_ms" + - "max_block_ms" + - "request_timeout_ms" + - "delivery_timeout_ms" + - "send_buffer_bytes" + - "receive_buffer_bytes" additionalProperties: true properties: bootstrap_servers: title: "Bootstrap servers" - description: - "A list of host/port pairs to use for establishing the initial\ + description: "A list of host/port pairs to use for establishing the initial\ \ connection to the Kafka cluster. The client will make use of all servers\ \ irrespective of which servers are specified here for bootstrapping—this\ \ list only impacts the initial hosts used to discover the full set of\ @@ -1054,24 +1012,23 @@ \ though, in case a server is down)." type: "string" examples: - - "kafka-broker1:9092,kafka-broker2:9092" + - "kafka-broker1:9092,kafka-broker2:9092" topic_pattern: title: "Topic pattern" - description: - "Topic pattern in which the records will be sent. You can use\ + description: "Topic pattern in which the records will be sent. You can use\ \ patterns like '{namespace}' and/or '{stream}' to send the message to\ \ a specific topic based on these values. Notice that the topic name will\ \ be transformed to a standard naming convention." type: "string" examples: - - "sample.topic" - - "{namespace}.{stream}.sample" + - "sample.topic" + - "{namespace}.{stream}.sample" test_topic: title: "Test topic" description: "Topic to test if Airbyte can produce messages." type: "string" examples: - - "test.topic" + - "test.topic" sync_producer: title: "Sync producer" description: "Wait synchronously until the record has been sent to Kafka." @@ -1082,99 +1039,92 @@ type: "object" description: "Protocol used to communicate with brokers." oneOf: - - title: "PLAINTEXT" - required: - - "security_protocol" - properties: - security_protocol: - type: "string" - enum: - - "PLAINTEXT" - default: "PLAINTEXT" - - title: "SASL PLAINTEXT" - required: - - "security_protocol" - - "sasl_mechanism" - - "sasl_jaas_config" - properties: - security_protocol: - type: "string" - enum: - - "SASL_PLAINTEXT" - default: "SASL_PLAINTEXT" - sasl_mechanism: - title: "SASL mechanism" - description: - "SASL mechanism used for client connections. This may\ - \ be any mechanism for which a security provider is available." - type: "string" - default: "PLAIN" - enum: - - "PLAIN" - sasl_jaas_config: - title: "SASL JAAS config" - description: - "JAAS login context parameters for SASL connections in\ - \ the format used by JAAS configuration files." - type: "string" - default: "" - airbyte_secret: true - - title: "SASL SSL" - required: - - "security_protocol" - - "sasl_mechanism" - - "sasl_jaas_config" - properties: - security_protocol: - type: "string" - enum: - - "SASL_SSL" - default: "SASL_SSL" - sasl_mechanism: - title: "SASL mechanism" - description: - "SASL mechanism used for client connections. This may\ - \ be any mechanism for which a security provider is available." - type: "string" - default: "GSSAPI" - enum: - - "GSSAPI" - - "OAUTHBEARER" - - "SCRAM-SHA-256" - sasl_jaas_config: - title: "SASL JAAS config" - description: - "JAAS login context parameters for SASL connections in\ - \ the format used by JAAS configuration files." - type: "string" - default: "" - airbyte_secret: true + - title: "PLAINTEXT" + required: + - "security_protocol" + properties: + security_protocol: + type: "string" + enum: + - "PLAINTEXT" + default: "PLAINTEXT" + - title: "SASL PLAINTEXT" + required: + - "security_protocol" + - "sasl_mechanism" + - "sasl_jaas_config" + properties: + security_protocol: + type: "string" + enum: + - "SASL_PLAINTEXT" + default: "SASL_PLAINTEXT" + sasl_mechanism: + title: "SASL mechanism" + description: "SASL mechanism used for client connections. This may\ + \ be any mechanism for which a security provider is available." + type: "string" + default: "PLAIN" + enum: + - "PLAIN" + sasl_jaas_config: + title: "SASL JAAS config" + description: "JAAS login context parameters for SASL connections in\ + \ the format used by JAAS configuration files." + type: "string" + default: "" + airbyte_secret: true + - title: "SASL SSL" + required: + - "security_protocol" + - "sasl_mechanism" + - "sasl_jaas_config" + properties: + security_protocol: + type: "string" + enum: + - "SASL_SSL" + default: "SASL_SSL" + sasl_mechanism: + title: "SASL mechanism" + description: "SASL mechanism used for client connections. This may\ + \ be any mechanism for which a security provider is available." + type: "string" + default: "GSSAPI" + enum: + - "GSSAPI" + - "OAUTHBEARER" + - "SCRAM-SHA-256" + sasl_jaas_config: + title: "SASL JAAS config" + description: "JAAS login context parameters for SASL connections in\ + \ the format used by JAAS configuration files." + type: "string" + default: "" + airbyte_secret: true client_id: title: "Client ID" - description: - "An id string to pass to the server when making requests. The\ + description: "An id string to pass to the server when making requests. The\ \ purpose of this is to be able to track the source of requests beyond\ \ just ip/port by allowing a logical application name to be included in\ \ server-side request logging." type: "string" examples: - - "airbyte-producer" + - "airbyte-producer" acks: title: "ACKs" - description: - "The number of acknowledgments the producer requires the leader\ + description: "The number of acknowledgments the producer requires the leader\ \ to have received before considering a request complete. This controls\ \ the durability of records that are sent." type: "string" default: "1" enum: - - "0" - - "1" - - "all" + - "0" + - "1" + - "all" enable_idempotence: title: "Enable idempotence" - description: - "When set to 'true', the producer will ensure that exactly\ + description: "When set to 'true', the producer will ensure that exactly\ \ one copy of each message is written in the stream. If 'false', producer\ \ retries due to broker failures, etc., may write duplicates of the retried\ \ message in the stream." @@ -1186,39 +1136,35 @@ type: "string" default: "none" enum: - - "none" - - "gzip" - - "snappy" - - "lz4" - - "zstd" + - "none" + - "gzip" + - "snappy" + - "lz4" + - "zstd" batch_size: title: "Batch size" - description: - "The producer will attempt to batch records together into fewer\ + description: "The producer will attempt to batch records together into fewer\ \ requests whenever multiple records are being sent to the same partition." type: "integer" examples: - - 16384 + - 16384 linger_ms: title: "Linger ms" - description: - "The producer groups together any records that arrive in between\ + description: "The producer groups together any records that arrive in between\ \ request transmissions into a single batched request." type: "string" examples: - - 0 + - 0 max_in_flight_requests_per_connection: title: "Max in flight requests per connection" - description: - "The maximum number of unacknowledged requests the client will\ + description: "The maximum number of unacknowledged requests the client will\ \ send on a single connection before blocking." type: "integer" examples: - - 5 + - 5 client_dns_lookup: title: "Client DNS lookup" - description: - "Controls how the client uses DNS lookups. If set to use_all_dns_ips,\ + description: "Controls how the client uses DNS lookups. If set to use_all_dns_ips,\ \ connect to each returned IP address in sequence until a successful connection\ \ is established. After a disconnection, the next IP is used. Once all\ \ IPs have been used once, the client resolves the IP(s) from the hostname\ @@ -1230,14 +1176,13 @@ type: "string" default: "use_all_dns_ips" enum: - - "default" - - "use_all_dns_ips" - - "resolve_canonical_bootstrap_servers_only" - - "use_all_dns_ips" + - "default" + - "use_all_dns_ips" + - "resolve_canonical_bootstrap_servers_only" + - "use_all_dns_ips" buffer_memory: title: "Buffer memory" - description: - "The total bytes of memory the producer can use to buffer records\ + description: "The total bytes of memory the producer can use to buffer records\ \ waiting to be sent to the server." type: "string" examples: 33554432 @@ -1246,80 +1191,72 @@ description: "The maximum size of a request in bytes." type: "integer" examples: - - 1048576 + - 1048576 retries: title: "Retries" - description: - "Setting a value greater than zero will cause the client to\ + description: "Setting a value greater than zero will cause the client to\ \ resend any record whose send fails with a potentially transient error." type: "integer" examples: - - 2147483647 + - 2147483647 socket_connection_setup_timeout_ms: title: "Socket connection setup timeout" - description: - "The amount of time the client will wait for the socket connection\ + description: "The amount of time the client will wait for the socket connection\ \ to be established." type: "string" examples: - - 10000 + - 10000 socket_connection_setup_timeout_max_ms: title: "Socket connection setup max timeout" - description: - "The maximum amount of time the client will wait for the socket\ + description: "The maximum amount of time the client will wait for the socket\ \ connection to be established. The connection setup timeout will increase\ \ exponentially for each consecutive connection failure up to this maximum." type: "string" examples: - - 30000 + - 30000 max_block_ms: title: "Max block ms" - description: - "The configuration controls how long the KafkaProducer's send(),\ + description: "The configuration controls how long the KafkaProducer's send(),\ \ partitionsFor(), initTransactions(), sendOffsetsToTransaction(), commitTransaction()\ \ and abortTransaction() methods will block." type: "string" examples: - - 60000 + - 60000 request_timeout_ms: title: "Request timeout" - description: - "The configuration controls the maximum amount of time the\ + description: "The configuration controls the maximum amount of time the\ \ client will wait for the response of a request. If the response is not\ \ received before the timeout elapses the client will resend the request\ \ if necessary or fail the request if retries are exhausted." type: "integer" examples: - - 30000 + - 30000 delivery_timeout_ms: title: "Delivery timeout" - description: - "An upper bound on the time to report success or failure after\ + description: "An upper bound on the time to report success or failure after\ \ a call to 'send()' returns." type: "integer" examples: - - 120000 + - 120000 send_buffer_bytes: title: "Send buffer bytes" - description: - "The size of the TCP send buffer (SO_SNDBUF) to use when sending\ + description: "The size of the TCP send buffer (SO_SNDBUF) to use when sending\ \ data. If the value is -1, the OS default will be used." type: "integer" examples: - - 131072 + - 131072 receive_buffer_bytes: title: "Receive buffer bytes" - description: - "The size of the TCP receive buffer (SO_RCVBUF) to use when\ + description: "The size of the TCP receive buffer (SO_RCVBUF) to use when\ \ reading data. If the value is -1, the OS default will be used." type: "integer" examples: - - 32768 + - 32768 supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "append" + - "append" - dockerImage: "airbyte/destination-csv:0.2.8" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/local-csv" @@ -1328,25 +1265,24 @@ title: "CSV Destination Spec" type: "object" required: - - "destination_path" + - "destination_path" additionalProperties: false properties: destination_path: - description: - "Path to the directory where csv files will be written. The\ + description: "Path to the directory where csv files will be written. The\ \ destination uses the local mount \"/local\" and any data files will\ \ be placed inside that local mount. For more information check out our\ \ docs" type: "string" examples: - - "/local" + - "/local" supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-local-json:0.2.8" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/local-json" @@ -1355,24 +1291,23 @@ title: "Local Json Destination Spec" type: "object" required: - - "destination_path" + - "destination_path" additionalProperties: false properties: destination_path: - description: - "Path to the directory where json files will be written. The\ + description: "Path to the directory where json files will be written. The\ \ files will be placed inside that local mount. For more information check\ \ out our docs" type: "string" examples: - - "/json_data" + - "/json_data" supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-mssql:0.1.11" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/mssql" @@ -1381,11 +1316,11 @@ title: "MS SQL Server Destination Spec" type: "object" required: - - "host" - - "port" - - "username" - - "database" - - "schema" + - "host" + - "port" + - "username" + - "database" + - "schema" additionalProperties: true properties: host: @@ -1401,7 +1336,7 @@ maximum: 65536 default: 1433 examples: - - "1433" + - "1433" order: 1 database: title: "DB Name" @@ -1410,13 +1345,12 @@ order: 2 schema: title: "Default Schema" - description: - "The default schema tables are written to if the source does\ + description: "The default schema tables are written to if the source does\ \ not specify a namespace. The usual value for this field is \"public\"\ ." type: "string" examples: - - "public" + - "public" default: "public" order: 3 username: @@ -1436,169 +1370,159 @@ description: "Encryption method to use when communicating with the database" order: 6 oneOf: - - title: "Unencrypted" - additionalProperties: false - description: "Data transfer will not be encrypted." - required: - - "ssl_method" - type: "object" - properties: - ssl_method: - type: "string" - enum: - - "unencrypted" - default: "unencrypted" - - title: "Encrypted (trust server certificate)" - additionalProperties: false - description: - "Use the cert provided by the server without verification.\ - \ (For testing purposes only!)" - required: - - "ssl_method" - type: "object" - properties: - ssl_method: - type: "string" - enum: - - "encrypted_trust_server_certificate" - default: "encrypted_trust_server_certificate" - - title: "Encrypted (verify certificate)" - additionalProperties: false - description: "Verify and use the cert provided by the server." - required: - - "ssl_method" - - "trustStoreName" - - "trustStorePassword" - type: "object" - properties: - ssl_method: - type: "string" - enum: - - "encrypted_verify_certificate" - default: "encrypted_verify_certificate" - hostNameInCertificate: - title: "Host Name In Certificate" - type: "string" - description: - "Specifies the host name of the server. The value of\ - \ this property must match the subject property of the certificate." - order: 7 + - title: "Unencrypted" + additionalProperties: false + description: "Data transfer will not be encrypted." + required: + - "ssl_method" + type: "object" + properties: + ssl_method: + type: "string" + enum: + - "unencrypted" + default: "unencrypted" + - title: "Encrypted (trust server certificate)" + additionalProperties: false + description: "Use the cert provided by the server without verification.\ + \ (For testing purposes only!)" + required: + - "ssl_method" + type: "object" + properties: + ssl_method: + type: "string" + enum: + - "encrypted_trust_server_certificate" + default: "encrypted_trust_server_certificate" + - title: "Encrypted (verify certificate)" + additionalProperties: false + description: "Verify and use the cert provided by the server." + required: + - "ssl_method" + - "trustStoreName" + - "trustStorePassword" + type: "object" + properties: + ssl_method: + type: "string" + enum: + - "encrypted_verify_certificate" + default: "encrypted_verify_certificate" + hostNameInCertificate: + title: "Host Name In Certificate" + type: "string" + description: "Specifies the host name of the server. The value of\ + \ this property must match the subject property of the certificate." + order: 7 tunnel_method: type: "object" title: "SSH Tunnel Method" - description: - "Whether to initiate an SSH tunnel before connecting to the\ + description: "Whether to initiate an SSH tunnel before connecting to the\ \ database, and if so, which kind of authentication to use." oneOf: - - title: "No Tunnel" - required: - - "tunnel_method" - properties: - tunnel_method: - description: "No ssh tunnel needed to connect to database" - type: "string" - const: "NO_TUNNEL" - order: 0 - - title: "SSH Key Authentication" - required: - - "tunnel_method" - - "tunnel_host" - - "tunnel_port" - - "tunnel_user" - - "ssh_key" - properties: - tunnel_method: - description: - "Connect through a jump server tunnel host using username\ - \ and ssh key" - type: "string" - const: "SSH_KEY_AUTH" - order: 0 - tunnel_host: - title: "SSH Tunnel Jump Server Host" - description: - "Hostname of the jump server host that allows inbound\ - \ ssh tunnel." - type: "string" - order: 1 - tunnel_port: - title: "SSH Connection Port" - description: - "Port on the proxy/jump server that accepts inbound ssh\ - \ connections." - type: "integer" - minimum: 0 - maximum: 65536 - default: 22 - examples: - - "22" - order: 2 - tunnel_user: - title: "SSH Login Username" - description: "OS-level username for logging into the jump server host." - type: "string" - order: 3 - ssh_key: - title: "SSH Private Key" - description: - "OS-level user account ssh key credentials in RSA PEM\ - \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" - type: "string" - airbyte_secret: true - multiline: true - order: 4 - - title: "Password Authentication" - required: - - "tunnel_method" - - "tunnel_host" - - "tunnel_port" - - "tunnel_user" - - "tunnel_user_password" - properties: - tunnel_method: - description: - "Connect through a jump server tunnel host using username\ - \ and password authentication" - type: "string" - const: "SSH_PASSWORD_AUTH" - order: 0 - tunnel_host: - title: "SSH Tunnel Jump Server Host" - description: - "Hostname of the jump server host that allows inbound\ - \ ssh tunnel." - type: "string" - order: 1 - tunnel_port: - title: "SSH Connection Port" - description: - "Port on the proxy/jump server that accepts inbound ssh\ - \ connections." - type: "integer" - minimum: 0 - maximum: 65536 - default: 22 - examples: - - "22" - order: 2 - tunnel_user: - title: "SSH Login Username" - description: "OS-level username for logging into the jump server host" - type: "string" - order: 3 - tunnel_user_password: - title: "Password" - description: "OS-level password for logging into the jump server host" - type: "string" - airbyte_secret: true - order: 4 + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 supportsIncremental: true supportsNormalization: true supportsDBT: true supported_destination_sync_modes: - - "overwrite" - - "append" - - "append_dedup" + - "overwrite" + - "append" + - "append_dedup" - dockerImage: "airbyte/destination-meilisearch:0.2.10" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/meilisearch" @@ -1607,7 +1531,7 @@ title: "MeiliSearch Destination Spec" type: "object" required: - - "host" + - "host" additionalProperties: true properties: host: @@ -1625,8 +1549,8 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-mongodb:0.1.2" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/mongodb" @@ -1635,94 +1559,91 @@ title: "MongoDB Destination Spec" type: "object" required: - - "database" - - "auth_type" + - "database" + - "auth_type" additionalProperties: true properties: instance_type: - description: - "MongoDb instance to connect to. For MongoDB Atlas and Replica\ + description: "MongoDb instance to connect to. For MongoDB Atlas and Replica\ \ Set TLS connection is used by default." title: "MongoDb instance type" type: "object" order: 0 oneOf: - - title: "Standalone MongoDb Instance" - required: - - "instance" - - "host" - - "port" - properties: - instance: - type: "string" - enum: - - "standalone" - default: "standalone" - host: - title: "Host" - type: "string" - description: "Host of a Mongo database to be replicated." - order: 0 - port: - title: "Port" - type: "integer" - description: "Port of a Mongo database to be replicated." - minimum: 0 - maximum: 65536 - default: 27017 - examples: - - "27017" - order: 1 - tls: - title: "TLS connection" - type: "boolean" - description: - "Indicates whether TLS encryption protocol will be used\ - \ to connect to MongoDB. It is recommended to use TLS connection\ - \ if possible. For more information see documentation." - default: false - order: 2 - - title: "Replica Set" - required: - - "instance" - - "server_addresses" - properties: - instance: - type: "string" - enum: - - "replica" - default: "replica" - server_addresses: - title: "Server addresses" - type: "string" - description: - "The members of a replica set. Please specify `host`:`port`\ - \ of each member seperated by comma." - examples: - - "host1:27017,host2:27017,host3:27017" - order: 0 - replica_set: - title: "Replica Set" - type: "string" - description: "A replica set name." - order: 1 - - title: "MongoDB Atlas" - additionalProperties: false - required: - - "instance" - - "cluster_url" - properties: - instance: - type: "string" - enum: - - "atlas" - default: "atlas" - cluster_url: - title: "Cluster URL" - type: "string" - description: "URL of a cluster to connect to." - order: 0 + - title: "Standalone MongoDb Instance" + required: + - "instance" + - "host" + - "port" + properties: + instance: + type: "string" + enum: + - "standalone" + default: "standalone" + host: + title: "Host" + type: "string" + description: "Host of a Mongo database to be replicated." + order: 0 + port: + title: "Port" + type: "integer" + description: "Port of a Mongo database to be replicated." + minimum: 0 + maximum: 65536 + default: 27017 + examples: + - "27017" + order: 1 + tls: + title: "TLS connection" + type: "boolean" + description: "Indicates whether TLS encryption protocol will be used\ + \ to connect to MongoDB. It is recommended to use TLS connection\ + \ if possible. For more information see documentation." + default: false + order: 2 + - title: "Replica Set" + required: + - "instance" + - "server_addresses" + properties: + instance: + type: "string" + enum: + - "replica" + default: "replica" + server_addresses: + title: "Server addresses" + type: "string" + description: "The members of a replica set. Please specify `host`:`port`\ + \ of each member seperated by comma." + examples: + - "host1:27017,host2:27017,host3:27017" + order: 0 + replica_set: + title: "Replica Set" + type: "string" + description: "A replica set name." + order: 1 + - title: "MongoDB Atlas" + additionalProperties: false + required: + - "instance" + - "cluster_url" + properties: + instance: + type: "string" + enum: + - "atlas" + default: "atlas" + cluster_url: + title: "Cluster URL" + type: "string" + description: "URL of a cluster to connect to." + order: 0 database: title: "DB Name" description: "Name of the database." @@ -1733,45 +1654,45 @@ type: "object" description: "Authorization type." oneOf: - - title: "None" - additionalProperties: false - description: "None." - required: - - "authorization" - type: "object" - properties: - authorization: - type: "string" - const: "none" - - title: "Login/Password" - additionalProperties: false - description: "Login/Password." - required: - - "authorization" - - "username" - - "password" - type: "object" - properties: - authorization: - type: "string" - const: "login/password" - username: - title: "User" - description: "Username to use to access the database." - type: "string" - order: 1 - password: - title: "Password" - description: "Password associated with the username." - type: "string" - airbyte_secret: true - order: 2 + - title: "None" + additionalProperties: false + description: "None." + required: + - "authorization" + type: "object" + properties: + authorization: + type: "string" + const: "none" + - title: "Login/Password" + additionalProperties: false + description: "Login/Password." + required: + - "authorization" + - "username" + - "password" + type: "object" + properties: + authorization: + type: "string" + const: "login/password" + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 1 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 2 supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-mysql:0.1.14" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/mysql" @@ -1780,10 +1701,10 @@ title: "MySQL Destination Spec" type: "object" required: - - "host" - - "port" - - "username" - - "database" + - "host" + - "port" + - "username" + - "database" additionalProperties: true properties: host: @@ -1799,7 +1720,7 @@ maximum: 65536 default: 3306 examples: - - "3306" + - "3306" order: 1 database: title: "DB Name" @@ -1826,118 +1747,110 @@ tunnel_method: type: "object" title: "SSH Tunnel Method" - description: - "Whether to initiate an SSH tunnel before connecting to the\ + description: "Whether to initiate an SSH tunnel before connecting to the\ \ database, and if so, which kind of authentication to use." oneOf: - - title: "No Tunnel" - required: - - "tunnel_method" - properties: - tunnel_method: - description: "No ssh tunnel needed to connect to database" - type: "string" - const: "NO_TUNNEL" - order: 0 - - title: "SSH Key Authentication" - required: - - "tunnel_method" - - "tunnel_host" - - "tunnel_port" - - "tunnel_user" - - "ssh_key" - properties: - tunnel_method: - description: - "Connect through a jump server tunnel host using username\ - \ and ssh key" - type: "string" - const: "SSH_KEY_AUTH" - order: 0 - tunnel_host: - title: "SSH Tunnel Jump Server Host" - description: - "Hostname of the jump server host that allows inbound\ - \ ssh tunnel." - type: "string" - order: 1 - tunnel_port: - title: "SSH Connection Port" - description: - "Port on the proxy/jump server that accepts inbound ssh\ - \ connections." - type: "integer" - minimum: 0 - maximum: 65536 - default: 22 - examples: - - "22" - order: 2 - tunnel_user: - title: "SSH Login Username" - description: "OS-level username for logging into the jump server host." - type: "string" - order: 3 - ssh_key: - title: "SSH Private Key" - description: - "OS-level user account ssh key credentials in RSA PEM\ - \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" - type: "string" - airbyte_secret: true - multiline: true - order: 4 - - title: "Password Authentication" - required: - - "tunnel_method" - - "tunnel_host" - - "tunnel_port" - - "tunnel_user" - - "tunnel_user_password" - properties: - tunnel_method: - description: - "Connect through a jump server tunnel host using username\ - \ and password authentication" - type: "string" - const: "SSH_PASSWORD_AUTH" - order: 0 - tunnel_host: - title: "SSH Tunnel Jump Server Host" - description: - "Hostname of the jump server host that allows inbound\ - \ ssh tunnel." - type: "string" - order: 1 - tunnel_port: - title: "SSH Connection Port" - description: - "Port on the proxy/jump server that accepts inbound ssh\ - \ connections." - type: "integer" - minimum: 0 - maximum: 65536 - default: 22 - examples: - - "22" - order: 2 - tunnel_user: - title: "SSH Login Username" - description: "OS-level username for logging into the jump server host" - type: "string" - order: 3 - tunnel_user_password: - title: "Password" - description: "OS-level password for logging into the jump server host" - type: "string" - airbyte_secret: true - order: 4 + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 supportsIncremental: true supportsNormalization: true supportsDBT: true supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-oracle:0.1.12" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/oracle" @@ -1946,10 +1859,10 @@ title: "Oracle Destination Spec" type: "object" required: - - "host" - - "port" - - "username" - - "sid" + - "host" + - "port" + - "username" + - "sid" additionalProperties: true properties: host: @@ -1965,7 +1878,7 @@ maximum: 65536 default: 1521 examples: - - "1521" + - "1521" order: 1 sid: title: "SID" @@ -1974,8 +1887,7 @@ order: 2 username: title: "User" - description: - "Username to use to access the database. This user must have\ + description: "Username to use to access the database. This user must have\ \ CREATE USER privileges in the database." type: "string" order: 3 @@ -1987,15 +1899,14 @@ order: 4 schema: title: "Default Schema" - description: - "The default schema tables are written to if the source does\ + description: "The default schema tables are written to if the source does\ \ not specify a namespace. The usual value for this field is \"airbyte\"\ . In Oracle, schemas and users are the same thing, so the \"user\" parameter\ \ is used as the login credentials and this is used for the default Airbyte\ \ message schema." type: "string" examples: - - "airbyte" + - "airbyte" default: "airbyte" order: 5 encryption: @@ -2004,180 +1915,169 @@ description: "Encryption method to use when communicating with the database" order: 6 oneOf: - - title: "Unencrypted" - additionalProperties: false - description: "Data transfer will not be encrypted." - required: - - "encryption_method" - properties: - encryption_method: - type: "string" - const: "unencrypted" - enum: - - "unencrypted" - default: "unencrypted" - - title: "Native Network Ecryption (NNE)" - additionalProperties: false - description: - "Native network encryption gives you the ability to encrypt\ - \ database connections, without the configuration overhead of TCP/IP\ - \ and SSL/TLS and without the need to open and listen on different ports." - required: - - "encryption_method" - properties: - encryption_method: - type: "string" - const: "client_nne" - enum: - - "client_nne" - default: "client_nne" - encryption_algorithm: - type: "string" - description: - "This parameter defines the encryption algorithm to be\ - \ used" - title: "Encryption Algorithm" - default: "AES256" - enum: - - "AES256" - - "RC4_56" - - "3DES168" - - title: "TLS Encrypted (verify certificate)" - additionalProperties: false - description: "Verify and use the cert provided by the server." - required: - - "encryption_method" - - "ssl_certificate" - properties: - encryption_method: - type: "string" - const: "encrypted_verify_certificate" - enum: - - "encrypted_verify_certificate" - default: "encrypted_verify_certificate" - ssl_certificate: - title: "SSL PEM file" - description: - "Privacy Enhanced Mail (PEM) files are concatenated certificate\ - \ containers frequently used in certificate installations" - type: "string" - airbyte_secret: true - multiline: true + - title: "Unencrypted" + additionalProperties: false + description: "Data transfer will not be encrypted." + required: + - "encryption_method" + properties: + encryption_method: + type: "string" + const: "unencrypted" + enum: + - "unencrypted" + default: "unencrypted" + - title: "Native Network Ecryption (NNE)" + additionalProperties: false + description: "Native network encryption gives you the ability to encrypt\ + \ database connections, without the configuration overhead of TCP/IP\ + \ and SSL/TLS and without the need to open and listen on different ports." + required: + - "encryption_method" + properties: + encryption_method: + type: "string" + const: "client_nne" + enum: + - "client_nne" + default: "client_nne" + encryption_algorithm: + type: "string" + description: "This parameter defines the encryption algorithm to be\ + \ used" + title: "Encryption Algorithm" + default: "AES256" + enum: + - "AES256" + - "RC4_56" + - "3DES168" + - title: "TLS Encrypted (verify certificate)" + additionalProperties: false + description: "Verify and use the cert provided by the server." + required: + - "encryption_method" + - "ssl_certificate" + properties: + encryption_method: + type: "string" + const: "encrypted_verify_certificate" + enum: + - "encrypted_verify_certificate" + default: "encrypted_verify_certificate" + ssl_certificate: + title: "SSL PEM file" + description: "Privacy Enhanced Mail (PEM) files are concatenated certificate\ + \ containers frequently used in certificate installations" + type: "string" + airbyte_secret: true + multiline: true tunnel_method: type: "object" title: "SSH Tunnel Method" - description: - "Whether to initiate an SSH tunnel before connecting to the\ + description: "Whether to initiate an SSH tunnel before connecting to the\ \ database, and if so, which kind of authentication to use." oneOf: - - title: "No Tunnel" - required: - - "tunnel_method" - properties: - tunnel_method: - description: "No ssh tunnel needed to connect to database" - type: "string" - const: "NO_TUNNEL" - order: 0 - - title: "SSH Key Authentication" - required: - - "tunnel_method" - - "tunnel_host" - - "tunnel_port" - - "tunnel_user" - - "ssh_key" - properties: - tunnel_method: - description: - "Connect through a jump server tunnel host using username\ - \ and ssh key" - type: "string" - const: "SSH_KEY_AUTH" - order: 0 - tunnel_host: - title: "SSH Tunnel Jump Server Host" - description: - "Hostname of the jump server host that allows inbound\ - \ ssh tunnel." - type: "string" - order: 1 - tunnel_port: - title: "SSH Connection Port" - description: - "Port on the proxy/jump server that accepts inbound ssh\ - \ connections." - type: "integer" - minimum: 0 - maximum: 65536 - default: 22 - examples: - - "22" - order: 2 - tunnel_user: - title: "SSH Login Username" - description: "OS-level username for logging into the jump server host." - type: "string" - order: 3 - ssh_key: - title: "SSH Private Key" - description: - "OS-level user account ssh key credentials in RSA PEM\ - \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" - type: "string" - airbyte_secret: true - multiline: true - order: 4 - - title: "Password Authentication" - required: - - "tunnel_method" - - "tunnel_host" - - "tunnel_port" - - "tunnel_user" - - "tunnel_user_password" - properties: - tunnel_method: - description: - "Connect through a jump server tunnel host using username\ - \ and password authentication" - type: "string" - const: "SSH_PASSWORD_AUTH" - order: 0 - tunnel_host: - title: "SSH Tunnel Jump Server Host" - description: - "Hostname of the jump server host that allows inbound\ - \ ssh tunnel." - type: "string" - order: 1 - tunnel_port: - title: "SSH Connection Port" - description: - "Port on the proxy/jump server that accepts inbound ssh\ - \ connections." - type: "integer" - minimum: 0 - maximum: 65536 - default: 22 - examples: - - "22" - order: 2 - tunnel_user: - title: "SSH Login Username" - description: "OS-level username for logging into the jump server host" - type: "string" - order: 3 - tunnel_user_password: - title: "Password" - description: "OS-level password for logging into the jump server host" - type: "string" - airbyte_secret: true - order: 4 + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 supportsIncremental: true supportsNormalization: false supportsDBT: true supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-postgres:0.3.12" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/postgres" @@ -2186,11 +2086,11 @@ title: "Postgres Destination Spec" type: "object" required: - - "host" - - "port" - - "username" - - "database" - - "schema" + - "host" + - "port" + - "username" + - "database" + - "schema" additionalProperties: true properties: host: @@ -2206,7 +2106,7 @@ maximum: 65536 default: 5432 examples: - - "5432" + - "5432" order: 1 database: title: "DB Name" @@ -2215,13 +2115,12 @@ order: 2 schema: title: "Default Schema" - description: - "The default schema tables are written to if the source does\ + description: "The default schema tables are written to if the source does\ \ not specify a namespace. The usual value for this field is \"public\"\ ." type: "string" examples: - - "public" + - "public" default: "public" order: 3 username: @@ -2244,119 +2143,111 @@ tunnel_method: type: "object" title: "SSH Tunnel Method" - description: - "Whether to initiate an SSH tunnel before connecting to the\ + description: "Whether to initiate an SSH tunnel before connecting to the\ \ database, and if so, which kind of authentication to use." oneOf: - - title: "No Tunnel" - required: - - "tunnel_method" - properties: - tunnel_method: - description: "No ssh tunnel needed to connect to database" - type: "string" - const: "NO_TUNNEL" - order: 0 - - title: "SSH Key Authentication" - required: - - "tunnel_method" - - "tunnel_host" - - "tunnel_port" - - "tunnel_user" - - "ssh_key" - properties: - tunnel_method: - description: - "Connect through a jump server tunnel host using username\ - \ and ssh key" - type: "string" - const: "SSH_KEY_AUTH" - order: 0 - tunnel_host: - title: "SSH Tunnel Jump Server Host" - description: - "Hostname of the jump server host that allows inbound\ - \ ssh tunnel." - type: "string" - order: 1 - tunnel_port: - title: "SSH Connection Port" - description: - "Port on the proxy/jump server that accepts inbound ssh\ - \ connections." - type: "integer" - minimum: 0 - maximum: 65536 - default: 22 - examples: - - "22" - order: 2 - tunnel_user: - title: "SSH Login Username" - description: "OS-level username for logging into the jump server host." - type: "string" - order: 3 - ssh_key: - title: "SSH Private Key" - description: - "OS-level user account ssh key credentials in RSA PEM\ - \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" - type: "string" - airbyte_secret: true - multiline: true - order: 4 - - title: "Password Authentication" - required: - - "tunnel_method" - - "tunnel_host" - - "tunnel_port" - - "tunnel_user" - - "tunnel_user_password" - properties: - tunnel_method: - description: - "Connect through a jump server tunnel host using username\ - \ and password authentication" - type: "string" - const: "SSH_PASSWORD_AUTH" - order: 0 - tunnel_host: - title: "SSH Tunnel Jump Server Host" - description: - "Hostname of the jump server host that allows inbound\ - \ ssh tunnel." - type: "string" - order: 1 - tunnel_port: - title: "SSH Connection Port" - description: - "Port on the proxy/jump server that accepts inbound ssh\ - \ connections." - type: "integer" - minimum: 0 - maximum: 65536 - default: 22 - examples: - - "22" - order: 2 - tunnel_user: - title: "SSH Login Username" - description: "OS-level username for logging into the jump server host" - type: "string" - order: 3 - tunnel_user_password: - title: "Password" - description: "OS-level password for logging into the jump server host" - type: "string" - airbyte_secret: true - order: 4 + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 supportsIncremental: true supportsNormalization: true supportsDBT: true supported_destination_sync_modes: - - "overwrite" - - "append" - - "append_dedup" + - "overwrite" + - "append" + - "append_dedup" - dockerImage: "airbyte/destination-pulsar:0.1.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/pulsar" @@ -2365,30 +2256,29 @@ title: "Pulsar Destination Spec" type: "object" required: - - "brokers" - - "use_tls" - - "topic_type" - - "topic_tenant" - - "topic_namespace" - - "topic_pattern" - - "compression_type" - - "send_timeout_ms" - - "max_pending_messages" - - "max_pending_messages_across_partitions" - - "batching_enabled" - - "batching_max_messages" - - "batching_max_publish_delay" - - "block_if_queue_full" + - "brokers" + - "use_tls" + - "topic_type" + - "topic_tenant" + - "topic_namespace" + - "topic_pattern" + - "compression_type" + - "send_timeout_ms" + - "max_pending_messages" + - "max_pending_messages_across_partitions" + - "batching_enabled" + - "batching_max_messages" + - "batching_max_publish_delay" + - "block_if_queue_full" additionalProperties: true properties: brokers: title: "Pulsar brokers" - description: - "A list of host/port pairs to use for establishing the initial\ + description: "A list of host/port pairs to use for establishing the initial\ \ connection to the Pulsar cluster." type: "string" examples: - - "broker1:6650,broker2:6650" + - "broker1:6650,broker2:6650" use_tls: title: "Use TLS" description: "Whether to use TLS encryption on the connection." @@ -2396,8 +2286,7 @@ default: false topic_type: title: "Topic type" - description: - "It identifies type of topic. Pulsar supports two kind of topics:\ + description: "It identifies type of topic. Pulsar supports two kind of topics:\ \ persistent and non-persistent. In persistent topic, all messages are\ \ durably persisted on disk (that means on multiple disks unless the broker\ \ is standalone), whereas non-persistent topic does not persist message\ @@ -2405,52 +2294,48 @@ type: "string" default: "persistent" enum: - - "persistent" - - "non-persistent" + - "persistent" + - "non-persistent" topic_tenant: title: "Topic tenant" - description: - "The topic tenant within the instance. Tenants are essential\ + description: "The topic tenant within the instance. Tenants are essential\ \ to multi-tenancy in Pulsar, and spread across clusters." type: "string" default: "public" examples: - - "public" + - "public" topic_namespace: title: "Topic namespace" - description: - "The administrative unit of the topic, which acts as a grouping\ + description: "The administrative unit of the topic, which acts as a grouping\ \ mechanism for related topics. Most topic configuration is performed\ \ at the namespace level. Each tenant has one or multiple namespaces." type: "string" default: "default" examples: - - "default" + - "default" topic_pattern: title: "Topic pattern" - description: - "Topic pattern in which the records will be sent. You can use\ + description: "Topic pattern in which the records will be sent. You can use\ \ patterns like '{namespace}' and/or '{stream}' to send the message to\ \ a specific topic based on these values. Notice that the topic name will\ \ be transformed to a standard naming convention." type: "string" examples: - - "sample.topic" - - "{namespace}.{stream}.sample" + - "sample.topic" + - "{namespace}.{stream}.sample" topic_test: title: "Test topic" description: "Topic to test if Airbyte can produce messages." type: "string" examples: - - "test.topic" + - "test.topic" producer_name: title: "Producer name" - description: - "Name for the producer. If not filled, the system will generate\ + description: "Name for the producer. If not filled, the system will generate\ \ a globally unique name which can be accessed with." type: "string" examples: - - "airbyte-producer" + - "airbyte-producer" producer_sync: title: "Sync producer" description: "Wait synchronously until the record has been sent to Pulsar." @@ -2462,15 +2347,14 @@ type: "string" default: "NONE" enum: - - "NONE" - - "LZ4" - - "ZLIB" - - "ZSTD" - - "SNAPPY" + - "NONE" + - "LZ4" + - "ZLIB" + - "ZSTD" + - "SNAPPY" send_timeout_ms: title: "Message send timeout" - description: - "If a message is not acknowledged by a server before the send-timeout\ + description: "If a message is not acknowledged by a server before the send-timeout\ \ expires, an error occurs (in ms)." type: "integer" default: 30000 @@ -2486,8 +2370,7 @@ default: 50000 batching_enabled: title: "Enable batching" - description: - "Control whether automatic batching of messages is enabled\ + description: "Control whether automatic batching of messages is enabled\ \ for the producer." type: "boolean" default: true @@ -2498,15 +2381,13 @@ default: 1000 batching_max_publish_delay: title: "Batching max publish delay" - description: - " Time period in milliseconds within which the messages sent\ + description: " Time period in milliseconds within which the messages sent\ \ will be batched." type: "integer" default: 1 block_if_queue_full: title: "Block if queue is full" - description: - "If the send operation should block when the outgoing message\ + description: "If the send operation should block when the outgoing message\ \ queue is full." type: "boolean" default: false @@ -2514,7 +2395,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "append" + - "append" - dockerImage: "airbyte/destination-redshift:0.3.20" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/redshift" @@ -2523,17 +2404,16 @@ title: "Redshift Destination Spec" type: "object" required: - - "host" - - "port" - - "database" - - "username" - - "password" - - "schema" + - "host" + - "port" + - "database" + - "username" + - "password" + - "schema" additionalProperties: true properties: host: - description: - "Host Endpoint of the Redshift Cluster (must include the cluster-id,\ + description: "Host Endpoint of the Redshift Cluster (must include the cluster-id,\ \ region and end with .redshift.amazonaws.com)" type: "string" title: "Host" @@ -2544,7 +2424,7 @@ maximum: 65536 default: 5439 examples: - - "5439" + - "5439" title: "Port" username: description: "Username to use to access the database." @@ -2560,61 +2440,57 @@ type: "string" title: "Database" schema: - description: - "The default schema tables are written to if the source does\ + description: "The default schema tables are written to if the source does\ \ not specify a namespace. Unless specifically configured, the usual value\ \ for this field is \"public\"." type: "string" examples: - - "public" + - "public" default: "public" title: "Default Schema" s3_bucket_name: title: "S3 Bucket Name" type: "string" - description: - "The name of the staging S3 bucket to use if utilising a COPY\ + description: "The name of the staging S3 bucket to use if utilising a COPY\ \ strategy. COPY is recommended for production workloads for better speed\ \ and scalability. See AWS docs for more details." examples: - - "airbyte.staging" + - "airbyte.staging" s3_bucket_region: title: "S3 Bucket Region" type: "string" default: "" - description: - "The region of the S3 staging bucket to use if utilising a\ + description: "The region of the S3 staging bucket to use if utilising a\ \ copy strategy." enum: - - "" - - "us-east-1" - - "us-east-2" - - "us-west-1" - - "us-west-2" - - "af-south-1" - - "ap-east-1" - - "ap-south-1" - - "ap-northeast-1" - - "ap-northeast-2" - - "ap-northeast-3" - - "ap-southeast-1" - - "ap-southeast-2" - - "ca-central-1" - - "cn-north-1" - - "cn-northwest-1" - - "eu-central-1" - - "eu-north-1" - - "eu-south-1" - - "eu-west-1" - - "eu-west-2" - - "eu-west-3" - - "sa-east-1" - - "me-south-1" + - "" + - "us-east-1" + - "us-east-2" + - "us-west-1" + - "us-west-2" + - "af-south-1" + - "ap-east-1" + - "ap-south-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-southeast-1" + - "ap-southeast-2" + - "ca-central-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-north-1" + - "eu-south-1" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "sa-east-1" + - "me-south-1" access_key_id: type: "string" - description: - "The Access Key Id granting allow one to access the above S3\ + description: "The Access Key Id granting allow one to access the above S3\ \ staging bucket. Airbyte requires Read and Write permissions to the given\ \ bucket." title: "S3 Key Id" @@ -2629,9 +2505,8 @@ minimum: 10 maximum: 100 examples: - - "10" - description: - "Optional. Increase this if syncing tables larger than 100GB.\ + - "10" + description: "Optional. Increase this if syncing tables larger than 100GB.\ \ Only relevant for COPY. Files are streamed to S3 in parts. This determines\ \ the size of each part, in MBs. As S3 has a limit of 10,000 parts per\ \ file, part size affects the table size. This is 10MB by default, resulting\ @@ -2643,9 +2518,9 @@ supportsNormalization: true supportsDBT: true supported_destination_sync_modes: - - "overwrite" - - "append" - - "append_dedup" + - "overwrite" + - "append" + - "append_dedup" - dockerImage: "airbyte/destination-s3:0.1.13" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/s3" @@ -2654,330 +2529,316 @@ title: "S3 Destination Spec" type: "object" required: - - "s3_bucket_name" - - "s3_bucket_path" - - "s3_bucket_region" - - "access_key_id" - - "secret_access_key" - - "format" + - "s3_bucket_name" + - "s3_bucket_path" + - "s3_bucket_region" + - "access_key_id" + - "secret_access_key" + - "format" additionalProperties: false properties: s3_endpoint: title: "Endpoint" type: "string" default: "" - description: - "This is your S3 endpoint url.(if you are working with AWS\ + description: "This is your S3 endpoint url.(if you are working with AWS\ \ S3, just leave empty)." examples: - - "http://localhost:9000" + - "http://localhost:9000" s3_bucket_name: title: "S3 Bucket Name" type: "string" description: "The name of the S3 bucket." examples: - - "airbyte_sync" + - "airbyte_sync" s3_bucket_path: description: "Directory under the S3 bucket where data will be written." type: "string" examples: - - "data_sync/test" + - "data_sync/test" s3_bucket_region: title: "S3 Bucket Region" type: "string" default: "" description: "The region of the S3 bucket." enum: - - "" - - "us-east-1" - - "us-east-2" - - "us-west-1" - - "us-west-2" - - "af-south-1" - - "ap-east-1" - - "ap-south-1" - - "ap-northeast-1" - - "ap-northeast-2" - - "ap-northeast-3" - - "ap-southeast-1" - - "ap-southeast-2" - - "ca-central-1" - - "cn-north-1" - - "cn-northwest-1" - - "eu-central-1" - - "eu-north-1" - - "eu-south-1" - - "eu-west-1" - - "eu-west-2" - - "eu-west-3" - - "sa-east-1" - - "me-south-1" - - "us-gov-east-1" - - "us-gov-west-1" + - "" + - "us-east-1" + - "us-east-2" + - "us-west-1" + - "us-west-2" + - "af-south-1" + - "ap-east-1" + - "ap-south-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-southeast-1" + - "ap-southeast-2" + - "ca-central-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-north-1" + - "eu-south-1" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "sa-east-1" + - "me-south-1" + - "us-gov-east-1" + - "us-gov-west-1" access_key_id: type: "string" - description: - "The access key id to access the S3 bucket. Airbyte requires\ + description: "The access key id to access the S3 bucket. Airbyte requires\ \ Read and Write permissions to the given bucket." title: "S3 Key Id" airbyte_secret: true examples: - - "A012345678910EXAMPLE" + - "A012345678910EXAMPLE" secret_access_key: type: "string" description: "The corresponding secret to the access key id." title: "S3 Access Key" airbyte_secret: true examples: - - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" format: title: "Output Format" type: "object" description: "Output data format" oneOf: - - title: "Avro: Apache Avro" - required: - - "format_type" - - "compression_codec" - properties: - format_type: - type: "string" - enum: - - "Avro" - default: "Avro" - compression_codec: - title: "Compression Codec" - description: - "The compression algorithm used to compress data. Default\ - \ to no compression." - type: "object" - oneOf: - - title: "no compression" - required: - - "codec" - properties: - codec: - type: "string" - enum: - - "no compression" - default: "no compression" - - title: "Deflate" - required: - - "codec" - - "compression_level" - properties: - codec: - type: "string" - enum: - - "Deflate" - default: "Deflate" - compression_level: - title: "Deflate level" - description: - "0: no compression & fastest, 9: best compression\ - \ & slowest." - type: "integer" - default: 0 - minimum: 0 - maximum: 9 - - title: "bzip2" - required: - - "codec" - properties: - codec: - type: "string" - enum: - - "bzip2" - default: "bzip2" - - title: "xz" - required: - - "codec" - - "compression_level" - properties: - codec: - type: "string" - enum: - - "xz" - default: "xz" - compression_level: - title: "Compression level" - description: - "See here for details." - type: "integer" - default: 6 - minimum: 0 - maximum: 9 - - title: "zstandard" - required: - - "codec" - - "compression_level" - properties: - codec: - type: "string" - enum: - - "zstandard" - default: "zstandard" - compression_level: - title: "Compression level" - description: - "Negative levels are 'fast' modes akin to lz4 or\ - \ snappy, levels above 9 are generally for archival purposes,\ - \ and levels above 18 use a lot of memory." - type: "integer" - default: 3 - minimum: -5 - maximum: 22 - include_checksum: - title: "Include checksum" - description: "If true, include a checksum with each data block." - type: "boolean" - default: false - - title: "snappy" - required: - - "codec" - properties: - codec: - type: "string" - enum: - - "snappy" - default: "snappy" - part_size_mb: - title: "Block Size (MB) for Amazon S3 multipart upload" - description: - "This is the size of a \"Part\" being buffered in memory.\ - \ It limits the memory usage when writing. Larger values will allow\ - \ to upload a bigger files and improve the speed, but consumes9\ - \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." - type: "integer" - default: 5 - examples: - - 5 - - title: "CSV: Comma-Separated Values" - required: - - "format_type" - - "flattening" - properties: - format_type: - type: "string" - enum: - - "CSV" - default: "CSV" - flattening: - type: "string" - title: "Normalization (Flattening)" - description: - "Whether the input json data should be normalized (flattened)\ - \ in the output CSV. Please refer to docs for details." - default: "No flattening" - enum: - - "No flattening" - - "Root level flattening" - part_size_mb: - title: "Block Size (MB) for Amazon S3 multipart upload" - description: - "This is the size of a \"Part\" being buffered in memory.\ - \ It limits the memory usage when writing. Larger values will allow\ - \ to upload a bigger files and improve the speed, but consumes9\ - \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." - type: "integer" - default: 5 - examples: - - 5 - - title: "JSON Lines: newline-delimited JSON" - required: - - "format_type" - properties: - format_type: - type: "string" - enum: - - "JSONL" - default: "JSONL" - part_size_mb: - title: "Block Size (MB) for Amazon S3 multipart upload" - description: - "This is the size of a \"Part\" being buffered in memory.\ - \ It limits the memory usage when writing. Larger values will allow\ - \ to upload a bigger files and improve the speed, but consumes9\ - \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." - type: "integer" - default: 5 - examples: - - 5 - - title: "Parquet: Columnar Storage" - required: - - "format_type" - properties: - format_type: - type: "string" - enum: - - "Parquet" - default: "Parquet" - compression_codec: - title: "Compression Codec" - description: "The compression algorithm used to compress data pages." - type: "string" - enum: - - "UNCOMPRESSED" - - "SNAPPY" - - "GZIP" - - "LZO" - - "BROTLI" - - "LZ4" - - "ZSTD" - default: "UNCOMPRESSED" - block_size_mb: - title: "Block Size (Row Group Size) (MB)" - description: - "This is the size of a row group being buffered in memory.\ - \ It limits the memory usage when writing. Larger values will improve\ - \ the IO when reading, but consume more memory when writing. Default:\ - \ 128 MB." - type: "integer" - default: 128 - examples: - - 128 - max_padding_size_mb: - title: "Max Padding Size (MB)" - description: - "Maximum size allowed as padding to align row groups.\ - \ This is also the minimum size of a row group. Default: 8 MB." - type: "integer" - default: 8 - examples: - - 8 - page_size_kb: - title: "Page Size (KB)" - description: - "The page size is for compression. A block is composed\ - \ of pages. A page is the smallest unit that must be read fully\ - \ to access a single record. If this value is too small, the compression\ - \ will deteriorate. Default: 1024 KB." - type: "integer" - default: 1024 - examples: - - 1024 - dictionary_page_size_kb: - title: "Dictionary Page Size (KB)" - description: - "There is one dictionary page per column per row group\ - \ when dictionary encoding is used. The dictionary page size works\ - \ like the page size but for dictionary. Default: 1024 KB." - type: "integer" - default: 1024 - examples: - - 1024 - dictionary_encoding: - title: "Dictionary Encoding" - description: "Default: true." - type: "boolean" - default: true + - title: "Avro: Apache Avro" + required: + - "format_type" + - "compression_codec" + properties: + format_type: + type: "string" + enum: + - "Avro" + default: "Avro" + compression_codec: + title: "Compression Codec" + description: "The compression algorithm used to compress data. Default\ + \ to no compression." + type: "object" + oneOf: + - title: "no compression" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "no compression" + default: "no compression" + - title: "Deflate" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "Deflate" + default: "Deflate" + compression_level: + title: "Deflate level" + description: "0: no compression & fastest, 9: best compression\ + \ & slowest." + type: "integer" + default: 0 + minimum: 0 + maximum: 9 + - title: "bzip2" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "bzip2" + default: "bzip2" + - title: "xz" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "xz" + default: "xz" + compression_level: + title: "Compression level" + description: "See here for details." + type: "integer" + default: 6 + minimum: 0 + maximum: 9 + - title: "zstandard" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "zstandard" + default: "zstandard" + compression_level: + title: "Compression level" + description: "Negative levels are 'fast' modes akin to lz4 or\ + \ snappy, levels above 9 are generally for archival purposes,\ + \ and levels above 18 use a lot of memory." + type: "integer" + default: 3 + minimum: -5 + maximum: 22 + include_checksum: + title: "Include checksum" + description: "If true, include a checksum with each data block." + type: "boolean" + default: false + - title: "snappy" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "snappy" + default: "snappy" + part_size_mb: + title: "Block Size (MB) for Amazon S3 multipart upload" + description: "This is the size of a \"Part\" being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will allow\ + \ to upload a bigger files and improve the speed, but consumes9\ + \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." + type: "integer" + default: 5 + examples: + - 5 + - title: "CSV: Comma-Separated Values" + required: + - "format_type" + - "flattening" + properties: + format_type: + type: "string" + enum: + - "CSV" + default: "CSV" + flattening: + type: "string" + title: "Normalization (Flattening)" + description: "Whether the input json data should be normalized (flattened)\ + \ in the output CSV. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + part_size_mb: + title: "Block Size (MB) for Amazon S3 multipart upload" + description: "This is the size of a \"Part\" being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will allow\ + \ to upload a bigger files and improve the speed, but consumes9\ + \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." + type: "integer" + default: 5 + examples: + - 5 + - title: "JSON Lines: newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "JSONL" + default: "JSONL" + part_size_mb: + title: "Block Size (MB) for Amazon S3 multipart upload" + description: "This is the size of a \"Part\" being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will allow\ + \ to upload a bigger files and improve the speed, but consumes9\ + \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." + type: "integer" + default: 5 + examples: + - 5 + - title: "Parquet: Columnar Storage" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "Parquet" + default: "Parquet" + compression_codec: + title: "Compression Codec" + description: "The compression algorithm used to compress data pages." + type: "string" + enum: + - "UNCOMPRESSED" + - "SNAPPY" + - "GZIP" + - "LZO" + - "BROTLI" + - "LZ4" + - "ZSTD" + default: "UNCOMPRESSED" + block_size_mb: + title: "Block Size (Row Group Size) (MB)" + description: "This is the size of a row group being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will improve\ + \ the IO when reading, but consume more memory when writing. Default:\ + \ 128 MB." + type: "integer" + default: 128 + examples: + - 128 + max_padding_size_mb: + title: "Max Padding Size (MB)" + description: "Maximum size allowed as padding to align row groups.\ + \ This is also the minimum size of a row group. Default: 8 MB." + type: "integer" + default: 8 + examples: + - 8 + page_size_kb: + title: "Page Size (KB)" + description: "The page size is for compression. A block is composed\ + \ of pages. A page is the smallest unit that must be read fully\ + \ to access a single record. If this value is too small, the compression\ + \ will deteriorate. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_page_size_kb: + title: "Dictionary Page Size (KB)" + description: "There is one dictionary page per column per row group\ + \ when dictionary encoding is used. The dictionary page size works\ + \ like the page size but for dictionary. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_encoding: + title: "Dictionary Encoding" + description: "Default: true." + type: "boolean" + default: true supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-snowflake:0.3.17" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/snowflake" @@ -2986,58 +2847,56 @@ title: "Snowflake Destination Spec" type: "object" required: - - "host" - - "role" - - "warehouse" - - "database" - - "schema" - - "username" - - "password" + - "host" + - "role" + - "warehouse" + - "database" + - "schema" + - "username" + - "password" additionalProperties: true properties: host: - description: - "Host domain of the snowflake instance (must include the account,\ + description: "Host domain of the snowflake instance (must include the account,\ \ region, cloud environment, and end with snowflakecomputing.com)." examples: - - "accountname.us-east-2.aws.snowflakecomputing.com" + - "accountname.us-east-2.aws.snowflakecomputing.com" type: "string" title: "Host" order: 0 role: description: "The role you created for Airbyte to access Snowflake." examples: - - "AIRBYTE_ROLE" + - "AIRBYTE_ROLE" type: "string" title: "Role" order: 1 warehouse: description: "The warehouse you created for Airbyte to sync data into." examples: - - "AIRBYTE_WAREHOUSE" + - "AIRBYTE_WAREHOUSE" type: "string" title: "Warehouse" order: 2 database: description: "The database you created for Airbyte to sync data into." examples: - - "AIRBYTE_DATABASE" + - "AIRBYTE_DATABASE" type: "string" title: "Database" order: 3 schema: - description: - "The default Snowflake schema tables are written to if the\ + description: "The default Snowflake schema tables are written to if the\ \ source does not specify a namespace." examples: - - "AIRBYTE_SCHEMA" + - "AIRBYTE_SCHEMA" type: "string" title: "Default Schema" order: 4 username: description: "The username you created to allow Airbyte to access the database." examples: - - "AIRBYTE_USER" + - "AIRBYTE_USER" type: "string" title: "Username" order: 5 @@ -3053,148 +2912,140 @@ description: "Loading method used to send data to Snowflake." order: 7 oneOf: - - title: "Standard Inserts" - additionalProperties: false - description: - "Uses
INSERT
statements to send batches of records\ - \ to Snowflake. Easiest (no setup) but not recommended for large production\ - \ workloads due to slow speed." - required: - - "method" - properties: - method: - type: "string" - enum: - - "Standard" - default: "Standard" - - title: "AWS S3 Staging" - additionalProperties: false - description: - "Writes large batches of records to a file, uploads the file\ - \ to S3, then uses
COPY INTO table
to upload the file. Recommended\ - \ for large production workloads for better speed and scalability." - required: - - "method" - - "s3_bucket_name" - - "access_key_id" - - "secret_access_key" - properties: - method: - type: "string" - enum: - - "S3 Staging" - default: "S3 Staging" - order: 0 - s3_bucket_name: - title: "S3 Bucket Name" - type: "string" - description: - "The name of the staging S3 bucket. Airbyte will write\ - \ files to this bucket and read them via
COPY
statements\ - \ on Snowflake." - examples: - - "airbyte.staging" - order: 1 - s3_bucket_region: - title: "S3 Bucket Region" - type: "string" - default: "" - description: - "The region of the S3 staging bucket to use if utilising\ - \ a copy strategy." - enum: - - "" - - "us-east-1" - - "us-east-2" - - "us-west-1" - - "us-west-2" - - "af-south-1" - - "ap-east-1" - - "ap-south-1" - - "ap-northeast-1" - - "ap-northeast-2" - - "ap-northeast-3" - - "ap-southeast-1" - - "ap-southeast-2" - - "ca-central-1" - - "cn-north-1" - - "cn-northwest-1" - - "eu-central-1" - - "eu-west-1" - - "eu-west-2" - - "eu-west-3" - - "eu-south-1" - - "eu-north-1" - - "sa-east-1" - - "me-south-1" - order: 2 - access_key_id: - type: "string" - description: - "The Access Key Id granting allow one to access the above\ - \ S3 staging bucket. Airbyte requires Read and Write permissions\ - \ to the given bucket." - title: "S3 Key Id" - airbyte_secret: true - order: 3 - secret_access_key: - type: "string" - description: "The corresponding secret to the above access key id." - title: "S3 Access Key" - airbyte_secret: true - order: 4 - - title: "GCS Staging" - additionalProperties: false - description: - "Writes large batches of records to a file, uploads the file\ - \ to GCS, then uses
COPY INTO table
to upload the file. Recommended\ - \ for large production workloads for better speed and scalability." - required: - - "method" - - "project_id" - - "bucket_name" - - "credentials_json" - properties: - method: - type: "string" - enum: - - "GCS Staging" - default: "GCS Staging" - order: 0 - project_id: - title: "GCP Project ID" - type: "string" - description: "The name of the GCP project ID for your credentials." - examples: - - "my-project" - order: 1 - bucket_name: - title: "GCS Bucket Name" - type: "string" - description: - "The name of the staging GCS bucket. Airbyte will write\ - \ files to this bucket and read them via
COPY
statements\ - \ on Snowflake." - examples: - - "airbyte-staging" - order: 2 - credentials_json: - title: "Google Application Credentials" - type: "string" - description: - "The contents of the JSON key file that has read/write\ - \ permissions to the staging GCS bucket. You will separately need\ - \ to grant bucket access to your Snowflake GCP service account.\ - \ See the GCP docs for more information on how to generate a JSON key\ - \ for your service account." - airbyte_secret: true - multiline: true - order: 3 + - title: "Standard Inserts" + additionalProperties: false + description: "Uses
INSERT
statements to send batches of records\ + \ to Snowflake. Easiest (no setup) but not recommended for large production\ + \ workloads due to slow speed." + required: + - "method" + properties: + method: + type: "string" + enum: + - "Standard" + default: "Standard" + - title: "AWS S3 Staging" + additionalProperties: false + description: "Writes large batches of records to a file, uploads the file\ + \ to S3, then uses
COPY INTO table
to upload the file. Recommended\ + \ for large production workloads for better speed and scalability." + required: + - "method" + - "s3_bucket_name" + - "access_key_id" + - "secret_access_key" + properties: + method: + type: "string" + enum: + - "S3 Staging" + default: "S3 Staging" + order: 0 + s3_bucket_name: + title: "S3 Bucket Name" + type: "string" + description: "The name of the staging S3 bucket. Airbyte will write\ + \ files to this bucket and read them via
COPY
statements\ + \ on Snowflake." + examples: + - "airbyte.staging" + order: 1 + s3_bucket_region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: "The region of the S3 staging bucket to use if utilising\ + \ a copy strategy." + enum: + - "" + - "us-east-1" + - "us-east-2" + - "us-west-1" + - "us-west-2" + - "af-south-1" + - "ap-east-1" + - "ap-south-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-southeast-1" + - "ap-southeast-2" + - "ca-central-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "eu-south-1" + - "eu-north-1" + - "sa-east-1" + - "me-south-1" + order: 2 + access_key_id: + type: "string" + description: "The Access Key Id granting allow one to access the above\ + \ S3 staging bucket. Airbyte requires Read and Write permissions\ + \ to the given bucket." + title: "S3 Key Id" + airbyte_secret: true + order: 3 + secret_access_key: + type: "string" + description: "The corresponding secret to the above access key id." + title: "S3 Access Key" + airbyte_secret: true + order: 4 + - title: "GCS Staging" + additionalProperties: false + description: "Writes large batches of records to a file, uploads the file\ + \ to GCS, then uses
COPY INTO table
to upload the file. Recommended\ + \ for large production workloads for better speed and scalability." + required: + - "method" + - "project_id" + - "bucket_name" + - "credentials_json" + properties: + method: + type: "string" + enum: + - "GCS Staging" + default: "GCS Staging" + order: 0 + project_id: + title: "GCP Project ID" + type: "string" + description: "The name of the GCP project ID for your credentials." + examples: + - "my-project" + order: 1 + bucket_name: + title: "GCS Bucket Name" + type: "string" + description: "The name of the staging GCS bucket. Airbyte will write\ + \ files to this bucket and read them via
COPY
statements\ + \ on Snowflake." + examples: + - "airbyte-staging" + order: 2 + credentials_json: + title: "Google Application Credentials" + type: "string" + description: "The contents of the JSON key file that has read/write\ + \ permissions to the staging GCS bucket. You will separately need\ + \ to grant bucket access to your Snowflake GCP service account.\ + \ See the GCP docs for more information on how to generate a JSON key\ + \ for your service account." + airbyte_secret: true + multiline: true + order: 3 supportsIncremental: true supportsNormalization: true supportsDBT: true supported_destination_sync_modes: - - "overwrite" - - "append" - - "append_dedup" + - "overwrite" + - "append" + - "append_dedup" diff --git a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/buffered_stream_consumer/BufferedStreamConsumer.java b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/buffered_stream_consumer/BufferedStreamConsumer.java index 0a7ad0d5d3f5..d2581f5321ac 100644 --- a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/buffered_stream_consumer/BufferedStreamConsumer.java +++ b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/buffered_stream_consumer/BufferedStreamConsumer.java @@ -142,8 +142,9 @@ protected void acceptTracked(final AirbyteMessage message) throws Exception { return; } - // TODO use a more efficient way to compute bytes that doesn't require double serialization (records are serialized again when writing to - // the destination + // TODO use a more efficient way to compute bytes that doesn't require double serialization (records + // are serialized again when writing to + // the destination // TODO use a smarter way of estimating byte size rather than always multiply by two long messageSizeInBytes = Jsons.serialize(recordMessage.getData()).length() * 2; // Strings serialize to UTF-8 by default if (bufferSizeInBytes + messageSizeInBytes >= maxQueueSizeInBytes) { diff --git a/airbyte-migration/Dockerfile b/airbyte-migration/Dockerfile index d591e22f0204..d320f6b5fd93 100644 --- a/airbyte-migration/Dockerfile +++ b/airbyte-migration/Dockerfile @@ -6,7 +6,7 @@ ENV APPLICATION airbyte-migration WORKDIR /app # Move and run scheduler -COPY bin/${APPLICATION}-0.30.35-alpha.tar ${APPLICATION}.tar +COPY bin/${APPLICATION}-0.30.36-alpha.tar ${APPLICATION}.tar RUN tar xf ${APPLICATION}.tar --strip-components=1 diff --git a/airbyte-scheduler/app/Dockerfile b/airbyte-scheduler/app/Dockerfile index 7763f711f30d..d4557f3486da 100644 --- a/airbyte-scheduler/app/Dockerfile +++ b/airbyte-scheduler/app/Dockerfile @@ -5,7 +5,7 @@ ENV APPLICATION airbyte-scheduler WORKDIR /app -ADD bin/${APPLICATION}-0.30.35-alpha.tar /app +ADD bin/${APPLICATION}-0.30.36-alpha.tar /app # wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.30.35-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.30.36-alpha/bin/${APPLICATION}"] diff --git a/airbyte-server/Dockerfile b/airbyte-server/Dockerfile index c807078aaa5c..baf264ea2432 100644 --- a/airbyte-server/Dockerfile +++ b/airbyte-server/Dockerfile @@ -7,7 +7,7 @@ ENV APPLICATION airbyte-server WORKDIR /app -ADD bin/${APPLICATION}-0.30.35-alpha.tar /app +ADD bin/${APPLICATION}-0.30.36-alpha.tar /app # wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.30.35-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.30.36-alpha/bin/${APPLICATION}"] diff --git a/airbyte-webapp/package-lock.json b/airbyte-webapp/package-lock.json index 9436454a7786..ebbd5d65064f 100644 --- a/airbyte-webapp/package-lock.json +++ b/airbyte-webapp/package-lock.json @@ -1,12 +1,12 @@ { "name": "airbyte-webapp", - "version": "0.30.35-alpha", + "version": "0.30.36-alpha", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "airbyte-webapp", - "version": "0.30.35-alpha", + "version": "0.30.36-alpha", "dependencies": { "@fortawesome/fontawesome-svg-core": "^1.2.36", "@fortawesome/free-brands-svg-icons": "^5.15.4", diff --git a/airbyte-webapp/package.json b/airbyte-webapp/package.json index 1038db3ce74d..ed29363d4724 100644 --- a/airbyte-webapp/package.json +++ b/airbyte-webapp/package.json @@ -1,6 +1,6 @@ { "name": "airbyte-webapp", - "version": "0.30.35-alpha", + "version": "0.30.36-alpha", "private": true, "scripts": { "start": "react-scripts start", diff --git a/airbyte-workers/Dockerfile b/airbyte-workers/Dockerfile index 8dfd807656ed..b3e25dc3fa12 100644 --- a/airbyte-workers/Dockerfile +++ b/airbyte-workers/Dockerfile @@ -23,7 +23,7 @@ ENV APPLICATION airbyte-workers WORKDIR /app # Move worker app -ADD bin/${APPLICATION}-0.30.35-alpha.tar /app +ADD bin/${APPLICATION}-0.30.36-alpha.tar /app # wait for upstream dependencies to become available before starting server -ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.30.35-alpha/bin/${APPLICATION}"] +ENTRYPOINT ["/bin/bash", "-c", "${APPLICATION}-0.30.36-alpha/bin/${APPLICATION}"] diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/normalization/NormalizationRunnerFactory.java b/airbyte-workers/src/main/java/io/airbyte/workers/normalization/NormalizationRunnerFactory.java index 02e81fa0878a..e26136b3cf1b 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/normalization/NormalizationRunnerFactory.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/normalization/NormalizationRunnerFactory.java @@ -7,9 +7,7 @@ import com.google.common.collect.ImmutableMap; import io.airbyte.workers.normalization.DefaultNormalizationRunner.DestinationType; import io.airbyte.workers.process.ProcessFactory; - import java.util.Map; - import org.apache.commons.lang3.tuple.ImmutablePair; public class NormalizationRunnerFactory { @@ -33,11 +31,11 @@ public class NormalizationRunnerFactory { .build(); public static NormalizationRunner create(final String connectorImageName, final ProcessFactory processFactory) { - final var valuePair = getNormalizationInfoForConnector(connectorImageName); - return new DefaultNormalizationRunner( - valuePair.getRight(), - processFactory, - String.format("%s:%s", valuePair.getLeft(), NORMALIZATION_VERSION)); + final var valuePair = getNormalizationInfoForConnector(connectorImageName); + return new DefaultNormalizationRunner( + valuePair.getRight(), + processFactory, + String.format("%s:%s", valuePair.getLeft(), NORMALIZATION_VERSION)); } public static ImmutablePair getNormalizationInfoForConnector(final String connectorImageName) { diff --git a/charts/airbyte/Chart.yaml b/charts/airbyte/Chart.yaml index fc5b368377a4..5f6e9c4488b3 100644 --- a/charts/airbyte/Chart.yaml +++ b/charts/airbyte/Chart.yaml @@ -21,7 +21,7 @@ version: 0.3.0 # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: "0.30.35-alpha" +appVersion: "0.30.36-alpha" dependencies: - name: common diff --git a/charts/airbyte/README.md b/charts/airbyte/README.md index f887e9b02cdd..69f6dce0a783 100644 --- a/charts/airbyte/README.md +++ b/charts/airbyte/README.md @@ -29,7 +29,7 @@ | `webapp.replicaCount` | Number of webapp replicas | `1` | | `webapp.image.repository` | The repository to use for the airbyte webapp image. | `airbyte/webapp` | | `webapp.image.pullPolicy` | the pull policy to use for the airbyte webapp image | `IfNotPresent` | -| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.30.35-alpha` | +| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.30.36-alpha` | | `webapp.podAnnotations` | Add extra annotations to the webapp pod(s) | `{}` | | `webapp.service.type` | The service type to use for the webapp service | `ClusterIP` | | `webapp.service.port` | The service port to expose the webapp on | `80` | @@ -56,7 +56,7 @@ | `scheduler.replicaCount` | Number of scheduler replicas | `1` | | `scheduler.image.repository` | The repository to use for the airbyte scheduler image. | `airbyte/scheduler` | | `scheduler.image.pullPolicy` | the pull policy to use for the airbyte scheduler image | `IfNotPresent` | -| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.30.35-alpha` | +| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.30.36-alpha` | | `scheduler.podAnnotations` | Add extra annotations to the scheduler pod | `{}` | | `scheduler.resources.limits` | The resources limits for the scheduler container | `{}` | | `scheduler.resources.requests` | The requested resources for the scheduler container | `{}` | @@ -87,7 +87,7 @@ | `server.replicaCount` | Number of server replicas | `1` | | `server.image.repository` | The repository to use for the airbyte server image. | `airbyte/server` | | `server.image.pullPolicy` | the pull policy to use for the airbyte server image | `IfNotPresent` | -| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.30.35-alpha` | +| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.30.36-alpha` | | `server.podAnnotations` | Add extra annotations to the server pod | `{}` | | `server.livenessProbe.enabled` | Enable livenessProbe on the server | `true` | | `server.livenessProbe.initialDelaySeconds` | Initial delay seconds for livenessProbe | `30` | @@ -121,7 +121,7 @@ | `worker.replicaCount` | Number of worker replicas | `1` | | `worker.image.repository` | The repository to use for the airbyte worker image. | `airbyte/worker` | | `worker.image.pullPolicy` | the pull policy to use for the airbyte worker image | `IfNotPresent` | -| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.30.35-alpha` | +| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.30.36-alpha` | | `worker.podAnnotations` | Add extra annotations to the worker pod(s) | `{}` | | `worker.livenessProbe.enabled` | Enable livenessProbe on the worker | `true` | | `worker.livenessProbe.initialDelaySeconds` | Initial delay seconds for livenessProbe | `30` | diff --git a/charts/airbyte/values.yaml b/charts/airbyte/values.yaml index 2edfa5c72001..efa1424e47ab 100644 --- a/charts/airbyte/values.yaml +++ b/charts/airbyte/values.yaml @@ -44,7 +44,7 @@ webapp: image: repository: airbyte/webapp pullPolicy: IfNotPresent - tag: 0.30.35-alpha + tag: 0.30.36-alpha ## @param webapp.podAnnotations [object] Add extra annotations to the webapp pod(s) ## @@ -141,7 +141,7 @@ scheduler: image: repository: airbyte/scheduler pullPolicy: IfNotPresent - tag: 0.30.35-alpha + tag: 0.30.36-alpha ## @param scheduler.podAnnotations [object] Add extra annotations to the scheduler pod ## @@ -248,7 +248,7 @@ server: image: repository: airbyte/server pullPolicy: IfNotPresent - tag: 0.30.35-alpha + tag: 0.30.36-alpha ## @param server.podAnnotations [object] Add extra annotations to the server pod ## @@ -360,7 +360,7 @@ worker: image: repository: airbyte/worker pullPolicy: IfNotPresent - tag: 0.30.35-alpha + tag: 0.30.36-alpha ## @param worker.podAnnotations [object] Add extra annotations to the worker pod(s) ## diff --git a/docs/operator-guides/upgrading-airbyte.md b/docs/operator-guides/upgrading-airbyte.md index 0da21393d7a3..63d49b7c8657 100644 --- a/docs/operator-guides/upgrading-airbyte.md +++ b/docs/operator-guides/upgrading-airbyte.md @@ -82,7 +82,7 @@ If you are upgrading from \(i.e. your current version of Airbyte is\) Airbyte ve Here's an example of what it might look like with the values filled in. It assumes that the downloaded `airbyte_archive.tar.gz` is in `/tmp`. ```bash - docker run --rm -v /tmp:/config airbyte/migration:0.30.35-alpha --\ + docker run --rm -v /tmp:/config airbyte/migration:0.30.36-alpha --\ --input /config/airbyte_archive.tar.gz\ --output /config/airbyte_archive_migrated.tar.gz ``` diff --git a/kube/overlays/stable-with-resource-limits/.env b/kube/overlays/stable-with-resource-limits/.env index d0841315bfa1..503bc37aa8f0 100644 --- a/kube/overlays/stable-with-resource-limits/.env +++ b/kube/overlays/stable-with-resource-limits/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.30.35-alpha +AIRBYTE_VERSION=0.30.36-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable-with-resource-limits/kustomization.yaml b/kube/overlays/stable-with-resource-limits/kustomization.yaml index dae710a47bc1..26b5ab7e1525 100644 --- a/kube/overlays/stable-with-resource-limits/kustomization.yaml +++ b/kube/overlays/stable-with-resource-limits/kustomization.yaml @@ -8,15 +8,15 @@ bases: images: - name: airbyte/db - newTag: 0.30.35-alpha + newTag: 0.30.36-alpha - name: airbyte/scheduler - newTag: 0.30.35-alpha + newTag: 0.30.36-alpha - name: airbyte/server - newTag: 0.30.35-alpha + newTag: 0.30.36-alpha - name: airbyte/webapp - newTag: 0.30.35-alpha + newTag: 0.30.36-alpha - name: airbyte/worker - newTag: 0.30.35-alpha + newTag: 0.30.36-alpha - name: temporalio/auto-setup newTag: 1.7.0 diff --git a/kube/overlays/stable/.env b/kube/overlays/stable/.env index d0841315bfa1..503bc37aa8f0 100644 --- a/kube/overlays/stable/.env +++ b/kube/overlays/stable/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.30.35-alpha +AIRBYTE_VERSION=0.30.36-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable/kustomization.yaml b/kube/overlays/stable/kustomization.yaml index 927f3ec66f47..35e465187368 100644 --- a/kube/overlays/stable/kustomization.yaml +++ b/kube/overlays/stable/kustomization.yaml @@ -8,15 +8,15 @@ bases: images: - name: airbyte/db - newTag: 0.30.35-alpha + newTag: 0.30.36-alpha - name: airbyte/scheduler - newTag: 0.30.35-alpha + newTag: 0.30.36-alpha - name: airbyte/server - newTag: 0.30.35-alpha + newTag: 0.30.36-alpha - name: airbyte/webapp - newTag: 0.30.35-alpha + newTag: 0.30.36-alpha - name: airbyte/worker - newTag: 0.30.35-alpha + newTag: 0.30.36-alpha - name: temporalio/auto-setup newTag: 1.7.0