diff --git a/airbyte-config/init/src/main/java/io/airbyte/config/init/SeedType.java b/airbyte-config/init/src/main/java/io/airbyte/config/init/SeedType.java index 47c4c419bcf1..373036962109 100644 --- a/airbyte-config/init/src/main/java/io/airbyte/config/init/SeedType.java +++ b/airbyte-config/init/src/main/java/io/airbyte/config/init/SeedType.java @@ -7,7 +7,9 @@ public enum SeedType { STANDARD_SOURCE_DEFINITION("/seed/source_definitions.yaml", "sourceDefinitionId"), - STANDARD_DESTINATION_DEFINITION("/seed/destination_definitions.yaml", "destinationDefinitionId"); + STANDARD_DESTINATION_DEFINITION("/seed/destination_definitions.yaml", "destinationDefinitionId"), + SOURCE_SPEC("/seed/source_specs.yaml", "dockerImage"), + DESTINATION_SPEC("/seed/destination_specs.yaml", "dockerImage"); final String resourcePath; // ID field name diff --git a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml new file mode 100644 index 000000000000..a248b4eff240 --- /dev/null +++ b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml @@ -0,0 +1,2752 @@ +# This file is generated by io.airbyte.config.specs.SeedConnectorSpecGenerator. +# Do NOT edit this file directly. See generator class for more details. +--- +- dockerImage: "airbyte/destination-azure-blob-storage:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/destinations/azureblobstorage" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "AzureBlobStorage Destination Spec" + type: "object" + required: + - "azure_blob_storage_account_name" + - "azure_blob_storage_account_key" + - "format" + additionalProperties: false + properties: + azure_blob_storage_endpoint_domain_name: + title: "Endpoint Domain Name" + type: "string" + default: "blob.core.windows.net" + description: "This is Azure Blob Storage endpoint domain name. Leave default\ + \ value (or leave it empty if run container from command line) to use\ + \ Microsoft native from example." + examples: + - "blob.core.windows.net" + azure_blob_storage_container_name: + title: "Azure blob storage container (Bucket) Name" + type: "string" + description: "The name of the Azure blob storage container. If not exists\ + \ - will be created automatically. May be empty, then will be created\ + \ automatically airbytecontainer+timestamp" + examples: + - "airbytetescontainername" + azure_blob_storage_account_name: + title: "Azure Blob Storage account name" + type: "string" + description: "The account's name of the Azure Blob Storage." + examples: + - "airbyte5storage" + azure_blob_storage_account_key: + description: "The Azure blob storage account key." + airbyte_secret: true + type: "string" + examples: + - "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd==" + format: + title: "Output Format" + type: "object" + description: "Output data format" + oneOf: + - title: "CSV: Comma-Separated Values" + required: + - "format_type" + - "flattening" + properties: + format_type: + type: "string" + const: "CSV" + flattening: + type: "string" + title: "Normalization (Flattening)" + description: "Whether the input json data should be normalized (flattened)\ + \ in the output CSV. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + - title: "JSON Lines: newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + type: "string" + const: "JSONL" + supportsIncremental: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: + - "overwrite" + - "append" +- dockerImage: "airbyte/destination-bigquery:0.5.0" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/destinations/bigquery" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "BigQuery Destination Spec" + type: "object" + required: + - "project_id" + - "dataset_id" + additionalProperties: true + properties: + big_query_client_buffer_size_mb: + title: "Google BigQuery client chunk size" + description: "Google BigQuery client's chunk(buffer) size (MIN=1, MAX =\ + \ 15) for each table. The default 15MiB value is used if not set explicitly.\ + \ It's recommended to decrease value for big data sets migration for less\ + \ HEAP memory consumption and avoiding crashes. For more details refer\ + \ to https://googleapis.dev/python/bigquery/latest/generated/google.cloud.bigquery.client.Client.html" + type: "integer" + minimum: 1 + maximum: 15 + default: 15 + examples: + - "15" + project_id: + type: "string" + description: "The GCP project ID for the project containing the target BigQuery\ + \ dataset." + title: "Project ID" + dataset_id: + type: "string" + description: "Default BigQuery Dataset ID tables are replicated to if the\ + \ source does not specify a namespace." + title: "Default Dataset ID" + dataset_location: + type: "string" + description: "The location of the dataset. Warning: Changes made after creation\ + \ will not be applied." + title: "Dataset Location" + default: "US" + enum: + - "US" + - "EU" + - "asia-east1" + - "asia-east2" + - "asia-northeast1" + - "asia-northeast2" + - "asia-northeast3" + - "asia-south1" + - "asia-southeast1" + - "asia-southeast2" + - "australia-southeast1" + - "europe-central1" + - "europe-central2" + - "europe-north1" + - "europe-west1" + - "europe-west2" + - "europe-west3" + - "europe-west4" + - "europe-west5" + - "europe-west6" + - "northamerica-northeast1" + - "southamerica-east1" + - "us-central1" + - "us-east1" + - "us-east4" + - "us-west-1" + - "us-west-2" + - "us-west-3" + - "us-west-4" + credentials_json: + type: "string" + description: "The contents of the JSON service account key. Check out the\ + \ docs if you need help generating this key. Default credentials will\ + \ be used if this field is left empty." + title: "Credentials JSON" + airbyte_secret: true + transformation_priority: + type: "string" + description: "When running custom transformations or Basic normalization,\ + \ running queries on interactive mode can hit BQ limits, choosing batch\ + \ will solve those limitss." + title: "Transformation Query Run Type" + default: "interactive" + enum: + - "interactive" + - "batch" + loading_method: + type: "object" + title: "Loading Method" + description: "Loading method used to send select the way data will be uploaded\ + \ to BigQuery." + oneOf: + - title: "Standard Inserts" + additionalProperties: false + description: "Direct uploading using streams." + required: + - "method" + properties: + method: + type: "string" + const: "Standard" + - title: "GCS Staging" + additionalProperties: false + description: "Writes large batches of records to a file, uploads the file\ + \ to GCS, then uses
COPY INTO tableto upload the file. Recommended\ + \ for large production workloads for better speed and scalability." + required: + - "method" + - "gcs_bucket_name" + - "gcs_bucket_path" + - "credential" + properties: + method: + type: "string" + const: "GCS Staging" + gcs_bucket_name: + title: "GCS Bucket Name" + type: "string" + description: "The name of the GCS bucket." + examples: + - "airbyte_sync" + gcs_bucket_path: + description: "Directory under the GCS bucket where data will be written." + type: "string" + examples: + - "data_sync/test" + keep_files_in_gcs-bucket: + type: "string" + description: "This upload method is supposed to temporary store records\ + \ in GCS bucket. What do you want to do with data in GCS bucket\ + \ when migration has finished?" + title: "GCS tmp files afterward processing" + default: "Delete all tmp files from GCS" + enum: + - "Delete all tmp files from GCS" + - "Keep all tmp files in GCS" + credential: + title: "Credential" + type: "object" + oneOf: + - title: "HMAC key" + required: + - "credential_type" + - "hmac_key_access_id" + - "hmac_key_secret" + properties: + credential_type: + type: "string" + const: "HMAC_KEY" + hmac_key_access_id: + type: "string" + description: "HMAC key access ID. When linked to a service account,\ + \ this ID is 61 characters long; when linked to a user account,\ + \ it is 24 characters long." + title: "HMAC Key Access ID" + airbyte_secret: true + examples: + - "1234567890abcdefghij1234" + hmac_key_secret: + type: "string" + description: "The corresponding secret for the access ID. It\ + \ is a 40-character base-64 encoded string." + title: "HMAC Key Secret" + airbyte_secret: true + examples: + - "1234567890abcdefghij1234567890ABCDEFGHIJ" + supportsIncremental: true + supportsNormalization: true + supportsDBT: true + supported_destination_sync_modes: + - "overwrite" + - "append" + - "append_dedup" +- dockerImage: "airbyte/destination-bigquery-denormalized:0.1.7" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/destinations/bigquery" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "BigQuery Denormalized Typed Struct Destination Spec" + type: "object" + required: + - "project_id" + - "dataset_id" + additionalProperties: true + properties: + project_id: + type: "string" + description: "The GCP project ID for the project containing the target BigQuery\ + \ dataset." + title: "Project ID" + dataset_id: + type: "string" + description: "Default BigQuery Dataset ID tables are replicated to if the\ + \ source does not specify a namespace." + title: "Default Dataset ID" + dataset_location: + type: "string" + description: "The location of the dataset. Warning: Changes made after creation\ + \ will not be applied." + title: "Dataset Location" + default: "US" + enum: + - "US" + - "EU" + - "asia-east1" + - "asia-east2" + - "asia-northeast1" + - "asia-northeast2" + - "asia-northeast3" + - "asia-south1" + - "asia-southeast1" + - "asia-southeast2" + - "australia-southeast1" + - "europe-central1" + - "europe-central2" + - "europe-north1" + - "europe-west1" + - "europe-west2" + - "europe-west3" + - "europe-west4" + - "europe-west5" + - "europe-west6" + - "northamerica-northeast1" + - "southamerica-east1" + - "us-central1" + - "us-east1" + - "us-east4" + - "us-west-1" + - "us-west-2" + - "us-west-3" + - "us-west-4" + credentials_json: + type: "string" + description: "The contents of the JSON service account key. Check out the\ + \ docs if you need help generating this key. Default credentials will\ + \ be used if this field is left empty." + title: "Credentials JSON" + airbyte_secret: true + supportsIncremental: true + supportsNormalization: false + supportsDBT: true + supported_destination_sync_modes: + - "overwrite" + - "append" +- dockerImage: "airbyte/destination-keen:0.2.0" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/destinations/keen" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Keen Spec" + type: "object" + required: + - "project_id" + - "api_key" + additionalProperties: false + properties: + project_id: + description: "Keen Project ID" + type: "string" + examples: + - "58b4acc22ba938934e888322e" + api_key: + title: "API Key" + description: "Keen Master API key" + type: "string" + examples: + - "ABCDEFGHIJKLMNOPRSTUWXYZ" + airbyte_secret: true + infer_timestamp: + title: "Infer Timestamp" + description: "Allow connector to guess keen.timestamp value based on the\ + \ streamed data" + type: "boolean" + default: true + supportsIncremental: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: + - "overwrite" + - "append" +- dockerImage: "airbyte/destination-dynamodb:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/destinations/dynamodb" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "DynamoDB Destination Spec" + type: "object" + required: + - "dynamodb_table_name" + - "dynamodb_region" + - "access_key_id" + - "secret_access_key" + additionalProperties: false + properties: + dynamodb_endpoint: + title: "Endpoint" + type: "string" + default: "" + description: "This is your DynamoDB endpoint url.(if you are working with\ + \ AWS DynamoDB, just leave empty)." + examples: + - "http://localhost:9000" + dynamodb_table_name: + title: "DynamoDB Table Name" + type: "string" + description: "The name of the DynamoDB table." + examples: + - "airbyte_sync" + dynamodb_region: + title: "DynamoDB Region" + type: "string" + default: "" + description: "The region of the DynamoDB." + enum: + - "" + - "us-east-1" + - "us-east-2" + - "us-west-1" + - "us-west-2" + - "af-south-1" + - "ap-east-1" + - "ap-south-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-southeast-1" + - "ap-southeast-2" + - "ca-central-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-north-1" + - "eu-south-1" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "sa-east-1" + - "me-south-1" + - "us-gov-east-1" + - "us-gov-west-1" + access_key_id: + type: "string" + description: "The access key id to access the DynamoDB. Airbyte requires\ + \ Read and Write permissions to the DynamoDB." + title: "DynamoDB Key Id" + airbyte_secret: true + examples: + - "A012345678910EXAMPLE" + secret_access_key: + type: "string" + description: "The corresponding secret to the access key id." + title: "DynamoDB Access Key" + airbyte_secret: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + supportsIncremental: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: + - "overwrite" + - "append" +- dockerImage: "airbyte/destination-gcs:0.1.2" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/destinations/gcs" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "GCS Destination Spec" + type: "object" + required: + - "gcs_bucket_name" + - "gcs_bucket_path" + - "gcs_bucket_region" + - "credential" + - "format" + additionalProperties: false + properties: + gcs_bucket_name: + title: "GCS Bucket Name" + type: "string" + description: "The name of the GCS bucket." + examples: + - "airbyte_sync" + gcs_bucket_path: + description: "Directory under the GCS bucket where data will be written." + type: "string" + examples: + - "data_sync/test" + gcs_bucket_region: + title: "GCS Bucket Region" + type: "string" + default: "" + description: "The region of the GCS bucket." + enum: + - "" + - "-- North America --" + - "northamerica-northeast1" + - "us-central1" + - "us-east1" + - "us-east4" + - "us-west1" + - "us-west2" + - "us-west3" + - "us-west4" + - "-- South America --" + - "southamerica-east1" + - "-- Europe --" + - "europe-central2" + - "europe-north1" + - "europe-west1" + - "europe-west2" + - "europe-west3" + - "europe-west4" + - "europe-west6" + - "-- Asia --" + - "asia-east1" + - "asia-east2" + - "asia-northeast1" + - "asia-northeast2" + - "asia-northeast3" + - "asia-south1" + - "asia-south2" + - "asia-southeast1" + - "asia-southeast2" + - "-- Australia --" + - "australia-southeast1" + - "australia-southeast2" + - "-- Multi-regions --" + - "asia" + - "eu" + - "us" + - "-- Dual-regions --" + - "asia1" + - "eur4" + - "nam4" + credential: + title: "Credential" + type: "object" + oneOf: + - title: "HMAC key" + required: + - "credential_type" + - "hmac_key_access_id" + - "hmac_key_secret" + properties: + credential_type: + type: "string" + enum: + - "HMAC_KEY" + default: "HMAC_KEY" + hmac_key_access_id: + type: "string" + description: "HMAC key access ID. When linked to a service account,\ + \ this ID is 61 characters long; when linked to a user account,\ + \ it is 24 characters long." + title: "HMAC Key Access ID" + airbyte_secret: true + examples: + - "1234567890abcdefghij1234" + hmac_key_secret: + type: "string" + description: "The corresponding secret for the access ID. It is a\ + \ 40-character base-64 encoded string." + title: "HMAC Key Secret" + airbyte_secret: true + examples: + - "1234567890abcdefghij1234567890ABCDEFGHIJ" + format: + title: "Output Format" + type: "object" + description: "Output data format" + oneOf: + - title: "Avro: Apache Avro" + required: + - "format_type" + - "compression_codec" + properties: + format_type: + type: "string" + enum: + - "Avro" + default: "Avro" + compression_codec: + title: "Compression Codec" + description: "The compression algorithm used to compress data. Default\ + \ to no compression." + type: "object" + oneOf: + - title: "no compression" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "no compression" + default: "no compression" + - title: "Deflate" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "Deflate" + default: "Deflate" + compression_level: + title: "Deflate level" + description: "0: no compression & fastest, 9: best compression\ + \ & slowest." + type: "integer" + default: 0 + minimum: 0 + maximum: 9 + - title: "bzip2" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "bzip2" + default: "bzip2" + - title: "xz" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "xz" + default: "xz" + compression_level: + title: "Compression level" + description: "See here for details." + type: "integer" + default: 6 + minimum: 0 + maximum: 9 + - title: "zstandard" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "zstandard" + default: "zstandard" + compression_level: + title: "Compression level" + description: "Negative levels are 'fast' modes akin to lz4 or\ + \ snappy, levels above 9 are generally for archival purposes,\ + \ and levels above 18 use a lot of memory." + type: "integer" + default: 3 + minimum: -5 + maximum: 22 + include_checksum: + title: "Include checksum" + description: "If true, include a checksum with each data block." + type: "boolean" + default: false + - title: "snappy" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "snappy" + default: "snappy" + part_size_mb: + title: "Block Size (MB) for GCS multipart upload" + description: "This is the size of a \"Part\" being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will allow\ + \ to upload a bigger files and improve the speed, but consumes9\ + \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." + type: "integer" + default: 5 + examples: + - 5 + - title: "CSV: Comma-Separated Values" + required: + - "format_type" + - "flattening" + properties: + format_type: + type: "string" + enum: + - "CSV" + default: "CSV" + flattening: + type: "string" + title: "Normalization (Flattening)" + description: "Whether the input json data should be normalized (flattened)\ + \ in the output CSV. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + part_size_mb: + title: "Block Size (MB) for GCS multipart upload" + description: "This is the size of a \"Part\" being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will allow\ + \ to upload a bigger files and improve the speed, but consumes9\ + \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." + type: "integer" + default: 5 + examples: + - 5 + - title: "JSON Lines: newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "JSONL" + default: "JSONL" + part_size_mb: + title: "Block Size (MB) for GCS multipart upload" + description: "This is the size of a \"Part\" being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will allow\ + \ to upload a bigger files and improve the speed, but consumes9\ + \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." + type: "integer" + default: 5 + examples: + - 5 + - title: "Parquet: Columnar Storage" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "Parquet" + default: "Parquet" + compression_codec: + title: "Compression Codec" + description: "The compression algorithm used to compress data pages." + type: "string" + enum: + - "UNCOMPRESSED" + - "SNAPPY" + - "GZIP" + - "LZO" + - "BROTLI" + - "LZ4" + - "ZSTD" + default: "UNCOMPRESSED" + block_size_mb: + title: "Block Size (Row Group Size) (MB)" + description: "This is the size of a row group being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will improve\ + \ the IO when reading, but consume more memory when writing. Default:\ + \ 128 MB." + type: "integer" + default: 128 + examples: + - 128 + max_padding_size_mb: + title: "Max Padding Size (MB)" + description: "Maximum size allowed as padding to align row groups.\ + \ This is also the minimum size of a row group. Default: 8 MB." + type: "integer" + default: 8 + examples: + - 8 + page_size_kb: + title: "Page Size (KB)" + description: "The page size is for compression. A block is composed\ + \ of pages. A page is the smallest unit that must be read fully\ + \ to access a single record. If this value is too small, the compression\ + \ will deteriorate. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_page_size_kb: + title: "Dictionary Page Size (KB)" + description: "There is one dictionary page per column per row group\ + \ when dictionary encoding is used. The dictionary page size works\ + \ like the page size but for dictionary. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_encoding: + title: "Dictionary Encoding" + description: "Default: true." + type: "boolean" + default: true + supportsIncremental: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: + - "overwrite" + - "append" + $schema: "http://json-schema.org/draft-07/schema#" +- dockerImage: "airbyte/destination-pubsub:0.1.1" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/destinations/pubsub" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Google PubSub Destination Spec" + type: "object" + required: + - "project_id" + - "topic_id" + - "credentials_json" + additionalProperties: true + properties: + project_id: + type: "string" + description: "The GCP project ID for the project containing the target PubSub" + title: "Project ID" + topic_id: + type: "string" + description: "PubSub topic ID in the given GCP project ID" + title: "PubSub Topic ID" + credentials_json: + type: "string" + description: "The contents of the JSON service account key. Check out the\ + \ docs if you need help generating this key." + title: "Credentials JSON" + airbyte_secret: true + supportsIncremental: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: + - "append" +- dockerImage: "airbyte/destination-kafka:0.1.2" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/destinations/kafka" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Kafka Destination Spec" + type: "object" + required: + - "bootstrap_servers" + - "topic_pattern" + - "protocol" + - "acks" + - "enable_idempotence" + - "compression_type" + - "batch_size" + - "linger_ms" + - "max_in_flight_requests_per_connection" + - "client_dns_lookup" + - "buffer_memory" + - "max_request_size" + - "retries" + - "socket_connection_setup_timeout_ms" + - "socket_connection_setup_timeout_max_ms" + - "max_block_ms" + - "request_timeout_ms" + - "delivery_timeout_ms" + - "send_buffer_bytes" + - "receive_buffer_bytes" + additionalProperties: true + properties: + bootstrap_servers: + title: "Bootstrap servers" + description: "A list of host/port pairs to use for establishing the initial\ + \ connection to the Kafka cluster. The client will make use of all servers\ + \ irrespective of which servers are specified here for bootstrapping—this\ + \ list only impacts the initial hosts used to discover the full set of\ + \ servers. This list should be in the form
host1:port1,host2:port2,...
.\
+ \ Since these servers are just used for the initial connection to discover\
+ \ the full cluster membership (which may change dynamically), this list\
+ \ need not contain the full set of servers (you may want more than one,\
+ \ though, in case a server is down)."
+ type: "string"
+ examples:
+ - "kafka-broker1:9092,kafka-broker2:9092"
+ topic_pattern:
+ title: "Topic pattern"
+ description: "Topic pattern in which the records will be sent. You can use\
+ \ patterns like '{namespace}' and/or '{stream}' to send the message to\
+ \ a specific topic based on these values. Notice that the topic name will\
+ \ be transformed to a standard naming convention."
+ type: "string"
+ examples:
+ - "sample.topic"
+ - "{namespace}.{stream}.sample"
+ test_topic:
+ title: "Test topic"
+ description: "Topic to test if Airbyte can produce messages."
+ type: "string"
+ examples:
+ - "test.topic"
+ sync_producer:
+ title: "Sync producer"
+ description: "Wait synchronously until the record has been sent to Kafka."
+ type: "boolean"
+ default: false
+ protocol:
+ title: "Protocol"
+ type: "object"
+ description: "Protocol used to communicate with brokers."
+ oneOf:
+ - title: "PLAINTEXT"
+ required:
+ - "security_protocol"
+ properties:
+ security_protocol:
+ type: "string"
+ enum:
+ - "PLAINTEXT"
+ default: "PLAINTEXT"
+ - title: "SASL PLAINTEXT"
+ required:
+ - "security_protocol"
+ - "sasl_mechanism"
+ - "sasl_jaas_config"
+ properties:
+ security_protocol:
+ type: "string"
+ enum:
+ - "SASL_PLAINTEXT"
+ default: "SASL_PLAINTEXT"
+ sasl_mechanism:
+ title: "SASL mechanism"
+ description: "SASL mechanism used for client connections. This may\
+ \ be any mechanism for which a security provider is available."
+ type: "string"
+ default: "PLAIN"
+ enum:
+ - "PLAIN"
+ sasl_jaas_config:
+ title: "SASL JAAS config"
+ description: "JAAS login context parameters for SASL connections in\
+ \ the format used by JAAS configuration files."
+ type: "string"
+ default: ""
+ airbyte_secret: true
+ - title: "SASL SSL"
+ required:
+ - "security_protocol"
+ - "sasl_mechanism"
+ - "sasl_jaas_config"
+ properties:
+ security_protocol:
+ type: "string"
+ enum:
+ - "SASL_SSL"
+ default: "SASL_SSL"
+ sasl_mechanism:
+ title: "SASL mechanism"
+ description: "SASL mechanism used for client connections. This may\
+ \ be any mechanism for which a security provider is available."
+ type: "string"
+ default: "GSSAPI"
+ enum:
+ - "GSSAPI"
+ - "OAUTHBEARER"
+ - "SCRAM-SHA-256"
+ sasl_jaas_config:
+ title: "SASL JAAS config"
+ description: "JAAS login context parameters for SASL connections in\
+ \ the format used by JAAS configuration files."
+ type: "string"
+ default: ""
+ airbyte_secret: true
+ client_id:
+ title: "Client ID"
+ description: "An id string to pass to the server when making requests. The\
+ \ purpose of this is to be able to track the source of requests beyond\
+ \ just ip/port by allowing a logical application name to be included in\
+ \ server-side request logging."
+ type: "string"
+ examples:
+ - "airbyte-producer"
+ acks:
+ title: "ACKs"
+ description: "The number of acknowledgments the producer requires the leader\
+ \ to have received before considering a request complete. This controls\
+ \ the durability of records that are sent."
+ type: "string"
+ default: "1"
+ enum:
+ - "0"
+ - "1"
+ - "all"
+ enable_idempotence:
+ title: "Enable idempotence"
+ description: "When set to 'true', the producer will ensure that exactly\
+ \ one copy of each message is written in the stream. If 'false', producer\
+ \ retries due to broker failures, etc., may write duplicates of the retried\
+ \ message in the stream."
+ type: "boolean"
+ default: false
+ compression_type:
+ title: "Compression type"
+ description: "The compression type for all data generated by the producer."
+ type: "string"
+ default: "none"
+ enum:
+ - "none"
+ - "gzip"
+ - "snappy"
+ - "lz4"
+ - "zstd"
+ batch_size:
+ title: "Batch size"
+ description: "The producer will attempt to batch records together into fewer\
+ \ requests whenever multiple records are being sent to the same partition."
+ type: "integer"
+ examples:
+ - 16384
+ linger_ms:
+ title: "Linger ms"
+ description: "The producer groups together any records that arrive in between\
+ \ request transmissions into a single batched request."
+ type: "string"
+ examples:
+ - 0
+ max_in_flight_requests_per_connection:
+ title: "Max in flight requests per connection"
+ description: "The maximum number of unacknowledged requests the client will\
+ \ send on a single connection before blocking."
+ type: "integer"
+ examples:
+ - 5
+ client_dns_lookup:
+ title: "Client DNS lookup"
+ description: "Controls how the client uses DNS lookups. If set to use_all_dns_ips,\
+ \ connect to each returned IP address in sequence until a successful connection\
+ \ is established. After a disconnection, the next IP is used. Once all\
+ \ IPs have been used once, the client resolves the IP(s) from the hostname\
+ \ again. If set to resolve_canonical_bootstrap_servers_only, resolve each\
+ \ bootstrap address into a list of canonical names. After the bootstrap\
+ \ phase, this behaves the same as use_all_dns_ips. If set to default (deprecated),\
+ \ attempt to connect to the first IP address returned by the lookup, even\
+ \ if the lookup returns multiple IP addresses."
+ type: "string"
+ default: "use_all_dns_ips"
+ enum:
+ - "default"
+ - "use_all_dns_ips"
+ - "resolve_canonical_bootstrap_servers_only"
+ - "use_all_dns_ips"
+ buffer_memory:
+ title: "Buffer memory"
+ description: "The total bytes of memory the producer can use to buffer records\
+ \ waiting to be sent to the server."
+ type: "string"
+ examples: 33554432
+ max_request_size:
+ title: "Max request size"
+ description: "The maximum size of a request in bytes."
+ type: "integer"
+ examples:
+ - 1048576
+ retries:
+ title: "Retries"
+ description: "Setting a value greater than zero will cause the client to\
+ \ resend any record whose send fails with a potentially transient error."
+ type: "integer"
+ examples:
+ - 2147483647
+ socket_connection_setup_timeout_ms:
+ title: "Socket connection setup timeout"
+ description: "The amount of time the client will wait for the socket connection\
+ \ to be established."
+ type: "string"
+ examples:
+ - 10000
+ socket_connection_setup_timeout_max_ms:
+ title: "Socket connection setup max timeout"
+ description: "The maximum amount of time the client will wait for the socket\
+ \ connection to be established. The connection setup timeout will increase\
+ \ exponentially for each consecutive connection failure up to this maximum."
+ type: "string"
+ examples:
+ - 30000
+ max_block_ms:
+ title: "Max block ms"
+ description: "The configuration controls how long the KafkaProducer's send(),\
+ \ partitionsFor(), initTransactions(), sendOffsetsToTransaction(), commitTransaction()\
+ \ and abortTransaction() methods will block."
+ type: "string"
+ examples:
+ - 60000
+ request_timeout_ms:
+ title: "Request timeout"
+ description: "The configuration controls the maximum amount of time the\
+ \ client will wait for the response of a request. If the response is not\
+ \ received before the timeout elapses the client will resend the request\
+ \ if necessary or fail the request if retries are exhausted."
+ type: "integer"
+ examples:
+ - 30000
+ delivery_timeout_ms:
+ title: "Delivery timeout"
+ description: "An upper bound on the time to report success or failure after\
+ \ a call to 'send()' returns."
+ type: "integer"
+ examples:
+ - 120000
+ send_buffer_bytes:
+ title: "Send buffer bytes"
+ description: "The size of the TCP send buffer (SO_SNDBUF) to use when sending\
+ \ data. If the value is -1, the OS default will be used."
+ type: "integer"
+ examples:
+ - 131072
+ receive_buffer_bytes:
+ title: "Receive buffer bytes"
+ description: "The size of the TCP receive buffer (SO_RCVBUF) to use when\
+ \ reading data. If the value is -1, the OS default will be used."
+ type: "integer"
+ examples:
+ - 32768
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes:
+ - "append"
+- dockerImage: "airbyte/destination-csv:0.2.8"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/local-csv"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "CSV Destination Spec"
+ type: "object"
+ required:
+ - "destination_path"
+ additionalProperties: false
+ properties:
+ destination_path:
+ description: "Path to the directory where csv files will be written. The\
+ \ destination uses the local mount \"/local\" and any data files will\
+ \ be placed inside that local mount. For more information check out our\
+ \ docs"
+ type: "string"
+ examples:
+ - "/local"
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes:
+ - "overwrite"
+ - "append"
+- dockerImage: "airbyte/destination-local-json:0.2.8"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/local-json"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Local Json Destination Spec"
+ type: "object"
+ required:
+ - "destination_path"
+ additionalProperties: false
+ properties:
+ destination_path:
+ description: "Path to the directory where json files will be written. The\
+ \ files will be placed inside that local mount. For more information check\
+ \ out our docs"
+ type: "string"
+ examples:
+ - "/json_data"
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes:
+ - "overwrite"
+ - "append"
+- dockerImage: "airbyte/destination-mssql:0.1.10"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/mssql"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "MS SQL Server Destination Spec"
+ type: "object"
+ required:
+ - "host"
+ - "port"
+ - "username"
+ - "database"
+ - "schema"
+ additionalProperties: true
+ properties:
+ host:
+ title: "Host"
+ description: "Hostname of the database."
+ type: "string"
+ order: 0
+ port:
+ title: "Port"
+ description: "Port of the database."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 1433
+ examples:
+ - "1433"
+ order: 1
+ database:
+ title: "DB Name"
+ description: "Name of the database."
+ type: "string"
+ order: 2
+ schema:
+ title: "Default Schema"
+ description: "The default schema tables are written to if the source does\
+ \ not specify a namespace. The usual value for this field is \"public\"\
+ ."
+ type: "string"
+ examples:
+ - "public"
+ default: "public"
+ order: 3
+ username:
+ title: "User"
+ description: "Username to use to access the database."
+ type: "string"
+ order: 4
+ password:
+ title: "Password"
+ description: "Password associated with the username."
+ type: "string"
+ airbyte_secret: true
+ order: 5
+ ssl_method:
+ title: "SSL Method"
+ type: "object"
+ description: "Encryption method to use when communicating with the database"
+ order: 6
+ oneOf:
+ - title: "Unencrypted"
+ additionalProperties: false
+ description: "Data transfer will not be encrypted."
+ required:
+ - "ssl_method"
+ type: "object"
+ properties:
+ ssl_method:
+ type: "string"
+ enum:
+ - "unencrypted"
+ default: "unencrypted"
+ - title: "Encrypted (trust server certificate)"
+ additionalProperties: false
+ description: "Use the cert provided by the server without verification.\
+ \ (For testing purposes only!)"
+ required:
+ - "ssl_method"
+ type: "object"
+ properties:
+ ssl_method:
+ type: "string"
+ enum:
+ - "encrypted_trust_server_certificate"
+ default: "encrypted_trust_server_certificate"
+ - title: "Encrypted (verify certificate)"
+ additionalProperties: false
+ description: "Verify and use the cert provided by the server."
+ required:
+ - "ssl_method"
+ - "trustStoreName"
+ - "trustStorePassword"
+ type: "object"
+ properties:
+ ssl_method:
+ type: "string"
+ enum:
+ - "encrypted_verify_certificate"
+ default: "encrypted_verify_certificate"
+ hostNameInCertificate:
+ title: "Host Name In Certificate"
+ type: "string"
+ description: "Specifies the host name of the server. The value of\
+ \ this property must match the subject property of the certificate."
+ order: 7
+ tunnel_method:
+ type: "object"
+ title: "SSH Tunnel Method"
+ description: "Whether to initiate an SSH tunnel before connecting to the\
+ \ database, and if so, which kind of authentication to use."
+ oneOf:
+ - title: "No Tunnel"
+ required:
+ - "tunnel_method"
+ properties:
+ tunnel_method:
+ description: "No ssh tunnel needed to connect to database"
+ type: "string"
+ const: "NO_TUNNEL"
+ order: 0
+ - title: "SSH Key Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "ssh_key"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and ssh key"
+ type: "string"
+ const: "SSH_KEY_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host."
+ type: "string"
+ order: 3
+ ssh_key:
+ title: "SSH Private Key"
+ description: "OS-level user account ssh key credentials in RSA PEM\
+ \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"
+ type: "string"
+ airbyte_secret: true
+ multiline: true
+ order: 4
+ - title: "Password Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "tunnel_user_password"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and password authentication"
+ type: "string"
+ const: "SSH_PASSWORD_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host"
+ type: "string"
+ order: 3
+ tunnel_user_password:
+ title: "Password"
+ description: "OS-level password for logging into the jump server host"
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ supportsIncremental: true
+ supportsNormalization: true
+ supportsDBT: true
+ supported_destination_sync_modes:
+ - "overwrite"
+ - "append"
+ - "append_dedup"
+- dockerImage: "airbyte/destination-meilisearch:0.2.10"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/meilisearch"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "MeiliSearch Destination Spec"
+ type: "object"
+ required:
+ - "host"
+ additionalProperties: true
+ properties:
+ host:
+ title: "Host"
+ description: "Hostname of the MeiliSearch instance"
+ type: "string"
+ order: 0
+ api_key:
+ title: "API Key"
+ airbyte_secret: true
+ description: "MeiliSearch instance API Key"
+ type: "string"
+ order: 1
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes:
+ - "overwrite"
+ - "append"
+- dockerImage: "airbyte/destination-mongodb:0.1.2"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/mongodb"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "MongoDB Destination Spec"
+ type: "object"
+ required:
+ - "database"
+ - "auth_type"
+ additionalProperties: true
+ properties:
+ instance_type:
+ description: "MongoDb instance to connect to. For MongoDB Atlas and Replica\
+ \ Set TLS connection is used by default."
+ title: "MongoDb instance type"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "Standalone MongoDb Instance"
+ required:
+ - "instance"
+ - "host"
+ - "port"
+ properties:
+ instance:
+ type: "string"
+ enum:
+ - "standalone"
+ default: "standalone"
+ host:
+ title: "Host"
+ type: "string"
+ description: "Host of a Mongo database to be replicated."
+ order: 0
+ port:
+ title: "Port"
+ type: "integer"
+ description: "Port of a Mongo database to be replicated."
+ minimum: 0
+ maximum: 65536
+ default: 27017
+ examples:
+ - "27017"
+ order: 1
+ tls:
+ title: "TLS connection"
+ type: "boolean"
+ description: "Indicates whether TLS encryption protocol will be used\
+ \ to connect to MongoDB. It is recommended to use TLS connection\
+ \ if possible. For more information see documentation."
+ default: false
+ order: 2
+ - title: "Replica Set"
+ required:
+ - "instance"
+ - "server_addresses"
+ properties:
+ instance:
+ type: "string"
+ enum:
+ - "replica"
+ default: "replica"
+ server_addresses:
+ title: "Server addresses"
+ type: "string"
+ description: "The members of a replica set. Please specify `host`:`port`\
+ \ of each member seperated by comma."
+ examples:
+ - "host1:27017,host2:27017,host3:27017"
+ order: 0
+ replica_set:
+ title: "Replica Set"
+ type: "string"
+ description: "A replica set name."
+ order: 1
+ - title: "MongoDB Atlas"
+ additionalProperties: false
+ required:
+ - "instance"
+ - "cluster_url"
+ properties:
+ instance:
+ type: "string"
+ enum:
+ - "atlas"
+ default: "atlas"
+ cluster_url:
+ title: "Cluster URL"
+ type: "string"
+ description: "URL of a cluster to connect to."
+ order: 0
+ database:
+ title: "DB Name"
+ description: "Name of the database."
+ type: "string"
+ order: 2
+ auth_type:
+ title: "Authorization type"
+ type: "object"
+ description: "Authorization type."
+ oneOf:
+ - title: "None"
+ additionalProperties: false
+ description: "None."
+ required:
+ - "authorization"
+ type: "object"
+ properties:
+ authorization:
+ type: "string"
+ const: "none"
+ - title: "Login/Password"
+ additionalProperties: false
+ description: "Login/Password."
+ required:
+ - "authorization"
+ - "username"
+ - "password"
+ type: "object"
+ properties:
+ authorization:
+ type: "string"
+ const: "login/password"
+ username:
+ title: "User"
+ description: "Username to use to access the database."
+ type: "string"
+ order: 1
+ password:
+ title: "Password"
+ description: "Password associated with the username."
+ type: "string"
+ airbyte_secret: true
+ order: 2
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes:
+ - "overwrite"
+ - "append"
+- dockerImage: "airbyte/destination-mysql:0.1.13"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/mysql"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "MySQL Destination Spec"
+ type: "object"
+ required:
+ - "host"
+ - "port"
+ - "username"
+ - "database"
+ additionalProperties: true
+ properties:
+ host:
+ title: "Host"
+ description: "Hostname of the database."
+ type: "string"
+ order: 0
+ port:
+ title: "Port"
+ description: "Port of the database."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 3306
+ examples:
+ - "3306"
+ order: 1
+ database:
+ title: "DB Name"
+ description: "Name of the database."
+ type: "string"
+ order: 2
+ username:
+ title: "User"
+ description: "Username to use to access the database."
+ type: "string"
+ order: 3
+ password:
+ title: "Password"
+ description: "Password associated with the username."
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ ssl:
+ title: "SSL Connection"
+ description: "Encrypt data using SSL."
+ type: "boolean"
+ default: true
+ order: 5
+ tunnel_method:
+ type: "object"
+ title: "SSH Tunnel Method"
+ description: "Whether to initiate an SSH tunnel before connecting to the\
+ \ database, and if so, which kind of authentication to use."
+ oneOf:
+ - title: "No Tunnel"
+ required:
+ - "tunnel_method"
+ properties:
+ tunnel_method:
+ description: "No ssh tunnel needed to connect to database"
+ type: "string"
+ const: "NO_TUNNEL"
+ order: 0
+ - title: "SSH Key Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "ssh_key"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and ssh key"
+ type: "string"
+ const: "SSH_KEY_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host."
+ type: "string"
+ order: 3
+ ssh_key:
+ title: "SSH Private Key"
+ description: "OS-level user account ssh key credentials for logging\
+ \ into the jump server host."
+ type: "string"
+ airbyte_secret: true
+ multiline: true
+ order: 4
+ - title: "Password Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "tunnel_user_password"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and password authentication"
+ type: "string"
+ const: "SSH_PASSWORD_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host"
+ type: "string"
+ order: 3
+ tunnel_user_password:
+ title: "Password"
+ description: "OS-level password for logging into the jump server host"
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ supportsIncremental: true
+ supportsNormalization: true
+ supportsDBT: true
+ supported_destination_sync_modes:
+ - "overwrite"
+ - "append"
+- dockerImage: "airbyte/destination-oracle:0.1.11"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/oracle"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Oracle Destination Spec"
+ type: "object"
+ required:
+ - "host"
+ - "port"
+ - "username"
+ - "sid"
+ additionalProperties: true
+ properties:
+ host:
+ title: "Host"
+ description: "Hostname of the database."
+ type: "string"
+ order: 0
+ port:
+ title: "Port"
+ description: "Port of the database."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 1521
+ examples:
+ - "1521"
+ order: 1
+ sid:
+ title: "SID"
+ description: "SID"
+ type: "string"
+ order: 2
+ username:
+ title: "User"
+ description: "Username to use to access the database. This user must have\
+ \ CREATE USER privileges in the database."
+ type: "string"
+ order: 3
+ password:
+ title: "Password"
+ description: "Password associated with the username."
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ schema:
+ title: "Default Schema"
+ description: "The default schema tables are written to if the source does\
+ \ not specify a namespace. The usual value for this field is \"airbyte\"\
+ . In Oracle, schemas and users are the same thing, so the \"user\" parameter\
+ \ is used as the login credentials and this is used for the default Airbyte\
+ \ message schema."
+ type: "string"
+ examples:
+ - "airbyte"
+ default: "airbyte"
+ order: 5
+ encryption:
+ title: "Encryption"
+ type: "object"
+ description: "Encryption method to use when communicating with the database"
+ order: 6
+ oneOf:
+ - title: "Unencrypted"
+ additionalProperties: false
+ description: "Data transfer will not be encrypted."
+ required:
+ - "encryption_method"
+ properties:
+ encryption_method:
+ type: "string"
+ const: "unencrypted"
+ enum:
+ - "unencrypted"
+ default: "unencrypted"
+ - title: "Native Network Ecryption (NNE)"
+ additionalProperties: false
+ description: "Native network encryption gives you the ability to encrypt\
+ \ database connections, without the configuration overhead of TCP/IP\
+ \ and SSL/TLS and without the need to open and listen on different ports."
+ required:
+ - "encryption_method"
+ properties:
+ encryption_method:
+ type: "string"
+ const: "client_nne"
+ enum:
+ - "client_nne"
+ default: "client_nne"
+ encryption_algorithm:
+ type: "string"
+ description: "This parameter defines the encryption algorithm to be\
+ \ used"
+ title: "Encryption Algorithm"
+ default: "AES256"
+ enum:
+ - "AES256"
+ - "RC4_56"
+ - "3DES168"
+ - title: "TLS Encrypted (verify certificate)"
+ additionalProperties: false
+ description: "Verify and use the cert provided by the server."
+ required:
+ - "encryption_method"
+ - "ssl_certificate"
+ properties:
+ encryption_method:
+ type: "string"
+ const: "encrypted_verify_certificate"
+ enum:
+ - "encrypted_verify_certificate"
+ default: "encrypted_verify_certificate"
+ ssl_certificate:
+ title: "SSL PEM file"
+ description: "Privacy Enhanced Mail (PEM) files are concatenated certificate\
+ \ containers frequently used in certificate installations"
+ type: "string"
+ airbyte_secret: true
+ multiline: true
+ tunnel_method:
+ type: "object"
+ title: "SSH Tunnel Method"
+ description: "Whether to initiate an SSH tunnel before connecting to the\
+ \ database, and if so, which kind of authentication to use."
+ oneOf:
+ - title: "No Tunnel"
+ required:
+ - "tunnel_method"
+ properties:
+ tunnel_method:
+ description: "No ssh tunnel needed to connect to database"
+ type: "string"
+ const: "NO_TUNNEL"
+ order: 0
+ - title: "SSH Key Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "ssh_key"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and ssh key"
+ type: "string"
+ const: "SSH_KEY_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host."
+ type: "string"
+ order: 3
+ ssh_key:
+ title: "SSH Private Key"
+ description: "OS-level user account ssh key credentials in RSA PEM\
+ \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"
+ type: "string"
+ airbyte_secret: true
+ multiline: true
+ order: 4
+ - title: "Password Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "tunnel_user_password"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and password authentication"
+ type: "string"
+ const: "SSH_PASSWORD_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host"
+ type: "string"
+ order: 3
+ tunnel_user_password:
+ title: "Password"
+ description: "OS-level password for logging into the jump server host"
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: true
+ supported_destination_sync_modes:
+ - "overwrite"
+ - "append"
+- dockerImage: "airbyte/destination-postgres:0.3.11"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/postgres"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Postgres Destination Spec"
+ type: "object"
+ required:
+ - "host"
+ - "port"
+ - "username"
+ - "database"
+ - "schema"
+ additionalProperties: true
+ properties:
+ host:
+ title: "Host"
+ description: "Hostname of the database."
+ type: "string"
+ order: 0
+ port:
+ title: "Port"
+ description: "Port of the database."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 5432
+ examples:
+ - "5432"
+ order: 1
+ database:
+ title: "DB Name"
+ description: "Name of the database."
+ type: "string"
+ order: 2
+ schema:
+ title: "Default Schema"
+ description: "The default schema tables are written to if the source does\
+ \ not specify a namespace. The usual value for this field is \"public\"\
+ ."
+ type: "string"
+ examples:
+ - "public"
+ default: "public"
+ order: 3
+ username:
+ title: "User"
+ description: "Username to use to access the database."
+ type: "string"
+ order: 4
+ password:
+ title: "Password"
+ description: "Password associated with the username."
+ type: "string"
+ airbyte_secret: true
+ order: 5
+ ssl:
+ title: "SSL Connection"
+ description: "Encrypt data using SSL."
+ type: "boolean"
+ default: false
+ order: 6
+ tunnel_method:
+ type: "object"
+ title: "SSH Tunnel Method"
+ description: "Whether to initiate an SSH tunnel before connecting to the\
+ \ database, and if so, which kind of authentication to use."
+ oneOf:
+ - title: "No Tunnel"
+ required:
+ - "tunnel_method"
+ properties:
+ tunnel_method:
+ description: "No ssh tunnel needed to connect to database"
+ type: "string"
+ const: "NO_TUNNEL"
+ order: 0
+ - title: "SSH Key Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "ssh_key"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and ssh key"
+ type: "string"
+ const: "SSH_KEY_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host."
+ type: "string"
+ order: 3
+ ssh_key:
+ title: "SSH Private Key"
+ description: "OS-level user account ssh key credentials for logging\
+ \ into the jump server host."
+ type: "string"
+ airbyte_secret: true
+ multiline: true
+ order: 4
+ - title: "Password Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "tunnel_user_password"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and password authentication"
+ type: "string"
+ const: "SSH_PASSWORD_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host"
+ type: "string"
+ order: 3
+ tunnel_user_password:
+ title: "Password"
+ description: "OS-level password for logging into the jump server host"
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ supportsIncremental: true
+ supportsNormalization: true
+ supportsDBT: true
+ supported_destination_sync_modes:
+ - "overwrite"
+ - "append"
+ - "append_dedup"
+- dockerImage: "airbyte/destination-redshift:0.3.19"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/redshift"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Redshift Destination Spec"
+ type: "object"
+ required:
+ - "host"
+ - "port"
+ - "database"
+ - "username"
+ - "password"
+ - "schema"
+ additionalProperties: true
+ properties:
+ host:
+ description: "Host Endpoint of the Redshift Cluster (must include the cluster-id,\
+ \ region and end with .redshift.amazonaws.com)"
+ type: "string"
+ title: "Host"
+ port:
+ description: "Port of the database."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 5439
+ examples:
+ - "5439"
+ title: "Port"
+ username:
+ description: "Username to use to access the database."
+ type: "string"
+ title: "Username"
+ password:
+ description: "Password associated with the username."
+ type: "string"
+ airbyte_secret: true
+ title: "Password"
+ database:
+ description: "Name of the database."
+ type: "string"
+ title: "Database"
+ schema:
+ description: "The default schema tables are written to if the source does\
+ \ not specify a namespace. Unless specifically configured, the usual value\
+ \ for this field is \"public\"."
+ type: "string"
+ examples:
+ - "public"
+ default: "public"
+ title: "Default Schema"
+ s3_bucket_name:
+ title: "S3 Bucket Name"
+ type: "string"
+ description: "The name of the staging S3 bucket to use if utilising a COPY\
+ \ strategy. COPY is recommended for production workloads for better speed\
+ \ and scalability. See AWS docs for more details."
+ examples:
+ - "airbyte.staging"
+ s3_bucket_region:
+ title: "S3 Bucket Region"
+ type: "string"
+ default: ""
+ description: "The region of the S3 staging bucket to use if utilising a\
+ \ copy strategy."
+ enum:
+ - ""
+ - "us-east-1"
+ - "us-east-2"
+ - "us-west-1"
+ - "us-west-2"
+ - "af-south-1"
+ - "ap-east-1"
+ - "ap-south-1"
+ - "ap-northeast-1"
+ - "ap-northeast-2"
+ - "ap-northeast-3"
+ - "ap-southeast-1"
+ - "ap-southeast-2"
+ - "ca-central-1"
+ - "cn-north-1"
+ - "cn-northwest-1"
+ - "eu-central-1"
+ - "eu-north-1"
+ - "eu-south-1"
+ - "eu-west-1"
+ - "eu-west-2"
+ - "eu-west-3"
+ - "sa-east-1"
+ - "me-south-1"
+ access_key_id:
+ type: "string"
+ description: "The Access Key Id granting allow one to access the above S3\
+ \ staging bucket. Airbyte requires Read and Write permissions to the given\
+ \ bucket."
+ title: "S3 Key Id"
+ airbyte_secret: true
+ secret_access_key:
+ type: "string"
+ description: "The corresponding secret to the above access key id."
+ title: "S3 Access Key"
+ airbyte_secret: true
+ part_size:
+ type: "integer"
+ minimum: 10
+ maximum: 100
+ examples:
+ - "10"
+ description: "Optional. Increase this if syncing tables larger than 100GB.\
+ \ Only relevant for COPY. Files are streamed to S3 in parts. This determines\
+ \ the size of each part, in MBs. As S3 has a limit of 10,000 parts per\
+ \ file, part size affects the table size. This is 10MB by default, resulting\
+ \ in a default limit of 100GB tables. Note, a larger part size will result\
+ \ in larger memory requirements. A rule of thumb is to multiply the part\
+ \ size by 10 to get the memory requirement. Modify this with care."
+ title: "Stream Part Size"
+ supportsIncremental: true
+ supportsNormalization: true
+ supportsDBT: true
+ supported_destination_sync_modes:
+ - "overwrite"
+ - "append"
+ - "append_dedup"
+- dockerImage: "airbyte/destination-s3:0.1.12"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/s3"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "S3 Destination Spec"
+ type: "object"
+ required:
+ - "s3_bucket_name"
+ - "s3_bucket_path"
+ - "s3_bucket_region"
+ - "access_key_id"
+ - "secret_access_key"
+ - "format"
+ additionalProperties: false
+ properties:
+ s3_endpoint:
+ title: "Endpoint"
+ type: "string"
+ default: ""
+ description: "This is your S3 endpoint url.(if you are working with AWS\
+ \ S3, just leave empty)."
+ examples:
+ - "http://localhost:9000"
+ s3_bucket_name:
+ title: "S3 Bucket Name"
+ type: "string"
+ description: "The name of the S3 bucket."
+ examples:
+ - "airbyte_sync"
+ s3_bucket_path:
+ description: "Directory under the S3 bucket where data will be written."
+ type: "string"
+ examples:
+ - "data_sync/test"
+ s3_bucket_region:
+ title: "S3 Bucket Region"
+ type: "string"
+ default: ""
+ description: "The region of the S3 bucket."
+ enum:
+ - ""
+ - "us-east-1"
+ - "us-east-2"
+ - "us-west-1"
+ - "us-west-2"
+ - "af-south-1"
+ - "ap-east-1"
+ - "ap-south-1"
+ - "ap-northeast-1"
+ - "ap-northeast-2"
+ - "ap-northeast-3"
+ - "ap-southeast-1"
+ - "ap-southeast-2"
+ - "ca-central-1"
+ - "cn-north-1"
+ - "cn-northwest-1"
+ - "eu-central-1"
+ - "eu-north-1"
+ - "eu-south-1"
+ - "eu-west-1"
+ - "eu-west-2"
+ - "eu-west-3"
+ - "sa-east-1"
+ - "me-south-1"
+ - "us-gov-east-1"
+ - "us-gov-west-1"
+ access_key_id:
+ type: "string"
+ description: "The access key id to access the S3 bucket. Airbyte requires\
+ \ Read and Write permissions to the given bucket."
+ title: "S3 Key Id"
+ airbyte_secret: true
+ examples:
+ - "A012345678910EXAMPLE"
+ secret_access_key:
+ type: "string"
+ description: "The corresponding secret to the access key id."
+ title: "S3 Access Key"
+ airbyte_secret: true
+ examples:
+ - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY"
+ format:
+ title: "Output Format"
+ type: "object"
+ description: "Output data format"
+ oneOf:
+ - title: "Avro: Apache Avro"
+ required:
+ - "format_type"
+ - "compression_codec"
+ properties:
+ format_type:
+ type: "string"
+ enum:
+ - "Avro"
+ default: "Avro"
+ compression_codec:
+ title: "Compression Codec"
+ description: "The compression algorithm used to compress data. Default\
+ \ to no compression."
+ type: "object"
+ oneOf:
+ - title: "no compression"
+ required:
+ - "codec"
+ properties:
+ codec:
+ type: "string"
+ enum:
+ - "no compression"
+ default: "no compression"
+ - title: "Deflate"
+ required:
+ - "codec"
+ - "compression_level"
+ properties:
+ codec:
+ type: "string"
+ enum:
+ - "Deflate"
+ default: "Deflate"
+ compression_level:
+ title: "Deflate level"
+ description: "0: no compression & fastest, 9: best compression\
+ \ & slowest."
+ type: "integer"
+ default: 0
+ minimum: 0
+ maximum: 9
+ - title: "bzip2"
+ required:
+ - "codec"
+ properties:
+ codec:
+ type: "string"
+ enum:
+ - "bzip2"
+ default: "bzip2"
+ - title: "xz"
+ required:
+ - "codec"
+ - "compression_level"
+ properties:
+ codec:
+ type: "string"
+ enum:
+ - "xz"
+ default: "xz"
+ compression_level:
+ title: "Compression level"
+ description: "See here for details."
+ type: "integer"
+ default: 6
+ minimum: 0
+ maximum: 9
+ - title: "zstandard"
+ required:
+ - "codec"
+ - "compression_level"
+ properties:
+ codec:
+ type: "string"
+ enum:
+ - "zstandard"
+ default: "zstandard"
+ compression_level:
+ title: "Compression level"
+ description: "Negative levels are 'fast' modes akin to lz4 or\
+ \ snappy, levels above 9 are generally for archival purposes,\
+ \ and levels above 18 use a lot of memory."
+ type: "integer"
+ default: 3
+ minimum: -5
+ maximum: 22
+ include_checksum:
+ title: "Include checksum"
+ description: "If true, include a checksum with each data block."
+ type: "boolean"
+ default: false
+ - title: "snappy"
+ required:
+ - "codec"
+ properties:
+ codec:
+ type: "string"
+ enum:
+ - "snappy"
+ default: "snappy"
+ part_size_mb:
+ title: "Block Size (MB) for Amazon S3 multipart upload"
+ description: "This is the size of a \"Part\" being buffered in memory.\
+ \ It limits the memory usage when writing. Larger values will allow\
+ \ to upload a bigger files and improve the speed, but consumes9\
+ \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB."
+ type: "integer"
+ default: 5
+ examples:
+ - 5
+ - title: "CSV: Comma-Separated Values"
+ required:
+ - "format_type"
+ - "flattening"
+ properties:
+ format_type:
+ type: "string"
+ enum:
+ - "CSV"
+ default: "CSV"
+ flattening:
+ type: "string"
+ title: "Normalization (Flattening)"
+ description: "Whether the input json data should be normalized (flattened)\
+ \ in the output CSV. Please refer to docs for details."
+ default: "No flattening"
+ enum:
+ - "No flattening"
+ - "Root level flattening"
+ part_size_mb:
+ title: "Block Size (MB) for Amazon S3 multipart upload"
+ description: "This is the size of a \"Part\" being buffered in memory.\
+ \ It limits the memory usage when writing. Larger values will allow\
+ \ to upload a bigger files and improve the speed, but consumes9\
+ \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB."
+ type: "integer"
+ default: 5
+ examples:
+ - 5
+ - title: "JSON Lines: newline-delimited JSON"
+ required:
+ - "format_type"
+ properties:
+ format_type:
+ type: "string"
+ enum:
+ - "JSONL"
+ default: "JSONL"
+ part_size_mb:
+ title: "Block Size (MB) for Amazon S3 multipart upload"
+ description: "This is the size of a \"Part\" being buffered in memory.\
+ \ It limits the memory usage when writing. Larger values will allow\
+ \ to upload a bigger files and improve the speed, but consumes9\
+ \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB."
+ type: "integer"
+ default: 5
+ examples:
+ - 5
+ - title: "Parquet: Columnar Storage"
+ required:
+ - "format_type"
+ properties:
+ format_type:
+ type: "string"
+ enum:
+ - "Parquet"
+ default: "Parquet"
+ compression_codec:
+ title: "Compression Codec"
+ description: "The compression algorithm used to compress data pages."
+ type: "string"
+ enum:
+ - "UNCOMPRESSED"
+ - "SNAPPY"
+ - "GZIP"
+ - "LZO"
+ - "BROTLI"
+ - "LZ4"
+ - "ZSTD"
+ default: "UNCOMPRESSED"
+ block_size_mb:
+ title: "Block Size (Row Group Size) (MB)"
+ description: "This is the size of a row group being buffered in memory.\
+ \ It limits the memory usage when writing. Larger values will improve\
+ \ the IO when reading, but consume more memory when writing. Default:\
+ \ 128 MB."
+ type: "integer"
+ default: 128
+ examples:
+ - 128
+ max_padding_size_mb:
+ title: "Max Padding Size (MB)"
+ description: "Maximum size allowed as padding to align row groups.\
+ \ This is also the minimum size of a row group. Default: 8 MB."
+ type: "integer"
+ default: 8
+ examples:
+ - 8
+ page_size_kb:
+ title: "Page Size (KB)"
+ description: "The page size is for compression. A block is composed\
+ \ of pages. A page is the smallest unit that must be read fully\
+ \ to access a single record. If this value is too small, the compression\
+ \ will deteriorate. Default: 1024 KB."
+ type: "integer"
+ default: 1024
+ examples:
+ - 1024
+ dictionary_page_size_kb:
+ title: "Dictionary Page Size (KB)"
+ description: "There is one dictionary page per column per row group\
+ \ when dictionary encoding is used. The dictionary page size works\
+ \ like the page size but for dictionary. Default: 1024 KB."
+ type: "integer"
+ default: 1024
+ examples:
+ - 1024
+ dictionary_encoding:
+ title: "Dictionary Encoding"
+ description: "Default: true."
+ type: "boolean"
+ default: true
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes:
+ - "overwrite"
+ - "append"
+- dockerImage: "airbyte/destination-snowflake:0.3.16"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/snowflake"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Snowflake Destination Spec"
+ type: "object"
+ required:
+ - "host"
+ - "role"
+ - "warehouse"
+ - "database"
+ - "schema"
+ - "username"
+ - "password"
+ additionalProperties: true
+ properties:
+ host:
+ description: "Host domain of the snowflake instance (must include the account,\
+ \ region, cloud environment, and end with snowflakecomputing.com)."
+ examples:
+ - "accountname.us-east-2.aws.snowflakecomputing.com"
+ type: "string"
+ title: "Host"
+ order: 0
+ role:
+ description: "The role you created for Airbyte to access Snowflake."
+ examples:
+ - "AIRBYTE_ROLE"
+ type: "string"
+ title: "Role"
+ order: 1
+ warehouse:
+ description: "The warehouse you created for Airbyte to sync data into."
+ examples:
+ - "AIRBYTE_WAREHOUSE"
+ type: "string"
+ title: "Warehouse"
+ order: 2
+ database:
+ description: "The database you created for Airbyte to sync data into."
+ examples:
+ - "AIRBYTE_DATABASE"
+ type: "string"
+ title: "Database"
+ order: 3
+ schema:
+ description: "The default Snowflake schema tables are written to if the\
+ \ source does not specify a namespace."
+ examples:
+ - "AIRBYTE_SCHEMA"
+ type: "string"
+ title: "Default Schema"
+ order: 4
+ username:
+ description: "The username you created to allow Airbyte to access the database."
+ examples:
+ - "AIRBYTE_USER"
+ type: "string"
+ title: "Username"
+ order: 5
+ password:
+ description: "Password associated with the username."
+ type: "string"
+ airbyte_secret: true
+ title: "Password"
+ order: 6
+ loading_method:
+ type: "object"
+ title: "Loading Method"
+ description: "Loading method used to send data to Snowflake."
+ order: 7
+ oneOf:
+ - title: "Standard Inserts"
+ additionalProperties: false
+ description: "Uses INSERTstatements to send batches of records\ + \ to Snowflake. Easiest (no setup) but not recommended for large production\ + \ workloads due to slow speed." + required: + - "method" + properties: + method: + type: "string" + enum: + - "Standard" + default: "Standard" + - title: "AWS S3 Staging" + additionalProperties: false + description: "Writes large batches of records to a file, uploads the file\ + \ to S3, then uses
COPY INTO tableto upload the file. Recommended\ + \ for large production workloads for better speed and scalability." + required: + - "method" + - "s3_bucket_name" + - "access_key_id" + - "secret_access_key" + properties: + method: + type: "string" + enum: + - "S3 Staging" + default: "S3 Staging" + order: 0 + s3_bucket_name: + title: "S3 Bucket Name" + type: "string" + description: "The name of the staging S3 bucket. Airbyte will write\ + \ files to this bucket and read them via
COPYstatements\ + \ on Snowflake." + examples: + - "airbyte.staging" + order: 1 + s3_bucket_region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: "The region of the S3 staging bucket to use if utilising\ + \ a copy strategy." + enum: + - "" + - "us-east-1" + - "us-east-2" + - "us-west-1" + - "us-west-2" + - "af-south-1" + - "ap-east-1" + - "ap-south-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-southeast-1" + - "ap-southeast-2" + - "ca-central-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "eu-south-1" + - "eu-north-1" + - "sa-east-1" + - "me-south-1" + order: 2 + access_key_id: + type: "string" + description: "The Access Key Id granting allow one to access the above\ + \ S3 staging bucket. Airbyte requires Read and Write permissions\ + \ to the given bucket." + title: "S3 Key Id" + airbyte_secret: true + order: 3 + secret_access_key: + type: "string" + description: "The corresponding secret to the above access key id." + title: "S3 Access Key" + airbyte_secret: true + order: 4 + - title: "GCS Staging" + additionalProperties: false + description: "Writes large batches of records to a file, uploads the file\ + \ to GCS, then uses
COPY INTO tableto upload the file. Recommended\ + \ for large production workloads for better speed and scalability." + required: + - "method" + - "project_id" + - "bucket_name" + - "credentials_json" + properties: + method: + type: "string" + enum: + - "GCS Staging" + default: "GCS Staging" + order: 0 + project_id: + title: "GCP Project ID" + type: "string" + description: "The name of the GCP project ID for your credentials." + examples: + - "my-project" + order: 1 + bucket_name: + title: "GCS Bucket Name" + type: "string" + description: "The name of the staging GCS bucket. Airbyte will write\ + \ files to this bucket and read them via
COPYstatements\ + \ on Snowflake." + examples: + - "airbyte-staging" + order: 2 + credentials_json: + title: "Google Application Credentials" + type: "string" + description: "The contents of the JSON key file that has read/write\ + \ permissions to the staging GCS bucket. You will separately need\ + \ to grant bucket access to your Snowflake GCP service account.\ + \ See the GCP docs for more information on how to generate a JSON key\ + \ for your service account." + airbyte_secret: true + multiline: true + order: 3 + supportsIncremental: true + supportsNormalization: true + supportsDBT: true + supported_destination_sync_modes: + - "overwrite" + - "append" + - "append_dedup" diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml new file mode 100644 index 000000000000..71e4a45e6915 --- /dev/null +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -0,0 +1,5836 @@ +# This file is generated by io.airbyte.config.specs.SeedConnectorSpecGenerator. +# Do NOT edit this file directly. See generator class for more details. +--- +- dockerImage: "airbyte/source-aws-cloudtrail:0.1.2" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/aws-cloudtrail" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Aws CloudTrail Spec" + type: "object" + required: + - "aws_key_id" + - "aws_secret_key" + - "aws_region_name" + - "start_date" + additionalProperties: true + properties: + aws_key_id: + type: "string" + description: "Specifies an AWS access key associated with an IAM user or\ + \ role." + airbyte_secret: true + aws_secret_key: + type: "string" + description: "Specifies the secret key associated with the access key. This\ + \ is essentially the 'password' for the access key." + airbyte_secret: true + aws_region_name: + type: "string" + description: "The default AWS Region to use, for example, us-west-1 or us-west-2.\ + \ When specifying a Region inline during client initialization, this property\ + \ is named region_name." + start_date: + type: "string" + description: "The date you would like to replicate data. Data in ClouTraid\ + \ is available for last 90 days only. Format: YYYY-MM-DD." + examples: + - "2021-01-01" + default: "1970-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-amazon-ads:0.1.2" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/amazon-ads" + connectionSpecification: + title: "Amazon Ads Spec" + type: "object" + properties: + client_id: + title: "Client Id" + description: "Oauth client id How to create your Login with Amazon" + name: "Client ID" + type: "string" + client_secret: + title: "Client Secret" + description: "Oauth client secret How to create your Login with Amazon" + name: "Client secret" + airbyte_secret: true + type: "string" + scope: + title: "Scope" + description: "By default its advertising::campaign_management, but customers\ + \ may need to set scope to cpc_advertising:campaign_management." + default: "advertising::campaign_management" + name: "Client scope" + examples: + - "cpc_advertising:campaign_management" + type: "string" + refresh_token: + title: "Refresh Token" + description: "Oauth 2.0 refresh_token, read details here" + name: "Oauth refresh token" + airbyte_secret: true + type: "string" + start_date: + title: "Start Date" + description: "Start date for collectiong reports, should not be more than\ + \ 60 days in past. In YYYY-MM-DD format" + name: "Start date" + examples: + - "2022-10-10" + - "2022-10-22" + type: "string" + region: + description: "Region to pull data from (EU/NA/FE/SANDBOX)" + default: "NA" + name: "Region" + title: "AmazonAdsRegion" + enum: + - "NA" + - "EU" + - "FE" + - "SANDBOX" + type: "string" + profiles: + title: "Profiles" + description: "profile Ids you want to fetch data for" + name: "Profile Ids" + type: "array" + items: + type: "integer" + required: + - "client_id" + - "client_secret" + - "refresh_token" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-amazon-seller-partner:0.2.0" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/amazon-seller-partner" + changelogUrl: "https://docs.airbyte.io/integrations/sources/amazon-seller-partner" + connectionSpecification: + title: "Amazon Seller Partner Spec" + type: "object" + properties: + replication_start_date: + title: "Replication Start Date" + description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + type: "string" + refresh_token: + title: "Refresh Token" + description: "The refresh token used obtained via authorization (can be\ + \ passed to the client instead)" + airbyte_secret: true + type: "string" + lwa_app_id: + title: "Lwa App Id" + description: "Your login with amazon app id" + airbyte_secret: true + type: "string" + lwa_client_secret: + title: "Lwa Client Secret" + description: "Your login with amazon client secret" + airbyte_secret: true + type: "string" + aws_access_key: + title: "Aws Access Key" + description: "AWS user access key" + airbyte_secret: true + type: "string" + aws_secret_key: + title: "Aws Secret Key" + description: "AWS user secret key" + airbyte_secret: true + type: "string" + role_arn: + title: "Role Arn" + description: "The role's arn (needs permission to 'Assume Role' STS)" + airbyte_secret: true + type: "string" + aws_environment: + title: "AWSEnvironment" + description: "An enumeration." + enum: + - "PRODUCTION" + - "SANDBOX" + type: "string" + region: + title: "AWSRegion" + description: "An enumeration." + enum: + - "AE" + - "DE" + - "PL" + - "EG" + - "ES" + - "FR" + - "IN" + - "IT" + - "NL" + - "SA" + - "SE" + - "TR" + - "UK" + - "AU" + - "JP" + - "SG" + - "US" + - "BR" + - "CA" + - "MX" + - "GB" + type: "string" + required: + - "replication_start_date" + - "refresh_token" + - "lwa_app_id" + - "lwa_client_secret" + - "aws_access_key" + - "aws_secret_key" + - "role_arn" + - "aws_environment" + - "region" + definitions: + AWSEnvironment: + title: "AWSEnvironment" + description: "An enumeration." + enum: + - "PRODUCTION" + - "SANDBOX" + type: "string" + AWSRegion: + title: "AWSRegion" + description: "An enumeration." + enum: + - "AE" + - "DE" + - "PL" + - "EG" + - "ES" + - "FR" + - "IN" + - "IT" + - "NL" + - "SA" + - "SE" + - "TR" + - "UK" + - "AU" + - "JP" + - "SG" + - "US" + - "BR" + - "CA" + - "MX" + - "GB" + type: "string" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-amplitude:0.1.3" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/amplitude" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Amplitude Spec" + type: "object" + required: + - "api_key" + - "secret_key" + - "start_date" + additionalProperties: false + properties: + api_key: + type: "string" + description: "This is the project’s API key, used for calling Amplitude’\ + s APIs" + airbyte_secret: true + secret_key: + type: "string" + description: "This is the project's secret key, which is also used for calling\ + \ Amplitude’s APIs" + airbyte_secret: true + start_date: + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: "UTC date and time in the format 2021-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2021-01-25T00:00:00Z" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-apify-dataset:0.1.1" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/apify-dataset" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Apify Dataset Spec" + type: "object" + required: + - "datasetId" + additionalProperties: false + properties: + datasetId: + type: "string" + description: "ID of the dataset you would like to load to Airbyte." + clean: + type: "boolean" + description: "If set to true, only clean items will be downloaded from the\ + \ dataset. See description of what clean means in Apify API docs. If not sure, set clean to false." + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-appstore-singer:0.2.4" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/appstore" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Source Appstore Singer Spec" + type: "object" + required: + - "key_id" + - "private_key" + - "issuer_id" + - "vendor" + - "start_date" + additionalProperties: false + properties: + key_id: + type: "string" + description: "Key_id is the API key you use to connect to appstore's API." + private_key: + type: "string" + description: "Private_key is the contents of the key file you use to connect to appstore's API." + airbyte_secret: true + multiline: true + issuer_id: + type: "string" + description: "Issuer_id is used to generate the credentials to connect to appstore's\ + \ API." + vendor: + type: "string" + description: "This is the Apple ID of your account." + start_date: + type: "string" + description: "Date from which to start pulling data." + examples: + - "2020-11-16T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-asana:0.1.3" + spec: + documentationUrl: "https://docsurl.com" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Asana Spec" + type: "object" + additionalProperties: true + properties: + credentials: + title: "Authentication mechanism" + description: "Choose how to authenticate to Github" + type: "object" + oneOf: + - type: "object" + title: "Authenticate with Personal Access Token" + required: + - "personal_access_token" + properties: + option_title: + type: "string" + title: "Credentials title" + description: "PAT Credentials" + const: "PAT Credentials" + personal_access_token: + type: "string" + title: "Personal Access Token" + description: "Asana Personal Access Token (generate yours here)." + airbyte_secret: true + - type: "object" + title: "Authenticate via Asana (Oauth)" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + option_title: + type: "string" + title: "Credentials title" + description: "OAuth Credentials" + const: "OAuth Credentials" + client_id: + type: "string" + title: "" + description: "" + airbyte_secret: true + client_secret: + type: "string" + title: "" + description: "" + airbyte_secret: true + refresh_token: + type: "string" + title: "" + description: "" + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] + authSpecification: + auth_type: "oauth2.0" + oauth2Specification: + rootObject: + - "credentials" + - "1" + oauthFlowInitParameters: + - - "client_id" + - - "client_secret" + oauthFlowOutputParameters: + - - "refresh_token" +- dockerImage: "airbyte/source-bamboo-hr:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/bamboo-hr" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Bamboo HR Spec" + type: "object" + required: + - "subdomain" + - "api_key" + additionalProperties: false + properties: + subdomain: + type: "string" + description: "Sub Domain of bamboo hr" + api_key: + type: "string" + description: "Api key of bamboo hr" + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-bigcommerce:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/bigcommerce" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "BigCommerce Source CDK Specifications" + type: "object" + required: + - "start_date" + - "store_hash" + - "access_token" + additionalProperties: false + properties: + start_date: + type: "string" + description: "The date you would like to replicate data. Format: YYYY-MM-DD." + examples: + - "2021-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + store_hash: + type: "string" + description: "The hash code of the store. For https://api.bigcommerce.com/stores/HASH_CODE/v3/,\ + \ The store's hash code is 'HASH_CODE'." + access_token: + type: "string" + description: "The API Access Token." + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-bigquery:0.1.4" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/source/bigquery" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "BigQuery Source Spec" + type: "object" + required: + - "project_id" + - "credentials_json" + additionalProperties: false + properties: + project_id: + type: "string" + description: "The GCP project ID for the project containing the target BigQuery\ + \ dataset." + title: "Project ID" + dataset_id: + type: "string" + description: "The BigQuery Dataset ID to look for tables to replicate from." + title: "Default Dataset ID" + credentials_json: + type: "string" + description: "The contents of the JSON service account key. Check out the\ + \ docs\ + \ if you need help generating this key." + title: "Credentials JSON" + airbyte_secret: true + supportsIncremental: true + supportsNormalization: true + supportsDBT: true + supported_destination_sync_modes: [] + supported_sync_modes: + - "overwrite" + - "append" + - "append_dedup" +- dockerImage: "airbyte/source-bing-ads:0.1.1" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/bing-ads" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Bing Ads Spec" + type: "object" + required: + - "accounts" + - "client_id" + - "client_secret" + - "customer_id" + - "developer_token" + - "refresh_token" + - "user_id" + - "reports_start_date" + - "hourly_reports" + - "daily_reports" + - "weekly_reports" + - "monthly_reports" + additionalProperties: false + properties: + accounts: + title: "Accounts" + type: "object" + description: "Account selection strategy." + oneOf: + - title: "All accounts assigned to your user" + additionalProperties: false + description: "Fetch data for all available accounts." + required: + - "selection_strategy" + properties: + selection_strategy: + type: "string" + enum: + - "all" + const: "all" + - title: "Subset of your accounts" + additionalProperties: false + description: "Fetch data for subset of account ids." + required: + - "ids" + - "selection_strategy" + properties: + selection_strategy: + type: "string" + enum: + - "subset" + const: "subset" + ids: + type: "array" + description: "List of accounts from which data will be fetched." + items: + type: "string" + minItems: 1 + uniqueItems: true + client_id: + type: "string" + description: "ID of your Microsoft Advertising client application." + airbyte_secret: true + client_secret: + type: "string" + description: "Secret of your Microsoft Advertising client application." + airbyte_secret: true + customer_id: + type: "string" + description: "User's customer ID." + developer_token: + type: "string" + description: "Developer token associated with user." + airbyte_secret: true + refresh_token: + type: "string" + description: "The long-lived Refresh token received via grant_type=refresh_token\ + \ request." + airbyte_secret: true + user_id: + type: "string" + description: "Unique user identifier." + reports_start_date: + type: "string" + format: "date" + default: "2020-01-01" + description: "From which date perform initial sync for report related streams.\ + \ In YYYY-MM-DD format" + hourly_reports: + title: "Hourly reports" + type: "boolean" + description: "The report data will be aggregated by each hour of the day." + default: false + daily_reports: + title: "Daily reports" + type: "boolean" + description: "The report data will be aggregated by each day." + default: false + weekly_reports: + title: "Weekly reports" + type: "boolean" + description: "The report data will be aggregated by each week running from\ + \ Sunday through Saturday." + default: false + monthly_reports: + title: "Monthly reports" + type: "boolean" + description: "The report data will be aggregated by each month." + default: false + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-braintree:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/braintree" + connectionSpecification: + title: "Braintree Spec" + type: "object" + properties: + merchant_id: + title: "Merchant Id" + description: "Merchant ID is the unique identifier for entire gateway account." + name: "Merchant ID" + type: "string" + public_key: + title: "Public Key" + description: "This is your user-specific public identifier for Braintree." + name: "Public key" + type: "string" + private_key: + title: "Private Key" + description: "This is your user-specific private identifier." + name: "Private Key" + airbyte_secret: true + type: "string" + start_date: + title: "Start Date" + description: "The date from which you'd like to replicate data for Braintree\ + \ API for UTC timezone, All data generated after this date will be replicated." + name: "Start date" + examples: + - "2020" + - "2020-12-30" + - "2020-11-22 20:20:05" + type: "string" + format: "date-time" + environment: + description: "Environment specifies where the data will come from." + name: "Environment" + examples: + - "sandbox" + - "production" + - "qa" + - "development" + allOf: + - $ref: "#/definitions/Environment" + required: + - "merchant_id" + - "public_key" + - "private_key" + - "environment" + definitions: + Environment: + title: "Environment" + description: "An enumeration." + enum: + - "Development" + - "Sandbox" + - "Qa" + - "Production" + type: "string" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-cart:0.1.3" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/cart" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Cart Spec" + type: "object" + required: + - "access_token" + - "start_date" + - "store_name" + additionalProperties: true + properties: + access_token: + type: "string" + airbyte_secret: true + description: "API Key. See the docs for information on how to generate this key." + store_name: + type: "string" + description: "Store name. All API URLs start with https://[mystorename.com]/api/v1/,\ + \ where [mystorename.com] is the domain name of your store." + start_date: + title: "Start Date" + type: "string" + description: "The date from which you'd like to replicate the data" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2021-01-01T00:00:00Z" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-chargebee:0.1.4" + spec: + documentationUrl: "https://apidocs.chargebee.com/docs/api" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Chargebee Spec" + type: "object" + required: + - "site" + - "site_api_key" + - "start_date" + - "product_catalog" + additionalProperties: false + properties: + site: + type: "string" + title: "Site" + description: "The site prefix for your Chargebee instance." + examples: + - "airbyte-test" + site_api_key: + type: "string" + title: "API Key" + description: "The API key from your Chargebee instance." + examples: + - "test_3yzfanAXF66USdWC9wQcM555DQJkSYoppu" + airbyte_secret: true + start_date: + type: "string" + title: "Start Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: "UTC date and time in the format 2021-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2021-01-25T00:00:00Z" + product_catalog: + title: "Product Catalog" + type: "string" + description: "Product Catalog version of your Chargebee site. Instructions\ + \ on how to find your version you may find here under `API Version` section." + enum: + - "1.0" + - "2.0" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-clickhouse:0.1.4" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/destinations/clickhouse" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "ClickHouse Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + additionalProperties: false + properties: + host: + description: "Host Endpoint of the Clickhouse Cluster" + type: "string" + port: + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 8123 + examples: + - "8123" + database: + description: "Name of the database." + type: "string" + examples: + - "default" + username: + description: "Username to use to access the database." + type: "string" + password: + description: "Password associated with the username." + type: "string" + airbyte_secret: true + ssl: + title: "SSL Connection" + description: "Encrypt data using SSL." + type: "boolean" + default: true + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-close-com:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/close-com" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Close.com Spec" + type: "object" + required: + - "api_key" + additionalProperties: false + properties: + api_key: + type: "string" + description: "Close.com API key (usually starts with 'api_'; find yours\ + \ here)." + airbyte_secret: true + start_date: + type: "string" + description: "The start date to sync data. Leave blank for full sync. Format:\ + \ YYYY-MM-DD." + examples: + - "2021-01-01" + default: "2021-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-cockroachdb:0.1.2" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/postgres" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Cockroach Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + additionalProperties: false + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 5432 + examples: + - "5432" + order: 1 + database: + title: "DB Name" + description: "Name of the database." + type: "string" + order: 2 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 3 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 4 + ssl: + title: "Connect using SSL" + description: "Encrypt client/server communications for increased security." + type: "boolean" + default: false + order: 5 + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-dixa:0.1.1" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/dixa" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Dixa Spec" + type: "object" + required: + - "api_token" + - "start_date" + additionalProperties: false + properties: + api_token: + type: "string" + description: "Dixa API token" + airbyte_secret: true + start_date: + type: "string" + description: "The connector pulls records updated from this date onwards." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "YYYY-MM-DD" + batch_size: + type: "integer" + description: "Number of days to batch into one request. Max 31." + pattern: "^[0-9]{1,2}$" + examples: + - 1 + - 31 + default: 31 + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-drift:0.2.3" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/drift" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Drift Spec" + type: "object" + required: + - "access_token" + additionalProperties: false + properties: + access_token: + type: "string" + description: "Drift Access Token. See the docs for more information on how to generate this key." + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-exchange-rates:0.2.3" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/exchangeratesapi" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "ratesapi.io Source Spec" + type: "object" + required: + - "start_date" + - "access_key" + additionalProperties: false + properties: + start_date: + type: "string" + description: "Start getting data from that date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "YYYY-MM-DD" + access_key: + type: "string" + description: "Your API Access Key. See here. The key is case sensitive." + airbyte_secret: true + base: + type: "string" + description: "ISO reference currency. See here. Free plan doesn't support Source Currency Switching, default\ + \ base currency is EUR" + examples: + - "EUR" + - "USD" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-facebook-marketing:0.2.21" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/facebook-marketing" + changelogUrl: "https://docs.airbyte.io/integrations/sources/facebook-marketing" + connectionSpecification: + title: "Source Facebook Marketing" + type: "object" + properties: + account_id: + title: "Account Id" + description: "The Facebook Ad account ID to use when pulling data from the\ + \ Facebook Marketing API." + type: "string" + access_token: + title: "Access Token" + description: "The value of the access token generated. See the docs\ + \ for more information" + airbyte_secret: true + type: "string" + start_date: + title: "Start Date" + description: "The date from which you'd like to replicate data for AdCreatives\ + \ and AdInsights APIs, in the format YYYY-MM-DDT00:00:00Z. All data generated\ + \ after this date will be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + type: "string" + format: "date-time" + end_date: + title: "End Date" + description: "The date until which you'd like to replicate data for AdCreatives\ + \ and AdInsights APIs, in the format YYYY-MM-DDT00:00:00Z. All data generated\ + \ between start_date and this date will be replicated. Not setting this\ + \ option will result in always syncing the latest data." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-26T00:00:00Z" + type: "string" + format: "date-time" + include_deleted: + title: "Include Deleted" + description: "Include data from deleted campaigns, ads, and adsets." + default: false + type: "boolean" + insights_lookback_window: + title: "Insights Lookback Window" + description: "The attribution window for the actions" + default: 28 + minimum: 0 + maximum: 28 + type: "integer" + insights_days_per_job: + title: "Insights Days Per Job" + description: "Number of days to sync in one job. The more data you have\ + \ - the smaller you want this parameter to be." + default: 7 + minimum: 1 + maximum: 30 + type: "integer" + custom_insights: + title: "Custom Insights" + description: "A list wich contains insights entries, each entry must have\ + \ a name and can contains fields, breakdowns or action_breakdowns)" + type: "array" + items: + title: "InsightConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name value of insight" + type: "string" + fields: + title: "Fields" + description: "A list of chosen fields for fields parameter" + default: [] + type: "array" + items: + type: "string" + breakdowns: + title: "Breakdowns" + description: "A list of chosen breakdowns for breakdowns" + default: [] + type: "array" + items: + type: "string" + action_breakdowns: + title: "Action Breakdowns" + description: "A list of chosen action_breakdowns for action_breakdowns" + default: [] + type: "array" + items: + type: "string" + required: + - "name" + required: + - "account_id" + - "access_token" + - "start_date" + definitions: + InsightConfig: + title: "InsightConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name value of insight" + type: "string" + fields: + title: "Fields" + description: "A list of chosen fields for fields parameter" + default: [] + type: "array" + items: + type: "string" + breakdowns: + title: "Breakdowns" + description: "A list of chosen breakdowns for breakdowns" + default: [] + type: "array" + items: + type: "string" + action_breakdowns: + title: "Action Breakdowns" + description: "A list of chosen action_breakdowns for action_breakdowns" + default: [] + type: "array" + items: + type: "string" + required: + - "name" + supportsIncremental: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: + - "append" + authSpecification: + auth_type: "oauth2.0" + oauth2Specification: + rootObject: [] + oauthFlowInitParameters: [] + oauthFlowOutputParameters: + - - "access_token" +- dockerImage: "airbyte/source-facebook-pages:0.1.2" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/facebook-pages" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Facebook Pages Spec" + type: "object" + required: + - "access_token" + - "page_id" + additionalProperties: false + properties: + access_token: + type: "string" + description: "Facebook Page Access Token" + airbyte_secret: true + page_id: + type: "string" + description: "Page ID" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] + authSpecification: + auth_type: "oauth2.0" + oauth2Specification: + rootObject: [] + oauthFlowInitParameters: [] + oauthFlowOutputParameters: + - - "access_token" +- dockerImage: "airbyte/source-file:0.2.6" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/file" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "File Source Spec" + type: "object" + additionalProperties: false + required: + - "dataset_name" + - "format" + - "url" + - "provider" + properties: + dataset_name: + type: "string" + description: "Name of the final table where to replicate this file (should\ + \ include only letters, numbers dash and underscores)" + format: + type: "string" + enum: + - "csv" + - "json" + - "jsonl" + - "excel" + - "feather" + - "parquet" + default: "csv" + description: "File Format of the file to be replicated (Warning: some format\ + \ may be experimental, please refer to docs)." + reader_options: + type: "string" + description: "This should be a valid JSON string used by each reader/parser\ + \ to provide additional options and tune its behavior" + examples: + - "{}" + - "{'sep': ' '}" + url: + type: "string" + description: "URL path to access the file to be replicated" + provider: + type: "object" + description: "Storage Provider or Location of the file(s) to be replicated." + default: "Public Web" + oneOf: + - title: "HTTPS: Public Web" + required: + - "storage" + properties: + storage: + type: "string" + enum: + - "HTTPS" + default: "HTTPS" + - title: "GCS: Google Cloud Storage" + required: + - "storage" + properties: + storage: + type: "string" + enum: + - "GCS" + default: "GCS" + service_account_json: + type: "string" + description: "In order to access private Buckets stored on Google\ + \ Cloud, this connector would need a service account json credentials\ + \ with the proper permissions as described here. Please generate the credentials.json\ + \ file and copy/paste its content to this field (expecting JSON\ + \ formats). If accessing publicly available data, this field is\ + \ not necessary." + - title: "S3: Amazon Web Services" + required: + - "storage" + properties: + storage: + type: "string" + enum: + - "S3" + default: "S3" + aws_access_key_id: + type: "string" + description: "In order to access private Buckets stored on AWS S3,\ + \ this connector would need credentials with the proper permissions.\ + \ If accessing publicly available data, this field is not necessary." + aws_secret_access_key: + type: "string" + description: "In order to access private Buckets stored on AWS S3,\ + \ this connector would need credentials with the proper permissions.\ + \ If accessing publicly available data, this field is not necessary." + airbyte_secret: true + - title: "AzBlob: Azure Blob Storage" + required: + - "storage" + - "storage_account" + properties: + storage: + type: "string" + enum: + - "AzBlob" + default: "AzBlob" + storage_account: + type: "string" + description: "The globally unique name of the storage account that\ + \ the desired blob sits within. See here for more details." + sas_token: + type: "string" + description: "To access Azure Blob Storage, this connector would need\ + \ credentials with the proper permissions. One option is a SAS (Shared\ + \ Access Signature) token. If accessing publicly available data,\ + \ this field is not necessary." + airbyte_secret: true + shared_key: + type: "string" + description: "To access Azure Blob Storage, this connector would need\ + \ credentials with the proper permissions. One option is a storage\ + \ account shared key (aka account key or access key). If accessing\ + \ publicly available data, this field is not necessary." + airbyte_secret: true + - title: "SSH: Secure Shell" + required: + - "storage" + - "user" + - "host" + properties: + storage: + type: "string" + enum: + - "SSH" + default: "SSH" + user: + type: "string" + password: + type: "string" + airbyte_secret: true + host: + type: "string" + port: + type: "string" + default: "22" + - title: "SCP: Secure copy protocol" + required: + - "storage" + - "user" + - "host" + properties: + storage: + type: "string" + enum: + - "SCP" + default: "SCP" + user: + type: "string" + password: + type: "string" + airbyte_secret: true + host: + type: "string" + port: + type: "string" + default: "22" + - title: "SFTP: Secure File Transfer Protocol" + required: + - "storage" + - "user" + - "host" + properties: + storage: + type: "string" + enum: + - "SFTP" + default: "SFTP" + user: + type: "string" + password: + type: "string" + airbyte_secret: true + host: + type: "string" + port: + type: "string" + default: "22" + - title: "Local Filesystem (limited)" + required: + - "storage" + properties: + storage: + type: "string" + description: "WARNING: Note that local storage URL available for read\ + \ must start with the local mount \"/local/\" at the moment until\ + \ we implement more advanced docker mounting options..." + enum: + - "local" + default: "local" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-freshdesk:0.2.7" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/freshdesk" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Freshdesk Spec" + type: "object" + required: + - "domain" + - "api_key" + additionalProperties: false + properties: + domain: + type: "string" + description: "Freshdesk domain" + examples: + - "myaccount.freshdesk.com" + pattern: + - "^[a-zA-Z0-9._-]*\\.freshdesk\\.com$" + api_key: + type: "string" + description: "Freshdesk API Key. See the docs for more information on how to obtain this key." + airbyte_secret: true + requests_per_minute: + title: "Requests per minute" + type: "integer" + description: "Number of requests per minute that this source allowed to\ + \ use." + start_date: + title: "Start date" + description: "Date from which to start pulling data." + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2020-12-01T00:00:00Z" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-freshservice:0.1.0" + spec: + documentationUrl: "https://hub.docker.com/r/airbyte/source-freshservice" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Freshservice Spec" + type: "object" + required: + - "domain_name" + - "api_key" + - "start_date" + additionalProperties: false + properties: + domain_name: + type: "string" + description: "Freshservice domain" + examples: + - "mydomain.freshservice.com" + api_key: + title: "Api Key" + type: "string" + description: "Your API Access Key. See here. The key is case sensitive." + airbyte_secret: true + start_date: + title: "Replication Start Date" + type: "string" + description: "UTC date and time in the format 2020-10-01T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2020-10-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-github:0.2.3" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/github" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Github Source Spec" + type: "object" + required: + - "start_date" + - "repository" + additionalProperties: true + properties: + credentials: + title: "Authentication mechanism" + description: "Choose how to authenticate to Github" + type: "object" + oneOf: + - type: "object" + title: "Authenticate via Github (Oauth)" + required: + - "access_token" + properties: + option_title: + type: "string" + title: "Credentials title" + description: "OAuth Credentials" + const: "OAuth Credentials" + access_token: + type: "string" + title: "Access Token" + description: "Oauth access token" + airbyte_secret: true + - type: "object" + title: "Authenticate with Personal Access Token" + required: + - "personal_access_token" + properties: + option_title: + type: "string" + title: "Credentials title" + description: "PAT Credentials" + const: "PAT Credentials" + personal_access_token: + type: "string" + title: "Personal Access Tokens" + description: "Log into Github and then generate a personal access token. To load balance your API quota consumption\ + \ across multiple API tokens, input multiple tokens separated with\ + \ \",\"" + airbyte_secret: true + repository: + type: "string" + examples: + - "airbytehq/airbyte" + - "airbytehq/*" + title: "Github repositories" + description: "Space-delimited list of GitHub repositories/organizations,\ + \ e.g. `airbytehq/airbyte` for single repository and `airbytehq/*` for\ + \ get all repositories from organization" + start_date: + type: "string" + title: "Start date" + description: "The date from which you'd like to replicate data for GitHub\ + \ in the format YYYY-MM-DDT00:00:00Z. All data generated after this date\ + \ will be replicated. Note that it will be used only in the following\ + \ incremental streams: comments, commits and issues." + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + branch: + type: "string" + title: "Branch" + examples: + - "airbytehq/airbyte/master" + description: "Space-delimited list of GitHub repository branches to pull\ + \ commits for, e.g. `airbytehq/airbyte/master`. If no branches are specified\ + \ for a repository, the default branch will be pulled." + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] + authSpecification: + auth_type: "oauth2.0" + oauth2Specification: + rootObject: + - "credentials" + - "0" + oauthFlowInitParameters: [] + oauthFlowOutputParameters: + - - "access_token" +- dockerImage: "airbyte/source-gitlab:0.1.2" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/gitlab" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Source Gitlab Singer Spec" + type: "object" + required: + - "api_url" + - "private_token" + - "start_date" + additionalProperties: false + properties: + api_url: + type: "string" + examples: + - "gitlab.com" + description: "Please enter your basic URL from Gitlab instance" + private_token: + type: "string" + description: "Log into your Gitlab account and then generate a personal\ + \ Access Token." + airbyte_secret: true + groups: + type: "string" + examples: + - "airbyte.io" + description: "Space-delimited list of groups. e.g. airbyte.io" + projects: + type: "string" + examples: + - "airbyte.io/documentation" + description: "Space-delimited list of projects. e.g. airbyte.io/documentation\ + \ meltano/tap-gitlab" + start_date: + type: "string" + description: "The date from which you'd like to replicate data for Gitlab\ + \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\ + \ date will be replicated." + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-google-ads:0.1.15" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/google-ads" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Google Ads Spec" + type: "object" + required: + - "credentials" + - "start_date" + - "customer_id" + additionalProperties: true + properties: + credentials: + type: "object" + title: "Google Credentials" + required: + - "developer_token" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + developer_token: + type: "string" + title: "Developer Token" + description: "Developer token granted by Google to use their APIs. More\ + \ instruction on how to find this value in our docs" + airbyte_secret: true + client_id: + type: "string" + title: "Client Id" + description: "Google client id. More instruction on how to find this\ + \ value in our docs" + client_secret: + type: "string" + title: "Client Secret" + description: "Google client secret. More instruction on how to find\ + \ this value in our docs" + airbyte_secret: true + access_token: + type: "string" + title: "Access Token" + description: "Access token generated using developer_token, oauth_client_id,\ + \ and oauth_client_secret. More instruction on how to find this value\ + \ in our docs" + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "Refresh token generated using developer_token, oauth_client_id,\ + \ and oauth_client_secret. More instruction on how to find this value\ + \ in our docs" + airbyte_secret: true + customer_id: + title: "Customer Id" + type: "string" + description: "Customer id must be specified as a 10-digit number without\ + \ dashes. More instruction on how to find this value in our docs" + login_customer_id: + type: "string" + title: "Login Customer ID" + description: "If your access to the customer account is through a manager\ + \ account, this field is required and must be set to the customer ID of\ + \ the manager account (10-digit number without dashes). More information\ + \ about this field you can see here" + start_date: + type: "string" + title: "Start Date" + description: "UTC date and time in the format 2017-01-25. Any data before\ + \ this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2017-01-25" + conversion_window_days: + title: "Conversion Window" + type: "integer" + description: "Define the historical replication lookback window in days" + minimum: 0 + maximum: 1095 + default: 14 + examples: + - 14 + custom_queries: + type: "array" + title: "Custom GAQL Queries" + items: + type: "object" + properties: + query: + type: "string" + title: "Custom query" + description: "A custom defined GAQL query for building the report.\ + \ Should not contain segments.date expression as it used by incremental\ + \ streams" + examples: + - "SELECT segments.ad_destination_type, campaign.advertising_channel_sub_type\ + \ FROM campaign WHERE campaign.status = 'PAUSED'" + table_name: + type: "string" + title: "Destination table name" + description: "The table name in your destination database for choosen\ + \ query." + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] + authSpecification: + auth_type: "oauth2.0" + oauth2Specification: + rootObject: + - "credentials" + oauthFlowInitParameters: + - - "client_id" + - - "client_secret" + - - "developer_token" + oauthFlowOutputParameters: + - - "access_token" + - - "refresh_token" +- dockerImage: "airbyte/source-google-analytics-v4:0.1.9" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/google-analytics-v4" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Google Analytics V4 Spec" + type: "object" + required: + - "view_id" + - "start_date" + additionalProperties: true + properties: + view_id: + type: "string" + title: "View ID" + description: "The ID for the Google Analytics View you want to fetch data\ + \ from. This can be found from the Google Analytics Account Explorer." + airbyte_secret: true + start_date: + type: "string" + title: "Start Date" + description: "A date in the format YYYY-MM-DD." + examples: + - "2020-06-01" + window_in_days: + type: "integer" + description: "The amount of days for each data-chunk begining from start_date.\ + \ Bigger the value - faster the fetch. (Min=1, as for a Day; Max=364,\ + \ as for a Year)." + examples: + - 30 + - 60 + - 90 + - 120 + - 200 + - 364 + default: 90 + custom_reports: + title: "Custom Reports" + type: "string" + description: "A JSON array describing the custom reports you want to sync\ + \ from GA. Check out the docs to get more information about this field." + credentials: + type: "object" + oneOf: + - title: "Authenticate via Google (Oauth)" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + enum: + - "Client" + default: "Client" + order: 0 + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your developer application" + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: "The client secret of your developer application" + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "A refresh token generated using the above client ID\ + \ and secret" + airbyte_secret: true + access_token: + title: "Access Token" + type: "string" + description: "A access token generated using the above client ID,\ + \ secret and refresh_token" + airbyte_secret: true + - type: "object" + title: "Service Account Key Authentication" + required: + - "credentials_json" + properties: + auth_type: + type: "string" + const: "Service" + enum: + - "Service" + default: "Service" + order: 0 + credentials_json: + type: "string" + description: "The JSON key of the service account to use for authorization" + examples: + - "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID,\ + \ \"private_key_id\": YOUR_PRIVATE_KEY, ... }" + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] + authSpecification: + auth_type: "oauth2.0" + oauth2Specification: + rootObject: + - "credentials" + - "0" + oauthFlowInitParameters: + - - "client_id" + - - "client_secret" + oauthFlowOutputParameters: + - - "access_token" + - - "refresh_token" +- dockerImage: "airbyte/source-google-directory:0.1.5" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/google-directory" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Google Directory Spec" + type: "object" + required: + - "credentials_json" + - "email" + additionalProperties: false + properties: + credentials_json: + type: "string" + description: "The contents of the JSON service account key. See the docs for more information on how to generate this key." + airbyte_secret: true + email: + type: "string" + description: "The email of the user, which has permissions to access the\ + \ Google Workspace Admin APIs." + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-google-search-console:0.1.6" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/google-search-console" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Google Search Console Spec" + type: "object" + additionalProperties: false + required: + - "site_urls" + - "start_date" + - "authorization" + properties: + site_urls: + type: "array" + items: + type: "string" + description: "Website URLs property; do not include the domain-level property\ + \ in the list" + examples: + - "https://example1.com" + - "https://example2.com" + start_date: + type: "string" + description: "The date from which you'd like to replicate data in the format\ + \ YYYY-MM-DD." + examples: + - "2021-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + end_date: + type: "string" + description: "The date from which you'd like to replicate data in the format\ + \ YYYY-MM-DD. Must be greater or equal start_date field" + examples: + - "2021-12-12" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + authorization: + type: "object" + title: "Authentication Type" + oneOf: + - title: "Authenticate via Google (Oauth)" + type: "object" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + enum: + - "Client" + default: "Client" + order: 0 + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your developer application" + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: "The client secret of your developer application" + airbyte_secret: true + access_token: + title: "Access Token" + type: "string" + description: "An access token generated using the above client ID\ + \ and secret" + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "A refresh token generated using the above client ID\ + \ and secret" + airbyte_secret: true + - type: "object" + title: "Service Account Key Authentication" + required: + - "auth_type" + - "service_account_info" + - "email" + properties: + auth_type: + type: "string" + const: "Service" + enum: + - "Service" + default: "Service" + order: 0 + service_account_info: + title: "Service Account JSON Key" + type: "string" + description: "The JSON key of the service account to use for authorization" + examples: + - "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID,\ + \ \"private_key_id\": YOUR_PRIVATE_KEY, ... }" + email: + title: "Admin Email" + type: "string" + description: "The email of the user which has permissions to access\ + \ the Google Workspace Admin APIs." + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] + authSpecification: + auth_type: "oauth2.0" + oauth2Specification: + rootObject: + - "authorization" + - "0" + oauthFlowInitParameters: + - - "client_id" + - - "client_secret" + oauthFlowOutputParameters: + - - "access_token" + - - "refresh_token" +- dockerImage: "airbyte/source-google-sheets:0.2.6" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/google-sheets" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Stripe Source Spec" + type: "object" + required: + - "spreadsheet_id" + additionalProperties: true + properties: + spreadsheet_id: + type: "string" + description: "The ID of the spreadsheet to be replicated." + credentials: + type: "object" + oneOf: + - title: "Authenticate via Google (Oauth)" + type: "object" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your developer application" + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: "The client secret of your developer application" + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "A refresh token generated using the above client ID\ + \ and secret" + airbyte_secret: true + - title: "Service Account Key Authentication" + type: "object" + required: + - "auth_type" + - "service_account_info" + properties: + auth_type: + type: "string" + const: "Service" + service_account_info: + type: "string" + description: "The JSON key of the service account to use for authorization" + examples: + - "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID,\ + \ \"private_key_id\": YOUR_PRIVATE_KEY, ... }" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] + authSpecification: + auth_type: "oauth2.0" + oauth2Specification: + rootObject: + - "credentials" + - 0 + oauthFlowInitParameters: + - - "client_id" + - - "client_secret" + oauthFlowOutputParameters: + - - "refresh_token" +- dockerImage: "airbyte/source-google-workspace-admin-reports:0.1.5" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/google-workspace-admin-reports" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Google Directory Spec" + type: "object" + required: + - "credentials_json" + - "email" + additionalProperties: false + properties: + credentials_json: + type: "string" + description: "The contents of the JSON service account key. See the docs for more information on how to generate this key." + airbyte_secret: true + email: + type: "string" + description: "The email of the user, which has permissions to access the\ + \ Google Workspace Admin APIs." + lookback: + type: "integer" + minimum: 0 + maximum: 180 + description: "Sets the range of time shown in the report. Reports API allows\ + \ from up to 180 days ago. " + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-greenhouse:0.2.5" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/greenhouse" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Greenhouse Spec" + type: "object" + required: + - "api_key" + additionalProperties: false + properties: + api_key: + type: "string" + description: "Greenhouse API Key. See the docs for more information on how to generate this key." + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-harvest:0.1.5" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/harvest" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Harvest Spec" + type: "object" + required: + - "api_token" + - "account_id" + - "replication_start_date" + additionalProperties: false + properties: + api_token: + title: "API Token" + description: "Harvest API Token." + airbyte_secret: true + type: "string" + account_id: + title: "Account ID" + description: "Harvest account ID. Required for all Harvest requests in pair\ + \ with API Key" + airbyte_secret: true + type: "string" + replication_start_date: + title: "Replication Start Date" + description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + type: "string" + supportsIncremental: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: + - "append" +- dockerImage: "airbyte/source-hubspot:0.1.21" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/hubspot" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Hubspot Source Spec" + type: "object" + required: + - "start_date" + - "credentials" + additionalProperties: false + properties: + start_date: + type: "string" + title: "Replication start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2017-01-25T00:00:00Z" + credentials: + title: "Authentication mechanism" + description: "Choose either to provide the API key or the OAuth2.0 credentials" + type: "object" + oneOf: + - type: "object" + title: "Authenticate via Hubspot (Oauth)" + required: + - "redirect_uri" + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "credentials_title" + properties: + credentials_title: + type: "string" + title: "Credentials title" + description: "Name of the credentials set" + const: "OAuth Credentials" + enum: + - "OAuth Credentials" + default: "OAuth Credentials" + order: 0 + client_id: + title: "Client ID" + description: "Hubspot client_id. See our docs if you need help finding this id." + type: "string" + examples: + - "123456789000" + client_secret: + title: "Client Secret" + description: "Hubspot client_secret. See our docs if you need help finding this secret." + type: "string" + examples: + - "secret" + airbyte_secret: true + refresh_token: + title: "Refresh token" + description: "Hubspot refresh_token. See our docs if you need help generating the token." + type: "string" + examples: + - "refresh_token" + airbyte_secret: true + - type: "object" + title: "API key" + required: + - "api_key" + - "credentials_title" + properties: + credentials_title: + type: "string" + title: "Credentials title" + description: "Name of the credentials set" + const: "API Key Credentials" + enum: + - "API Key Credentials" + default: "API Key Credentials" + order: 0 + api_key: + title: "API key" + description: "Hubspot API Key. See our docs if you need help finding this key." + type: "string" + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] + authSpecification: + auth_type: "oauth2.0" + oauth2Specification: + rootObject: + - "credentials" + - "0" + oauthFlowInitParameters: + - - "client_id" + - - "client_secret" + - - "refresh_token" + oauthFlowOutputParameters: + - - "refresh_token" +- dockerImage: "airbyte/source-db2:0.1.1" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/db2" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "IBM Db2 Source Spec" + type: "object" + required: + - "host" + - "port" + - "db" + - "username" + - "password" + additionalProperties: false + properties: + host: + description: "Host of the Db2." + type: "string" + port: + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 8123 + examples: + - "8123" + db: + description: "Name of the database." + type: "string" + examples: + - "default" + username: + description: "Username to use to access the database." + type: "string" + password: + description: "Password associated with the username." + type: "string" + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-instagram:0.1.9" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/instagram" + changelogUrl: "https://docs.airbyte.io/integrations/sources/instagram" + connectionSpecification: + title: "Source Instagram" + type: "object" + properties: + start_date: + title: "Start Date" + description: "The date from which you'd like to replicate data for User\ + \ Insights, in the format YYYY-MM-DDT00:00:00Z. All data generated after\ + \ this date will be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + type: "string" + format: "date-time" + access_token: + title: "Access Token" + description: "The value of the access token generated. See the docs for\ + \ more information" + airbyte_secret: true + type: "string" + required: + - "start_date" + - "access_token" + supportsIncremental: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: + - "append" + authSpecification: + auth_type: "oauth2.0" + oauth2Specification: + rootObject: [] + oauthFlowInitParameters: [] + oauthFlowOutputParameters: + - - "access_token" +- dockerImage: "airbyte/source-intercom:0.1.6" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/intercom" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Source Intercom Spec" + type: "object" + required: + - "access_token" + - "start_date" + additionalProperties: false + properties: + access_token: + type: "string" + description: "Intercom Access Token. See the docs for more information on how to obtain this key." + airbyte_secret: true + start_date: + type: "string" + description: "The date from which you'd like to replicate data for Intercom\ + \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\ + \ date will be replicated." + examples: + - "2020-11-16T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-iterable:0.1.9" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/iterable" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Iterable Spec" + type: "object" + required: + - "start_date" + - "api_key" + additionalProperties: false + properties: + start_date: + type: "string" + description: "The date from which you'd like to replicate data for Iterable,\ + \ in the format YYYY-MM-DDT00:00:00Z. All data generated after this date\ + \ will be replicated." + examples: + - "2021-04-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + api_key: + type: "string" + description: "Iterable API Key. See the docs for more information on how to obtain this key." + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-jira:0.2.14" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/jira" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Jira Spec" + type: "object" + required: + - "api_token" + - "domain" + - "email" + additionalProperties: true + properties: + api_token: + type: "string" + description: "Jira API Token. See the docs for more information on how to generate this key." + airbyte_secret: true + domain: + type: "string" + examples: + - "domainname.atlassian.net" + pattern: "^[a-zA-Z0-9._-]*\\.atlassian\\.net$" + description: "Domain for your Jira account, e.g. airbyteio.atlassian.net" + email: + type: "string" + description: "The user email for your Jira account" + projects: + type: "array" + title: "Projects" + items: + type: "string" + examples: + - "PROJ1" + - "PROJ2" + description: "Comma-separated list of Jira project keys to replicate data\ + \ for" + start_date: + type: "string" + title: "Start Date" + description: "The date from which you'd like to replicate data for Jira\ + \ in the format YYYY-MM-DDT00:00:00Z. All data generated after this date\ + \ will be replicated. Note that it will be used only in the following\ + \ incremental streams: issues." + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + additional_fields: + type: "array" + title: "Additional Fields" + items: + type: "string" + description: "Comma-separated list of additional fields to include in replicating\ + \ issues" + examples: + - "Field A" + - "Field B" + expand_issue_changelog: + type: "boolean" + title: "Expand Issue Changelog" + description: "Expand the changelog when replicating issues" + default: false + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-kafka:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/kafka" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Kafka Source Spec" + type: "object" + required: + - "bootstrap_servers" + - "subscription" + - "protocol" + additionalProperties: false + properties: + bootstrap_servers: + title: "Bootstrap servers" + description: "A list of host/port pairs to use for establishing the initial\ + \ connection to the Kafka cluster. The client will make use of all servers\ + \ irrespective of which servers are specified here for bootstrapping—this\ + \ list only impacts the initial hosts used to discover the full set of\ + \ servers. This list should be in the form
host1:port1,host2:port2,...
.\
+ \ Since these servers are just used for the initial connection to discover\
+ \ the full cluster membership (which may change dynamically), this list\
+ \ need not contain the full set of servers (you may want more than one,\
+ \ though, in case a server is down)."
+ type: "string"
+ examples:
+ - "kafka-broker1:9092,kafka-broker2:9092"
+ subscription:
+ title: "Subscribe method"
+ type: "object"
+ description: "You can choose to manually assign a list of partitions, or\
+ \ subscribe to all topics matching specified pattern to get dynamically\
+ \ assigned partitions"
+ oneOf:
+ - title: "Manually assign a list of partitions"
+ required:
+ - "subscription_type"
+ - "topic_partitions"
+ properties:
+ subscription_type:
+ description: "Manually assign a list of partitions to this consumer.\
+ \ This interface does not allow for incremental assignment and will\
+ \ replace the previous assignment (if there is one).\nIf the given\
+ \ list of topic partitions is empty, it is treated the same as unsubscribe()."
+ type: "string"
+ const: "assign"
+ enum:
+ - "assign"
+ default: "assign"
+ topic_partitions:
+ title: "List of topic:partition pairs"
+ type: "string"
+ examples:
+ - "sample.topic:0, sample.topic:1"
+ - title: "Subscribe to all topics matching specified pattern"
+ required:
+ - "subscription_type"
+ - "topic_pattern"
+ properties:
+ subscription_type:
+ description: "Topic pattern from which the records will be read."
+ type: "string"
+ const: "subscribe"
+ enum:
+ - "subscribe"
+ default: "subscribe"
+ topic_pattern:
+ title: "Topic pattern"
+ type: "string"
+ examples:
+ - "sample.topic"
+ test_topic:
+ title: "Test topic"
+ description: "Topic to test if Airbyte can consume messages."
+ type: "string"
+ examples:
+ - "test.topic"
+ group_id:
+ title: "Group ID"
+ description: "Group id."
+ type: "string"
+ examples:
+ - "group.id"
+ max_poll_records:
+ title: "Max poll records"
+ description: "The maximum number of records returned in a single call to\
+ \ poll(). Note, that max_poll_records does not impact the underlying fetching\
+ \ behavior. The consumer will cache the records from each fetch request\
+ \ and returns them incrementally from each poll."
+ type: "integer"
+ default: 500
+ protocol:
+ title: "Protocol"
+ type: "object"
+ description: "Protocol used to communicate with brokers."
+ oneOf:
+ - title: "PLAINTEXT"
+ required:
+ - "security_protocol"
+ properties:
+ security_protocol:
+ type: "string"
+ enum:
+ - "PLAINTEXT"
+ default: "PLAINTEXT"
+ - title: "SASL PLAINTEXT"
+ required:
+ - "security_protocol"
+ - "sasl_mechanism"
+ - "sasl_jaas_config"
+ properties:
+ security_protocol:
+ type: "string"
+ enum:
+ - "SASL_PLAINTEXT"
+ default: "SASL_PLAINTEXT"
+ sasl_mechanism:
+ title: "SASL mechanism"
+ description: "SASL mechanism used for client connections. This may\
+ \ be any mechanism for which a security provider is available."
+ type: "string"
+ default: "PLAIN"
+ enum:
+ - "PLAIN"
+ sasl_jaas_config:
+ title: "SASL JAAS config"
+ description: "JAAS login context parameters for SASL connections in\
+ \ the format used by JAAS configuration files."
+ type: "string"
+ default: ""
+ airbyte_secret: true
+ - title: "SASL SSL"
+ required:
+ - "security_protocol"
+ - "sasl_mechanism"
+ - "sasl_jaas_config"
+ properties:
+ security_protocol:
+ type: "string"
+ enum:
+ - "SASL_SSL"
+ default: "SASL_SSL"
+ sasl_mechanism:
+ title: "SASL mechanism"
+ description: "SASL mechanism used for client connections. This may\
+ \ be any mechanism for which a security provider is available."
+ type: "string"
+ default: "GSSAPI"
+ enum:
+ - "GSSAPI"
+ - "OAUTHBEARER"
+ - "SCRAM-SHA-256"
+ sasl_jaas_config:
+ title: "SASL JAAS config"
+ description: "JAAS login context parameters for SASL connections in\
+ \ the format used by JAAS configuration files."
+ type: "string"
+ default: ""
+ airbyte_secret: true
+ client_id:
+ title: "Client ID"
+ description: "An id string to pass to the server when making requests. The\
+ \ purpose of this is to be able to track the source of requests beyond\
+ \ just ip/port by allowing a logical application name to be included in\
+ \ server-side request logging."
+ type: "string"
+ examples:
+ - "airbyte-consumer"
+ enable_auto_commit:
+ title: "Enable auto commit"
+ description: "If true the consumer's offset will be periodically committed\
+ \ in the background."
+ type: "boolean"
+ default: true
+ auto_commit_interval_ms:
+ title: "Auto commit interval ms"
+ description: "The frequency in milliseconds that the consumer offsets are\
+ \ auto-committed to Kafka if enable.auto.commit is set to true."
+ type: "integer"
+ default: 5000
+ client_dns_lookup:
+ title: "Client DNS lookup"
+ description: "Controls how the client uses DNS lookups. If set to use_all_dns_ips,\
+ \ connect to each returned IP address in sequence until a successful connection\
+ \ is established. After a disconnection, the next IP is used. Once all\
+ \ IPs have been used once, the client resolves the IP(s) from the hostname\
+ \ again. If set to resolve_canonical_bootstrap_servers_only, resolve each\
+ \ bootstrap address into a list of canonical names. After the bootstrap\
+ \ phase, this behaves the same as use_all_dns_ips. If set to default (deprecated),\
+ \ attempt to connect to the first IP address returned by the lookup, even\
+ \ if the lookup returns multiple IP addresses."
+ type: "string"
+ default: "use_all_dns_ips"
+ enum:
+ - "default"
+ - "use_all_dns_ips"
+ - "resolve_canonical_bootstrap_servers_only"
+ retry_backoff_ms:
+ title: "Retry backoff ms"
+ description: "The amount of time to wait before attempting to retry a failed\
+ \ request to a given topic partition. This avoids repeatedly sending requests\
+ \ in a tight loop under some failure scenarios."
+ type: "integer"
+ default: 100
+ request_timeout_ms:
+ title: "Request timeout ms"
+ description: "The configuration controls the maximum amount of time the\
+ \ client will wait for the response of a request. If the response is not\
+ \ received before the timeout elapses the client will resend the request\
+ \ if necessary or fail the request if retries are exhausted."
+ type: "integer"
+ default: 30000
+ receive_buffer_bytes:
+ title: "Receive buffer bytes"
+ description: "The size of the TCP receive buffer (SO_RCVBUF) to use when\
+ \ reading data. If the value is -1, the OS default will be used."
+ type: "integer"
+ default: 32768
+ auto_offset_reset:
+ title: "Auto offset reset"
+ description: "What to do when there is no initial offset in Kafka or if\
+ \ the current offset does not exist any more on the server - earliest:\
+ \ automatically reset the offset to the earliest offset, latest: automatically\
+ \ reset the offset to the latest offset, none: throw exception to the\
+ \ consumer if no previous offset is found for the consumer's group, anything\
+ \ else: throw exception to the consumer."
+ type: "string"
+ default: "latest"
+ enum:
+ - "latest"
+ - "earliest"
+ - "none"
+ repeated_calls:
+ title: "Repeated calls"
+ description: "The number of repeated calls to poll() if no messages were\
+ \ received."
+ type: "integer"
+ default: 3
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+ supported_source_sync_modes:
+ - "append"
+- dockerImage: "airbyte/source-klaviyo:0.1.2"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/klaviyo"
+ changelogUrl: "https://docs.airbyte.io/integrations/sources/klaviyo"
+ connectionSpecification:
+ title: "Klaviyo Spec"
+ type: "object"
+ properties:
+ api_key:
+ title: "Api Key"
+ description: "Klaviyo API Key. See our docs if you need help finding this key."
+ airbyte_secret: true
+ type: "string"
+ start_date:
+ title: "Start Date"
+ description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\
+ \ data before this date will not be replicated."
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ examples:
+ - "2017-01-25T00:00:00Z"
+ type: "string"
+ required:
+ - "api_key"
+ - "start_date"
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes:
+ - "append"
+- dockerImage: "airbyte/source-lever-hiring:0.1.0"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/lever-hiring"
+ changelogUrl: "https://docs.airbyte.io/integrations/sources/lever-hiring#changelog"
+ connectionSpecification:
+ title: "Lever Hiring Spec"
+ type: "object"
+ properties:
+ client_id:
+ title: "Client Id"
+ description: "The client application id as provided when registering the\
+ \ application with Lever."
+ type: "string"
+ client_secret:
+ title: "Client Secret"
+ description: "The application secret as provided when registering the application\
+ \ with Lever."
+ airbyte_secret: true
+ type: "string"
+ refresh_token:
+ title: "Refresh Token"
+ description: "The refresh token your application will need to submit to\
+ \ get a new access token after it's expired."
+ type: "string"
+ environment:
+ title: "Environment"
+ description: "Sandbox or Production environment."
+ default: "Production"
+ enum:
+ - "Sandbox"
+ - "Production"
+ type: "string"
+ start_date:
+ title: "Start Date"
+ description: "UTC date and time in the format 2019-02-25T00:00:00Z. Any\
+ \ data before this date will not be replicated."
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ examples:
+ - "2021-04-25T00:00:00Z"
+ type: "string"
+ required:
+ - "client_id"
+ - "client_secret"
+ - "refresh_token"
+ - "start_date"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+ authSpecification:
+ auth_type: "oauth2.0"
+ oauth2Specification:
+ rootObject: []
+ oauthFlowInitParameters:
+ - - "client_id"
+ - - "client_secret"
+ - - "refresh_token"
+ oauthFlowOutputParameters: []
+- dockerImage: "airbyte/source-linkedin-ads:0.1.1"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/linkedin-ads"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Linkedin Ads Spec"
+ type: "object"
+ required:
+ - "start_date"
+ - "access_token"
+ additionalProperties: false
+ properties:
+ start_date:
+ type: "string"
+ title: "Start Date"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$"
+ description: "Date in the format 2020-09-17. Any data before this date will\
+ \ not be replicated."
+ examples:
+ - "2021-05-17"
+ access_token:
+ type: "string"
+ title: "Access Token"
+ description: "The token value ganerated using Auth Code"
+ airbyte_secret: true
+ account_ids:
+ title: "Account IDs"
+ type: "array"
+ description: "Specify the Account IDs separated by space, from which to\
+ \ pull the data. Leave empty to pull from all associated accounts."
+ items:
+ type: "integer"
+ default: []
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-looker:0.2.5"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/looker"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Looker Spec"
+ type: "object"
+ required:
+ - "domain"
+ - "client_id"
+ - "client_secret"
+ additionalProperties: false
+ properties:
+ domain:
+ type: "string"
+ examples:
+ - "domainname.looker.com"
+ - "looker.clientname.com"
+ - "123.123.124.123:8000"
+ description: "Domain for your Looker account, e.g. airbyte.cloud.looker.com,looker.[clientname].com,IP\
+ \ address"
+ client_id:
+ title: "Client ID"
+ type: "string"
+ description: "The Client ID is first part of an API3 key that is specific\
+ \ to each Looker user. See the docs for more information on how to generate this key."
+ client_secret:
+ title: "Client Secret"
+ type: "string"
+ description: "The Client Secret is second part of an API3 key."
+ run_look_ids:
+ title: "Look IDs to Run"
+ type: "array"
+ items:
+ type: "string"
+ pattern: "^[0-9]*$"
+ description: "The IDs of any Looks to run (optional)"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-mailchimp:0.2.8"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/mailchimp"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Mailchimp Spec"
+ type: "object"
+ required:
+ - "username"
+ - "apikey"
+ additionalProperties: false
+ properties:
+ username:
+ type: "string"
+ description: "The Username or email you use to sign into Mailchimp"
+ apikey:
+ type: "string"
+ airbyte_secret: true
+ description: "API Key. See the docs for information on how to generate this key."
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-marketo:0.1.0"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/marketo"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Source Marketo Spec"
+ type: "object"
+ required:
+ - "domain_url"
+ - "client_id"
+ - "client_secret"
+ - "start_date"
+ additionalProperties: false
+ properties:
+ domain_url:
+ type: "string"
+ description: "Your Marketo Base URL. See the docs for info on how to obtain this."
+ examples:
+ - "https://000-AAA-000.mktorest.com"
+ airbyte_secret: true
+ client_id:
+ type: "string"
+ description: "Your Marketo client_id. See the docs for info on how to obtain this."
+ airbyte_secret: true
+ client_secret:
+ type: "string"
+ description: "Your Marketo client secret. See the docs for info on how to obtain this."
+ airbyte_secret: true
+ start_date:
+ type: "string"
+ description: "Data generated in Marketo after this date will be replicated.\
+ \ This date must be specified in the format YYYY-MM-DDT00:00:00Z."
+ examples:
+ - "2020-09-25T00:00:00Z"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ window_in_days:
+ type: "integer"
+ description: "The amount of days for each data-chunk begining from start_date.\
+ \ (Min=1, as for a Day; Max=30, as for a Month)."
+ examples:
+ - 1
+ - 5
+ - 10
+ - 15
+ - 30
+ default: 30
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-mssql:0.3.6"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/destinations/mssql"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "MSSQL Source Spec"
+ type: "object"
+ required:
+ - "host"
+ - "port"
+ - "database"
+ - "username"
+ additionalProperties: false
+ properties:
+ host:
+ description: "Hostname of the database."
+ type: "string"
+ port:
+ description: "Port of the database."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ examples:
+ - "1433"
+ database:
+ description: "Name of the database."
+ type: "string"
+ examples:
+ - "master"
+ username:
+ description: "Username to use to access the database."
+ type: "string"
+ password:
+ description: "Password associated with the username."
+ type: "string"
+ airbyte_secret: true
+ ssl_method:
+ title: "SSL Method"
+ type: "object"
+ description: "Encryption method to use when communicating with the database"
+ order: 6
+ oneOf:
+ - title: "Unencrypted"
+ additionalProperties: false
+ description: "Data transfer will not be encrypted."
+ required:
+ - "ssl_method"
+ properties:
+ ssl_method:
+ type: "string"
+ const: "unencrypted"
+ enum:
+ - "unencrypted"
+ default: "unencrypted"
+ - title: "Encrypted (trust server certificate)"
+ additionalProperties: false
+ description: "Use the cert provided by the server without verification.\
+ \ (For testing purposes only!)"
+ required:
+ - "ssl_method"
+ properties:
+ ssl_method:
+ type: "string"
+ const: "encrypted_trust_server_certificate"
+ enum:
+ - "encrypted_trust_server_certificate"
+ default: "encrypted_trust_server_certificate"
+ - title: "Encrypted (verify certificate)"
+ additionalProperties: false
+ description: "Verify and use the cert provided by the server."
+ required:
+ - "ssl_method"
+ - "trustStoreName"
+ - "trustStorePassword"
+ properties:
+ ssl_method:
+ type: "string"
+ const: "encrypted_verify_certificate"
+ enum:
+ - "encrypted_verify_certificate"
+ default: "encrypted_verify_certificate"
+ hostNameInCertificate:
+ title: "Host Name In Certificate"
+ type: "string"
+ description: "Specifies the host name of the server. The value of\
+ \ this property must match the subject property of the certificate."
+ order: 7
+ replication_method:
+ type: "string"
+ title: "Replication Method"
+ description: "Replication method to use for extracting data from the database.\
+ \ STANDARD replication requires no setup on the DB side but will not be\
+ \ able to represent deletions incrementally. CDC uses {TBC} to detect\
+ \ inserts, updates, and deletes. This needs to be configured on the source\
+ \ database itself."
+ default: "STANDARD"
+ enum:
+ - "STANDARD"
+ - "CDC"
+ tunnel_method:
+ type: "object"
+ title: "SSH Tunnel Method"
+ description: "Whether to initiate an SSH tunnel before connecting to the\
+ \ database, and if so, which kind of authentication to use."
+ oneOf:
+ - title: "No Tunnel"
+ required:
+ - "tunnel_method"
+ properties:
+ tunnel_method:
+ description: "No ssh tunnel needed to connect to database"
+ type: "string"
+ const: "NO_TUNNEL"
+ order: 0
+ - title: "SSH Key Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "ssh_key"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and ssh key"
+ type: "string"
+ const: "SSH_KEY_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host."
+ type: "string"
+ order: 3
+ ssh_key:
+ title: "SSH Private Key"
+ description: "OS-level user account ssh key credentials for logging\
+ \ into the jump server host."
+ type: "string"
+ airbyte_secret: true
+ multiline: true
+ order: 4
+ - title: "Password Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "tunnel_user_password"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and password authentication"
+ type: "string"
+ const: "SSH_PASSWORD_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host"
+ type: "string"
+ order: 3
+ tunnel_user_password:
+ title: "Password"
+ description: "OS-level password for logging into the jump server host"
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-microsoft-teams:0.2.2"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/microsoft-teams"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Microsoft Teams Spec"
+ type: "object"
+ required:
+ - "tenant_id"
+ - "client_id"
+ - "client_secret"
+ - "period"
+ additionalProperties: false
+ properties:
+ tenant_id:
+ title: "Directory (tenant) ID"
+ type: "string"
+ description: "Directory (tenant) ID"
+ client_id:
+ title: "Application (client) ID"
+ type: "string"
+ description: "Application (client) ID"
+ client_secret:
+ title: "Client Secret"
+ type: "string"
+ description: "Client secret"
+ airbyte_secret: true
+ period:
+ type: "string"
+ description: "Specifies the length of time over which the Team Device Report\
+ \ stream is aggregated. The supported values are: D7, D30, D90, and D180."
+ examples:
+ - "D7"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-mixpanel:0.1.1"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/mixpanel"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Source Mixpanel Spec"
+ type: "object"
+ required:
+ - "api_secret"
+ additionalProperties: true
+ properties:
+ api_secret:
+ type: "string"
+ description: "Mixpanel API Secret. See the docs for more information on how to obtain this key."
+ airbyte_secret: true
+ attribution_window:
+ type: "integer"
+ description: "Latency minimum number of days to look-back to account for\
+ \ delays in attributing accurate results. Default attribution window is\
+ \ 5 days."
+ default: 5
+ date_window_size:
+ type: "integer"
+ description: "Number of days for date window looping through transactional\
+ \ endpoints with from_date and to_date. Default date_window_size is 30\
+ \ days. Clients with large volumes of events may want to decrease this\
+ \ to 14, 7, or even down to 1-2 days."
+ default: 30
+ project_timezone:
+ type: "string"
+ description: "Time zone in which integer date times are stored. The project\
+ \ timezone may be found in the project settings in the Mixpanel console."
+ default: "US/Pacific"
+ examples:
+ - "US/Pacific"
+ - "UTC"
+ select_properties_by_default:
+ type: "boolean"
+ description: "Setting this config parameter to true ensures that new properties\
+ \ on events and engage records are captured. Otherwise new properties\
+ \ will be ignored"
+ default: true
+ start_date:
+ type: "string"
+ description: "The default value to use if no bookmark exists for an endpoint.\
+ \ Default is 1 year ago."
+ examples:
+ - "2021-11-16"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}(T[0-9]{2}:[0-9]{2}:[0-9]{2}Z)?$"
+ region:
+ type: "string"
+ enum:
+ - "US"
+ - "EU"
+ default: "US"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-mongodb-v2:0.1.3"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/mongodb-v2"
+ changelogUrl: "https://docs.airbyte.io/integrations/sources/mongodb-v2"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "MongoDb Source Spec"
+ type: "object"
+ required:
+ - "database"
+ additionalProperties: true
+ properties:
+ instance_type:
+ type: "object"
+ title: "MongoDb instance type"
+ description: "MongoDb instance to connect to. For MongoDB Atlas and Replica\
+ \ Set TLS connection is used by default."
+ order: 0
+ oneOf:
+ - title: "Standalone MongoDb Instance"
+ required:
+ - "instance"
+ - "host"
+ - "port"
+ properties:
+ instance:
+ type: "string"
+ enum:
+ - "standalone"
+ default: "standalone"
+ host:
+ title: "Host"
+ type: "string"
+ description: "Host of a Mongo database to be replicated."
+ order: 0
+ port:
+ title: "Port"
+ type: "integer"
+ description: "Port of a Mongo database to be replicated."
+ minimum: 0
+ maximum: 65536
+ default: 27017
+ examples:
+ - "27017"
+ order: 1
+ tls:
+ title: "TLS connection"
+ type: "boolean"
+ description: "Indicates whether TLS encryption protocol will be used\
+ \ to connect to MongoDB. It is recommended to use TLS connection\
+ \ if possible. For more information see documentation."
+ default: false
+ order: 2
+ - title: "Replica Set"
+ required:
+ - "instance"
+ - "server_addresses"
+ properties:
+ instance:
+ type: "string"
+ enum:
+ - "replica"
+ default: "replica"
+ server_addresses:
+ title: "Server addresses"
+ type: "string"
+ description: "The members of a replica set. Please specify `host`:`port`\
+ \ of each member seperated by comma."
+ examples:
+ - "host1:27017,host2:27017,host3:27017"
+ order: 0
+ replica_set:
+ title: "Replica Set"
+ type: "string"
+ description: "A replica set name."
+ order: 1
+ - title: "MongoDB Atlas"
+ additionalProperties: false
+ required:
+ - "instance"
+ - "cluster_url"
+ properties:
+ instance:
+ type: "string"
+ enum:
+ - "atlas"
+ default: "atlas"
+ cluster_url:
+ title: "Cluster URL"
+ type: "string"
+ description: "URL of a cluster to connect to."
+ order: 0
+ database:
+ title: "Database name"
+ type: "string"
+ description: "Database to be replicated."
+ order: 1
+ user:
+ title: "User"
+ type: "string"
+ description: "User"
+ order: 2
+ password:
+ title: "Password"
+ type: "string"
+ description: "Password"
+ airbyte_secret: true
+ order: 3
+ auth_source:
+ title: "Authentication source"
+ type: "string"
+ description: "Authentication source where user information is stored"
+ default: "admin"
+ examples:
+ - "admin"
+ order: 4
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-mysql:0.4.8"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/mysql"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "MySql Source Spec"
+ type: "object"
+ required:
+ - "host"
+ - "port"
+ - "database"
+ - "username"
+ - "replication_method"
+ additionalProperties: false
+ properties:
+ host:
+ description: "Hostname of the database."
+ type: "string"
+ order: 0
+ port:
+ description: "Port of the database."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 3306
+ examples:
+ - "3306"
+ order: 1
+ database:
+ description: "Name of the database."
+ type: "string"
+ order: 2
+ username:
+ description: "Username to use to access the database."
+ type: "string"
+ order: 3
+ password:
+ description: "Password associated with the username."
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ jdbc_url_params:
+ description: "Additional properties to pass to the jdbc url string when\
+ \ connecting to the database formatted as 'key=value' pairs separated\
+ \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)"
+ type: "string"
+ order: 5
+ ssl:
+ title: "SSL Connection"
+ description: "Encrypt data using SSL."
+ type: "boolean"
+ default: true
+ order: 7
+ replication_method:
+ type: "string"
+ title: "Replication Method"
+ description: "Replication method to use for extracting data from the database.\
+ \ STANDARD replication requires no setup on the DB side but will not be\
+ \ able to represent deletions incrementally. CDC uses the Binlog to detect\
+ \ inserts, updates, and deletes. This needs to be configured on the source\
+ \ database itself."
+ order: 6
+ default: "STANDARD"
+ enum:
+ - "STANDARD"
+ - "CDC"
+ tunnel_method:
+ type: "object"
+ title: "SSH Tunnel Method"
+ description: "Whether to initiate an SSH tunnel before connecting to the\
+ \ database, and if so, which kind of authentication to use."
+ oneOf:
+ - title: "No Tunnel"
+ required:
+ - "tunnel_method"
+ properties:
+ tunnel_method:
+ description: "No ssh tunnel needed to connect to database"
+ type: "string"
+ const: "NO_TUNNEL"
+ order: 0
+ - title: "SSH Key Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "ssh_key"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and ssh key"
+ type: "string"
+ const: "SSH_KEY_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host."
+ type: "string"
+ order: 3
+ ssh_key:
+ title: "SSH Private Key"
+ description: "OS-level user account ssh key credentials in RSA PEM\
+ \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"
+ type: "string"
+ airbyte_secret: true
+ multiline: true
+ order: 4
+ - title: "Password Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "tunnel_user_password"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and password authentication"
+ type: "string"
+ const: "SSH_PASSWORD_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host"
+ type: "string"
+ order: 3
+ tunnel_user_password:
+ title: "Password"
+ description: "OS-level password for logging into the jump server host"
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-okta:0.1.2"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/okta"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Okta Spec"
+ type: "object"
+ required:
+ - "token"
+ - "base_url"
+ additionalProperties: false
+ properties:
+ token:
+ type: "string"
+ title: "API Token"
+ description: "A Okta token. See the docs for instructions on how to generate it."
+ airbyte_secret: true
+ base_url:
+ type: "string"
+ title: "Base URL"
+ description: "The Okta base URL."
+ airbyte_secret: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-onesignal:0.1.0"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/onesignal"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "OneSignal Source Spec"
+ type: "object"
+ required:
+ - "user_auth_key"
+ - "start_date"
+ - "outcome_names"
+ additionalProperties: false
+ properties:
+ user_auth_key:
+ type: "string"
+ description: "OneSignal User Auth Key, see the docs for more information on how to obtain this key."
+ airbyte_secret: true
+ start_date:
+ type: "string"
+ description: "The date from which you'd like to replicate data for OneSignal\
+ \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\
+ \ date will be replicated."
+ examples:
+ - "2020-11-16T00:00:00Z"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ outcome_names:
+ type: "string"
+ description: "Comma-separated list of names and the value (sum/count) for\
+ \ the returned outcome data. See the docs for more details"
+ examples:
+ - "os__session_duration.count,os__click.count,CustomOutcomeName.sum"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-oracle:0.3.8"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/oracle"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Oracle Source Spec"
+ type: "object"
+ required:
+ - "host"
+ - "port"
+ - "sid"
+ - "username"
+ additionalProperties: false
+ properties:
+ host:
+ title: "Host"
+ description: "Hostname of the database."
+ type: "string"
+ port:
+ title: "Port"
+ description: "Port of the database.\nOracle Corporations recommends the\
+ \ following port numbers:\n1521 - Default listening port for client connections\
+ \ to the listener. \n2484 - Recommended and officially registered listening\
+ \ port for client connections to the listener using TCP/IP with SSL"
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 1521
+ sid:
+ title: "SID (Oracle System Identifier)"
+ type: "string"
+ username:
+ title: "User"
+ description: "Username to use to access the database."
+ type: "string"
+ password:
+ title: "Password"
+ description: "Password associated with the username."
+ type: "string"
+ airbyte_secret: true
+ schemas:
+ title: "Schemas"
+ description: "List of schemas to sync from. Defaults to user. Case sensitive."
+ type: "array"
+ items:
+ type: "string"
+ minItems: 1
+ uniqueItems: true
+ encryption:
+ title: "Encryption"
+ type: "object"
+ description: "Encryption method to use when communicating with the database"
+ order: 6
+ oneOf:
+ - title: "Unencrypted"
+ additionalProperties: false
+ description: "Data transfer will not be encrypted."
+ required:
+ - "encryption_method"
+ properties:
+ encryption_method:
+ type: "string"
+ const: "unencrypted"
+ enum:
+ - "unencrypted"
+ default: "unencrypted"
+ - title: "Native Network Ecryption (NNE)"
+ additionalProperties: false
+ description: "Native network encryption gives you the ability to encrypt\
+ \ database connections, without the configuration overhead of TCP/IP\
+ \ and SSL/TLS and without the need to open and listen on different ports."
+ required:
+ - "encryption_method"
+ properties:
+ encryption_method:
+ type: "string"
+ const: "client_nne"
+ enum:
+ - "client_nne"
+ default: "client_nne"
+ encryption_algorithm:
+ type: "string"
+ description: "This parameter defines the encryption algorithm to be\
+ \ used"
+ title: "Encryption Algorithm"
+ default: "AES256"
+ enum:
+ - "AES256"
+ - "RC4_56"
+ - "3DES168"
+ - title: "TLS Encrypted (verify certificate)"
+ additionalProperties: false
+ description: "Verify and use the cert provided by the server."
+ required:
+ - "encryption_method"
+ - "ssl_certificate"
+ properties:
+ encryption_method:
+ type: "string"
+ const: "encrypted_verify_certificate"
+ enum:
+ - "encrypted_verify_certificate"
+ default: "encrypted_verify_certificate"
+ ssl_certificate:
+ title: "SSL PEM file"
+ description: "Privacy Enhanced Mail (PEM) files are concatenated certificate\
+ \ containers frequently used in certificate installations"
+ type: "string"
+ airbyte_secret: true
+ multiline: true
+ order: 4
+ tunnel_method:
+ type: "object"
+ title: "SSH Tunnel Method"
+ description: "Whether to initiate an SSH tunnel before connecting to the\
+ \ database, and if so, which kind of authentication to use."
+ oneOf:
+ - title: "No Tunnel"
+ required:
+ - "tunnel_method"
+ properties:
+ tunnel_method:
+ description: "No ssh tunnel needed to connect to database"
+ type: "string"
+ const: "NO_TUNNEL"
+ order: 0
+ - title: "SSH Key Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "ssh_key"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and ssh key"
+ type: "string"
+ const: "SSH_KEY_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host."
+ type: "string"
+ order: 3
+ ssh_key:
+ title: "SSH Private Key"
+ description: "OS-level user account ssh key credentials in RSA PEM\
+ \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"
+ type: "string"
+ airbyte_secret: true
+ multiline: true
+ order: 4
+ - title: "Password Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "tunnel_user_password"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and password authentication"
+ type: "string"
+ const: "SSH_PASSWORD_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host"
+ type: "string"
+ order: 3
+ tunnel_user_password:
+ title: "Password"
+ description: "OS-level password for logging into the jump server host"
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-paypal-transaction:0.1.1"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/paypal-transactions"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Paypal Transaction Search"
+ type: "object"
+ required:
+ - "client_id"
+ - "secret"
+ - "start_date"
+ - "is_sandbox"
+ additionalProperties: true
+ properties:
+ client_id:
+ title: "Client ID"
+ type: "string"
+ description: "The Paypal Client ID for API credentials"
+ secret:
+ title: "Secret"
+ type: "string"
+ description: "The Secret for a given Client ID."
+ airbyte_secret: true
+ start_date:
+ type: "string"
+ title: "Start Date"
+ description: "Start Date for data extraction in ISO format. Date must be in range from 3 years till 12 hrs before\
+ \ present time"
+ examples:
+ - "2021-06-11T23:59:59-00:00"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}[+-][0-9]{2}:[0-9]{2}$"
+ is_sandbox:
+ title: "Is Sandbox"
+ description: "Whether or not to Sandbox or Production environment to extract\
+ \ data from"
+ type: "boolean"
+ default: false
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-pipedrive:0.1.6"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/pipedrive"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Pipedrive Spec"
+ type: "object"
+ required:
+ - "replication_start_date"
+ additionalProperties: true
+ properties:
+ authorization:
+ type: "object"
+ title: "Authentication Type"
+ oneOf:
+ - title: "Sign in via Pipedrive (OAuth)"
+ type: "object"
+ required:
+ - "auth_type"
+ - "client_id"
+ - "client_secret"
+ - "refresh_token"
+ properties:
+ auth_type:
+ type: "string"
+ const: "Client"
+ enum:
+ - "Client"
+ default: "Client"
+ order: 0
+ client_id:
+ title: "Client ID"
+ type: "string"
+ description: "The Client ID of your developer application"
+ airbyte_secret: true
+ client_secret:
+ title: "Client Secret"
+ type: "string"
+ description: "The client secret of your developer application"
+ airbyte_secret: true
+ access_token:
+ title: "Access Token"
+ type: "string"
+ description: "An access token generated using the above client ID\
+ \ and secret"
+ airbyte_secret: true
+ refresh_token:
+ title: "Refresh Token"
+ type: "string"
+ description: "A refresh token generated using the above client ID\
+ \ and secret"
+ airbyte_secret: true
+ - type: "object"
+ title: "API Key Authentication"
+ required:
+ - "auth_type"
+ - "api_token"
+ properties:
+ auth_type:
+ type: "string"
+ const: "Token"
+ enum:
+ - "Token"
+ default: "Token"
+ order: 0
+ api_token:
+ title: "API Token"
+ type: "string"
+ description: "Pipedrive API Token"
+ airbyte_secret: true
+ replication_start_date:
+ title: "Replication Start Date"
+ description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\
+ \ data before this date will not be replicated. When specified and not\
+ \ None, then stream will behave as incremental"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ examples:
+ - "2017-01-25T00:00:00Z"
+ type: "string"
+ supportsIncremental: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes:
+ - "append"
+- dockerImage: "airbyte/source-plaid:0.2.1"
+ spec:
+ documentationUrl: "https://plaid.com/docs/api/"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ type: "object"
+ required:
+ - "access_token"
+ - "api_key"
+ - "client_id"
+ additionalProperties: false
+ properties:
+ access_token:
+ type: "string"
+ title: "Access Token"
+ description: "The end-user's Link access token."
+ api_key:
+ title: "API Key"
+ type: "string"
+ description: "The Plaid API key to use to hit the API."
+ airbyte_secret: true
+ client_id:
+ title: "Client ID"
+ type: "string"
+ description: "The Plaid client id"
+ plaid_env:
+ title: "Plaid Environment"
+ type: "string"
+ enum:
+ - "sandbox"
+ - "development"
+ - "production"
+ description: "The Plaid environment"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-pokeapi:0.1.1"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/pokeapi"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Pokeapi Spec"
+ type: "object"
+ required:
+ - "pokemon_name"
+ additionalProperties: false
+ properties:
+ pokemon_name:
+ type: "string"
+ description: "Pokemon requested from the API."
+ pattern: "^[a-z0-9_\\-]+$"
+ examples:
+ - "ditto, luxray, snorlax"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-posthog:0.1.4"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/posthog"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "PostHog Spec"
+ type: "object"
+ required:
+ - "api_key"
+ - "start_date"
+ additionalProperties: false
+ properties:
+ start_date:
+ title: "Start Date"
+ type: "string"
+ description: "The date from which you'd like to replicate the data"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ examples:
+ - "2021-01-01T00:00:00Z"
+ api_key:
+ type: "string"
+ airbyte_secret: true
+ description: "API Key. See the docs for information on how to generate this key."
+ base_url:
+ type: "string"
+ default: "https://app.posthog.com"
+ description: "Base PostHog url. Defaults to PostHog Cloud (https://app.posthog.com)."
+ examples:
+ - "https://posthog.example.com"
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-postgres:0.3.11"
+ spec:
+ documentationUrl: "https://docs.airbyte.io/integrations/sources/postgres"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Postgres Source Spec"
+ type: "object"
+ required:
+ - "host"
+ - "port"
+ - "database"
+ - "username"
+ additionalProperties: false
+ properties:
+ host:
+ title: "Host"
+ description: "Hostname of the database."
+ type: "string"
+ order: 0
+ port:
+ title: "Port"
+ description: "Port of the database."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 5432
+ examples:
+ - "5432"
+ order: 1
+ database:
+ title: "DB Name"
+ description: "Name of the database."
+ type: "string"
+ order: 2
+ username:
+ title: "User"
+ description: "Username to use to access the database."
+ type: "string"
+ order: 3
+ password:
+ title: "Password"
+ description: "Password associated with the username."
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ ssl:
+ title: "Connect using SSL"
+ description: "Encrypt client/server communications for increased security."
+ type: "boolean"
+ default: false
+ order: 5
+ replication_method:
+ type: "object"
+ title: "Replication Method"
+ description: "Replication method to use for extracting data from the database."
+ order: 6
+ oneOf:
+ - title: "Standard"
+ additionalProperties: false
+ description: "Standard replication requires no setup on the DB side but\
+ \ will not be able to represent deletions incrementally."
+ required:
+ - "method"
+ properties:
+ method:
+ type: "string"
+ const: "Standard"
+ enum:
+ - "Standard"
+ default: "Standard"
+ order: 0
+ - title: "Logical Replication (CDC)"
+ additionalProperties: false
+ description: "Logical replication uses the Postgres write-ahead log (WAL)\
+ \ to detect inserts, updates, and deletes. This needs to be configured\
+ \ on the source database itself. Only available on Postgres 10 and above.\
+ \ Read the Postgres Source docs for more information."
+ required:
+ - "method"
+ - "replication_slot"
+ - "publication"
+ properties:
+ method:
+ type: "string"
+ const: "CDC"
+ enum:
+ - "CDC"
+ default: "CDC"
+ order: 0
+ plugin:
+ type: "string"
+ description: "A logical decoding plug-in installed on the PostgreSQL\
+ \ server. `pgoutput` plug-in is used by default.\nIf replication\
+ \ table contains a lot of big jsonb values it is recommended to\
+ \ use `wal2json` plug-in. For more information about `wal2json`\
+ \ plug-in read Postgres Source docs."
+ enum:
+ - "pgoutput"
+ - "wal2json"
+ default: "pgoutput"
+ order: 1
+ replication_slot:
+ type: "string"
+ description: "A plug-in logical replication slot."
+ order: 2
+ publication:
+ type: "string"
+ description: "A Postgres publication used for consuming changes."
+ order: 3
+ tunnel_method:
+ type: "object"
+ title: "SSH Tunnel Method"
+ description: "Whether to initiate an SSH tunnel before connecting to the\
+ \ database, and if so, which kind of authentication to use."
+ oneOf:
+ - title: "No Tunnel"
+ required:
+ - "tunnel_method"
+ properties:
+ tunnel_method:
+ description: "No ssh tunnel needed to connect to database"
+ type: "string"
+ const: "NO_TUNNEL"
+ order: 0
+ - title: "SSH Key Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "ssh_key"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and ssh key"
+ type: "string"
+ const: "SSH_KEY_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host."
+ type: "string"
+ order: 3
+ ssh_key:
+ title: "SSH Private Key"
+ description: "OS-level user account ssh key credentials for logging\
+ \ into the jump server host."
+ type: "string"
+ airbyte_secret: true
+ multiline: true
+ order: 4
+ - title: "Password Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "tunnel_user_password"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and password authentication"
+ type: "string"
+ const: "SSH_PASSWORD_AUTH"
+ order: 0
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host"
+ type: "string"
+ order: 3
+ tunnel_user_password:
+ title: "Password"
+ description: "OS-level password for logging into the jump server host"
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-prestashop:0.1.0"
+ spec:
+ documentationUrl: "https://docsurl.com"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "PrestaShop Spec"
+ type: "object"
+ required:
+ - "url"
+ - "access_key"
+ additionalProperties: false
+ properties:
+ url:
+ type: "string"
+ description: "Shop URL without trailing slash (domain name or IP address)"
+ access_key:
+ type: "string"
+ description: "Your PrestaShop access key. See the docs for info on how to obtain this."
+ airbyte_secret: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-quickbooks-singer:0.1.3"
+ spec:
+ documentationUrl: "https://docsurl.com"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Source Quickbooks Singer Spec"
+ type: "object"
+ required:
+ - "client_id"
+ - "client_secret"
+ - "refresh_token"
+ - "realm_id"
+ - "user_agent"
+ - "start_date"
+ - "sandbox"
+ additionalProperties: false
+ properties:
+ client_id:
+ type: "string"
+ description: "Identifies which app is making the request. Obtain this value\
+ \ from the Keys tab on the app profile via My Apps on the developer site.\
+ \ There are two versions of this key: development and production"
+ client_secret:
+ description: " Obtain this value from the Keys tab on the app profile via\
+ \ My Apps on the developer site. There are two versions of this key: development\
+ \ and production"
+ type: "string"
+ airbyte_secret: true
+ refresh_token:
+ description: "A token used when refreshing the access token."
+ type: "string"
+ airbyte_secret: true
+ realm_id:
+ description: "Labeled Company ID. The Make API Calls panel is populated\
+ \ with the realm id and the current access token"
+ type: "string"
+ airbyte_secret: true
+ user_agent:
+ type: "string"
+ description: "Process and email for API logging purposes. Example: tap-quickbooks\
+ \ + * Specs are stored in a separate file from the definitions in an effort to keep the definitions + * yaml files human-readable and easily-editable, as specs can be rather large. + *
+ * Specs are fetched from the GCS spec cache bucket, so if any specs are missing from the bucket
+ * then this will fail. Note that this script only pulls specs from the bucket cache; it never
+ * pushes specs to the bucket. Since this script runs at build time, the decision was to depend on
+ * the bucket cache rather than running a docker container to fetch the spec during the build which
+ * could be slow and unwieldy. If there is a failure, check the bucket cache and figure out how to
+ * get the correct spec in there.
+ */
+public class SeedConnectorSpecGenerator {
+
+ private static final String DOCKER_REPOSITORY_FIELD = "dockerRepository";
+ private static final String DOCKER_IMAGE_TAG_FIELD = "dockerImageTag";
+ private static final String DOCKER_IMAGE_FIELD = "dockerImage";
+ private static final String SPEC_FIELD = "spec";
+ private static final String SPEC_BUCKET_NAME = new EnvConfigs().getSpecCacheBucket();
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(SeedConnectorSpecGenerator.class);
+
+ private static final Option SEED_ROOT_OPTION = Option.builder("s").longOpt("seed-root").hasArg(true).required(true)
+ .desc("path to where seed resource files are stored").build();
+ private static final Options OPTIONS = new Options().addOption(SEED_ROOT_OPTION);
+
+ private final GcsBucketSpecFetcher bucketSpecFetcher;
+
+ public SeedConnectorSpecGenerator(final GcsBucketSpecFetcher bucketSpecFetcher) {
+ this.bucketSpecFetcher = bucketSpecFetcher;
+ }
+
+ public static void main(final String[] args) throws Exception {
+ final CommandLine parsed = Clis.parse(args, OPTIONS);
+ final Path outputRoot = Path.of(parsed.getOptionValue(SEED_ROOT_OPTION.getOpt()));
+
+ final GcsBucketSpecFetcher bucketSpecFetcher = new GcsBucketSpecFetcher(StorageOptions.getDefaultInstance().getService(), SPEC_BUCKET_NAME);
+ final SeedConnectorSpecGenerator seedConnectorSpecGenerator = new SeedConnectorSpecGenerator(bucketSpecFetcher);
+ seedConnectorSpecGenerator.run(outputRoot, SeedConnectorType.SOURCE);
+ seedConnectorSpecGenerator.run(outputRoot, SeedConnectorType.DESTINATION);
+ }
+
+ public void run(final Path seedRoot, final SeedConnectorType seedConnectorType) throws IOException {
+ LOGGER.info("Updating seeded {} definition specs if necessary...", seedConnectorType.name());
+
+ final JsonNode seedDefinitionsJson = yamlToJson(seedRoot, seedConnectorType.getDefinitionFileName());
+ final JsonNode seedSpecsJson = yamlToJson(seedRoot, seedConnectorType.getSpecFileName());
+
+ final List