diff --git a/docs/01-homepage.md b/docs/01-homepage.md new file mode 100644 index 000000000000..7f9bf5a57222 --- /dev/null +++ b/docs/01-homepage.md @@ -0,0 +1,29 @@ +--- +slug: / +--- + +# Welcome to Airbyte Docs + +Whether you are an Airbyte user or contributor, we have docs for you! + +### For Airbyte Cloud users + +Browse the [connector catalog](02-integrations/README.md) to find the connector you want. In case the connector is not yet supported on Airbyte Cloud, consider using [Airbyte Open Source](#for-airbyte-open-source-users). + +Next, check out the [step-by-step tutorial](03-cloud/README.md) to sign up for Airbyte Cloud, understand Airbyte [concepts](03-cloud/01-core-concepts.md), and run your first sync. Then learn how to [manage your Airbyte Cloud account](03-cloud/02-managing-airbyte-cloud.md). + +### For Airbyte Open Source users + +Browse the [connector catalog](02-integrations/README.md) to find the connector you want. If the connector is not yet supported on Airbyte Open Source, [build your own connector](08-connector-development/README.md). + +Next, check out the [Airbyte Open Source QuickStart](04-quickstart/01-deploy-airbyte.md). Then learn how to [deploy](05-deploying-airbyte/01-local-deployment.md) and [manage](06-operator-guides/01-upgrading-airbyte.md) Airbyte Open Source in your cloud infrastructure. + + +To get help with Airbyte deployments, check out the [Troubleshooting & FAQ](07-troubleshooting/README.md), chat with Support on [Discourse](https://discuss.airbyte.io/), or join us on [Community Slack](https://slack.airbyte.io/). + +### For Airbyte contributors + +To contribute to Airbyte code, connectors, and documentation, refer to our [Contributing Guide](09-contributing-to-airbyte/README.md). + +[![GitHub stars](https://img.shields.io/github/stars/airbytehq/airbyte?style=social&label=Star&maxAge=2592000)](https://GitHub.com/airbytehq/airbyte/stargazers/) [![GitHub Workflow Status](https://img.shields.io/github/workflow/status/airbytehq/airbyte/Airbyte%20CI)](https://github.com/airbytehq/airbyte/actions/workflows/gradle.yml) [![License](https://img.shields.io/static/v1?label=license&message=MIT&color=brightgreen)](https://github.com/airbytehq/airbyte/tree/a9b1c6c0420550ad5069aca66c295223e0d05e27/LICENSE/README.md) [![License](https://img.shields.io/static/v1?label=license&message=ELv2&color=brightgreen)](https://github.com/airbytehq/airbyte/tree/a9b1c6c0420550ad5069aca66c295223e0d05e27/LICENSE/README.md) + diff --git a/docs/integrations/sources/README.md b/docs/02-integrations/01-sources/README.md similarity index 100% rename from docs/integrations/sources/README.md rename to docs/02-integrations/01-sources/README.md diff --git a/docs/integrations/sources/airtable.md b/docs/02-integrations/01-sources/airtable.md similarity index 100% rename from docs/integrations/sources/airtable.md rename to docs/02-integrations/01-sources/airtable.md diff --git a/docs/integrations/sources/amazon-ads.md b/docs/02-integrations/01-sources/amazon-ads.md similarity index 100% rename from docs/integrations/sources/amazon-ads.md rename to docs/02-integrations/01-sources/amazon-ads.md diff --git a/docs/integrations/sources/amazon-seller-partner.md b/docs/02-integrations/01-sources/amazon-seller-partner.md similarity index 100% rename from docs/integrations/sources/amazon-seller-partner.md rename to docs/02-integrations/01-sources/amazon-seller-partner.md diff --git a/docs/integrations/sources/amazon-sqs.md b/docs/02-integrations/01-sources/amazon-sqs.md similarity index 100% rename from docs/integrations/sources/amazon-sqs.md rename to docs/02-integrations/01-sources/amazon-sqs.md diff --git a/docs/integrations/sources/amplitude.md b/docs/02-integrations/01-sources/amplitude.md similarity index 100% rename from docs/integrations/sources/amplitude.md rename to docs/02-integrations/01-sources/amplitude.md diff --git a/docs/integrations/sources/apify-dataset.md b/docs/02-integrations/01-sources/apify-dataset.md similarity index 100% rename from docs/integrations/sources/apify-dataset.md rename to docs/02-integrations/01-sources/apify-dataset.md diff --git a/docs/integrations/sources/appstore.md b/docs/02-integrations/01-sources/appstore.md similarity index 100% rename from docs/integrations/sources/appstore.md rename to docs/02-integrations/01-sources/appstore.md diff --git a/docs/integrations/sources/asana.md b/docs/02-integrations/01-sources/asana.md similarity index 100% rename from docs/integrations/sources/asana.md rename to docs/02-integrations/01-sources/asana.md diff --git a/docs/integrations/sources/aws-cloudtrail.md b/docs/02-integrations/01-sources/aws-cloudtrail.md similarity index 100% rename from docs/integrations/sources/aws-cloudtrail.md rename to docs/02-integrations/01-sources/aws-cloudtrail.md diff --git a/docs/integrations/sources/azure-table.md b/docs/02-integrations/01-sources/azure-table.md similarity index 100% rename from docs/integrations/sources/azure-table.md rename to docs/02-integrations/01-sources/azure-table.md diff --git a/docs/integrations/sources/bamboo-hr.md b/docs/02-integrations/01-sources/bamboo-hr.md similarity index 100% rename from docs/integrations/sources/bamboo-hr.md rename to docs/02-integrations/01-sources/bamboo-hr.md diff --git a/docs/integrations/sources/bigcommerce.md b/docs/02-integrations/01-sources/bigcommerce.md similarity index 100% rename from docs/integrations/sources/bigcommerce.md rename to docs/02-integrations/01-sources/bigcommerce.md diff --git a/docs/integrations/sources/bigquery.md b/docs/02-integrations/01-sources/bigquery.md similarity index 100% rename from docs/integrations/sources/bigquery.md rename to docs/02-integrations/01-sources/bigquery.md diff --git a/docs/integrations/sources/bing-ads.md b/docs/02-integrations/01-sources/bing-ads.md similarity index 100% rename from docs/integrations/sources/bing-ads.md rename to docs/02-integrations/01-sources/bing-ads.md diff --git a/docs/integrations/sources/braintree.md b/docs/02-integrations/01-sources/braintree.md similarity index 100% rename from docs/integrations/sources/braintree.md rename to docs/02-integrations/01-sources/braintree.md diff --git a/docs/integrations/sources/cart.md b/docs/02-integrations/01-sources/cart.md similarity index 100% rename from docs/integrations/sources/cart.md rename to docs/02-integrations/01-sources/cart.md diff --git a/docs/integrations/sources/chargebee.md b/docs/02-integrations/01-sources/chargebee.md similarity index 100% rename from docs/integrations/sources/chargebee.md rename to docs/02-integrations/01-sources/chargebee.md diff --git a/docs/integrations/sources/chargify.md b/docs/02-integrations/01-sources/chargify.md similarity index 100% rename from docs/integrations/sources/chargify.md rename to docs/02-integrations/01-sources/chargify.md diff --git a/docs/integrations/sources/chartmogul.md b/docs/02-integrations/01-sources/chartmogul.md similarity index 100% rename from docs/integrations/sources/chartmogul.md rename to docs/02-integrations/01-sources/chartmogul.md diff --git a/docs/integrations/sources/clickhouse.md b/docs/02-integrations/01-sources/clickhouse.md similarity index 100% rename from docs/integrations/sources/clickhouse.md rename to docs/02-integrations/01-sources/clickhouse.md diff --git a/docs/integrations/sources/close-com.md b/docs/02-integrations/01-sources/close-com.md similarity index 100% rename from docs/integrations/sources/close-com.md rename to docs/02-integrations/01-sources/close-com.md diff --git a/docs/integrations/sources/cockroachdb.md b/docs/02-integrations/01-sources/cockroachdb.md similarity index 100% rename from docs/integrations/sources/cockroachdb.md rename to docs/02-integrations/01-sources/cockroachdb.md diff --git a/docs/integrations/sources/commercetools.md b/docs/02-integrations/01-sources/commercetools.md similarity index 100% rename from docs/integrations/sources/commercetools.md rename to docs/02-integrations/01-sources/commercetools.md diff --git a/docs/integrations/sources/confluence.md b/docs/02-integrations/01-sources/confluence.md similarity index 100% rename from docs/integrations/sources/confluence.md rename to docs/02-integrations/01-sources/confluence.md diff --git a/docs/integrations/sources/customer-io.md b/docs/02-integrations/01-sources/customer-io.md similarity index 100% rename from docs/integrations/sources/customer-io.md rename to docs/02-integrations/01-sources/customer-io.md diff --git a/docs/integrations/sources/db2.md b/docs/02-integrations/01-sources/db2.md similarity index 100% rename from docs/integrations/sources/db2.md rename to docs/02-integrations/01-sources/db2.md diff --git a/docs/integrations/sources/delighted.md b/docs/02-integrations/01-sources/delighted.md similarity index 100% rename from docs/integrations/sources/delighted.md rename to docs/02-integrations/01-sources/delighted.md diff --git a/docs/integrations/sources/dixa.md b/docs/02-integrations/01-sources/dixa.md similarity index 100% rename from docs/integrations/sources/dixa.md rename to docs/02-integrations/01-sources/dixa.md diff --git a/docs/integrations/sources/dockerhub.md b/docs/02-integrations/01-sources/dockerhub.md similarity index 100% rename from docs/integrations/sources/dockerhub.md rename to docs/02-integrations/01-sources/dockerhub.md diff --git a/docs/integrations/sources/drift.md b/docs/02-integrations/01-sources/drift.md similarity index 100% rename from docs/integrations/sources/drift.md rename to docs/02-integrations/01-sources/drift.md diff --git a/docs/integrations/sources/drupal.md b/docs/02-integrations/01-sources/drupal.md similarity index 100% rename from docs/integrations/sources/drupal.md rename to docs/02-integrations/01-sources/drupal.md diff --git a/docs/integrations/sources/e2e-test.md b/docs/02-integrations/01-sources/e2e-test.md similarity index 100% rename from docs/integrations/sources/e2e-test.md rename to docs/02-integrations/01-sources/e2e-test.md diff --git a/docs/integrations/sources/exchangeratesapi.md b/docs/02-integrations/01-sources/exchangeratesapi.md similarity index 100% rename from docs/integrations/sources/exchangeratesapi.md rename to docs/02-integrations/01-sources/exchangeratesapi.md diff --git a/docs/integrations/sources/facebook-marketing.md b/docs/02-integrations/01-sources/facebook-marketing.md similarity index 100% rename from docs/integrations/sources/facebook-marketing.md rename to docs/02-integrations/01-sources/facebook-marketing.md diff --git a/docs/integrations/sources/facebook-pages.md b/docs/02-integrations/01-sources/facebook-pages.md similarity index 100% rename from docs/integrations/sources/facebook-pages.md rename to docs/02-integrations/01-sources/facebook-pages.md diff --git a/docs/integrations/sources/faker.md b/docs/02-integrations/01-sources/faker.md similarity index 100% rename from docs/integrations/sources/faker.md rename to docs/02-integrations/01-sources/faker.md diff --git a/docs/integrations/sources/file.md b/docs/02-integrations/01-sources/file.md similarity index 98% rename from docs/integrations/sources/file.md rename to docs/02-integrations/01-sources/file.md index d6c98e2465b1..64c134e3006e 100644 --- a/docs/integrations/sources/file.md +++ b/docs/02-integrations/01-sources/file.md @@ -61,7 +61,7 @@ Setup through Airbyte Cloud will be exactly the same as the open-source setup, e #### Provider Specific Information -* In case of GCS, it is necessary to provide the content of the service account keyfile to access private buckets. See settings of [BigQuery Destination](../destinations/bigquery.md) +* In case of GCS, it is necessary to provide the content of the service account keyfile to access private buckets. See settings of [BigQuery Destination](../02-destinations/bigquery.md) * In case of AWS S3, the pair of `aws_access_key_id` and `aws_secret_access_key` is necessary to access private S3 buckets. * In case of AzBlob, it is necessary to provide the `storage_account` in which the blob you want to access resides. Either `sas_token` [(info)](https://docs.microsoft.com/en-us/azure/storage/blobs/sas-service-create?tabs=dotnet) or `shared_key` [(info)](https://docs.microsoft.com/en-us/azure/storage/common/storage-account-keys-manage?tabs=azure-portal) is necessary to access private blobs. @@ -119,7 +119,7 @@ Please see (or add) more at `airbyte-integrations/connectors/source-file/integra In order to read large files from a remote location, this connector uses the [smart\_open](https://pypi.org/project/smart-open/) library. However, it is possible to switch to either [GCSFS](https://gcsfs.readthedocs.io/en/latest/) or [S3FS](https://s3fs.readthedocs.io/en/latest/) implementations as it is natively supported by the `pandas` library. This choice is made possible through the optional `reader_impl` parameter. -* Note that for local filesystem, the file probably have to be stored somewhere in the `/tmp/airbyte_local` folder with the same limitations as the [CSV Destination](../destinations/local-csv.md) so the `URL` should also starts with `/local/`. +* Note that for local filesystem, the file probably have to be stored somewhere in the `/tmp/airbyte_local` folder with the same limitations as the [CSV Destination](../02-destinations/local-csv.md) so the `URL` should also starts with `/local/`. * The JSON implementation needs to be tweaked in order to produce more complex catalog and is still in an experimental state: Simple JSON schemas should work at this point but may not be well handled when there are multiple layers of nesting. ## Changelog diff --git a/docs/integrations/sources/firebolt.md b/docs/02-integrations/01-sources/firebolt.md similarity index 100% rename from docs/integrations/sources/firebolt.md rename to docs/02-integrations/01-sources/firebolt.md diff --git a/docs/integrations/sources/flexport.md b/docs/02-integrations/01-sources/flexport.md similarity index 100% rename from docs/integrations/sources/flexport.md rename to docs/02-integrations/01-sources/flexport.md diff --git a/docs/integrations/sources/freshdesk.md b/docs/02-integrations/01-sources/freshdesk.md similarity index 100% rename from docs/integrations/sources/freshdesk.md rename to docs/02-integrations/01-sources/freshdesk.md diff --git a/docs/integrations/sources/freshsales.md b/docs/02-integrations/01-sources/freshsales.md similarity index 100% rename from docs/integrations/sources/freshsales.md rename to docs/02-integrations/01-sources/freshsales.md diff --git a/docs/integrations/sources/freshservice.md b/docs/02-integrations/01-sources/freshservice.md similarity index 100% rename from docs/integrations/sources/freshservice.md rename to docs/02-integrations/01-sources/freshservice.md diff --git a/docs/integrations/sources/github.md b/docs/02-integrations/01-sources/github.md similarity index 100% rename from docs/integrations/sources/github.md rename to docs/02-integrations/01-sources/github.md diff --git a/docs/integrations/sources/gitlab.md b/docs/02-integrations/01-sources/gitlab.md similarity index 100% rename from docs/integrations/sources/gitlab.md rename to docs/02-integrations/01-sources/gitlab.md diff --git a/docs/integrations/sources/google-ads.md b/docs/02-integrations/01-sources/google-ads.md similarity index 100% rename from docs/integrations/sources/google-ads.md rename to docs/02-integrations/01-sources/google-ads.md diff --git a/docs/integrations/sources/google-analytics-universal-analytics.md b/docs/02-integrations/01-sources/google-analytics-universal-analytics.md similarity index 100% rename from docs/integrations/sources/google-analytics-universal-analytics.md rename to docs/02-integrations/01-sources/google-analytics-universal-analytics.md diff --git a/docs/integrations/sources/google-analytics-v4.md b/docs/02-integrations/01-sources/google-analytics-v4.md similarity index 100% rename from docs/integrations/sources/google-analytics-v4.md rename to docs/02-integrations/01-sources/google-analytics-v4.md diff --git a/docs/integrations/sources/google-directory.md b/docs/02-integrations/01-sources/google-directory.md similarity index 100% rename from docs/integrations/sources/google-directory.md rename to docs/02-integrations/01-sources/google-directory.md diff --git a/docs/integrations/sources/google-search-console.md b/docs/02-integrations/01-sources/google-search-console.md similarity index 100% rename from docs/integrations/sources/google-search-console.md rename to docs/02-integrations/01-sources/google-search-console.md diff --git a/docs/integrations/sources/google-sheets.md b/docs/02-integrations/01-sources/google-sheets.md similarity index 97% rename from docs/integrations/sources/google-sheets.md rename to docs/02-integrations/01-sources/google-sheets.md index 1a3ffbb6a8bf..3938f1462407 100644 --- a/docs/integrations/sources/google-sheets.md +++ b/docs/02-integrations/01-sources/google-sheets.md @@ -43,7 +43,7 @@ To set up Google Sheets as a source in Airbyte OSS: Each sheet in the selected spreadsheet is synced as a separate stream. Each selected column in the sheet is synced as a string field. -**Note: Sheet names and column headers must contain only alphanumeric characters or `_`, as specified in the** [**Airbyte Protocol**](../../understanding-airbyte/airbyte-protocol.md). For example, if your sheet or column header is named `the data`, rename it to `the_data`. This restriction does not apply to non-header cell values. +**Note: Sheet names and column headers must contain only alphanumeric characters or `_`, as specified in the** [**Airbyte Protocol**](../../10-understanding-airbyte/02-airbyte-protocol.md). For example, if your sheet or column header is named `the data`, rename it to `the_data`. This restriction does not apply to non-header cell values. Airbyte only supports replicating [Grid](https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/sheets#SheetType) sheets. diff --git a/docs/integrations/sources/google-workspace-admin-reports.md b/docs/02-integrations/01-sources/google-workspace-admin-reports.md similarity index 100% rename from docs/integrations/sources/google-workspace-admin-reports.md rename to docs/02-integrations/01-sources/google-workspace-admin-reports.md diff --git a/docs/integrations/sources/greenhouse.md b/docs/02-integrations/01-sources/greenhouse.md similarity index 100% rename from docs/integrations/sources/greenhouse.md rename to docs/02-integrations/01-sources/greenhouse.md diff --git a/docs/integrations/sources/harness.md b/docs/02-integrations/01-sources/harness.md similarity index 100% rename from docs/integrations/sources/harness.md rename to docs/02-integrations/01-sources/harness.md diff --git a/docs/integrations/sources/harvest.md b/docs/02-integrations/01-sources/harvest.md similarity index 100% rename from docs/integrations/sources/harvest.md rename to docs/02-integrations/01-sources/harvest.md diff --git a/docs/integrations/sources/hellobaton.md b/docs/02-integrations/01-sources/hellobaton.md similarity index 100% rename from docs/integrations/sources/hellobaton.md rename to docs/02-integrations/01-sources/hellobaton.md diff --git a/docs/integrations/sources/http-request.md b/docs/02-integrations/01-sources/http-request.md similarity index 81% rename from docs/integrations/sources/http-request.md rename to docs/02-integrations/01-sources/http-request.md index ebefc687a7bd..a3bbbca40ce3 100644 --- a/docs/integrations/sources/http-request.md +++ b/docs/02-integrations/01-sources/http-request.md @@ -2,13 +2,13 @@ :::caution -This connector is graveyarded and will not be receiving any updates from the Airbyte team. Its functionalities have been replaced by the [Airbyte CDK](../../connector-development/cdk-python/README.md), which allows you to create source connectors for any HTTP API. +This connector is graveyarded and will not be receiving any updates from the Airbyte team. Its functionalities have been replaced by the [Airbyte CDK](../../08-connector-development/07-cdk-python/README.md), which allows you to create source connectors for any HTTP API. ::: ## Overview -This connector allows you to generally connect to any HTTP API. In order to use this connector, you must manually bring it in as a custom connector. The steps to do this can be found [here](../../connector-development/tutorials/cdk-tutorial-python-http/7-use-connector-in-airbyte.md). +This connector allows you to generally connect to any HTTP API. In order to use this connector, you must manually bring it in as a custom connector. The steps to do this can be found [here](../../08-connector-development/02-cdk-tutorial-python-http/7-use-connector-in-airbyte.md). ## Where do I find the Docker image? diff --git a/docs/integrations/sources/hubspot.md b/docs/02-integrations/01-sources/hubspot.md similarity index 100% rename from docs/integrations/sources/hubspot.md rename to docs/02-integrations/01-sources/hubspot.md diff --git a/docs/integrations/sources/instagram.md b/docs/02-integrations/01-sources/instagram.md similarity index 100% rename from docs/integrations/sources/instagram.md rename to docs/02-integrations/01-sources/instagram.md diff --git a/docs/integrations/sources/intercom.md b/docs/02-integrations/01-sources/intercom.md similarity index 100% rename from docs/integrations/sources/intercom.md rename to docs/02-integrations/01-sources/intercom.md diff --git a/docs/integrations/sources/iterable.md b/docs/02-integrations/01-sources/iterable.md similarity index 100% rename from docs/integrations/sources/iterable.md rename to docs/02-integrations/01-sources/iterable.md diff --git a/docs/integrations/sources/jenkins.md b/docs/02-integrations/01-sources/jenkins.md similarity index 100% rename from docs/integrations/sources/jenkins.md rename to docs/02-integrations/01-sources/jenkins.md diff --git a/docs/integrations/sources/jira.md b/docs/02-integrations/01-sources/jira.md similarity index 100% rename from docs/integrations/sources/jira.md rename to docs/02-integrations/01-sources/jira.md diff --git a/docs/integrations/sources/kafka.md b/docs/02-integrations/01-sources/kafka.md similarity index 100% rename from docs/integrations/sources/kafka.md rename to docs/02-integrations/01-sources/kafka.md diff --git a/docs/integrations/sources/klaviyo.md b/docs/02-integrations/01-sources/klaviyo.md similarity index 100% rename from docs/integrations/sources/klaviyo.md rename to docs/02-integrations/01-sources/klaviyo.md diff --git a/docs/integrations/sources/kustomer.md b/docs/02-integrations/01-sources/kustomer.md similarity index 100% rename from docs/integrations/sources/kustomer.md rename to docs/02-integrations/01-sources/kustomer.md diff --git a/docs/integrations/sources/lemlist.md b/docs/02-integrations/01-sources/lemlist.md similarity index 100% rename from docs/integrations/sources/lemlist.md rename to docs/02-integrations/01-sources/lemlist.md diff --git a/docs/integrations/sources/lever-hiring.md b/docs/02-integrations/01-sources/lever-hiring.md similarity index 100% rename from docs/integrations/sources/lever-hiring.md rename to docs/02-integrations/01-sources/lever-hiring.md diff --git a/docs/integrations/sources/linkedin-ads.md b/docs/02-integrations/01-sources/linkedin-ads.md similarity index 100% rename from docs/integrations/sources/linkedin-ads.md rename to docs/02-integrations/01-sources/linkedin-ads.md diff --git a/docs/integrations/sources/linnworks.md b/docs/02-integrations/01-sources/linnworks.md similarity index 100% rename from docs/integrations/sources/linnworks.md rename to docs/02-integrations/01-sources/linnworks.md diff --git a/docs/integrations/sources/looker.md b/docs/02-integrations/01-sources/looker.md similarity index 100% rename from docs/integrations/sources/looker.md rename to docs/02-integrations/01-sources/looker.md diff --git a/docs/integrations/sources/magento.md b/docs/02-integrations/01-sources/magento.md similarity index 100% rename from docs/integrations/sources/magento.md rename to docs/02-integrations/01-sources/magento.md diff --git a/docs/integrations/sources/mailchimp.md b/docs/02-integrations/01-sources/mailchimp.md similarity index 100% rename from docs/integrations/sources/mailchimp.md rename to docs/02-integrations/01-sources/mailchimp.md diff --git a/docs/integrations/sources/marketo.md b/docs/02-integrations/01-sources/marketo.md similarity index 100% rename from docs/integrations/sources/marketo.md rename to docs/02-integrations/01-sources/marketo.md diff --git a/docs/integrations/sources/metabase.md b/docs/02-integrations/01-sources/metabase.md similarity index 100% rename from docs/integrations/sources/metabase.md rename to docs/02-integrations/01-sources/metabase.md diff --git a/docs/integrations/sources/microsoft-dynamics-ax.md b/docs/02-integrations/01-sources/microsoft-dynamics-ax.md similarity index 100% rename from docs/integrations/sources/microsoft-dynamics-ax.md rename to docs/02-integrations/01-sources/microsoft-dynamics-ax.md diff --git a/docs/integrations/sources/microsoft-dynamics-customer-engagement.md b/docs/02-integrations/01-sources/microsoft-dynamics-customer-engagement.md similarity index 100% rename from docs/integrations/sources/microsoft-dynamics-customer-engagement.md rename to docs/02-integrations/01-sources/microsoft-dynamics-customer-engagement.md diff --git a/docs/integrations/sources/microsoft-dynamics-gp.md b/docs/02-integrations/01-sources/microsoft-dynamics-gp.md similarity index 100% rename from docs/integrations/sources/microsoft-dynamics-gp.md rename to docs/02-integrations/01-sources/microsoft-dynamics-gp.md diff --git a/docs/integrations/sources/microsoft-dynamics-nav.md b/docs/02-integrations/01-sources/microsoft-dynamics-nav.md similarity index 100% rename from docs/integrations/sources/microsoft-dynamics-nav.md rename to docs/02-integrations/01-sources/microsoft-dynamics-nav.md diff --git a/docs/integrations/sources/microsoft-teams.md b/docs/02-integrations/01-sources/microsoft-teams.md similarity index 100% rename from docs/integrations/sources/microsoft-teams.md rename to docs/02-integrations/01-sources/microsoft-teams.md diff --git a/docs/integrations/sources/mixpanel.md b/docs/02-integrations/01-sources/mixpanel.md similarity index 100% rename from docs/integrations/sources/mixpanel.md rename to docs/02-integrations/01-sources/mixpanel.md diff --git a/docs/integrations/sources/monday.md b/docs/02-integrations/01-sources/monday.md similarity index 100% rename from docs/integrations/sources/monday.md rename to docs/02-integrations/01-sources/monday.md diff --git a/docs/integrations/sources/mongodb-v2.md b/docs/02-integrations/01-sources/mongodb-v2.md similarity index 100% rename from docs/integrations/sources/mongodb-v2.md rename to docs/02-integrations/01-sources/mongodb-v2.md diff --git a/docs/integrations/sources/mongodb.md b/docs/02-integrations/01-sources/mongodb.md similarity index 100% rename from docs/integrations/sources/mongodb.md rename to docs/02-integrations/01-sources/mongodb.md diff --git a/docs/integrations/sources/mssql.md b/docs/02-integrations/01-sources/mssql.md similarity index 98% rename from docs/integrations/sources/mssql.md rename to docs/02-integrations/01-sources/mssql.md index e9b683052454..fa88e4d3300b 100644 --- a/docs/integrations/sources/mssql.md +++ b/docs/02-integrations/01-sources/mssql.md @@ -48,7 +48,7 @@ We use [SQL Server's change data capture feature](https://docs.microsoft.com/en- Some extra setup requiring at least _db\_owner_ permissions on the database\(s\) you intend to sync from will be required \(detailed [below](mssql.md#setting-up-cdc-for-mssql)\). -Please read the [CDC docs](../../understanding-airbyte/cdc.md) for an overview of how Airbyte approaches CDC. +Please read the [CDC docs](../../10-understanding-airbyte/10-cdc.md) for an overview of how Airbyte approaches CDC. ### Should I use CDC for MSSQL? @@ -66,7 +66,7 @@ Please read the [CDC docs](../../understanding-airbyte/cdc.md) for an overview o #### CDC Limitations -* Make sure to read our [CDC docs](../../understanding-airbyte/cdc.md) to see limitations that impact all databases using CDC replication. +* Make sure to read our [CDC docs](../../10-understanding-airbyte/10-cdc.md) to see limitations that impact all databases using CDC replication. * There are some critical issues regarding certain datatypes. Please find detailed info in [this Github issue](https://github.com/airbytehq/airbyte/issues/4542). * CDC is only available for SQL Server 2016 Service Pack 1 \(SP1\) and later. * _db\_owner_ \(or higher\) permissions are required to perform the [neccessary setup](mssql.md#setting-up-cdc-for-mssql) for CDC. diff --git a/docs/integrations/sources/my-hours.md b/docs/02-integrations/01-sources/my-hours.md similarity index 100% rename from docs/integrations/sources/my-hours.md rename to docs/02-integrations/01-sources/my-hours.md diff --git a/docs/integrations/sources/mysql.md b/docs/02-integrations/01-sources/mysql.md similarity index 99% rename from docs/integrations/sources/mysql.md rename to docs/02-integrations/01-sources/mysql.md index d7041ce61f67..2769dd468c04 100644 --- a/docs/integrations/sources/mysql.md +++ b/docs/02-integrations/01-sources/mysql.md @@ -78,7 +78,7 @@ Your database user should now be ready for use with Airbyte. #### CDC Limitations -* Make sure to read our [CDC docs](../../understanding-airbyte/cdc.md) to see limitations that impact all databases using CDC replication. +* Make sure to read our [CDC docs](../../10-understanding-airbyte/10-cdc.md) to see limitations that impact all databases using CDC replication. * Our CDC implementation uses at least once delivery for all change records. **1. Enable binary logging** diff --git a/docs/integrations/sources/notion.md b/docs/02-integrations/01-sources/notion.md similarity index 100% rename from docs/integrations/sources/notion.md rename to docs/02-integrations/01-sources/notion.md diff --git a/docs/integrations/sources/okta.md b/docs/02-integrations/01-sources/okta.md similarity index 100% rename from docs/integrations/sources/okta.md rename to docs/02-integrations/01-sources/okta.md diff --git a/docs/integrations/sources/onesignal.md b/docs/02-integrations/01-sources/onesignal.md similarity index 100% rename from docs/integrations/sources/onesignal.md rename to docs/02-integrations/01-sources/onesignal.md diff --git a/docs/integrations/sources/openweather.md b/docs/02-integrations/01-sources/openweather.md similarity index 100% rename from docs/integrations/sources/openweather.md rename to docs/02-integrations/01-sources/openweather.md diff --git a/docs/integrations/sources/oracle-peoplesoft.md b/docs/02-integrations/01-sources/oracle-peoplesoft.md similarity index 100% rename from docs/integrations/sources/oracle-peoplesoft.md rename to docs/02-integrations/01-sources/oracle-peoplesoft.md diff --git a/docs/integrations/sources/oracle-siebel-crm.md b/docs/02-integrations/01-sources/oracle-siebel-crm.md similarity index 100% rename from docs/integrations/sources/oracle-siebel-crm.md rename to docs/02-integrations/01-sources/oracle-siebel-crm.md diff --git a/docs/integrations/sources/oracle.md b/docs/02-integrations/01-sources/oracle.md similarity index 100% rename from docs/integrations/sources/oracle.md rename to docs/02-integrations/01-sources/oracle.md diff --git a/docs/integrations/sources/orb.md b/docs/02-integrations/01-sources/orb.md similarity index 100% rename from docs/integrations/sources/orb.md rename to docs/02-integrations/01-sources/orb.md diff --git a/docs/integrations/sources/orbit.md b/docs/02-integrations/01-sources/orbit.md similarity index 100% rename from docs/integrations/sources/orbit.md rename to docs/02-integrations/01-sources/orbit.md diff --git a/docs/integrations/sources/outreach.md b/docs/02-integrations/01-sources/outreach.md similarity index 100% rename from docs/integrations/sources/outreach.md rename to docs/02-integrations/01-sources/outreach.md diff --git a/docs/integrations/sources/pagerduty.md b/docs/02-integrations/01-sources/pagerduty.md similarity index 100% rename from docs/integrations/sources/pagerduty.md rename to docs/02-integrations/01-sources/pagerduty.md diff --git a/docs/integrations/sources/paypal-transaction.md b/docs/02-integrations/01-sources/paypal-transaction.md similarity index 100% rename from docs/integrations/sources/paypal-transaction.md rename to docs/02-integrations/01-sources/paypal-transaction.md diff --git a/docs/integrations/sources/paystack.md b/docs/02-integrations/01-sources/paystack.md similarity index 100% rename from docs/integrations/sources/paystack.md rename to docs/02-integrations/01-sources/paystack.md diff --git a/docs/integrations/sources/persistiq.md b/docs/02-integrations/01-sources/persistiq.md similarity index 100% rename from docs/integrations/sources/persistiq.md rename to docs/02-integrations/01-sources/persistiq.md diff --git a/docs/integrations/sources/pinterest.md b/docs/02-integrations/01-sources/pinterest.md similarity index 100% rename from docs/integrations/sources/pinterest.md rename to docs/02-integrations/01-sources/pinterest.md diff --git a/docs/integrations/sources/pipedrive.md b/docs/02-integrations/01-sources/pipedrive.md similarity index 100% rename from docs/integrations/sources/pipedrive.md rename to docs/02-integrations/01-sources/pipedrive.md diff --git a/docs/integrations/sources/pivotal-tracker.md b/docs/02-integrations/01-sources/pivotal-tracker.md similarity index 100% rename from docs/integrations/sources/pivotal-tracker.md rename to docs/02-integrations/01-sources/pivotal-tracker.md diff --git a/docs/integrations/sources/plaid.md b/docs/02-integrations/01-sources/plaid.md similarity index 100% rename from docs/integrations/sources/plaid.md rename to docs/02-integrations/01-sources/plaid.md diff --git a/docs/integrations/sources/pokeapi.md b/docs/02-integrations/01-sources/pokeapi.md similarity index 96% rename from docs/integrations/sources/pokeapi.md rename to docs/02-integrations/01-sources/pokeapi.md index 797a58d71857..7d2c65415ed8 100644 --- a/docs/integrations/sources/pokeapi.md +++ b/docs/02-integrations/01-sources/pokeapi.md @@ -4,8 +4,8 @@ The PokéAPI is primarly used as a tutorial and educational resource, as it requires zero dependencies. Learn how Airbyte and this connector works with these tutorials: -- [Airbyte Quickstart: An Introduction to Deploying and Syncing](../../quickstart/deploy-airbyte.md) -- [Airbyte CDK Speedrun: A Quick Primer on Building Source Connectors](../../connector-development/tutorials/cdk-speedrun.md) +- [Airbyte Quickstart: An Introduction to Deploying and Syncing](../../04-quickstart/01-deploy-airbyte.md) +- [Airbyte CDK Speedrun: A Quick Primer on Building Source Connectors](../../08-connector-development/01-cdk-speedrun.md) - [How to Build ETL Sources in Under 30 Minutes: A Video Tutorial](https://www.youtube.com/watch?v=kJ3hLoNfz_E&t=13s&ab_channel=Airbyte) ## Features diff --git a/docs/integrations/sources/postgres.md b/docs/02-integrations/01-sources/postgres.md similarity index 99% rename from docs/integrations/sources/postgres.md rename to docs/02-integrations/01-sources/postgres.md index a87f5ad10149..61409eaea82c 100644 --- a/docs/integrations/sources/postgres.md +++ b/docs/02-integrations/01-sources/postgres.md @@ -107,7 +107,7 @@ We use [logical replication](https://www.postgresql.org/docs/10/logical-replicat We use `pgoutput` as a default plugin, which is included in Postgres 10+. Also `wal2json` plugin is supported, please read [the section on replication plugins below](postgres.md#select-replication-plugin) for more information. -Please read the [CDC docs](../../understanding-airbyte/cdc.md) for an overview of how Airbyte approaches CDC. +Please read the [CDC docs](../../10-understanding-airbyte/10-cdc.md) for an overview of how Airbyte approaches CDC. ### Should I use CDC for Postgres? @@ -118,7 +118,7 @@ Please read the [CDC docs](../../understanding-airbyte/cdc.md) for an overview o #### CDC Limitations -* Make sure to read our [CDC docs](../../understanding-airbyte/cdc.md) to see limitations that impact all databases using CDC replication. +* Make sure to read our [CDC docs](../../10-understanding-airbyte/10-cdc.md) to see limitations that impact all databases using CDC replication. * CDC is only available for Postgres 10+. * Airbyte requires a replication slot configured only for its use. Only one source should be configured that uses this replication slot. Instructions on how to set up a replication slot can be found below. * Log-based replication only works for master instances of Postgres. diff --git a/docs/integrations/sources/posthog.md b/docs/02-integrations/01-sources/posthog.md similarity index 100% rename from docs/integrations/sources/posthog.md rename to docs/02-integrations/01-sources/posthog.md diff --git a/docs/integrations/sources/presta-shop.md b/docs/02-integrations/01-sources/presta-shop.md similarity index 100% rename from docs/integrations/sources/presta-shop.md rename to docs/02-integrations/01-sources/presta-shop.md diff --git a/docs/integrations/sources/qualaroo.md b/docs/02-integrations/01-sources/qualaroo.md similarity index 100% rename from docs/integrations/sources/qualaroo.md rename to docs/02-integrations/01-sources/qualaroo.md diff --git a/docs/integrations/sources/quickbooks.md b/docs/02-integrations/01-sources/quickbooks.md similarity index 100% rename from docs/integrations/sources/quickbooks.md rename to docs/02-integrations/01-sources/quickbooks.md diff --git a/docs/integrations/sources/recharge.md b/docs/02-integrations/01-sources/recharge.md similarity index 100% rename from docs/integrations/sources/recharge.md rename to docs/02-integrations/01-sources/recharge.md diff --git a/docs/integrations/sources/recurly.md b/docs/02-integrations/01-sources/recurly.md similarity index 100% rename from docs/integrations/sources/recurly.md rename to docs/02-integrations/01-sources/recurly.md diff --git a/docs/integrations/sources/redshift.md b/docs/02-integrations/01-sources/redshift.md similarity index 100% rename from docs/integrations/sources/redshift.md rename to docs/02-integrations/01-sources/redshift.md diff --git a/docs/integrations/sources/retently.md b/docs/02-integrations/01-sources/retently.md similarity index 100% rename from docs/integrations/sources/retently.md rename to docs/02-integrations/01-sources/retently.md diff --git a/docs/integrations/sources/rki-covid.md b/docs/02-integrations/01-sources/rki-covid.md similarity index 100% rename from docs/integrations/sources/rki-covid.md rename to docs/02-integrations/01-sources/rki-covid.md diff --git a/docs/integrations/sources/s3.md b/docs/02-integrations/01-sources/s3.md similarity index 100% rename from docs/integrations/sources/s3.md rename to docs/02-integrations/01-sources/s3.md diff --git a/docs/integrations/sources/salesforce.md b/docs/02-integrations/01-sources/salesforce.md similarity index 100% rename from docs/integrations/sources/salesforce.md rename to docs/02-integrations/01-sources/salesforce.md diff --git a/docs/integrations/sources/salesloft.md b/docs/02-integrations/01-sources/salesloft.md similarity index 100% rename from docs/integrations/sources/salesloft.md rename to docs/02-integrations/01-sources/salesloft.md diff --git a/docs/integrations/sources/sap-business-one.md b/docs/02-integrations/01-sources/sap-business-one.md similarity index 100% rename from docs/integrations/sources/sap-business-one.md rename to docs/02-integrations/01-sources/sap-business-one.md diff --git a/docs/integrations/sources/search-metrics.md b/docs/02-integrations/01-sources/search-metrics.md similarity index 100% rename from docs/integrations/sources/search-metrics.md rename to docs/02-integrations/01-sources/search-metrics.md diff --git a/docs/integrations/sources/sendgrid.md b/docs/02-integrations/01-sources/sendgrid.md similarity index 100% rename from docs/integrations/sources/sendgrid.md rename to docs/02-integrations/01-sources/sendgrid.md diff --git a/docs/integrations/sources/sentry.md b/docs/02-integrations/01-sources/sentry.md similarity index 100% rename from docs/integrations/sources/sentry.md rename to docs/02-integrations/01-sources/sentry.md diff --git a/docs/integrations/sources/sftp.md b/docs/02-integrations/01-sources/sftp.md similarity index 100% rename from docs/integrations/sources/sftp.md rename to docs/02-integrations/01-sources/sftp.md diff --git a/docs/integrations/sources/shopify.md b/docs/02-integrations/01-sources/shopify.md similarity index 100% rename from docs/integrations/sources/shopify.md rename to docs/02-integrations/01-sources/shopify.md diff --git a/docs/integrations/sources/shortio.md b/docs/02-integrations/01-sources/shortio.md similarity index 100% rename from docs/integrations/sources/shortio.md rename to docs/02-integrations/01-sources/shortio.md diff --git a/docs/integrations/sources/slack.md b/docs/02-integrations/01-sources/slack.md similarity index 100% rename from docs/integrations/sources/slack.md rename to docs/02-integrations/01-sources/slack.md diff --git a/docs/integrations/sources/smartsheets.md b/docs/02-integrations/01-sources/smartsheets.md similarity index 97% rename from docs/integrations/sources/smartsheets.md rename to docs/02-integrations/01-sources/smartsheets.md index 91e71f498581..4f93bce3f4ee 100644 --- a/docs/integrations/sources/smartsheets.md +++ b/docs/02-integrations/01-sources/smartsheets.md @@ -68,7 +68,7 @@ For example, having a spreadsheet `Customers`, the connector would introduce a s ## Important highlights The Smartsheet Source is written to pull data from a single Smartsheet spreadsheet. Unlike Google Sheets, Smartsheets only allows one sheet per Smartsheet - so a given Airbyte connector instance can sync only one sheet at a time. To replicate multiple spreadsheets, you can create multiple instances of the Smartsheet Source in Airbyte, reusing the API token for all your sheets that you need to sync. -**Note: Column headers must contain only alphanumeric characters or `_` , as specified in the** [**Airbyte Protocol**](../../understanding-airbyte/airbyte-protocol.md). +**Note: Column headers must contain only alphanumeric characters or `_` , as specified in the** [**Airbyte Protocol**](../../10-understanding-airbyte/02-airbyte-protocol.md). ## Data type map The data type mapping adopted by this connector is based on the Smartsheet [documentation](https://smartsheet-platform.github.io/api-docs/index.html?python#column-types). diff --git a/docs/integrations/sources/snapchat-marketing.md b/docs/02-integrations/01-sources/snapchat-marketing.md similarity index 100% rename from docs/integrations/sources/snapchat-marketing.md rename to docs/02-integrations/01-sources/snapchat-marketing.md diff --git a/docs/integrations/sources/snowflake.md b/docs/02-integrations/01-sources/snowflake.md similarity index 100% rename from docs/integrations/sources/snowflake.md rename to docs/02-integrations/01-sources/snowflake.md diff --git a/docs/integrations/sources/spree-commerce.md b/docs/02-integrations/01-sources/spree-commerce.md similarity index 100% rename from docs/integrations/sources/spree-commerce.md rename to docs/02-integrations/01-sources/spree-commerce.md diff --git a/docs/integrations/sources/square.md b/docs/02-integrations/01-sources/square.md similarity index 100% rename from docs/integrations/sources/square.md rename to docs/02-integrations/01-sources/square.md diff --git a/docs/integrations/sources/strava.md b/docs/02-integrations/01-sources/strava.md similarity index 100% rename from docs/integrations/sources/strava.md rename to docs/02-integrations/01-sources/strava.md diff --git a/docs/integrations/sources/stripe.md b/docs/02-integrations/01-sources/stripe.md similarity index 100% rename from docs/integrations/sources/stripe.md rename to docs/02-integrations/01-sources/stripe.md diff --git a/docs/integrations/sources/sugar-crm.md b/docs/02-integrations/01-sources/sugar-crm.md similarity index 100% rename from docs/integrations/sources/sugar-crm.md rename to docs/02-integrations/01-sources/sugar-crm.md diff --git a/docs/integrations/sources/surveymonkey.md b/docs/02-integrations/01-sources/surveymonkey.md similarity index 100% rename from docs/integrations/sources/surveymonkey.md rename to docs/02-integrations/01-sources/surveymonkey.md diff --git a/docs/integrations/sources/talkdesk-explore.md b/docs/02-integrations/01-sources/talkdesk-explore.md similarity index 100% rename from docs/integrations/sources/talkdesk-explore.md rename to docs/02-integrations/01-sources/talkdesk-explore.md diff --git a/docs/integrations/sources/tempo.md b/docs/02-integrations/01-sources/tempo.md similarity index 100% rename from docs/integrations/sources/tempo.md rename to docs/02-integrations/01-sources/tempo.md diff --git a/docs/integrations/sources/tidb.md b/docs/02-integrations/01-sources/tidb.md similarity index 100% rename from docs/integrations/sources/tidb.md rename to docs/02-integrations/01-sources/tidb.md diff --git a/docs/integrations/sources/tiktok-marketing.md b/docs/02-integrations/01-sources/tiktok-marketing.md similarity index 100% rename from docs/integrations/sources/tiktok-marketing.md rename to docs/02-integrations/01-sources/tiktok-marketing.md diff --git a/docs/integrations/sources/tplcentral.md b/docs/02-integrations/01-sources/tplcentral.md similarity index 100% rename from docs/integrations/sources/tplcentral.md rename to docs/02-integrations/01-sources/tplcentral.md diff --git a/docs/integrations/sources/trello.md b/docs/02-integrations/01-sources/trello.md similarity index 100% rename from docs/integrations/sources/trello.md rename to docs/02-integrations/01-sources/trello.md diff --git a/docs/integrations/sources/twilio.md b/docs/02-integrations/01-sources/twilio.md similarity index 100% rename from docs/integrations/sources/twilio.md rename to docs/02-integrations/01-sources/twilio.md diff --git a/docs/integrations/sources/typeform.md b/docs/02-integrations/01-sources/typeform.md similarity index 100% rename from docs/integrations/sources/typeform.md rename to docs/02-integrations/01-sources/typeform.md diff --git a/docs/integrations/sources/us-census.md b/docs/02-integrations/01-sources/us-census.md similarity index 100% rename from docs/integrations/sources/us-census.md rename to docs/02-integrations/01-sources/us-census.md diff --git a/docs/integrations/sources/victorops.md b/docs/02-integrations/01-sources/victorops.md similarity index 100% rename from docs/integrations/sources/victorops.md rename to docs/02-integrations/01-sources/victorops.md diff --git a/docs/integrations/sources/webflow.md b/docs/02-integrations/01-sources/webflow.md similarity index 100% rename from docs/integrations/sources/webflow.md rename to docs/02-integrations/01-sources/webflow.md diff --git a/docs/integrations/sources/woocommerce.md b/docs/02-integrations/01-sources/woocommerce.md similarity index 100% rename from docs/integrations/sources/woocommerce.md rename to docs/02-integrations/01-sources/woocommerce.md diff --git a/docs/integrations/sources/wordpress.md b/docs/02-integrations/01-sources/wordpress.md similarity index 100% rename from docs/integrations/sources/wordpress.md rename to docs/02-integrations/01-sources/wordpress.md diff --git a/docs/integrations/sources/youtube-analytics.md b/docs/02-integrations/01-sources/youtube-analytics.md similarity index 100% rename from docs/integrations/sources/youtube-analytics.md rename to docs/02-integrations/01-sources/youtube-analytics.md diff --git a/docs/integrations/sources/zencart.md b/docs/02-integrations/01-sources/zencart.md similarity index 100% rename from docs/integrations/sources/zencart.md rename to docs/02-integrations/01-sources/zencart.md diff --git a/docs/integrations/sources/zendesk-chat.md b/docs/02-integrations/01-sources/zendesk-chat.md similarity index 100% rename from docs/integrations/sources/zendesk-chat.md rename to docs/02-integrations/01-sources/zendesk-chat.md diff --git a/docs/integrations/sources/zendesk-sunshine.md b/docs/02-integrations/01-sources/zendesk-sunshine.md similarity index 100% rename from docs/integrations/sources/zendesk-sunshine.md rename to docs/02-integrations/01-sources/zendesk-sunshine.md diff --git a/docs/integrations/sources/zendesk-support.md b/docs/02-integrations/01-sources/zendesk-support.md similarity index 100% rename from docs/integrations/sources/zendesk-support.md rename to docs/02-integrations/01-sources/zendesk-support.md diff --git a/docs/integrations/sources/zendesk-talk.md b/docs/02-integrations/01-sources/zendesk-talk.md similarity index 100% rename from docs/integrations/sources/zendesk-talk.md rename to docs/02-integrations/01-sources/zendesk-talk.md diff --git a/docs/integrations/sources/zenloop.md b/docs/02-integrations/01-sources/zenloop.md similarity index 100% rename from docs/integrations/sources/zenloop.md rename to docs/02-integrations/01-sources/zenloop.md diff --git a/docs/integrations/sources/zoho-crm.md b/docs/02-integrations/01-sources/zoho-crm.md similarity index 100% rename from docs/integrations/sources/zoho-crm.md rename to docs/02-integrations/01-sources/zoho-crm.md diff --git a/docs/integrations/sources/zoom.md b/docs/02-integrations/01-sources/zoom.md similarity index 100% rename from docs/integrations/sources/zoom.md rename to docs/02-integrations/01-sources/zoom.md diff --git a/docs/integrations/sources/zuora.md b/docs/02-integrations/01-sources/zuora.md similarity index 100% rename from docs/integrations/sources/zuora.md rename to docs/02-integrations/01-sources/zuora.md diff --git a/docs/02-integrations/02-destinations/README.md b/docs/02-integrations/02-destinations/README.md new file mode 100644 index 000000000000..861bb9095ee6 --- /dev/null +++ b/docs/02-integrations/02-destinations/README.md @@ -0,0 +1 @@ +# Destinations \ No newline at end of file diff --git a/docs/integrations/destinations/amazon-sqs.md b/docs/02-integrations/02-destinations/amazon-sqs.md similarity index 100% rename from docs/integrations/destinations/amazon-sqs.md rename to docs/02-integrations/02-destinations/amazon-sqs.md diff --git a/docs/integrations/destinations/aws-datalake.md b/docs/02-integrations/02-destinations/aws-datalake.md similarity index 100% rename from docs/integrations/destinations/aws-datalake.md rename to docs/02-integrations/02-destinations/aws-datalake.md diff --git a/docs/integrations/destinations/azureblobstorage.md b/docs/02-integrations/02-destinations/azureblobstorage.md similarity index 100% rename from docs/integrations/destinations/azureblobstorage.md rename to docs/02-integrations/02-destinations/azureblobstorage.md diff --git a/docs/integrations/destinations/bigquery.md b/docs/02-integrations/02-destinations/bigquery.md similarity index 100% rename from docs/integrations/destinations/bigquery.md rename to docs/02-integrations/02-destinations/bigquery.md diff --git a/docs/integrations/destinations/cassandra.md b/docs/02-integrations/02-destinations/cassandra.md similarity index 100% rename from docs/integrations/destinations/cassandra.md rename to docs/02-integrations/02-destinations/cassandra.md diff --git a/docs/integrations/destinations/chargify.md b/docs/02-integrations/02-destinations/chargify.md similarity index 100% rename from docs/integrations/destinations/chargify.md rename to docs/02-integrations/02-destinations/chargify.md diff --git a/docs/integrations/destinations/clickhouse.md b/docs/02-integrations/02-destinations/clickhouse.md similarity index 100% rename from docs/integrations/destinations/clickhouse.md rename to docs/02-integrations/02-destinations/clickhouse.md diff --git a/docs/integrations/destinations/databricks.md b/docs/02-integrations/02-destinations/databricks.md similarity index 100% rename from docs/integrations/destinations/databricks.md rename to docs/02-integrations/02-destinations/databricks.md diff --git a/docs/integrations/destinations/dynamodb.md b/docs/02-integrations/02-destinations/dynamodb.md similarity index 100% rename from docs/integrations/destinations/dynamodb.md rename to docs/02-integrations/02-destinations/dynamodb.md diff --git a/docs/integrations/destinations/e2e-test.md b/docs/02-integrations/02-destinations/e2e-test.md similarity index 100% rename from docs/integrations/destinations/e2e-test.md rename to docs/02-integrations/02-destinations/e2e-test.md diff --git a/docs/integrations/destinations/elasticsearch.md b/docs/02-integrations/02-destinations/elasticsearch.md similarity index 100% rename from docs/integrations/destinations/elasticsearch.md rename to docs/02-integrations/02-destinations/elasticsearch.md diff --git a/docs/integrations/destinations/firebolt.md b/docs/02-integrations/02-destinations/firebolt.md similarity index 100% rename from docs/integrations/destinations/firebolt.md rename to docs/02-integrations/02-destinations/firebolt.md diff --git a/docs/integrations/destinations/gcs.md b/docs/02-integrations/02-destinations/gcs.md similarity index 100% rename from docs/integrations/destinations/gcs.md rename to docs/02-integrations/02-destinations/gcs.md diff --git a/docs/integrations/destinations/google-sheets.md b/docs/02-integrations/02-destinations/google-sheets.md similarity index 100% rename from docs/integrations/destinations/google-sheets.md rename to docs/02-integrations/02-destinations/google-sheets.md diff --git a/docs/integrations/destinations/kafka.md b/docs/02-integrations/02-destinations/kafka.md similarity index 100% rename from docs/integrations/destinations/kafka.md rename to docs/02-integrations/02-destinations/kafka.md diff --git a/docs/integrations/destinations/keen.md b/docs/02-integrations/02-destinations/keen.md similarity index 100% rename from docs/integrations/destinations/keen.md rename to docs/02-integrations/02-destinations/keen.md diff --git a/docs/integrations/destinations/kinesis.md b/docs/02-integrations/02-destinations/kinesis.md similarity index 100% rename from docs/integrations/destinations/kinesis.md rename to docs/02-integrations/02-destinations/kinesis.md diff --git a/docs/integrations/destinations/local-csv.md b/docs/02-integrations/02-destinations/local-csv.md similarity index 96% rename from docs/integrations/destinations/local-csv.md rename to docs/02-integrations/02-destinations/local-csv.md index f0b02d405af1..a62a5ce57344 100644 --- a/docs/integrations/destinations/local-csv.md +++ b/docs/02-integrations/02-destinations/local-csv.md @@ -63,5 +63,5 @@ You can also copy the output file to your host machine, the following command wi docker cp airbyte-scheduler:/tmp/airbyte_local/{destination_path}/{filename}.csv . ``` -Note: If you are running Airbyte on Windows with Docker backed by WSL2, you have to use similar step as above or refer to this [link](../../operator-guides/locating-files-local-destination.md) for an alternative approach. +Note: If you are running Airbyte on Windows with Docker backed by WSL2, you have to use similar step as above or refer to this [link](../../06-operator-guides/08-locating-files-local-destination.md) for an alternative approach. diff --git a/docs/integrations/destinations/local-json.md b/docs/02-integrations/02-destinations/local-json.md similarity index 96% rename from docs/integrations/destinations/local-json.md rename to docs/02-integrations/02-destinations/local-json.md index e885d9d3b9da..7ab8dadfe841 100644 --- a/docs/integrations/destinations/local-json.md +++ b/docs/02-integrations/02-destinations/local-json.md @@ -63,5 +63,5 @@ You can also copy the output file to your host machine, the following command wi docker cp airbyte-scheduler:/tmp/airbyte_local/{destination_path}/{filename}.jsonl . ``` -Note: If you are running Airbyte on Windows with Docker backed by WSL2, you have to use similar step as above or refer to this [link](../../operator-guides/locating-files-local-destination.md) for an alternative approach. +Note: If you are running Airbyte on Windows with Docker backed by WSL2, you have to use similar step as above or refer to this [link](../../06-operator-guides/08-locating-files-local-destination.md) for an alternative approach. diff --git a/docs/integrations/destinations/mariadb-columnstore.md b/docs/02-integrations/02-destinations/mariadb-columnstore.md similarity index 100% rename from docs/integrations/destinations/mariadb-columnstore.md rename to docs/02-integrations/02-destinations/mariadb-columnstore.md diff --git a/docs/integrations/destinations/meilisearch.md b/docs/02-integrations/02-destinations/meilisearch.md similarity index 100% rename from docs/integrations/destinations/meilisearch.md rename to docs/02-integrations/02-destinations/meilisearch.md diff --git a/docs/integrations/destinations/mongodb.md b/docs/02-integrations/02-destinations/mongodb.md similarity index 100% rename from docs/integrations/destinations/mongodb.md rename to docs/02-integrations/02-destinations/mongodb.md diff --git a/docs/integrations/destinations/mqtt.md b/docs/02-integrations/02-destinations/mqtt.md similarity index 100% rename from docs/integrations/destinations/mqtt.md rename to docs/02-integrations/02-destinations/mqtt.md diff --git a/docs/integrations/destinations/mssql.md b/docs/02-integrations/02-destinations/mssql.md similarity index 100% rename from docs/integrations/destinations/mssql.md rename to docs/02-integrations/02-destinations/mssql.md diff --git a/docs/integrations/destinations/mysql.md b/docs/02-integrations/02-destinations/mysql.md similarity index 100% rename from docs/integrations/destinations/mysql.md rename to docs/02-integrations/02-destinations/mysql.md diff --git a/docs/integrations/destinations/oracle.md b/docs/02-integrations/02-destinations/oracle.md similarity index 100% rename from docs/integrations/destinations/oracle.md rename to docs/02-integrations/02-destinations/oracle.md diff --git a/docs/integrations/destinations/postgres.md b/docs/02-integrations/02-destinations/postgres.md similarity index 100% rename from docs/integrations/destinations/postgres.md rename to docs/02-integrations/02-destinations/postgres.md diff --git a/docs/integrations/destinations/pubsub.md b/docs/02-integrations/02-destinations/pubsub.md similarity index 100% rename from docs/integrations/destinations/pubsub.md rename to docs/02-integrations/02-destinations/pubsub.md diff --git a/docs/integrations/destinations/pulsar.md b/docs/02-integrations/02-destinations/pulsar.md similarity index 100% rename from docs/integrations/destinations/pulsar.md rename to docs/02-integrations/02-destinations/pulsar.md diff --git a/docs/integrations/destinations/rabbitmq.md b/docs/02-integrations/02-destinations/rabbitmq.md similarity index 100% rename from docs/integrations/destinations/rabbitmq.md rename to docs/02-integrations/02-destinations/rabbitmq.md diff --git a/docs/integrations/destinations/redis.md b/docs/02-integrations/02-destinations/redis.md similarity index 100% rename from docs/integrations/destinations/redis.md rename to docs/02-integrations/02-destinations/redis.md diff --git a/docs/integrations/destinations/redshift.md b/docs/02-integrations/02-destinations/redshift.md similarity index 100% rename from docs/integrations/destinations/redshift.md rename to docs/02-integrations/02-destinations/redshift.md diff --git a/docs/integrations/destinations/rockset.md b/docs/02-integrations/02-destinations/rockset.md similarity index 100% rename from docs/integrations/destinations/rockset.md rename to docs/02-integrations/02-destinations/rockset.md diff --git a/docs/integrations/destinations/s3.md b/docs/02-integrations/02-destinations/s3.md similarity index 100% rename from docs/integrations/destinations/s3.md rename to docs/02-integrations/02-destinations/s3.md diff --git a/docs/integrations/destinations/scylla.md b/docs/02-integrations/02-destinations/scylla.md similarity index 100% rename from docs/integrations/destinations/scylla.md rename to docs/02-integrations/02-destinations/scylla.md diff --git a/docs/integrations/destinations/sftp-json.md b/docs/02-integrations/02-destinations/sftp-json.md similarity index 100% rename from docs/integrations/destinations/sftp-json.md rename to docs/02-integrations/02-destinations/sftp-json.md diff --git a/docs/integrations/destinations/snowflake.md b/docs/02-integrations/02-destinations/snowflake.md similarity index 100% rename from docs/integrations/destinations/snowflake.md rename to docs/02-integrations/02-destinations/snowflake.md diff --git a/docs/integrations/destinations/streamr.md b/docs/02-integrations/02-destinations/streamr.md similarity index 100% rename from docs/integrations/destinations/streamr.md rename to docs/02-integrations/02-destinations/streamr.md diff --git a/docs/integrations/custom-connectors.md b/docs/02-integrations/03-custom-connectors.md similarity index 100% rename from docs/integrations/custom-connectors.md rename to docs/02-integrations/03-custom-connectors.md diff --git a/docs/02-integrations/README.md b/docs/02-integrations/README.md new file mode 100644 index 000000000000..dbc2f43570ae --- /dev/null +++ b/docs/02-integrations/README.md @@ -0,0 +1,215 @@ +# Connector Catalog + +## Connector Release Stages + +Airbyte uses a grading system for connectors to help you understand what to expect from a connector: + +**Generally Available**: A generally available connector has been deemed ready for use in a production environment and is officially supported by Airbyte. Its documentation is considered sufficient to support widespread adoption. + +**Beta**: A beta connector is considered stable and reliable with no backwards incompatible changes but has not been validated by a broader group of users. We expect to find and fix a few issues and bugs in the release before it’s ready for GA. + +**Alpha**: An alpha connector signifies a connector under development and helps Airbyte gather early feedback and issues reported by early adopters. We strongly discourage using alpha releases for production use cases and do not offer Cloud Support SLAs around these products, features, or connectors. + +For more information about the grading system, see [Product Release Stages](https://docs.airbyte.com/project-overview/product-release-stages) + +## Sources + +| Connector | Product Release Stage| Available in Cloud? | +|:--------------------------------------------------------------------------------------------| :------------------- | :------------------ | +| [3PL Central](01-sources/tplcentral.md) | Alpha | No | +| [Airtable](01-sources/airtable.md) | Alpha | Yes | +| [Amazon Ads](01-sources/amazon-ads.md) | Beta | Yes | +| [Amazon Seller Partner](01-sources/amazon-seller-partner.md) | Alpha | Yes | +| [Amazon SQS](01-sources/amazon-sqs.md) | Alpha | Yes | +| [Amplitude](01-sources/amplitude.md) | Generally Available | Yes | +| [Apify Dataset](01-sources/apify-dataset.md) | Alpha | Yes | +| [Appstore](01-sources/appstore.md) | Alpha | No | +| [Asana](01-sources/asana.md) | Alpha | No | +| [AWS CloudTrail](01-sources/aws-cloudtrail.md) | Alpha | Yes | +| [Azure Table Storage](01-sources/azure-table.md) | Alpha | Yes | +| [BambooHR](01-sources/bamboo-hr.md) | Alpha | No | +| [Baton](01-sources/hellobaton.md) | Alpha | No | +| [BigCommerce](01-sources/bigcommerce.md) | Alpha | Yes | +| [BigQuery](01-sources/bigquery.md) | Alpha | Yes | +| [Bing Ads](01-sources/bing-ads.md) | Generally Available | Yes | +| [Braintree](01-sources/braintree.md) | Alpha | Yes | +| [Cart.com](01-sources/cart.md) | Alpha | No | +| [Chargebee](01-sources/chargebee.md) | Alpha | Yes | +| [Chargify](01-sources/chargify.md) | Alpha | No | +| [Chartmogul](01-sources/chartmogul.md) | Alpha | Yes | +| [ClickHouse](01-sources/clickhouse.md) | Alpha | Yes | +| [Close.com](01-sources/close-com.md) | Alpha | Yes | +| [CockroachDB](01-sources/cockroachdb.md) | Alpha | No | +| [Commercetools](01-sources/commercetools.md) | Alpha | No | +| [Confluence](01-sources/confluence.md) | Alpha | No | +| [Customer.io](01-sources/customer-io.md) | Alpha | No | +| [Db2](01-sources/db2.md) | Alpha | No | +| [Delighted](01-sources/delighted.md) | Alpha | Yes | +| [Dixa](01-sources/dixa.md) | Alpha | Yes | +| [Dockerhub](01-sources/dockerhub.md) | Alpha | Yes | +| [Drift](01-sources/drift.md) | Alpha | No | +| [Drupal](01-sources/drupal.md) | Alpha | No | +| [End-to-End Testing](01-sources/e2e-test.md) | Alpha | Yes | +| [Exchange Rates API](01-sources/exchangeratesapi.md) | Alpha | Yes | +| [Facebook Marketing](01-sources/facebook-marketing.md) | Generally Available | Yes | +| [Facebook Pages](01-sources/facebook-pages.md) | Alpha | No | +| [Faker](01-sources/faker.md) | Alpha | Yes | +| [File](01-sources/file.md) | Alpha | Yes | +| [Firebolt](01-sources/firebolt.md) | Alpha | Yes | +| [Flexport](01-sources/flexport.md) | Alpha | No | +| [Freshdesk](01-sources/freshdesk.md) | Alpha | Yes | +| [Freshsales](01-sources/freshsales.md) | Alpha | No | +| [Freshservice](01-sources/freshservice.md) | Alpha | No | +| [GitHub](01-sources/github.md) | Generally Available | Yes | +| [GitLab](01-sources/gitlab.md) | Alpha | Yes | +| [Google Ads](01-sources/google-ads.md) | Generally Available | Yes | +| [Google Analytics (v4)](01-sources/google-analytics-v4.md) | Alpha | No | +| [Google Analytics (Universal Analytics)](01-sources/google-analytics-universal-analytics.md) | Generally Available | Yes | +| [Google Directory](01-sources/google-directory.md) | Alpha | Yes | +| [Google Search Console](01-sources/google-search-console.md) | Beta | Yes | +| [Google Sheets](01-sources/google-sheets.md) | Generally Available | Yes | +| [Google Workspace Admin Reports](01-sources/google-workspace-admin-reports.md) | Alpha | Yes | +| [Greenhouse](01-sources/greenhouse.md) | Alpha | Yes | +| [Harness](01-sources/harness.md) | Alpha | No | +| [Harvest](01-sources/harvest.md) | Alpha | No | +| [http-request](01-sources/http-request.md) | Alpha | No | +| [HubSpot](01-sources/hubspot.md) | Generally Available | Yes | +| [Instagram](01-sources/instagram.md) | Generally Available | Yes | +| [Intercom](01-sources/intercom.md) | Generally Available | Yes | +| [Iterable](01-sources/iterable.md) | Alpha | Yes | +| [Jenkins](01-sources/jenkins.md) | Alpha | No | +| [Jira](01-sources/jira.md) | Alpha | No | +| [Kafka](01-sources/kafka.md) | Alpha | No | +| [Klaviyo](01-sources/klaviyo.md) | Alpha | Yes | +| [Kustomer](01-sources/kustomer.md) | Alpha | Yes | +| [Lemlist](01-sources/lemlist.md) | Alpha | Yes | +| [Lever](01-sources/lever-hiring.md) | Alpha | No | +| [LinkedIn Ads](01-sources/linkedin-ads.md) | Generally Available | Yes | +| [Linnworks](01-sources/linnworks.md) | Alpha | Yes | +| [Looker](01-sources/looker.md) | Alpha | Yes | +| [Magento](01-sources/magento.md) | Alpha | No | +| [Mailchimp](01-sources/mailchimp.md) | Generally Available | Yes | +| [Marketo](01-sources/marketo.md) | Alpha | Yes | +| [Metabase](01-sources/metabase.md) | Alpha | Yes | +| [Microsoft Dynamics AX](01-sources/microsoft-dynamics-ax.md) | Alpha | No | +| [Microsoft Dynamics Customer Engagement](01-sources/microsoft-dynamics-customer-engagement.md) | Alpha | No | +| [Microsoft Dynamics GP](01-sources/microsoft-dynamics-gp.md) | Alpha | No | +| [Microsoft Dynamics NAV](01-sources/microsoft-dynamics-nav.md) | Alpha | No | +| [Microsoft SQL Server (MSSQL)](01-sources/mssql.md) | Alpha | Yes | +| [Microsoft Teams](01-sources/microsoft-teams.md) | Alpha | Yes | +| [Mixpanel](01-sources/mixpanel.md) | Beta | Yes | +| [Monday](01-sources/monday.md) | Alpha | Yes | +| [Mongo DB](01-sources/mongodb-v2.md) | Alpha | Yes | +| [My Hours](01-sources/my-hours.md) | Alpha | Yes | +| [MySQL](01-sources/mysql.md) | Alpha | Yes | +| [Notion](01-sources/notion.md) | Alpha | No | +| [Okta](01-sources/okta.md) | Alpha | Yes | +| [OneSignal](01-sources/onesignal.md) | Alpha | No | +| [OpenWeather](01-sources/openweather.md) | Alpha | No | +| [Oracle DB](01-sources/oracle.md) | Alpha | Yes | +| [Oracle PeopleSoft](01-sources/oracle-peoplesoft.md) | Alpha | No | +| [Oracle Siebel CRM](01-sources/oracle-siebel-crm.md) | Alpha | No | +| [Orb](01-sources/orb.md) | Alpha | Yes | +| [Outreach](./01-sources/outreach.md) | Alpha | No | +| [PagerDuty](01-sources/pagerduty.md) | Alpha | No | +| [PayPal Transaction](01-sources/paypal-transaction.md) | Alpha | No | +| [Paystack](01-sources/paystack.md) | Alpha | No | +| [PersistIq](01-sources/persistiq.md) | Alpha | Yes | +| [Pinterest](01-sources/pinterest.md) | Alpha | No | +| [Pipedrive](01-sources/pipedrive.md) | Alpha | No | +| [Pivotal Tracker](01-sources/pivotal-tracker.md) | Alpha | No | +| [Plaid](01-sources/plaid.md) | Alpha | No | +| [PokéAPI](01-sources/pokeapi.md) | Alpha | Yes | +| [Postgres](01-sources/postgres.md) | Beta | Yes | +| [PostHog](01-sources/posthog.md) | Alpha | Yes | +| [PrestaShop](01-sources/presta-shop.md) | Alpha | Yes | +| [Qualaroo](01-sources/qualaroo.md) | Alpha | Yes | +| [QuickBooks](01-sources/quickbooks.md) | Alpha | No | +| [Recharge](01-sources/recharge.md) | Alpha | Yes | +| [Recurly](01-sources/recurly.md) | Alpha | Yes | +| [Redshift](01-sources/redshift.md) | Alpha | Yes | +| [Retently](01-sources/retently.md) | Alpha | Yes | +| [S3](01-sources/s3.md) | Beta | Yes | +| [Salesforce](01-sources/salesforce.md) | Generally Available | Yes | +| [Salesloft](01-sources/salesloft.md) | Alpha | No | +| [SAP Business One](01-sources/sap-business-one.md) | Alpha | No | +| [SearchMetrics](./01-sources/search-metrics.md) | Alpha | No | +| [Sendgrid](01-sources/sendgrid.md) | Alpha | Yes | +| [Sentry](01-sources/sentry.md) | Alpha | Yes | +| [SFTP](01-sources/sftp.md) | Alpha | Yes | +| [Shopify](01-sources/shopify.md) | Alpha | No | +| [Short.io](01-sources/shortio.md) | Alpha | Yes | +| [Slack](01-sources/slack.md) | Alpha | No | +| [Smartsheets](01-sources/smartsheets.md) | Beta | Yes | +| [Snapchat Marketing](01-sources/snapchat-marketing.md) | Alpha | Yes | +| [Snowflake](01-sources/snowflake.md) | Alpha | Yes | +| [Spree Commerce](01-sources/spree-commerce.md) | Alpha | No | +| [Square](01-sources/square.md) | Alpha | Yes | +| [Strava](01-sources/strava.md) | Alpha | No | +| [Stripe](01-sources/stripe.md) | Generally Available | Yes | +| [Sugar CRM](01-sources/sugar-crm.md) | Alpha | No | +| [SurveyMonkey](01-sources/surveymonkey.md) | Alpha | No | +| [Tempo](01-sources/tempo.md) | Alpha | Yes | +| [TiDB](01-sources/tidb.md) | Alpha | No | +| [TikTok Marketing](./01-sources/tiktok-marketing.md) | Generally Available | Yes | +| [Trello](01-sources/trello.md) | Alpha | No | +| [Twilio](01-sources/twilio.md) | Alpha | Yes | +| [Typeform](01-sources/typeform.md) | Alpha | Yes | +| [US Census](01-sources/us-census.md) | Alpha | Yes | +| [VictorOps](01-sources/victorops.md) | Alpha | No | +| [Webflow](01-sources/webflow.md ) | Alpha | Yes | +| [WooCommerce](01-sources/woocommerce.md) | Alpha | No | +| [Wordpress](01-sources/wordpress.md) | Alpha | No | +| [YouTube Analytics](01-sources/youtube-analytics.md) | Alpha | No | +| [Zencart](01-sources/zencart.md) | Alpha | No | +| [Zendesk Chat](01-sources/zendesk-chat.md) | Alpha | Yes | +| [Zendesk Sunshine](01-sources/zendesk-sunshine.md) | Alpha | Yes | +| [Zendesk Support](01-sources/zendesk-support.md) | Generally Available | Yes | +| [Zendesk Talk](01-sources/zendesk-talk.md) | Alpha | No | +| [Zenloop](01-sources/zenloop.md) | Alpha | Yes | +| [Zoho CRM](01-sources/zoho-crm.md) | Alpha | No | +| [Zoom](01-sources/zoom.md) | Alpha | No | +| [Zuora](01-sources/zuora.md) | Alpha | Yes | + +## Destinations + +| Connector | Product Release Stage| Available in Cloud? | +|:-----------------------------------------------------------| :------------------- | :------------------ | +| [Amazon SQS](02-destinations/amazon-sqs.md) | Alpha | Yes | +| [Amazon Datalake](02-destinations/aws-datalake.md) | Alpha | No | +| [AzureBlobStorage](02-destinations/azureblobstorage.md) | Alpha | Yes | +| [BigQuery](02-destinations/bigquery.md) | Generally Available | Yes | +| [Cassandra](02-destinations/cassandra.md) | Alpha | Yes | +| [Chargify (Keen)](02-destinations/chargify.md) | Alpha | Yes | +| [ClickHouse](02-destinations/clickhouse.md) | Alpha | Yes | +| [Databricks](02-destinations/databricks.md) | Alpha | Yes | +| [DynamoDB](02-destinations/dynamodb.md) | Alpha | Yes | +| [Elasticsearch](02-destinations/elasticsearch.md) | Alpha | Yes | +| [End-to-End Testing](02-destinations/e2e-test.md) | Alpha | Yes | +| [Firebolt](02-destinations/firebolt.md) | Alpha | Yes | +| [Google Cloud Storage (GCS)](02-destinations/gcs.md) | Beta | Yes | +| [Google Pubsub](02-destinations/pubsub.md) | Alpha | Yes | +| [Google Sheets](02-destinations/google-sheets.md) | Alpha | Yes | +| [Kafka](02-destinations/kafka.md) | Alpha | No | +| [Keen](02-destinations/keen.md) | Alpha | No | +| [Kinesis](02-destinations/kinesis.md) | Alpha | No | +| [Local CSV](02-destinations/local-csv.md) | Alpha | No | +| [Local JSON](02-destinations/local-json.md) | Alpha | No | +| [MariaDB ColumnStore](02-destinations/mariadb-columnstore.md) | Alpha | Yes | +| [MeiliSearch](02-destinations/meilisearch.md) | Alpha | Yes | +| [MongoDB](02-destinations/mongodb.md) | Alpha | Yes | +| [MQTT](02-destinations/mqtt.md) | Alpha | Yes | +| [MS SQL Server](02-destinations/mssql.md) | Alpha | Yes | +| [MySQL](02-destinations/mysql.md) | Alpha | Yes | +| [Oracle](02-destinations/oracle.md) | Alpha | Yes | +| [Postgres](02-destinations/postgres.md) | Alpha | Yes | +| [Pulsar](02-destinations/pulsar.md) | Alpha | Yes | +| [RabbitMQ](02-destinations/rabbitmq.md) | Alpha | Yes | +| [Redis](02-destinations/redis.md) | Alpha | Yes | +| [Redshift](02-destinations/redshift.md) | Beta | Yes | +| [Rockset](02-destinations/rockset.md) | Alpha | Yes | +| [S3](02-destinations/s3.md) | Generally Available | Yes | +| [Scylla](02-destinations/scylla.md) | Alpha | Yes | +| [SFTP JSON](02-destinations/sftp-json.md) | Alpha | Yes | +| [Snowflake](02-destinations/snowflake.md) | Generally Available | Yes | +| [Streamr](02-destinations/streamr.md) | Alpha | No | diff --git a/docs/cloud/core-concepts.md b/docs/03-cloud/01-core-concepts.md similarity index 100% rename from docs/cloud/core-concepts.md rename to docs/03-cloud/01-core-concepts.md diff --git a/docs/cloud/managing-airbyte-cloud.md b/docs/03-cloud/02-managing-airbyte-cloud.md similarity index 100% rename from docs/cloud/managing-airbyte-cloud.md rename to docs/03-cloud/02-managing-airbyte-cloud.md diff --git a/docs/cloud/getting-started-with-airbyte-cloud.md b/docs/03-cloud/README.md similarity index 95% rename from docs/cloud/getting-started-with-airbyte-cloud.md rename to docs/03-cloud/README.md index 4959d697cf95..ffcf6fdfe8d7 100644 --- a/docs/cloud/getting-started-with-airbyte-cloud.md +++ b/docs/03-cloud/README.md @@ -1,4 +1,4 @@ -# Getting Started with Airbyte Cloud +# Airbyte Cloud This page guides you through setting up your Airbyte Cloud account, setting up a source, destination, and connection, verifying the sync, and allowlisting an IP address. @@ -8,7 +8,7 @@ To use Airbyte Cloud: 1. If you haven't already, [sign up for Airbyte Cloud](https://cloud.airbyte.io/signup?utm_campaign=22Q1_AirbyteCloudSignUpCampaign_Trial&utm_source=Docs&utm_content=SetupGuide). - Airbyte Cloud offers a 14-day free trial with $1000 worth of [credits](core-concepts.md#credits), whichever expires first. For more information, see [Pricing](https://airbyte.com/pricing). + Airbyte Cloud offers a 14-day free trial with $1000 worth of [credits](01-core-concepts.md#credits), whichever expires first. For more information, see [Pricing](https://airbyte.com/pricing). 2. Airbyte will send you an email with a verification link. On clicking the link, you'll be taken to your new workspace. @@ -96,7 +96,7 @@ Setting up a connection involves configuring the following parameters: -For more information, see [Connections and Sync Modes](../understanding-airbyte/connections/README.md) and [Namespaces](../understanding-airbyte/namespaces.md) +For more information, see [Connections and Sync Modes](../10-understanding-airbyte/05-connections/README.md) and [Namespaces](../10-understanding-airbyte/11-namespaces.md) To set up a connection: @@ -148,7 +148,7 @@ To set up a connection: :::tip -To better understand the destination namespace configurations, see [Destination Namespace example](../understanding-airbyte/namespaces.md#examples) +To better understand the destination namespace configurations, see [Destination Namespace example](../10-understanding-airbyte/11-namespaces.md#examples) ::: 6. (Optional) In the **Destination Stream Prefix (Optional)** field, add a prefix to stream names (for example, adding a prefix `airbyte_` renames `projects` to `airbyte_projects`). diff --git a/docs/quickstart/deploy-airbyte.md b/docs/04-quickstart/01-deploy-airbyte.md similarity index 95% rename from docs/quickstart/deploy-airbyte.md rename to docs/04-quickstart/01-deploy-airbyte.md index 38163974c68a..ba9d6ac15d07 100644 --- a/docs/quickstart/deploy-airbyte.md +++ b/docs/04-quickstart/01-deploy-airbyte.md @@ -13,7 +13,7 @@ docker-compose up Once you see an Airbyte banner, the UI is ready to go at [http://localhost:8000](http://localhost:8000)! -Alternatively, if you have an Airbyte Cloud invite, just follow [these steps.](../deploying-airbyte/on-cloud.md) +Alternatively, if you have an Airbyte Cloud invite, just follow [these steps.](../05-deploying-airbyte/09-on-cloud.md) ## FAQ diff --git a/docs/quickstart/add-a-source.md b/docs/04-quickstart/02-add-a-source.md similarity index 100% rename from docs/quickstart/add-a-source.md rename to docs/04-quickstart/02-add-a-source.md diff --git a/docs/quickstart/add-a-destination.md b/docs/04-quickstart/03-add-a-destination.md similarity index 100% rename from docs/quickstart/add-a-destination.md rename to docs/04-quickstart/03-add-a-destination.md diff --git a/docs/quickstart/set-up-a-connection.md b/docs/04-quickstart/04-set-up-a-connection.md similarity index 89% rename from docs/quickstart/set-up-a-connection.md rename to docs/04-quickstart/04-set-up-a-connection.md index 2c9add584812..7f02e41cee6f 100644 --- a/docs/quickstart/set-up-a-connection.md +++ b/docs/04-quickstart/04-set-up-a-connection.md @@ -33,7 +33,7 @@ jq '._airbyte_data | {abilities: .abilities, weight: .weight}' And there you have it. You've pulled data from an API directly into a file, with all of the actual configuration for this replication only taking place in the UI. -Note: If you are using Airbyte on Windows with WSL2 and Docker, refer to [this tutorial](../operator-guides/locating-files-local-destination.md) or [this section](../integrations/destinations/local-json.md#access-replicated-data-files) in the local-json destination guide to locate the replicated folder and file. +Note: If you are using Airbyte on Windows with WSL2 and Docker, refer to [this tutorial](../06-operator-guides/08-locating-files-local-destination.md) or [this section](../02-integrations/02-destinations/local-json.md#access-replicated-data-files) in the local-json destination guide to locate the replicated folder and file. ## That's it! diff --git a/docs/04-quickstart/README.md b/docs/04-quickstart/README.md new file mode 100644 index 000000000000..bad90214fbb5 --- /dev/null +++ b/docs/04-quickstart/README.md @@ -0,0 +1,6 @@ +# Airbyte Open Source QuickStart + +- [Deploy Airbyte](01-deploy-airbyte.md) +- [Add a source](02-add-a-source.md) +- [Add a destination](03-add-a-destination.md) +- [Set up a connection](04-set-up-a-connection.md) diff --git a/docs/deploying-airbyte/local-deployment.md b/docs/05-deploying-airbyte/01-local-deployment.md similarity index 94% rename from docs/deploying-airbyte/local-deployment.md rename to docs/05-deploying-airbyte/01-local-deployment.md index 91d433322237..76a440bdd16e 100644 --- a/docs/deploying-airbyte/local-deployment.md +++ b/docs/05-deploying-airbyte/01-local-deployment.md @@ -50,5 +50,5 @@ docker-compose up ## Troubleshooting -If you encounter any issues, just connect to our [Slack](https://slack.airbyte.io). Our community will help! We also have a [troubleshooting](../troubleshooting/on-deploying.md) section in our docs for common problems. +If you encounter any issues, just connect to our [Slack](https://slack.airbyte.io). Our community will help! We also have a [troubleshooting](../07-troubleshooting/01-on-deploying.md) section in our docs for common problems. diff --git a/docs/deploying-airbyte/on-aws-ec2.md b/docs/05-deploying-airbyte/02-on-aws-ec2.md similarity index 97% rename from docs/deploying-airbyte/on-aws-ec2.md rename to docs/05-deploying-airbyte/02-on-aws-ec2.md index 18da4e1608da..0e78813ebb25 100644 --- a/docs/deploying-airbyte/on-aws-ec2.md +++ b/docs/05-deploying-airbyte/02-on-aws-ec2.md @@ -137,5 +137,5 @@ If you want to get your logs from your Airbyte Docker containers in CloudWatch, ## Troubleshooting -If you encounter any issues, just connect to our [Slack](https://slack.airbyte.io). Our community will help! We also have a [FAQ](../troubleshooting/on-deploying.md) section in our docs for common problems. +If you encounter any issues, just connect to our [Slack](https://slack.airbyte.io). Our community will help! We also have a [FAQ](../07-troubleshooting/01-on-deploying.md) section in our docs for common problems. diff --git a/docs/deploying-airbyte/on-azure-vm-cloud-shell.md b/docs/05-deploying-airbyte/03-on-azure-vm-cloud-shell.md similarity index 95% rename from docs/deploying-airbyte/on-azure-vm-cloud-shell.md rename to docs/05-deploying-airbyte/03-on-azure-vm-cloud-shell.md index fbb3c450f863..472963fe17f3 100644 --- a/docs/deploying-airbyte/on-azure-vm-cloud-shell.md +++ b/docs/05-deploying-airbyte/03-on-azure-vm-cloud-shell.md @@ -58,7 +58,7 @@ Make sure to update the permissions on the private key, or you'll get an error t chmod 600 ./$YOUR_PATH_TO_DOWNLOADS/id_rsa ``` -Above command will generate download link and give you pop-up on right bottom side, click on `Click here to download your file.` to download private key. Note: Save this file, you will need it to connect to your VM in [Connect to Airbyte](on-azure-vm-cloud-shell.md#connect-to-airbyte) step. +Above command will generate download link and give you pop-up on right bottom side, click on `Click here to download your file.` to download private key. Note: Save this file, you will need it to connect to your VM in [Connect to Airbyte](03-on-azure-vm-cloud-shell.md#connect-to-airbyte) step. ![](../.gitbook/assets/azure_shell_download_ssh_key.png) @@ -145,5 +145,5 @@ This part assumes that you have access to a terminal on your workstation ## Troubleshooting -If you encounter any issues, just connect to our [Slack](https://slack.airbyte.io). Our community will help! We also have a [FAQ](../troubleshooting/on-deploying.md) section in our docs for common problems. +If you encounter any issues, just connect to our [Slack](https://slack.airbyte.io). Our community will help! We also have a [FAQ](../07-troubleshooting/01-on-deploying.md) section in our docs for common problems. diff --git a/docs/deploying-airbyte/on-gcp-compute-engine.md b/docs/05-deploying-airbyte/04-on-gcp-compute-engine.md similarity index 97% rename from docs/deploying-airbyte/on-gcp-compute-engine.md rename to docs/05-deploying-airbyte/04-on-gcp-compute-engine.md index f82df901476a..7bdb2db5dd39 100644 --- a/docs/deploying-airbyte/on-gcp-compute-engine.md +++ b/docs/05-deploying-airbyte/04-on-gcp-compute-engine.md @@ -145,5 +145,5 @@ gcloud --project=$PROJECT_ID beta compute ssh $INSTANCE_NAME -- -L 8000:localhos ## Troubleshooting -If you encounter any issues, just connect to our [Slack](https://slack.airbyte.io). Our community will help! We also have a [FAQ](../troubleshooting/on-deploying.md) section in our docs for common problems. +If you encounter any issues, just connect to our [Slack](https://slack.airbyte.io). Our community will help! We also have a [FAQ](../07-troubleshooting/01-on-deploying.md) section in our docs for common problems. diff --git a/docs/deploying-airbyte/on-kubernetes.md b/docs/05-deploying-airbyte/05-on-kubernetes.md similarity index 96% rename from docs/deploying-airbyte/on-kubernetes.md rename to docs/05-deploying-airbyte/05-on-kubernetes.md index 75f661a1e520..16087d8b74c5 100644 --- a/docs/deploying-airbyte/on-kubernetes.md +++ b/docs/05-deploying-airbyte/05-on-kubernetes.md @@ -6,7 +6,7 @@ Airbyte allows scaling sync workloads horizontally using Kubernetes. The core co ## Quickstart -If you don't want to configure your own K8s cluster and Airbyte instance, you can use the free, open-source project [Plural](https://www.plural.sh/) to bring up a K8s cluster and Airbyte for you. Use [this guide](on-plural.md) to get started. +If you don't want to configure your own K8s cluster and Airbyte instance, you can use the free, open-source project [Plural](https://www.plural.sh/) to bring up a K8s cluster and Airbyte for you. Use [this guide](06-on-plural.md) to get started. ## Getting Started @@ -173,7 +173,7 @@ Airbyte publishes logs every minute. This means it is normal to see minute-long Each log file is named `{yyyyMMddHH24mmss}_{podname}_{UUID}` and is not compressed. Users can view logs simply by navigating to the relevant folder and downloading the file for the time period in question. -See the [Known Issues](on-kubernetes.md#known-issues) section for planned logging improvements. +See the [Known Issues](05-on-kubernetes.md#known-issues) section for planned logging improvements. ### Using an external DB @@ -221,11 +221,11 @@ Check out the [Helm Chart Readme](https://github.com/airbytehq/airbyte/tree/mast ### Connector Container Logs -Although all logs can be accessed by viewing the scheduler logs, connector container logs may be easier to understand when isolated by accessing from the Airbyte UI or the [Airbyte API](../api-documentation.md) for a specific job attempt. Connector pods launched by Airbyte will not relay logs directly to Kubernetes logging. You must access these logs through Airbyte. +Although all logs can be accessed by viewing the scheduler logs, connector container logs may be easier to understand when isolated by accessing from the Airbyte UI or the [Airbyte API](../11-api-documentation.md) for a specific job attempt. Connector pods launched by Airbyte will not relay logs directly to Kubernetes logging. You must access these logs through Airbyte. ### Upgrading Airbyte Kube -See [Upgrading K8s](../operator-guides/upgrading-airbyte.md). +See [Upgrading K8s](../06-operator-guides/01-upgrading-airbyte.md). ### Resizing Volumes diff --git a/docs/deploying-airbyte/on-plural.md b/docs/05-deploying-airbyte/06-on-plural.md similarity index 100% rename from docs/deploying-airbyte/on-plural.md rename to docs/05-deploying-airbyte/06-on-plural.md diff --git a/docs/deploying-airbyte/on-oci-vm.md b/docs/05-deploying-airbyte/07-on-oci-vm.md similarity index 100% rename from docs/deploying-airbyte/on-oci-vm.md rename to docs/05-deploying-airbyte/07-on-oci-vm.md diff --git a/docs/deploying-airbyte/on-digitalocean-droplet.md b/docs/05-deploying-airbyte/08-on-digitalocean-droplet.md similarity index 96% rename from docs/deploying-airbyte/on-digitalocean-droplet.md rename to docs/05-deploying-airbyte/08-on-digitalocean-droplet.md index 6db2bcd78d37..5833326432f4 100644 --- a/docs/deploying-airbyte/on-digitalocean-droplet.md +++ b/docs/05-deploying-airbyte/08-on-digitalocean-droplet.md @@ -82,5 +82,5 @@ docker-compose up -d ## Troubleshooting -If you encounter any issues, just connect to our [Slack](https://slack.airbyte.io). Our community will help! We also have a [FAQ](../troubleshooting/on-deploying.md) section in our docs for common problems. +If you encounter any issues, just connect to our [Slack](https://slack.airbyte.io). Our community will help! We also have a [FAQ](../07-troubleshooting/01-on-deploying.md) section in our docs for common problems. diff --git a/docs/deploying-airbyte/on-cloud.md b/docs/05-deploying-airbyte/09-on-cloud.md similarity index 100% rename from docs/deploying-airbyte/on-cloud.md rename to docs/05-deploying-airbyte/09-on-cloud.md diff --git a/docs/05-deploying-airbyte/README.md b/docs/05-deploying-airbyte/README.md new file mode 100644 index 000000000000..80dd6b08bae7 --- /dev/null +++ b/docs/05-deploying-airbyte/README.md @@ -0,0 +1,12 @@ +# Deploy Airbyte Open Source + +![not all who wander are lost](https://user-images.githubusercontent.com/2591516/170351002-0d054d06-c901-4794-8719-97569060408f.png) + +- [Local Deployment](01-local-deployment.md) +- [On Aws](02-on-aws-ec2.md) +- [On Azure VM Cloud Shell](03-on-azure-vm-cloud-shell.md) +- [On GCP](04-on-gcp-compute-engine.md) +- [On Kubernetes](05-on-kubernetes.md) +- [On Plural](06-on-plural.md) +- [On OCI VM](07-on-oci-vm.md) +- [On Digital Ocean Droplet](08-on-digitalocean-droplet.md) diff --git a/docs/operator-guides/upgrading-airbyte.md b/docs/06-operator-guides/01-upgrading-airbyte.md similarity index 97% rename from docs/operator-guides/upgrading-airbyte.md rename to docs/06-operator-guides/01-upgrading-airbyte.md index 17366b5a2e2c..cf82798eb526 100644 --- a/docs/operator-guides/upgrading-airbyte.md +++ b/docs/06-operator-guides/01-upgrading-airbyte.md @@ -10,7 +10,7 @@ When Airbyte is upgraded, it will attempt to upgrade some connector versions. It Airbyte intelligently performs upgrades automatically based off of your version defined in your `.env` file and will handle data migration for you. -If you are running [Airbyte on Kubernetes](../deploying-airbyte/on-kubernetes.md), you will need to use one of the two processes defined [here](#upgrading-on-k8s-0270-alpha-and-above) that differ based on your Airbyte version. +If you are running [Airbyte on Kubernetes](../05-deploying-airbyte/05-on-kubernetes.md), you will need to use one of the two processes defined [here](#upgrading-on-k8s-0270-alpha-and-above) that differ based on your Airbyte version. ## Mandatory Intermediate Upgrade diff --git a/docs/operator-guides/reset.md b/docs/06-operator-guides/02-reset.md similarity index 100% rename from docs/operator-guides/reset.md rename to docs/06-operator-guides/02-reset.md diff --git a/docs/operator-guides/configuring-airbyte-db.md b/docs/06-operator-guides/03-configuring-airbyte-db.md similarity index 100% rename from docs/operator-guides/configuring-airbyte-db.md rename to docs/06-operator-guides/03-configuring-airbyte-db.md diff --git a/docs/operator-guides/browsing-output-logs.md b/docs/06-operator-guides/04-browsing-output-logs.md similarity index 98% rename from docs/operator-guides/browsing-output-logs.md rename to docs/06-operator-guides/04-browsing-output-logs.md index 9dc7acb58b3c..d82c9b2439bf 100644 --- a/docs/operator-guides/browsing-output-logs.md +++ b/docs/06-operator-guides/04-browsing-output-logs.md @@ -72,7 +72,7 @@ normalize target_config.json ### Reading the content of the catalog.json file -For example, it is often useful to inspect the content of the [catalog](../understanding-airbyte/beginners-guide-to-catalog.md) file. You could do so by running a `cat` command: +For example, it is often useful to inspect the content of the [catalog](../10-understanding-airbyte/01-beginners-guide-to-catalog.md) file. You could do so by running a `cat` command: ```bash docker run -it --rm --volume airbyte_workspace:/data busybox cat /data/9/2/catalog.json diff --git a/docs/operator-guides/using-the-airflow-airbyte-operator.md b/docs/06-operator-guides/05-using-the-airflow-airbyte-operator.md similarity index 100% rename from docs/operator-guides/using-the-airflow-airbyte-operator.md rename to docs/06-operator-guides/05-using-the-airflow-airbyte-operator.md diff --git a/docs/operator-guides/using-prefect-task.md b/docs/06-operator-guides/06-using-prefect-task.md similarity index 100% rename from docs/operator-guides/using-prefect-task.md rename to docs/06-operator-guides/06-using-prefect-task.md diff --git a/docs/operator-guides/using-dagster-integration.md b/docs/06-operator-guides/07-using-dagster-integration.md similarity index 100% rename from docs/operator-guides/using-dagster-integration.md rename to docs/06-operator-guides/07-using-dagster-integration.md diff --git a/docs/operator-guides/locating-files-local-destination.md b/docs/06-operator-guides/08-locating-files-local-destination.md similarity index 100% rename from docs/operator-guides/locating-files-local-destination.md rename to docs/06-operator-guides/08-locating-files-local-destination.md diff --git a/docs/operator-guides/transformation-and-normalization/transformations-with-sql.md b/docs/06-operator-guides/09-transformation-and-normalization/01-transformations-with-sql.md similarity index 96% rename from docs/operator-guides/transformation-and-normalization/transformations-with-sql.md rename to docs/06-operator-guides/09-transformation-and-normalization/01-transformations-with-sql.md index 3f6c9357d2c1..7a1a51318696 100644 --- a/docs/operator-guides/transformation-and-normalization/transformations-with-sql.md +++ b/docs/06-operator-guides/09-transformation-and-normalization/01-transformations-with-sql.md @@ -6,7 +6,7 @@ This tutorial will describe how to integrate SQL based transformations with Airbyte syncs using plain SQL queries. -This is the first part of ELT tutorial. The second part goes deeper with [Transformations with dbt](transformations-with-dbt.md) and then wrap-up with a third part on [Transformations with Airbyte](transformations-with-airbyte.md). +This is the first part of ELT tutorial. The second part goes deeper with [Transformations with dbt](02-transformations-with-dbt.md) and then wrap-up with a third part on [Transformations with Airbyte](03-transformations-with-airbyte.md). ## \(Examples outputs are updated with Airbyte version 0.23.0-alpha from May 2021\) @@ -16,7 +16,7 @@ At its core, Airbyte is geared to handle the EL \(Extract Load\) steps of an ELT However, this is actually producing a table in the destination with a JSON blob column... For the typical analytics use case, you probably want this json blob normalized so that each field is its own column. -So, after EL, comes the T \(transformation\) and the first T step that Airbyte actually applies on top of the extracted data is called "Normalization". You can find more information about it [here](../../understanding-airbyte/basic-normalization.md). +So, after EL, comes the T \(transformation\) and the first T step that Airbyte actually applies on top of the extracted data is called "Normalization". You can find more information about it [here](../../10-understanding-airbyte/04-basic-normalization.md). Airbyte runs this step before handing the final data over to other tools that will manage further transformation down the line. @@ -34,7 +34,7 @@ This could be useful if: In order to do so, we will now describe how you can leverage the basic normalization outputs that Airbyte generates to build your own transformations if you don't want to start from scratch. -Note: We will rely on docker commands that we've gone over as part of another [Tutorial on Exploring Docker Volumes](../browsing-output-logs.md). +Note: We will rely on docker commands that we've gone over as part of another [Tutorial on Exploring Docker Volumes](../04-browsing-output-logs.md). ### \(Optional\) Configure some Covid \(data\) source and Postgres destinations @@ -315,5 +315,5 @@ create view "postgres"."public"."covid_epidemiology" as ( Then you can run in your preferred SQL editor or tool! -If you are familiar with dbt or want to learn more about it, you can continue with the following [tutorial using dbt](transformations-with-dbt.md)... +If you are familiar with dbt or want to learn more about it, you can continue with the following [tutorial using dbt](02-transformations-with-dbt.md)... diff --git a/docs/operator-guides/transformation-and-normalization/transformations-with-dbt.md b/docs/06-operator-guides/09-transformation-and-normalization/02-transformations-with-dbt.md similarity index 93% rename from docs/operator-guides/transformation-and-normalization/transformations-with-dbt.md rename to docs/06-operator-guides/09-transformation-and-normalization/02-transformations-with-dbt.md index e7ea6b4158bb..450ba65a488d 100644 --- a/docs/operator-guides/transformation-and-normalization/transformations-with-dbt.md +++ b/docs/06-operator-guides/09-transformation-and-normalization/02-transformations-with-dbt.md @@ -4,7 +4,7 @@ This tutorial will describe how to integrate SQL based transformations with Airbyte syncs using specialized transformation tool: dbt. -This tutorial is the second part of the previous tutorial [Transformations with SQL](transformations-with-sql.md). Next, we'll wrap-up with a third part on submitting transformations back in Airbyte: [Transformations with Airbyte](transformations-with-airbyte.md). +This tutorial is the second part of the previous tutorial [Transformations with SQL](01-transformations-with-sql.md). Next, we'll wrap-up with a third part on submitting transformations back in Airbyte: [Transformations with Airbyte](03-transformations-with-airbyte.md). \(Example outputs are updated with Airbyte version 0.23.0-alpha from May 2021\) @@ -16,7 +16,7 @@ Before generating the SQL files as we've seen in the previous tutorial, Airbyte ### Validate dbt project settings -Let's say we identified our workspace \(as shown in the previous tutorial [Transformations with SQL](transformations-with-sql.md)\), and we have a workspace ID of: +Let's say we identified our workspace \(as shown in the previous tutorial [Transformations with SQL](01-transformations-with-sql.md)\), and we have a workspace ID of: ```bash NORMALIZE_WORKSPACE="5/0/" @@ -88,7 +88,7 @@ Done. PASS=1 WARN=0 ERROR=0 SKIP=0 TOTAL=1 ### Exporting dbt normalization project outside Airbyte -As seen in the tutorial on [exploring workspace folder](../browsing-output-logs.md), it is possible to browse the `normalize` folder and examine further logs if an error occurs. +As seen in the tutorial on [exploring workspace folder](../04-browsing-output-logs.md), it is possible to browse the `normalize` folder and examine further logs if an error occurs. In particular, we can also take a look at the dbt models generated by Airbyte and export them to the local host filesystem: @@ -214,5 +214,5 @@ Done. PASS=1 WARN=0 ERROR=0 SKIP=0 TOTAL=1 Now, that you've exported the generated normalization models, you can edit and tweak them as necessary. -If you want to know how to push your modifications back to Airbyte and use your updated dbt project during Airbyte syncs, you can continue with the following [tutorial on importing transformations into Airbyte](transformations-with-airbyte.md)... +If you want to know how to push your modifications back to Airbyte and use your updated dbt project during Airbyte syncs, you can continue with the following [tutorial on importing transformations into Airbyte](03-transformations-with-airbyte.md)... diff --git a/docs/operator-guides/transformation-and-normalization/transformations-with-airbyte.md b/docs/06-operator-guides/09-transformation-and-normalization/03-transformations-with-airbyte.md similarity index 91% rename from docs/operator-guides/transformation-and-normalization/transformations-with-airbyte.md rename to docs/06-operator-guides/09-transformation-and-normalization/03-transformations-with-airbyte.md index ff3cdaf32c14..cb666dc2bd3f 100644 --- a/docs/operator-guides/transformation-and-normalization/transformations-with-airbyte.md +++ b/docs/06-operator-guides/09-transformation-and-normalization/03-transformations-with-airbyte.md @@ -4,7 +4,7 @@ This tutorial will describe how to push a custom dbt transformation project back to Airbyte to use during syncs. -This guide is the last part of the tutorial series on transformations, following [Transformations with SQL](transformations-with-sql.md) and [connecting EL with T using dbt](transformations-with-dbt.md). +This guide is the last part of the tutorial series on transformations, following [Transformations with SQL](01-transformations-with-sql.md) and [connecting EL with T using dbt](02-transformations-with-dbt.md). \(Example outputs are updated with Airbyte version 0.23.0-alpha from May 2021\) @@ -18,7 +18,7 @@ After replication of data from a source connector \(Extract\) to a destination c ## Public Git repository -In the connection settings page, I can add new Transformations steps to apply after [normalization](../../understanding-airbyte/basic-normalization.md). For example, I want to run my custom dbt project jaffle_shop, whenever my sync is done replicating and normalizing my data. +In the connection settings page, I can add new Transformations steps to apply after [normalization](../../10-understanding-airbyte/04-basic-normalization.md). For example, I want to run my custom dbt project jaffle_shop, whenever my sync is done replicating and normalizing my data. You can find the jaffle shop test repository by clicking [here](https://github.com/dbt-labs/jaffle_shop). diff --git a/docs/operator-guides/transformation-and-normalization/README.md b/docs/06-operator-guides/09-transformation-and-normalization/README.md similarity index 100% rename from docs/operator-guides/transformation-and-normalization/README.md rename to docs/06-operator-guides/09-transformation-and-normalization/README.md diff --git a/docs/operator-guides/sentry-integration.md b/docs/06-operator-guides/10-configuring-airbyte/01-sentry-integration.md similarity index 93% rename from docs/operator-guides/sentry-integration.md rename to docs/06-operator-guides/10-configuring-airbyte/01-sentry-integration.md index 74b074b47ddd..703d8876f8d9 100644 --- a/docs/operator-guides/sentry-integration.md +++ b/docs/06-operator-guides/10-configuring-airbyte/01-sentry-integration.md @@ -8,4 +8,4 @@ By default, this option is off. There are 2 possible mechanisms for its activati Most connectors written using the Airbyte Python or Java CDKs automatically detect this environment variable and activate Sentry profiling accordingly. ## UML diagram -![](../.gitbook/assets/sentry-flow-v1.png) +![](../../.gitbook/assets/sentry-flow-v1.png) diff --git a/docs/operator-guides/configuring-airbyte.md b/docs/06-operator-guides/10-configuring-airbyte/README.md similarity index 99% rename from docs/operator-guides/configuring-airbyte.md rename to docs/06-operator-guides/10-configuring-airbyte/README.md index 5744b67ac584..c9eee64c9e1a 100644 --- a/docs/operator-guides/configuring-airbyte.md +++ b/docs/06-operator-guides/10-configuring-airbyte/README.md @@ -84,7 +84,8 @@ The following variables are relevant to both Docker and Kubernetes. 2. `MAX_CHECK_WORKERS` - Define the maximum number of Check workers each Airbyte Worker container can support. Defaults to 5. 3. `MAX_SYNC_WORKERS` - Define the maximum number of Sync workers each Airbyte Worker container can support. Defaults to 5. 4. `MAX_DISCOVER_WORKERS` - Define the maximum number of Discover workers each Airbyte Worker container can support. Defaults to 5. -5. `SENTRY_DSN` - Define the [DSN](https://docs.sentry.io/product/sentry-basics/dsn-explainer/) of necessary Sentry instance. Defaults to empty. Integration with Sentry is explained [here](./sentry-integration.md) +5. `SENTRY_DSN` - Define the [DSN](https://docs.sentry.io/product/sentry-basics/dsn-explainer/) of necessary Sentry instance. Defaults to empty. Integration with Sentry is explained [here](./01- +sentry-integration.md) ### Docker-Only 1. `WORKSPACE_DOCKER_MOUNT` - Defines the name of the Airbyte docker volume. diff --git a/docs/operator-guides/using-custom-connectors.md b/docs/06-operator-guides/11-using-custom-connectors.md similarity index 100% rename from docs/operator-guides/using-custom-connectors.md rename to docs/06-operator-guides/11-using-custom-connectors.md diff --git a/docs/operator-guides/scaling-airbyte.md b/docs/06-operator-guides/12-scaling-airbyte.md similarity index 93% rename from docs/operator-guides/scaling-airbyte.md rename to docs/06-operator-guides/12-scaling-airbyte.md index 61ec4ef1f415..9e30f6d51768 100644 --- a/docs/operator-guides/scaling-airbyte.md +++ b/docs/06-operator-guides/12-scaling-airbyte.md @@ -1,6 +1,6 @@ # Scaling Airbyte -As depicted in our [High-Level View](../understanding-airbyte/high-level-view.md), Airbyte is made up of several components under the hood: 1. Scheduler 2. Server 3. Temporal 4. Webapp 5. Database +As depicted in our [High-Level View](../10-understanding-airbyte/07-high-level-view.md), Airbyte is made up of several components under the hood: 1. Scheduler 2. Server 3. Temporal 4. Webapp 5. Database These components perform control plane operations that are low-scale, low-resource work. In addition to the work being low cost, these components are efficient and optimized for these jobs, meaning that only uncommonly large workloads will require deployments at scale. In general, you would only encounter scaling issues when running over a thousand connections. @@ -8,7 +8,7 @@ As a reference point, the typical Airbyte user has 5 - 20 connectors and 10 - 10 ## What To Scale -[Workers](../understanding-airbyte/jobs.md) do all the heavy lifting within Airbyte. A worker is responsible for executing Airbyte operations \(e.g. Discover, Read, Sync etc\), and is created on demand whenever these operations are requested. Thus, every job has a corresponding worker executing its work. +[Workers](../10-understanding-airbyte/08-jobs.md) do all the heavy lifting within Airbyte. A worker is responsible for executing Airbyte operations \(e.g. Discover, Read, Sync etc\), and is created on demand whenever these operations are requested. Thus, every job has a corresponding worker executing its work. How a worker executes work depends on the Airbyte deployment. In the Docker deployment, an Airbyte worker spins up at least one Docker container. In the Kubernetes deployment, an Airbyte worker will create at least one Kubernetes pod. The created resource \(Docker container or Kubernetes pod\) does all the actual work. diff --git a/docs/operator-guides/securing-airbyte.md b/docs/06-operator-guides/13-securing-airbyte.md similarity index 100% rename from docs/operator-guides/securing-airbyte.md rename to docs/06-operator-guides/13-securing-airbyte.md diff --git a/docs/operator-guides/collecting-metrics.md b/docs/06-operator-guides/14-collecting-metrics.md similarity index 100% rename from docs/operator-guides/collecting-metrics.md rename to docs/06-operator-guides/14-collecting-metrics.md diff --git a/docs/operator-guides/configuring-sync-notifications.md b/docs/06-operator-guides/15-configuring-sync-notifications.md similarity index 93% rename from docs/operator-guides/configuring-sync-notifications.md rename to docs/06-operator-guides/15-configuring-sync-notifications.md index 6418aa2ffab5..45a2635d61de 100644 --- a/docs/operator-guides/configuring-sync-notifications.md +++ b/docs/06-operator-guides/15-configuring-sync-notifications.md @@ -36,7 +36,7 @@ Click `Copy.` **Add the webhook to Airbyte.** -Assuming you have a [running instance of Airbyte](../deploying-airbyte/README.md), we can navigate to the UI. Click on Settings and then click on `Notifications`. +Assuming you have a [running instance of Airbyte](../05-deploying-airbyte/README.md), we can navigate to the UI. Click on Settings and then click on `Notifications`. ![](../.gitbook/assets/notifications_airbyte_settings.png) diff --git a/docs/06-operator-guides/README.md b/docs/06-operator-guides/README.md new file mode 100644 index 000000000000..380a8f749302 --- /dev/null +++ b/docs/06-operator-guides/README.md @@ -0,0 +1,24 @@ +# Manage Airbyte Open Source + +- [Upgrading Airbyte](01-upgrading-airbyte.md) +- [Resetting Your Data](02-reset.md) +- [Configuring the Airbyte Database](03-configuring-airbyte-db.md) +- [Browsing output logs](04-browsing-output-logs.md) +- [Using the Airflow Airbyte Operator](05-using-the-airflow-airbyte-operator.md) +- [Using the Prefect Airbyte Task](06-using-prefect-task.md) +- [Using the Dagster Integration](07-using-dagster-integration.md) +- [Windows - Browsing Local File Output](08-locating-files-local-destination.md) +- [Transformations and normalization](09-transformation-and-normalization/01-transformations-with-sql.md) +- [Configuring Airbyte](10-configuring-airbyte/README.md) +- [Using custom connectors](11-using-custom-connectors.md) +- [Scaling Airbyte](12-scaling-airbyte.md) +- [Securing Airbyte access](13-securing-airbyte.md) + + + + + + + + + diff --git a/docs/troubleshooting/on-deploying.md b/docs/07-troubleshooting/01-on-deploying.md similarity index 100% rename from docs/troubleshooting/on-deploying.md rename to docs/07-troubleshooting/01-on-deploying.md diff --git a/docs/troubleshooting/new-connection.md b/docs/07-troubleshooting/02-new-connection.md similarity index 100% rename from docs/troubleshooting/new-connection.md rename to docs/07-troubleshooting/02-new-connection.md diff --git a/docs/troubleshooting/running-sync.md b/docs/07-troubleshooting/03-running-sync.md similarity index 92% rename from docs/troubleshooting/running-sync.md rename to docs/07-troubleshooting/03-running-sync.md index 2e95463ebd73..cdfb60bdf248 100644 --- a/docs/troubleshooting/running-sync.md +++ b/docs/07-troubleshooting/03-running-sync.md @@ -11,7 +11,7 @@ If the above workaround does not fix your problem, please report it [here](https ## Your incremental connection is not working -Our current version of incremental is [append](../understanding-airbyte/connections/incremental-append.md). It works from a cursor field. So you need to check which cursor field you're using and if it's well populated in every record in your table. +Our current version of incremental is [append](../10-understanding-airbyte/05-connections/03-incremental-append.md). It works from a cursor field. So you need to check which cursor field you're using and if it's well populated in every record in your table. If this is true, then, there are still several things to check: diff --git a/docs/troubleshooting/README.md b/docs/07-troubleshooting/README.md similarity index 92% rename from docs/troubleshooting/README.md rename to docs/07-troubleshooting/README.md index 951140693ca1..e285b2976e62 100644 --- a/docs/troubleshooting/README.md +++ b/docs/07-troubleshooting/README.md @@ -1,10 +1,10 @@ -# Troubleshooting & FAQ +# Troubleshoot Airbyte The troubleshooting section is aimed at collecting common issues users have to provide quick solutions. There are some sections you can find: -* [On Deploying](on-deploying.md) -* [On Setting up a New Connection](new-connection.md) -* [On Running a Sync](running-sync.md) +* [On Deploying](01-on-deploying.md) +* [On Setting up a New Connection](02-new-connection.md) +* [On Running a Sync](03-running-sync.md) * [On Upgrading](on-upgrading.md) If you did not find a solution in the previous sections please head over to our online [forum](https://discuss.airbyte.io/). diff --git a/docs/07-troubleshooting/on-upgrading.md b/docs/07-troubleshooting/on-upgrading.md new file mode 100644 index 000000000000..b7b12bdaeb6c --- /dev/null +++ b/docs/07-troubleshooting/on-upgrading.md @@ -0,0 +1 @@ +# On Upgrading \ No newline at end of file diff --git a/docs/connector-development/tutorials/cdk-speedrun.md b/docs/08-connector-development/01-cdk-speedrun.md similarity index 98% rename from docs/connector-development/tutorials/cdk-speedrun.md rename to docs/08-connector-development/01-cdk-speedrun.md index 4784d1a877be..213cec0418ac 100644 --- a/docs/connector-development/tutorials/cdk-speedrun.md +++ b/docs/08-connector-development/01-cdk-speedrun.md @@ -2,7 +2,7 @@ ## CDK Speedrun \(HTTP API Source Creation [Any%](https://en.wikipedia.org/wiki/Speedrun#:~:text=Any%25%2C%20or%20fastest%20completion%2C,the%20game%20to%20its%20fullest.&text=Specific%20requirements%20for%20a%20100,different%20depending%20on%20the%20game.) Route\) -This is a blazing fast guide to building an HTTP source connector. Think of it as the TL;DR version of [this tutorial.](cdk-tutorial-python-http/0-getting-started.md) +This is a blazing fast guide to building an HTTP source connector. Think of it as the TL;DR version of [this tutorial.](02-cdk-tutorial-python-http/0-getting-started.md) If you are a visual learner and want to see a video version of this guide going over each part in detail, check it out below. diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/0-getting-started.md b/docs/08-connector-development/02-cdk-tutorial-python-http/0-getting-started.md similarity index 100% rename from docs/connector-development/tutorials/cdk-tutorial-python-http/0-getting-started.md rename to docs/08-connector-development/02-cdk-tutorial-python-http/0-getting-started.md diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/1-creating-the-source.md b/docs/08-connector-development/02-cdk-tutorial-python-http/1-creating-the-source.md similarity index 100% rename from docs/connector-development/tutorials/cdk-tutorial-python-http/1-creating-the-source.md rename to docs/08-connector-development/02-cdk-tutorial-python-http/1-creating-the-source.md diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/2-install-dependencies.md b/docs/08-connector-development/02-cdk-tutorial-python-http/2-install-dependencies.md similarity index 100% rename from docs/connector-development/tutorials/cdk-tutorial-python-http/2-install-dependencies.md rename to docs/08-connector-development/02-cdk-tutorial-python-http/2-install-dependencies.md diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/3-define-inputs.md b/docs/08-connector-development/02-cdk-tutorial-python-http/3-define-inputs.md similarity index 100% rename from docs/connector-development/tutorials/cdk-tutorial-python-http/3-define-inputs.md rename to docs/08-connector-development/02-cdk-tutorial-python-http/3-define-inputs.md diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/4-connection-checking.md b/docs/08-connector-development/02-cdk-tutorial-python-http/4-connection-checking.md similarity index 100% rename from docs/connector-development/tutorials/cdk-tutorial-python-http/4-connection-checking.md rename to docs/08-connector-development/02-cdk-tutorial-python-http/4-connection-checking.md diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/5-declare-schema.md b/docs/08-connector-development/02-cdk-tutorial-python-http/5-declare-schema.md similarity index 97% rename from docs/connector-development/tutorials/cdk-tutorial-python-http/5-declare-schema.md rename to docs/08-connector-development/02-cdk-tutorial-python-http/5-declare-schema.md index 85c10e6a42c0..f7c024f4500d 100644 --- a/docs/connector-development/tutorials/cdk-tutorial-python-http/5-declare-schema.md +++ b/docs/08-connector-development/02-cdk-tutorial-python-http/5-declare-schema.md @@ -74,5 +74,5 @@ you should see some output like: It's that simple! Now the connector knows how to declare your connector's stream's schema. We declare only one stream since our source is simple, but the principle is exactly the same if you had many streams. -You can also dynamically define schemas, but that's beyond the scope of this tutorial. See the [schema docs](../../cdk-python/full-refresh-stream.md#defining-the-streams-schema) for more information. +You can also dynamically define schemas, but that's beyond the scope of this tutorial. See the [schema docs](../07-cdk-python/03-full-refresh-stream.md#defining-the-streams-schema) for more information. diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/6-read-data.md b/docs/08-connector-development/02-cdk-tutorial-python-http/6-read-data.md similarity index 95% rename from docs/connector-development/tutorials/cdk-tutorial-python-http/6-read-data.md rename to docs/08-connector-development/02-cdk-tutorial-python-http/6-read-data.md index a10184720cbd..568eb0e47d63 100644 --- a/docs/connector-development/tutorials/cdk-tutorial-python-http/6-read-data.md +++ b/docs/08-connector-development/02-cdk-tutorial-python-http/6-read-data.md @@ -2,7 +2,7 @@ Describing schemas is good and all, but at some point we have to start reading data! So let's get to work. But before, let's describe what we're about to do: -The `HttpStream` superclass, like described in the [concepts documentation](../../cdk-python/http-streams.md), is facilitating reading data from HTTP endpoints. It contains built-in functions or helpers for: +The `HttpStream` superclass, like described in the [concepts documentation](../07-cdk-python/05-http-streams.md), is facilitating reading data from HTTP endpoints. It contains built-in functions or helpers for: * authentication * pagination @@ -98,7 +98,7 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: We're now ready to query the API! -To do this, we'll need a [ConfiguredCatalog](../../../understanding-airbyte/beginners-guide-to-catalog.md). We've prepared one [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-cdk/python/docs/tutorials/http_api_source_assets/configured_catalog.json) -- download this and place it in `sample_files/configured_catalog.json`. Then run: +To do this, we'll need a [ConfiguredCatalog](../../10-understanding-airbyte/01-beginners-guide-to-catalog.md). We've prepared one [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-cdk/python/docs/tutorials/http_api_source_assets/configured_catalog.json) -- download this and place it in `sample_files/configured_catalog.json`. Then run: ```text python main.py read --config secrets/config.json --catalog sample_files/configured_catalog.json @@ -125,7 +125,7 @@ To add incremental sync, we'll do a few things: 6. Update the `path` method to specify the date to pull exchange rates for. 7. Update the configured catalog to use `incremental` sync when we're testing the stream. -We'll describe what each of these methods do below. Before we begin, it may help to familiarize yourself with how incremental sync works in Airbyte by reading the [docs on incremental](../../../understanding-airbyte/connections/incremental-append.md). +We'll describe what each of these methods do below. Before we begin, it may help to familiarize yourself with how incremental sync works in Airbyte by reading the [docs on incremental](../../10-understanding-airbyte/05-connections/03-incremental-append.md). To keep things concise, we'll only show functions as we edit them one by one. @@ -211,7 +211,7 @@ We'll implement the `stream_slices` method to return a list of the dates for whi return self._chunk_date_range(start_date) ``` -Each slice will cause an HTTP request to be made to the API. We can then use the information present in the `stream_slice` parameter \(a single element from the list we constructed in `stream_slices` above\) to set other configurations for the outgoing request like `path` or `request_params`. For more info about stream slicing, see [the slicing docs](../../cdk-python/stream-slices.md). +Each slice will cause an HTTP request to be made to the API. We can then use the information present in the `stream_slice` parameter \(a single element from the list we constructed in `stream_slices` above\) to set other configurations for the outgoing request like `path` or `request_params`. For more info about stream slicing, see [the slicing docs](../07-cdk-python/07-stream-slices.md). In order to pull data for a specific date, the Exchange Rates API requires that we pass the date as the path component of the URL. Let's override the `path` method to achieve this: diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/7-use-connector-in-airbyte.md b/docs/08-connector-development/02-cdk-tutorial-python-http/7-use-connector-in-airbyte.md similarity index 69% rename from docs/connector-development/tutorials/cdk-tutorial-python-http/7-use-connector-in-airbyte.md rename to docs/08-connector-development/02-cdk-tutorial-python-http/7-use-connector-in-airbyte.md index 19f204275b50..9ca392d1ee38 100644 --- a/docs/connector-development/tutorials/cdk-tutorial-python-http/7-use-connector-in-airbyte.md +++ b/docs/08-connector-development/02-cdk-tutorial-python-http/7-use-connector-in-airbyte.md @@ -1,6 +1,6 @@ # Step 7: Use the Connector in Airbyte -To use your connector in your own installation of Airbyte, build the docker image for your container by running `docker build . -t airbyte/source-python-http-example:dev`. Then, follow the instructions from the [building a Python source tutorial](../building-a-python-source.md#step-11-add-the-connector-to-the-api-ui) for using the connector in the Airbyte UI, replacing the name as appropriate. +To use your connector in your own installation of Airbyte, build the docker image for your container by running `docker build . -t airbyte/source-python-http-example:dev`. Then, follow the instructions from the [building a Python source tutorial](../03-building-a-python-source.md#step-11-add-the-connector-to-the-api-ui) for using the connector in the Airbyte UI, replacing the name as appropriate. Note: your built docker image must be accessible to the `docker` daemon running on the Airbyte node. If you're doing this tutorial locally, these instructions are sufficient. Otherwise you may need to push your Docker image to Dockerhub. diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/8-test-your-connector.md b/docs/08-connector-development/02-cdk-tutorial-python-http/8-test-your-connector.md similarity index 91% rename from docs/connector-development/tutorials/cdk-tutorial-python-http/8-test-your-connector.md rename to docs/08-connector-development/02-cdk-tutorial-python-http/8-test-your-connector.md index f5fbcc07b4f0..3dc70b55a507 100644 --- a/docs/connector-development/tutorials/cdk-tutorial-python-http/8-test-your-connector.md +++ b/docs/08-connector-development/02-cdk-tutorial-python-http/8-test-your-connector.md @@ -16,7 +16,7 @@ More information on integration testing can be found on [the Testing Connectors ## Standard Tests -Standard tests are a fixed set of tests Airbyte provides that every Airbyte source connector must pass. While they're only required if you intend to submit your connector to Airbyte, you might find them helpful in any case. See [Testing your connectors](../../testing-connectors/) +Standard tests are a fixed set of tests Airbyte provides that every Airbyte source connector must pass. While they're only required if you intend to submit your connector to Airbyte, you might find them helpful in any case. See [Testing your connectors](../10-testing-connectors.md) -If you want to submit this connector to become a default connector within Airbyte, follow steps 8 onwards from the [Python source checklist](../building-a-python-source.md#step-8-set-up-standard-tests) +If you want to submit this connector to become a default connector within Airbyte, follow steps 8 onwards from the [Python source checklist](../03-building-a-python-source.md#step-8-set-up-standard-tests) diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/README.md b/docs/08-connector-development/02-cdk-tutorial-python-http/README.md similarity index 100% rename from docs/connector-development/tutorials/cdk-tutorial-python-http/README.md rename to docs/08-connector-development/02-cdk-tutorial-python-http/README.md diff --git a/docs/connector-development/tutorials/building-a-python-source.md b/docs/08-connector-development/03-building-a-python-source.md similarity index 95% rename from docs/connector-development/tutorials/building-a-python-source.md rename to docs/08-connector-development/03-building-a-python-source.md index 6bfc80fb91d1..f6034a6a010e 100644 --- a/docs/connector-development/tutorials/building-a-python-source.md +++ b/docs/08-connector-development/03-building-a-python-source.md @@ -6,7 +6,7 @@ This article provides a checklist for how to create a python source. Each step i ## Requirements -Docker, Python, and Java with the versions listed in the [tech stack section](../../understanding-airbyte/tech-stack.md). +Docker, Python, and Java with the versions listed in the [tech stack section](../10-understanding-airbyte/09-tech-stack.md). :::info @@ -159,7 +159,7 @@ The nice thing about this approach is that you are running your source exactly a Each source contains a specification that describes what inputs it needs in order for it to pull data. This file can be found in `airbyte-integrations/connectors/source-/spec.yaml`. This is a good place to start when developing your source. Using JsonSchema define what the inputs are \(e.g. username and password\). Here's [an example](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-stripe/source_stripe/spec.yaml) of what the `spec.yaml` looks like for the stripe source. -For more details on what the spec is, you can read about the Airbyte Protocol [here](../../understanding-airbyte/airbyte-protocol.md). +For more details on what the spec is, you can read about the Airbyte Protocol [here](../10-understanding-airbyte/02-airbyte-protocol.md). The generated code that Airbyte provides, handles implementing the `spec` method for you. It assumes that there will be a file called `spec.yaml` in the same directory as `source.py`. If you have declared the necessary JsonSchema in `spec.yaml` you should be done with this step. @@ -173,15 +173,15 @@ While developing, we recommend storing this object in `secrets/config.json`. The As described in the template code, this method takes in the same config object as `check`. It then returns a json object called a `catalog` that describes what data is available and metadata on what options are available for how to replicate it. -For a brief overview on the catalog check out [Beginner's Guide to the Airbyte Catalog](../../understanding-airbyte/beginners-guide-to-catalog.md). +For a brief overview on the catalog check out [Beginner's Guide to the Airbyte Catalog](../10-understanding-airbyte/01-beginners-guide-to-catalog.md). ### Step 7: Implement `read` -As described in the template code, this method takes in the same config object as the previous methods. It also takes in a "configured catalog". This object wraps the catalog emitted by the `discover` step and includes configuration on how the data should be replicated. For a brief overview on the configured catalog check out [Beginner's Guide to the Airbyte Catalog](../../understanding-airbyte/beginners-guide-to-catalog.md). It then returns a generator which returns each record in the stream. +As described in the template code, this method takes in the same config object as the previous methods. It also takes in a "configured catalog". This object wraps the catalog emitted by the `discover` step and includes configuration on how the data should be replicated. For a brief overview on the configured catalog check out [Beginner's Guide to the Airbyte Catalog](../10-understanding-airbyte/01-beginners-guide-to-catalog.md). It then returns a generator which returns each record in the stream. ### Step 8: Set up Standard Tests -The Standard Tests are a set of tests that run against all sources. These tests are run in the Airbyte CI to prevent regressions. They also can help you sanity check that your source works as expected. The following [article](../testing-connectors/source-acceptance-tests-reference.md) explains Standard Tests and how to run them. +The Standard Tests are a set of tests that run against all sources. These tests are run in the Airbyte CI to prevent regressions. They also can help you sanity check that your source works as expected. The following [article](11-source-acceptance-tests-reference.md) explains Standard Tests and how to run them. You can run the tests using `./gradlew :airbyte-integrations:connectors:source-:integrationTest`. Make sure to run this command from the Airbyte repository root. @@ -215,7 +215,7 @@ The template fills in most of the information for the readme for you. Unless the Open the following file: `airbyte-config/init/src/main/resources/seed/source_definitions.yaml`. You'll find a list of all the connectors that Airbyte displays in the UI. Pattern match to add your own connector. Make sure to generate a new _unique_ UUIDv4 for the `sourceDefinitionId` field. You can get one [here](https://www.uuidgenerator.net/). Note that modifications to source_definitions.yaml will only be picked-up the first time you start Airbyte, or when you upgrade Airbyte, or if you entirely wipe our your instance of Airbyte and start from scratch. -Note that for simple and quick testing use cases, you can also do this step [using the UI](../../integrations/custom-connectors.md#adding-your-connectors-in-the-ui). +Note that for simple and quick testing use cases, you can also do this step [using the UI](../02-integrations/03-custom-connectors.md#adding-your-connectors-in-the-ui). ### Step 12: Add docs diff --git a/docs/connector-development/tutorials/building-a-python-destination.md b/docs/08-connector-development/04-building-a-python-destination.md similarity index 98% rename from docs/connector-development/tutorials/building-a-python-destination.md rename to docs/08-connector-development/04-building-a-python-destination.md index b0086957ce48..6c6766f356ed 100644 --- a/docs/connector-development/tutorials/building-a-python-destination.md +++ b/docs/08-connector-development/04-building-a-python-destination.md @@ -6,7 +6,7 @@ This article provides a checklist for how to create a Python destination. Each s ## Requirements -Docker and Python with the versions listed in the [tech stack section](../../understanding-airbyte/tech-stack.md). You can use any Python version between 3.7 and 3.9, but this tutorial was tested with 3.7. +Docker and Python with the versions listed in the [tech stack section](../10-understanding-airbyte/09-tech-stack.md). You can use any Python version between 3.7 and 3.9, but this tutorial was tested with 3.7. ## Checklist @@ -77,7 +77,7 @@ Pretty much all it takes to create a destination is to implement the `Destinatio 2. `check`: tests if the user-provided configuration can be used to connect to the underlying data destination, and with the correct write permissions 3. `write`: writes data to the underlying destination by reading a configuration, a stream of records from stdin, and a configured catalog describing the schema of the data and how it should be written to the destination -The destination interface is described in detail in the [Airbyte Specification](../../understanding-airbyte/airbyte-protocol.md) reference. +The destination interface is described in detail in the [Airbyte Specification](../10-understanding-airbyte/02-airbyte-protocol.md) reference. The generated files fill in a lot of information for you and have docstrings describing what you need to do to implement each method. The next few steps are just implementing that interface. diff --git a/docs/connector-development/tutorials/building-a-java-destination.md b/docs/08-connector-development/05-building-a-java-destination.md similarity index 97% rename from docs/connector-development/tutorials/building-a-java-destination.md rename to docs/08-connector-development/05-building-a-java-destination.md index 7644c4346e44..18384989afc8 100644 --- a/docs/connector-development/tutorials/building-a-java-destination.md +++ b/docs/08-connector-development/05-building-a-java-destination.md @@ -6,7 +6,7 @@ This article provides a checklist for how to create a Java destination. Each ste ## Requirements -Docker and Java with the versions listed in the [tech stack section](../../understanding-airbyte/tech-stack.md). +Docker and Java with the versions listed in the [tech stack section](../10-understanding-airbyte/09-tech-stack.md). ## Checklist @@ -114,7 +114,7 @@ The nice thing about this approach is that you are running your destination exac #### Handling Exceptions -In order to best propagate user-friendly error messages and log error information to the platform, the [Airbyte Protocol](../../understanding-airbyte/airbyte-protocol.md#The Airbyte Protocol) implements AirbyteTraceMessage. +In order to best propagate user-friendly error messages and log error information to the platform, the [Airbyte Protocol](../10-understanding-airbyte/02-airbyte-protocol.md#The Airbyte Protocol) implements AirbyteTraceMessage. We recommend using AirbyteTraceMessages for known errors, as in these cases you can likely offer the user a helpful message as to what went wrong and suggest how they can resolve it. @@ -150,7 +150,7 @@ Each destination contains a specification written in JsonSchema that describes i Your generated template should have the spec file in `airbyte-integrations/connectors/destination-/src/main/resources/spec.json`. The generated connector will take care of reading this file and converting it to the correct output. Edit it and you should be done with this step. -For more details on what the spec is, you can read about the Airbyte Protocol [here](../../understanding-airbyte/airbyte-protocol.md). +For more details on what the spec is, you can read about the Airbyte Protocol [here](../10-understanding-airbyte/02-airbyte-protocol.md). See the `spec` operation in action: @@ -200,7 +200,7 @@ The Postgres destination leverages the `AbstractJdbcDestination` superclass whic ::: -For a brief overview on the Airbyte catalog check out [the Beginner's Guide to the Airbyte Catalog](../../understanding-airbyte/beginners-guide-to-catalog.md). +For a brief overview on the Airbyte catalog check out [the Beginner's Guide to the Airbyte Catalog](../10-understanding-airbyte/01-beginners-guide-to-catalog.md). ### Step 6: Set up Acceptance Tests diff --git a/docs/connector-development/tutorials/profile-java-connector-memory.md b/docs/08-connector-development/06-profile-java-connector-memory.md similarity index 100% rename from docs/connector-development/tutorials/profile-java-connector-memory.md rename to docs/08-connector-development/06-profile-java-connector-memory.md diff --git a/docs/connector-development/cdk-python/basic-concepts.md b/docs/08-connector-development/07-cdk-python/01-basic-concepts.md similarity index 99% rename from docs/connector-development/cdk-python/basic-concepts.md rename to docs/08-connector-development/07-cdk-python/01-basic-concepts.md index e446ccb42c2d..130e1c00325a 100644 --- a/docs/connector-development/cdk-python/basic-concepts.md +++ b/docs/08-connector-development/07-cdk-python/01-basic-concepts.md @@ -60,5 +60,5 @@ A summary of what we've covered so far on how to use the Airbyte CDK: ## HTTP Streams -We've covered how the `AbstractSource` works with the `Stream` interface in order to fulfill the Airbyte Specification. Although developers are welcome to implement their own object, the CDK saves developers the hassle of doing so in the case of HTTP APIs with the [`HTTPStream`](http-streams.md) object. +We've covered how the `AbstractSource` works with the `Stream` interface in order to fulfill the Airbyte Specification. Although developers are welcome to implement their own object, the CDK saves developers the hassle of doing so in the case of HTTP APIs with the [`HTTPStream`](05-http-streams.md) object. diff --git a/docs/connector-development/cdk-python/schemas.md b/docs/08-connector-development/07-cdk-python/02-schemas.md similarity index 100% rename from docs/connector-development/cdk-python/schemas.md rename to docs/08-connector-development/07-cdk-python/02-schemas.md diff --git a/docs/connector-development/cdk-python/full-refresh-stream.md b/docs/08-connector-development/07-cdk-python/03-full-refresh-stream.md similarity index 86% rename from docs/connector-development/cdk-python/full-refresh-stream.md rename to docs/08-connector-development/07-cdk-python/03-full-refresh-stream.md index 2caf62fff5da..d34ab26789ef 100644 --- a/docs/connector-development/cdk-python/full-refresh-stream.md +++ b/docs/08-connector-development/07-cdk-python/03-full-refresh-stream.md @@ -1,6 +1,6 @@ # Full Refresh Streams -As mentioned in the [Basic Concepts Overview](basic-concepts.md), a `Stream` is the atomic unit for reading data from a Source. A stream can read data from anywhere: a relational database, an API, or even scrape a web page! \(although that might be stretching the limits of what a connector should do\). +As mentioned in the [Basic Concepts Overview](01-basic-concepts.md), a `Stream` is the atomic unit for reading data from a Source. A stream can read data from anywhere: a relational database, an API, or even scrape a web page! \(although that might be stretching the limits of what a connector should do\). To implement a stream, there are two minimum requirements: 1. Define the stream's schema 2. Implement the logic for reading records from the underlying data source @@ -39,5 +39,5 @@ If custom functionality is required for reading a stream, you may need to overri ## Incremental Streams -We highly recommend implementing Incremental when feasible. See the [incremental streams page](incremental-stream.md) for more information. +We highly recommend implementing Incremental when feasible. See the [incremental streams page](04-incremental-stream.md) for more information. diff --git a/docs/connector-development/cdk-python/incremental-stream.md b/docs/08-connector-development/07-cdk-python/04-incremental-stream.md similarity index 100% rename from docs/connector-development/cdk-python/incremental-stream.md rename to docs/08-connector-development/07-cdk-python/04-incremental-stream.md diff --git a/docs/connector-development/cdk-python/http-streams.md b/docs/08-connector-development/07-cdk-python/05-http-streams.md similarity index 90% rename from docs/connector-development/cdk-python/http-streams.md rename to docs/08-connector-development/07-cdk-python/05-http-streams.md index e00c65256dc5..39eb12cd4ed5 100644 --- a/docs/connector-development/cdk-python/http-streams.md +++ b/docs/08-connector-development/07-cdk-python/05-http-streams.md @@ -7,7 +7,7 @@ The CDK offers base classes that greatly simplify writing HTTP API-based connect * Handling rate limiting with static or dynamic backoff timing * Caching -All these features have sane off-the-shelf defaults but are completely customizable depending on your use case. They can also be combined with other stream features described in the [full refresh streams](full-refresh-stream.md) and [incremental streams](incremental-stream.md) sections. +All these features have sane off-the-shelf defaults but are completely customizable depending on your use case. They can also be combined with other stream features described in the [full refresh streams](03-full-refresh-stream.md) and [incremental streams](04-incremental-stream.md) sections. ## Overview of HTTP Streams @@ -21,7 +21,7 @@ The `parse_response` function instructs the stream how to parse the API response Lastly, the `HTTPStream` must describe the schema of the records it outputs using JsonSchema. The simplest way to do this is by placing a `.json` file per stream in the `schemas` directory in the generated python module. The name of the `.json` file must match the lower snake case name of the corresponding Stream. Here are [examples](https://github.com/airbytehq/airbyte/tree/master/airbyte-integrations/connectors/source-stripe/source_stripe/schemas) from the Stripe API. -You can also dynamically set your schema. See the [schema docs](full-refresh-stream.md#defining-the-streams-schema) for more information. +You can also dynamically set your schema. See the [schema docs](03-full-refresh-stream.md#defining-the-streams-schema) for more information. These four elements - the `url_base` property, the `path` function, the `parse_response` function and the schema file - are the bare minimum required to implement the `HTTPStream`, and can be seen in the same [Stripe example](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-stripe/source_stripe/source.py#L38). @@ -47,7 +47,7 @@ Note that Airbyte will always attempt to make as many requests as possible and o ### Stream Slicing -When implementing [stream slicing](incremental-stream.md#streamstream_slices) in an `HTTPStream` each Slice is equivalent to a HTTP request; the stream will make one request per element returned by the `stream_slices` function. The current slice being read is passed into every other method in `HttpStream` e.g: `request_params`, `request_headers`, `path`, etc.. to be injected into a request. This allows you to dynamically determine the output of the `request_params`, `path`, and other functions to read the input slice and return the appropriate value. +When implementing [stream slicing](04-incremental-stream.md#streamstream_slices) in an `HTTPStream` each Slice is equivalent to a HTTP request; the stream will make one request per element returned by the `stream_slices` function. The current slice being read is passed into every other method in `HttpStream` e.g: `request_params`, `request_headers`, `path`, etc.. to be injected into a request. This allows you to dynamically determine the output of the `request_params`, `path`, and other functions to read the input slice and return the appropriate value. ## Nested Streams & Caching It's possible to cache data from a stream onto a temporary file on disk. diff --git a/docs/connector-development/cdk-python/python-concepts.md b/docs/08-connector-development/07-cdk-python/06-python-concepts.md similarity index 100% rename from docs/connector-development/cdk-python/python-concepts.md rename to docs/08-connector-development/07-cdk-python/06-python-concepts.md diff --git a/docs/connector-development/cdk-python/stream-slices.md b/docs/08-connector-development/07-cdk-python/07-stream-slices.md similarity index 100% rename from docs/connector-development/cdk-python/stream-slices.md rename to docs/08-connector-development/07-cdk-python/07-stream-slices.md diff --git a/docs/connector-development/cdk-python/README.md b/docs/08-connector-development/07-cdk-python/README.md similarity index 92% rename from docs/connector-development/cdk-python/README.md rename to docs/08-connector-development/07-cdk-python/README.md index b7225e4697b3..bb39ad07f800 100644 --- a/docs/connector-development/cdk-python/README.md +++ b/docs/08-connector-development/07-cdk-python/README.md @@ -29,23 +29,23 @@ Additionally, you can follow [this tutorial](https://docs.airbyte.io/connector-d #### Basic Concepts -If you want to learn more about the classes required to implement an Airbyte Source, head to our [basic concepts doc](basic-concepts.md). +If you want to learn more about the classes required to implement an Airbyte Source, head to our [basic concepts doc](01-basic-concepts.md). #### Full Refresh Streams -If you have questions or are running into issues creating your first full refresh stream, head over to our [full refresh stream doc](full-refresh-stream.md). If you have questions about implementing a `path` or `parse_response` function, this doc is for you. +If you have questions or are running into issues creating your first full refresh stream, head over to our [full refresh stream doc](03-full-refresh-stream.md). If you have questions about implementing a `path` or `parse_response` function, this doc is for you. #### Incremental Streams -Having trouble figuring out how to write a `stream_slices` function or aren't sure what a `cursor_field` is? Head to our [incremental stream doc](incremental-stream.md). +Having trouble figuring out how to write a `stream_slices` function or aren't sure what a `cursor_field` is? Head to our [incremental stream doc](04-incremental-stream.md). #### Practical Tips Airbyte recommends using the CDK template generator to develop with the CDK. The template generates created all the required scaffolding, with convenient TODOs, allowing developers to truly focus on implementing the API. -For tips on useful Python knowledge, see the [Python Concepts](python-concepts.md) page. +For tips on useful Python knowledge, see the [Python Concepts](06-python-concepts.md) page. -You can find a complete tutorial for implementing an HTTP source connector in [this tutorial](../tutorials/cdk-tutorial-python-http/) +You can find a complete tutorial for implementing an HTTP source connector in [this tutorial](../02-cdk-tutorial-python-http/README.md) ### Example Connectors diff --git a/docs/connector-development/cdk-faros-js.md b/docs/08-connector-development/08-cdk-faros-js.md similarity index 85% rename from docs/connector-development/cdk-faros-js.md rename to docs/08-connector-development/08-cdk-faros-js.md index 4b977778923c..6bb7c529eab4 100644 --- a/docs/connector-development/cdk-faros-js.md +++ b/docs/08-connector-development/08-cdk-faros-js.md @@ -1,6 +1,6 @@ # Connector Development Kit (Javascript) -The [Faros AI TypeScript/JavaScript CDK](https://github.com/faros-ai/airbyte-connectors/tree/main/faros-airbyte-cdk) allows you to build Airbyte connectors quickly similarly to how our [Python CDK](cdk-python/) does. This CDK currently offers support for creating Airbyte source connectors for: +The [Faros AI TypeScript/JavaScript CDK](https://github.com/faros-ai/airbyte-connectors/tree/main/faros-airbyte-cdk) allows you to build Airbyte connectors quickly similarly to how our [Python CDK](07-cdk-python/README.md) does. This CDK currently offers support for creating Airbyte source connectors for: * HTTP APIs diff --git a/docs/connector-development/airbyte101.md b/docs/08-connector-development/09-airbyte101.md similarity index 71% rename from docs/connector-development/airbyte101.md rename to docs/08-connector-development/09-airbyte101.md index 258d85262a83..a6db4c697cd7 100644 --- a/docs/connector-development/airbyte101.md +++ b/docs/08-connector-development/09-airbyte101.md @@ -2,5 +2,5 @@ ## The Airbyte Catalog -The Airbyte catalog defines the relationship between your incoming data's schema and the schema of your output stream. This is an incredibly important concept to understand as a connector dev, so check out the AirbyteCatalog [here](../understanding-airbyte/beginners-guide-to-catalog.md). +The Airbyte catalog defines the relationship between your incoming data's schema and the schema of your output stream. This is an incredibly important concept to understand as a connector dev, so check out the AirbyteCatalog [here](../10-understanding-airbyte/01-beginners-guide-to-catalog.md). diff --git a/docs/connector-development/testing-connectors/README.md b/docs/08-connector-development/10-testing-connectors.md similarity index 100% rename from docs/connector-development/testing-connectors/README.md rename to docs/08-connector-development/10-testing-connectors.md diff --git a/docs/connector-development/testing-connectors/source-acceptance-tests-reference.md b/docs/08-connector-development/11-source-acceptance-tests-reference.md similarity index 98% rename from docs/connector-development/testing-connectors/source-acceptance-tests-reference.md rename to docs/08-connector-development/11-source-acceptance-tests-reference.md index d1f2f11c130f..95f5d0d216de 100644 --- a/docs/connector-development/testing-connectors/source-acceptance-tests-reference.md +++ b/docs/08-connector-development/11-source-acceptance-tests-reference.md @@ -1,6 +1,6 @@ # Source Acceptance Tests Reference -To ensure a minimum quality bar, Airbyte runs all connectors against the same set of integration tests \(sources & destinations have two different test suites\). Those tests ensure that each connector adheres to the [Airbyte Specification](../../understanding-airbyte/airbyte-protocol.md) and responds correctly to Airbyte commands when provided valid \(or invalid\) inputs. +To ensure a minimum quality bar, Airbyte runs all connectors against the same set of integration tests \(sources & destinations have two different test suites\). Those tests ensure that each connector adheres to the [Airbyte Specification](../10-understanding-airbyte/02-airbyte-protocol.md) and responds correctly to Airbyte commands when provided valid \(or invalid\) inputs. _Note: If you are looking for reference documentation for the deprecated first version of test suites, see_ [_Standard Tests \(Legacy\)_](https://github.com/airbytehq/airbyte/tree/e378d40236b6a34e1c1cb481c8952735ec687d88/docs/contributing-to-airbyte/building-new-connector/legacy-standard-source-tests.md)_._ @@ -8,7 +8,7 @@ _Note: If you are looking for reference documentation for the deprecated first v The Standard Test Suite runs its tests against the connector's Docker image. It takes as input the configuration file `acceptance-tests-config.yml`. -![Standard test sequence diagram](../../.gitbook/assets/standard_tests_sequence_diagram.png) +![Standard test sequence diagram](../.gitbook/assets/standard_tests_sequence_diagram.png) The Standard Test Suite use pytest as a test runner and was built as pytest plugin `source-acceptance-test`. This plugin adds a new configuration option `—acceptance-test-config` - it should points to the folder with `acceptance-tests-config.yml`. diff --git a/docs/connector-development/connector-specification-reference.md b/docs/08-connector-development/12-connector-specification-reference.md similarity index 91% rename from docs/connector-development/connector-specification-reference.md rename to docs/08-connector-development/12-connector-specification-reference.md index ce27b9189b97..496156d5039f 100644 --- a/docs/connector-development/connector-specification-reference.md +++ b/docs/08-connector-development/12-connector-specification-reference.md @@ -1,6 +1,6 @@ # Connector Specification Reference -The [connector specification](../understanding-airbyte/airbyte-protocol.md#spec) describes what inputs can be used to configure a connector. Like the rest of the Airbyte Protocol, it uses [JsonSchema](https://json-schema.org), but with some slight modifications. +The [connector specification](../10-understanding-airbyte/02-airbyte-protocol.md#spec) describes what inputs can be used to configure a connector. Like the rest of the Airbyte Protocol, it uses [JsonSchema](https://json-schema.org), but with some slight modifications. ## Demoing your specification @@ -117,7 +117,7 @@ In order for the Airbyte UI to correctly render a specification, however, a few 2. Each item in the `oneOf` array must be a property with `type: object`. 3. One `string` field with the same property name must be consistently present throughout each object inside the `oneOf` array. It is required to add a [`const`](https://json-schema.org/understanding-json-schema/reference/generic.html#constant-values) value unique to that `oneOf` option. -Let's look at the [source-file](../integrations/sources/file.md) implementation as an example. In this example, we have `provider` as a dropdown list option, which allows the user to select what provider their file is being hosted on. We note that the `oneOf` keyword lives under the `provider` object as follows: +Let's look at the [source-file](../02-integrations/01-sources/file.md) implementation as an example. In this example, we have `provider` as a dropdown list option, which allows the user to select what provider their file is being hosted on. We note that the `oneOf` keyword lives under the `provider` object as follows: In each item in the `oneOf` array, the `option_title` string field exists with the aforementioned `const` value unique to that item. This helps the UI and the connector distinguish between the option that was chosen by the user. This can be displayed with adapting the file source spec to this example: diff --git a/docs/connector-development/best-practices.md b/docs/08-connector-development/13-best-practices.md similarity index 94% rename from docs/connector-development/best-practices.md rename to docs/08-connector-development/13-best-practices.md index 21459e00132e..09f485c8ef01 100644 --- a/docs/connector-development/best-practices.md +++ b/docs/08-connector-development/13-best-practices.md @@ -31,7 +31,7 @@ When reviewing connectors, we'll use the following "checklist" to verify whether * **API connectors** should validate records that every stream outputs data * If this causes rate limiting problems, there should be a periodic CI build which tests this on a less frequent cadence to avoid rate limiting -**Thoroughly test edge cases.** While Airbyte provides a [Standard Test Suite](testing-connectors/source-acceptance-tests-reference.md) that all connectors must pass, it's not possible for the standard test suite to cover all edge cases. When in doubt about whether the standard tests provide sufficient evidence of functionality, write a custom test case for your connector. +**Thoroughly test edge cases.** While Airbyte provides a [Standard Test Suite](11-source-acceptance-tests-reference.md) that all connectors must pass, it's not possible for the standard test suite to cover all edge cases. When in doubt about whether the standard tests provide sufficient evidence of functionality, write a custom test case for your connector. ### Check Connection diff --git a/docs/connector-development/ux-handbook.md b/docs/08-connector-development/14-ux-handbook.md similarity index 100% rename from docs/connector-development/ux-handbook.md rename to docs/08-connector-development/14-ux-handbook.md diff --git a/docs/connector-development/cdk-dotnet/README.md b/docs/08-connector-development/15-cdk-dotnet.md similarity index 100% rename from docs/connector-development/cdk-dotnet/README.md rename to docs/08-connector-development/15-cdk-dotnet.md diff --git a/docs/connector-development/README.md b/docs/08-connector-development/README.md similarity index 90% rename from docs/connector-development/README.md rename to docs/08-connector-development/README.md index b1bcbd6a0072..c0840f430678 100644 --- a/docs/connector-development/README.md +++ b/docs/08-connector-development/README.md @@ -1,6 +1,6 @@ -# Connector Development +# Build a connector -Airbyte supports two types of connectors: Sources and Destinations. A connector takes the form of a Docker image which follows the [Airbyte specification](../understanding-airbyte/airbyte-protocol.md). +Airbyte supports two types of connectors: Sources and Destinations. A connector takes the form of a Docker image which follows the [Airbyte specification](../10-understanding-airbyte/02-airbyte-protocol.md). To build a new connector in Java or Python, we provide templates so you don't need to start everything from scratch. @@ -8,11 +8,11 @@ To build a new connector in Java or Python, we provide templates so you don't ne ## Python Connector-Development Kit \(CDK\) -You can build a connector very quickly in Python with the [Airbyte CDK](cdk-python/), which generates 75% of the code required for you. +You can build a connector very quickly in Python with the [Airbyte CDK](07-cdk-python/README.md), which generates 75% of the code required for you. ## C#/.NET Connector-Development Kit \(CDK\) -You can build a connector very quickly in C# .NET with the [Airbyte Dotnet CDK](cdk-dotnet/), which generates 75% of the code required for you. +You can build a connector very quickly in C# .NET with the [Airbyte Dotnet CDK](15-cdk-dotnet.md), which generates 75% of the code required for you. ## TS/JS Connector-Development Kit \(Faros AI Airbyte CDK\) @@ -20,7 +20,7 @@ You can build a connector in TypeScript/JavaScript with the [Faros AI CDK](https ## The Airbyte specification -Before building a new connector, review [Airbyte's data protocol specification](../understanding-airbyte/airbyte-protocol.md). +Before building a new connector, review [Airbyte's data protocol specification](../10-understanding-airbyte/02-airbyte-protocol.md). ## Adding a new connector @@ -29,7 +29,7 @@ Before building a new connector, review [Airbyte's data protocol specification]( To add a new connector you need to: 1. Implement & Package your connector in an Airbyte Protocol compliant Docker image -2. Add integration tests for your connector. At a minimum, all connectors must pass [Airbyte's standard test suite](testing-connectors/), but you can also add your own tests. +2. Add integration tests for your connector. At a minimum, all connectors must pass [Airbyte's standard test suite](10-testing-connectors.md), but you can also add your own tests. 3. Document how to build & test your connector 4. Publish the Docker image containing the connector @@ -63,17 +63,17 @@ and choose the relevant template by using the arrow keys. This will generate a n Search the generated directory for "TODO"s and follow them to implement your connector. For more detailed walkthroughs and instructions, follow the relevant tutorial: -* [Speedrun: Building a HTTP source with the CDK](tutorials/cdk-speedrun.md) -* [Building a HTTP source with the CDK](tutorials/cdk-tutorial-python-http/) -* [Building a Python source](tutorials/building-a-python-source.md) -* [Building a Python destination](tutorials/building-a-python-destination.md) -* [Building a Java destination](tutorials/building-a-java-destination.md) +* [Speedrun: Building a HTTP source with the CDK](01-cdk-speedrun.md) +* [Building a HTTP source with the CDK](02-cdk-tutorial-python-http/README.md) +* [Building a Python source](03-building-a-python-source.md) +* [Building a Python destination](04-building-a-python-destination.md) +* [Building a Java destination](05-building-a-java-destination.md) -As you implement your connector, make sure to review the [Best Practices for Connector Development](best-practices.md) guide. Following best practices is not a requirement for merging your contribution to Airbyte, but it certainly doesn't hurt ;\) +As you implement your connector, make sure to review the [Best Practices for Connector Development](13-best-practices.md) guide. Following best practices is not a requirement for merging your contribution to Airbyte, but it certainly doesn't hurt ;\) ### 2. Integration tests -At a minimum, your connector must implement the acceptance tests described in [Testing Connectors](testing-connectors/) +At a minimum, your connector must implement the acceptance tests described in [Testing Connectors](10-testing-connectors.md) **Note: Acceptance tests are not yet available for Python destination connectors. Coming** [**soon**](https://github.com/airbytehq/airbyte/issues/4698)**!** diff --git a/docs/contributing-to-airbyte/code-of-conduct.md b/docs/09-contributing-to-airbyte/01-code-of-conduct.md similarity index 100% rename from docs/contributing-to-airbyte/code-of-conduct.md rename to docs/09-contributing-to-airbyte/01-code-of-conduct.md diff --git a/docs/contributing-to-airbyte/maintainer-code-of-conduct.md b/docs/09-contributing-to-airbyte/02-maintainer-code-of-conduct.md similarity index 100% rename from docs/contributing-to-airbyte/maintainer-code-of-conduct.md rename to docs/09-contributing-to-airbyte/02-maintainer-code-of-conduct.md diff --git a/docs/contributing-to-airbyte/developing-locally.md b/docs/09-contributing-to-airbyte/03-developing-locally.md similarity index 100% rename from docs/contributing-to-airbyte/developing-locally.md rename to docs/09-contributing-to-airbyte/03-developing-locally.md diff --git a/docs/contributing-to-airbyte/developing-on-docker.md b/docs/09-contributing-to-airbyte/04-developing-on-docker.md similarity index 100% rename from docs/contributing-to-airbyte/developing-on-docker.md rename to docs/09-contributing-to-airbyte/04-developing-on-docker.md diff --git a/docs/contributing-to-airbyte/developing-on-kubernetes.md b/docs/09-contributing-to-airbyte/05-developing-on-kubernetes.md similarity index 95% rename from docs/contributing-to-airbyte/developing-on-kubernetes.md rename to docs/09-contributing-to-airbyte/05-developing-on-kubernetes.md index e4117893310f..1962ee40f474 100644 --- a/docs/contributing-to-airbyte/developing-on-kubernetes.md +++ b/docs/09-contributing-to-airbyte/05-developing-on-kubernetes.md @@ -1,6 +1,6 @@ # Developing on Kubernetes -Make sure to read [our docs for developing locally](developing-locally.md) first. +Make sure to read [our docs for developing locally](03-developing-locally.md) first. ## Architecture diff --git a/docs/contributing-to-airbyte/monorepo-python-development.md b/docs/09-contributing-to-airbyte/06-monorepo-python-development.md similarity index 100% rename from docs/contributing-to-airbyte/monorepo-python-development.md rename to docs/09-contributing-to-airbyte/06-monorepo-python-development.md diff --git a/docs/contributing-to-airbyte/code-style.md b/docs/09-contributing-to-airbyte/07-code-style.md similarity index 100% rename from docs/contributing-to-airbyte/code-style.md rename to docs/09-contributing-to-airbyte/07-code-style.md diff --git a/docs/contributing-to-airbyte/gradle-cheatsheet.md b/docs/09-contributing-to-airbyte/08-gradle-cheatsheet.md similarity index 100% rename from docs/contributing-to-airbyte/gradle-cheatsheet.md rename to docs/09-contributing-to-airbyte/08-gradle-cheatsheet.md diff --git a/docs/contributing-to-airbyte/gradle-dependency-update.md b/docs/09-contributing-to-airbyte/09-gradle-dependency-update.md similarity index 100% rename from docs/contributing-to-airbyte/gradle-dependency-update.md rename to docs/09-contributing-to-airbyte/09-gradle-dependency-update.md diff --git a/docs/docusaurus/contributing_to_docs.md b/docs/09-contributing-to-airbyte/10-updating-documentation/01-contributing_to_docs.md similarity index 81% rename from docs/docusaurus/contributing_to_docs.md rename to docs/09-contributing-to-airbyte/10-updating-documentation/01-contributing_to_docs.md index 76cc8abcd512..72ee50d59076 100644 --- a/docs/docusaurus/contributing_to_docs.md +++ b/docs/09-contributing-to-airbyte/10-updating-documentation/01-contributing_to_docs.md @@ -1,6 +1,6 @@ # Contributions to docs -![hide easter egg text for images in these thingies](../assets/docs/docs-contribution-keyboard.jpg) +![hide easter egg text for images in these thingies](../../assets/docs/docs-contribution-keyboard.jpg) ## Did you just learn something?! - maybe write a doc for it! @@ -15,7 +15,7 @@ ## Testing your changes locally -[more in depth local change guide here](locally_testing_docusaurus.md) +[more in depth local change guide here](04-locally_testing_docusaurus.md) - You can see what the docs website will look like locally try: ```bash cd docusaurus diff --git a/docs/docusaurus/making_a_redirect.md b/docs/09-contributing-to-airbyte/10-updating-documentation/02-making_a_redirect.md similarity index 90% rename from docs/docusaurus/making_a_redirect.md rename to docs/09-contributing-to-airbyte/10-updating-documentation/02-making_a_redirect.md index f2504925215a..f7b65507278f 100644 --- a/docs/docusaurus/making_a_redirect.md +++ b/docs/09-contributing-to-airbyte/10-updating-documentation/02-making_a_redirect.md @@ -17,7 +17,7 @@ You will see a commented section the reads something like this // }, ``` -Copy this section, replace the values, and [test it locally](locally_testing_docusaurus.md) by going to the +Copy this section, replace the values, and [test it locally](04-locally_testing_docusaurus.md) by going to the path you created a redirect for and checked to see that the address changes to your new one. *Note:* Your path **needs* a leading slash `/` to work diff --git a/docs/docusaurus/deploying_and_reverting_docs.md b/docs/09-contributing-to-airbyte/10-updating-documentation/03-deploying_and_reverting_docs.md similarity index 93% rename from docs/docusaurus/deploying_and_reverting_docs.md rename to docs/09-contributing-to-airbyte/10-updating-documentation/03-deploying_and_reverting_docs.md index 801a8256004f..5464f92b42c9 100644 --- a/docs/docusaurus/deploying_and_reverting_docs.md +++ b/docs/09-contributing-to-airbyte/10-updating-documentation/03-deploying_and_reverting_docs.md @@ -1,10 +1,10 @@ # Deploying and Reverting Docs -![docs are fun](../assets/docs/docs.jpg) +![docs are fun](../../assets/docs/docs.jpg) Docusaurus has a strange deployment pattern. Luckily that pattern is abstracted away from you. -If you were looking for the contribution guide [check this doc out](contributing_to_docs.md) +If you were looking for the contribution guide [check this doc out](01-contributing_to_docs.md) Docs will deploy from whatever branch you are in. You will probably want to deploy from master, but that is at your discretion. diff --git a/docs/docusaurus/locally_testing_docusaurus.md b/docs/09-contributing-to-airbyte/10-updating-documentation/04-locally_testing_docusaurus.md similarity index 93% rename from docs/docusaurus/locally_testing_docusaurus.md rename to docs/09-contributing-to-airbyte/10-updating-documentation/04-locally_testing_docusaurus.md index 8e3f4a0a2434..61d205a7662c 100644 --- a/docs/docusaurus/locally_testing_docusaurus.md +++ b/docs/09-contributing-to-airbyte/10-updating-documentation/04-locally_testing_docusaurus.md @@ -1,6 +1,6 @@ # Locally testing your changes -![testing is fun and games until it blows up](../assets/docs/science-kid.jpg) +![testing is fun and games until it blows up](../../assets/docs/science-kid.jpg) You can test any change you make to see how it will look in production diff --git a/docs/docusaurus/docusaurus_settings.md b/docs/09-contributing-to-airbyte/10-updating-documentation/05-docusaurus/01-docusaurus_settings.md similarity index 92% rename from docs/docusaurus/docusaurus_settings.md rename to docs/09-contributing-to-airbyte/10-updating-documentation/05-docusaurus/01-docusaurus_settings.md index ba79876da010..de3417c4911d 100644 --- a/docs/docusaurus/docusaurus_settings.md +++ b/docs/09-contributing-to-airbyte/10-updating-documentation/05-docusaurus/01-docusaurus_settings.md @@ -25,8 +25,8 @@ Here are some quick relevant guides using docusaurus at Airbyte. label: 'A collection of useless websites', }, ``` -![its a funny website](../assets/docs/useless_example.jpg) -- test locally [following this guide](locally_testing_docusaurus.md) +![its a funny website](../../../assets/docs/useless_example.jpg) +- test locally [following this guide](../04-locally_testing_docusaurus.md) ### Updating docusaurus diff --git a/docs/docusaurus/readme.md b/docs/09-contributing-to-airbyte/10-updating-documentation/05-docusaurus/README.md similarity index 80% rename from docs/docusaurus/readme.md rename to docs/09-contributing-to-airbyte/10-updating-documentation/05-docusaurus/README.md index 400f248097e6..345b4584c8cb 100644 --- a/docs/docusaurus/readme.md +++ b/docs/09-contributing-to-airbyte/10-updating-documentation/05-docusaurus/README.md @@ -1,6 +1,6 @@ # Docusaurus / Docs -![isn't he cute?](../assets/docs/docusaurus.jpg) +![isn't he cute?](../../../assets/docs/docusaurus.jpg) Guides on how to use docusaurus and docs in airbyte diff --git a/docs/contributing-to-airbyte/updating-documentation.md b/docs/09-contributing-to-airbyte/10-updating-documentation/README.md similarity index 97% rename from docs/contributing-to-airbyte/updating-documentation.md rename to docs/09-contributing-to-airbyte/10-updating-documentation/README.md index f50fe6c8480d..b01d127c0085 100644 --- a/docs/contributing-to-airbyte/updating-documentation.md +++ b/docs/09-contributing-to-airbyte/10-updating-documentation/README.md @@ -33,7 +33,7 @@ To edit the sidebar you must [edit this JSON in this Javascript file](https://gi cd airbyte ``` - While cloning on Windows, you might encounter errors about long filenames. Refer to the instructions [here](../deploying-airbyte/local-deployment.md#handling-long-filename-error) to correct it. + While cloning on Windows, you might encounter errors about long filenames. Refer to the instructions [here](../../05-deploying-airbyte/01-local-deployment.md#handling-long-filename-error) to correct it. 3. Modify the documentation. 4. Create a pull request @@ -53,7 +53,7 @@ yarn serve You can now navigate to [http://localhost:3000/](http://localhost:3000/) to see your changes. You can stop the running server in OSX/Linux by pressing `control-c` in the terminal running the server ### Deploying the docs website -We use Github Pages for hosting this docs website, and Docusaurus as the docs framework. An [internal guide for deployment lives here](../docusaurus/deploying_and_reverting_docs.md). +We use Github Pages for hosting this docs website, and Docusaurus as the docs framework. An [internal guide for deployment lives here](03-deploying_and_reverting_docs.md). The source code for the docs lives in the [airbyte monorepo's `docs/` directory](https://github.com/airbytehq/airbyte/tree/master/docs). To publish the updated docs on this website after you've committed a change to the `docs/` markdown files, it is required to locally run a manual publish flow. Locally run `./tools/bin/deploy_docusaurus` from the `airbyte` monorepo project root to deploy this docs website. diff --git a/docs/contributing-to-airbyte/README.md b/docs/09-contributing-to-airbyte/README.md similarity index 92% rename from docs/contributing-to-airbyte/README.md rename to docs/09-contributing-to-airbyte/README.md index 330ae5addfc8..a6e91009f0ae 100644 --- a/docs/contributing-to-airbyte/README.md +++ b/docs/09-contributing-to-airbyte/README.md @@ -2,7 +2,7 @@ description: 'We love contributions to Airbyte, big or small.' --- -# Contributing to Airbyte +# Contribute to Airbyte Thank you for your interest in contributing! We love community contributions. Contribution guidelines are listed below. If you're unsure about how to start contributing or have any questions even after reading them, feel free to ask us on [Slack](https://slack.airbyte.io) in the \#dev or \#general channel. @@ -10,11 +10,11 @@ However, for those who want a bit more guidance on the best way to contribute to ## Code of conduct -Please follow our [Code of conduct](code-of-conduct.md) in the context of any contributions made to Airbyte. +Please follow our [Code of conduct](01-code-of-conduct.md) in the context of any contributions made to Airbyte. ## Airbyte specification -Before you can start contributing, you need to understand [Airbyte's data protocol specification](../understanding-airbyte/airbyte-protocol.md). +Before you can start contributing, you need to understand [Airbyte's data protocol specification](../10-understanding-airbyte/02-airbyte-protocol.md). ## First-time contributors, welcome! @@ -67,8 +67,8 @@ The CDK currently does not support creating destinations, but it will very soon. :::: * See [Building new connectors](../connector-development/) to get started. -* Since we frequently build connectors in Python, on top of Singer or in Java, we've created generator libraries to get you started quickly: [Build Python Source Connectors](../connector-development/tutorials/building-a-python-source.md) and [Build Java Destination Connectors](../connector-development/tutorials/building-a-java-destination.md) -* Integration tests \(tests that run a connector's image against an external resource\) can be run one of three ways, as detailed [here](../connector-development/testing-connectors/source-acceptance-tests-reference.md) +* Since we frequently build connectors in Python, on top of Singer or in Java, we've created generator libraries to get you started quickly: [Build Python Source Connectors](../08-connector-development/03-building-a-python-source.md) and [Build Java Destination Connectors](../08-connector-development/05-building-a-java-destination.md) +* Integration tests \(tests that run a connector's image against an external resource\) can be run one of three ways, as detailed [here](../08-connector-development/11-source-acceptance-tests-reference.md) **Please note that, at no point in time, we will ask you to maintain your connector.** The goal is that the Airbyte team and the community helps maintain the connector. @@ -81,7 +81,7 @@ Our goal is to keep our docs comprehensive and updated. If you would like to hel * Fix errors in existing docs * Help us in adding to the docs -The contributing guide for docs can be found [here](updating-documentation.md). +The contributing guide for docs can be found [here](10-updating-documentation/README.md). ### **Community content** diff --git a/docs/contributing-to-airbyte/sonar-qube-workflow.md b/docs/09-contributing-to-airbyte/sonar-qube-workflow.md similarity index 100% rename from docs/contributing-to-airbyte/sonar-qube-workflow.md rename to docs/09-contributing-to-airbyte/sonar-qube-workflow.md diff --git a/docs/contributing-to-airbyte/templates/README.md b/docs/09-contributing-to-airbyte/templates/README.md similarity index 100% rename from docs/contributing-to-airbyte/templates/README.md rename to docs/09-contributing-to-airbyte/templates/README.md diff --git a/docs/contributing-to-airbyte/templates/integration-documentation-template.md b/docs/09-contributing-to-airbyte/templates/integration-documentation-template.md similarity index 100% rename from docs/contributing-to-airbyte/templates/integration-documentation-template.md rename to docs/09-contributing-to-airbyte/templates/integration-documentation-template.md diff --git a/docs/understanding-airbyte/beginners-guide-to-catalog.md b/docs/10-understanding-airbyte/01-beginners-guide-to-catalog.md similarity index 86% rename from docs/understanding-airbyte/beginners-guide-to-catalog.md rename to docs/10-understanding-airbyte/01-beginners-guide-to-catalog.md index 6da92698dc90..0c88d4e7fd28 100644 --- a/docs/understanding-airbyte/beginners-guide-to-catalog.md +++ b/docs/10-understanding-airbyte/01-beginners-guide-to-catalog.md @@ -2,21 +2,21 @@ ## Overview -The goal of this article is to make the `AirbyteCatalog` approachable to someone contributing to Airbyte for the first time. If you are looking to get deeper into the details of the catalog, you can read our technical specification on it [here](airbyte-protocol.md#catalog). +The goal of this article is to make the `AirbyteCatalog` approachable to someone contributing to Airbyte for the first time. If you are looking to get deeper into the details of the catalog, you can read our technical specification on it [here](02-airbyte-protocol.md#catalog). The goal of the `AirbyteCatalog` is to describe _what_ data is available in a source. The goal of the `ConfiguredAirbyteCatalog` is to, based on an `AirbyteCatalog`, specify _how_ data from the source is replicated. ## Contents -This article will illustrate how to use `AirbyteCatalog` via a series of examples. We recommend reading the [Database Example](beginners-guide-to-catalog.md#Database-Example) first. The other examples, will refer to knowledge described in that section. After that, jump around to whichever example is most pertinent to your inquiry. +This article will illustrate how to use `AirbyteCatalog` via a series of examples. We recommend reading the [Database Example](01-beginners-guide-to-catalog.md#Database-Example) first. The other examples, will refer to knowledge described in that section. After that, jump around to whichever example is most pertinent to your inquiry. -* [Postgres Example](beginners-guide-to-catalog.md#Database-Example) -* [API Example](beginners-guide-to-catalog.md#API-Examples) - * [Static Streams Example](beginners-guide-to-catalog.md#Static-Streams-Example) - * [Dynamic Streams Example](beginners-guide-to-catalog.md#Dynamic-Streams-Example) -* [Nested Schema Example](beginners-guide-to-catalog.md#Nested-Schema-Example) +* [Postgres Example](01-beginners-guide-to-catalog.md#Database-Example) +* [API Example](01-beginners-guide-to-catalog.md#API-Examples) + * [Static Streams Example](01-beginners-guide-to-catalog.md#Static-Streams-Example) + * [Dynamic Streams Example](01-beginners-guide-to-catalog.md#Dynamic-Streams-Example) +* [Nested Schema Example](01-beginners-guide-to-catalog.md#Nested-Schema-Example) -In order to understand in depth how to configure incremental data replication, head over to the [incremental replication docs](connections/incremental-append.md). +In order to understand in depth how to configure incremental data replication, head over to the [incremental replication docs](05-connections/03-incremental-append.md). ## Database Example @@ -87,12 +87,12 @@ We would represent this data in a catalog as follows: } ``` -The catalog is structured as a list of `AirbyteStream`. In the case of a database a "stream" is analogous to a table. \(For APIs the mapping can be a more creative; we will discuss it later in [API Examples](beginners-guide-to-catalog.md#API-Examples)\) +The catalog is structured as a list of `AirbyteStream`. In the case of a database a "stream" is analogous to a table. \(For APIs the mapping can be a more creative; we will discuss it later in [API Examples](01-beginners-guide-to-catalog.md#API-Examples)\) Let's walk through what each field in a stream means. * `name` - The name of the stream. -* `supported_sync_modes` - This field lists the type of data replication that this source supports. The possible values in this array include `FULL_REFRESH` \([docs](connections/full-refresh-overwrite.md)\) and `INCREMENTAL` \([docs](connections/incremental-append.md)\). +* `supported_sync_modes` - This field lists the type of data replication that this source supports. The possible values in this array include `FULL_REFRESH` \([docs](05-connections/01-full-refresh-overwrite.md)\) and `INCREMENTAL` \([docs](05-connections/03-incremental-append.md)\). * `source_defined_cursor` - If the stream supports `INCREMENTAL` replication, then this field signals whether the source can figure out how to detect new records on its own or not. * `json_schema` - This field is a [JsonSchema](https://json-schema.org/understanding-json-schema) object that describes the structure of the data. Notice that each key in the `properties` object corresponds to a column name in our database table. @@ -137,7 +137,7 @@ Let's walk through each field in the `ConfiguredAirbyteStream`: * `sync_mode` - This field must be one of the values that was in `supported_sync_modes` in the `AirbyteStream` - Configures which sync mode will be used when data is replicated. * `stream` - Hopefully this one looks familiar! This field contains an `AirbyteStream`. It should be _identical_ to the one we saw in the `AirbyteCatalog`. -* `cursor_field` - When `sync_mode` is `INCREMENTAL` and `source_defined_cursor = false`, this field configures which field in the stream will be used to determine if a record should be replicated or not. Read more about this concept in our [documentation of incremental replication](connections/incremental-append.md). +* `cursor_field` - When `sync_mode` is `INCREMENTAL` and `source_defined_cursor = false`, this field configures which field in the stream will be used to determine if a record should be replicated or not. Read more about this concept in our [documentation of incremental replication](05-connections/03-incremental-append.md). ### Summary of the Postgres Example @@ -149,7 +149,7 @@ The `AirbyteCatalog` offers the flexibility in how to model the data for an API. ### Static Streams Example -Let's imagine we want to create a basic Stock Ticker source. The goal of this source is to take in a single stock symbol and return a single stream. We will call the stream `ticker` and will contain the closing price of the stock. We will assume that you already have a rough understanding of the `AirbyteCatalog` and the `ConfiguredAirbyteCatalog` from the [previous database example](beginners-guide-to-catalog.md#Database-Example). +Let's imagine we want to create a basic Stock Ticker source. The goal of this source is to take in a single stock symbol and return a single stream. We will call the stream `ticker` and will contain the closing price of the stock. We will assume that you already have a rough understanding of the `AirbyteCatalog` and the `ConfiguredAirbyteCatalog` from the [previous database example](01-beginners-guide-to-catalog.md#Database-Example). #### AirbyteCatalog @@ -184,11 +184,11 @@ Here is what the `AirbyteCatalog` might look like. } ``` -This catalog looks pretty similar to the `AirbyteCatalog` that we created for the [Database Example](beginners-guide-to-catalog.md#Database-Example). For the data we've picked here, you can think about `ticker` as a table and then each field it returns in a record as a column, so it makes sense that these look pretty similar. +This catalog looks pretty similar to the `AirbyteCatalog` that we created for the [Database Example](01-beginners-guide-to-catalog.md#Database-Example). For the data we've picked here, you can think about `ticker` as a table and then each field it returns in a record as a column, so it makes sense that these look pretty similar. #### ConfiguredAirbyteCatalog -The `ConfiguredAirbyteCatalog` follows the same rules as we described in the [Database Example](beginners-guide-to-catalog.md#Database-Example). It just wraps the `AirbyteCatalog` described above. +The `ConfiguredAirbyteCatalog` follows the same rules as we described in the [Database Example](01-beginners-guide-to-catalog.md#Database-Example). It just wraps the `AirbyteCatalog` described above. ## Dynamic Streams Example diff --git a/docs/understanding-airbyte/airbyte-protocol.md b/docs/10-understanding-airbyte/02-airbyte-protocol.md similarity index 99% rename from docs/understanding-airbyte/airbyte-protocol.md rename to docs/10-understanding-airbyte/02-airbyte-protocol.md index 6c3b805d4891..ed2fe293759c 100644 --- a/docs/understanding-airbyte/airbyte-protocol.md +++ b/docs/10-understanding-airbyte/02-airbyte-protocol.md @@ -5,7 +5,7 @@ The Airbyte Protocol describes a series of standard components and all the inter This document describes the protocol as it exists in its CURRENT form. Stay tuned for an RFC on how the protocol will evolve. -This document is intended to contain ALL the rules of the Airbyte Protocol in one place. Anything not contained in this document is NOT part of the Protocol. At the time of writing, there is one known exception, which is the [Supported Data Types](supported-data-types.md), which contains rules on data types that are part of the Protocol. That said, there are additional articles, e.g. [A Beginner's Guide to the Airbyte Catalog](beginners-guide-to-catalog.md) that repackage the information in this document for different audiences. +This document is intended to contain ALL the rules of the Airbyte Protocol in one place. Anything not contained in this document is NOT part of the Protocol. At the time of writing, there is one known exception, which is the [Supported Data Types](12-supported-data-types.md), which contains rules on data types that are part of the Protocol. That said, there are additional articles, e.g. [A Beginner's Guide to the Airbyte Catalog](01-beginners-guide-to-catalog.md) that repackage the information in this document for different audiences. ## Key Concepts There are 2 major components in the Airbyte Protocol: Source and Destination. These components are referred to as Actors. A source is an application that is described by a series of standard interfaces. This application extracts data from an underlying data store. A data store in this context refers to the tool where the data is actually stored. A data store includes: databases, APIs, anything that produces data, etc. For example, the Postgres Source is a Source that pulls from Postgres (which is a data store). A Destination is an application that is described by a series of standard interfaces that loads data into a data store. @@ -297,7 +297,7 @@ This section will document the meaning of each field in an `AirbyteStream` ### Data Types -Airbyte maintains a set of types that intersects with those of JSONSchema but also includes its own. More information on supported data types can be found in [Supported Data Types](supported-data-types.md). +Airbyte maintains a set of types that intersects with those of JSONSchema but also includes its own. More information on supported data types can be found in [Supported Data Types](12-supported-data-types.md). ## ConfiguredAirbyteStream diff --git a/docs/understanding-airbyte/airbyte-protocol-docker.md b/docs/10-understanding-airbyte/03-airbyte-protocol-docker.md similarity index 75% rename from docs/understanding-airbyte/airbyte-protocol-docker.md rename to docs/10-understanding-airbyte/03-airbyte-protocol-docker.md index bfc3404715fd..256ad91a9c68 100644 --- a/docs/understanding-airbyte/airbyte-protocol-docker.md +++ b/docs/10-understanding-airbyte/03-airbyte-protocol-docker.md @@ -1,7 +1,7 @@ # Airbyte Protocol Docker Interface ## Summary -The [Airbyte Protocol](airbyte-protocol.md) describes a series of structs and interfaces for building data pipelines. The Protocol article describes those interfaces in language agnostic pseudocode, this article transcribes those into docker commands. Airbyte's implementation of the protocol is all done in docker. Thus, this reference is helpful for getting a more concrete look at how the Protocol is used. It can also be used as a reference for interacting with Airbyte's implementation of the Protocol. +The [Airbyte Protocol](02-airbyte-protocol.md) describes a series of structs and interfaces for building data pipelines. The Protocol article describes those interfaces in language agnostic pseudocode, this article transcribes those into docker commands. Airbyte's implementation of the protocol is all done in docker. Thus, this reference is helpful for getting a more concrete look at how the Protocol is used. It can also be used as a reference for interacting with Airbyte's implementation of the Protocol. ## Source diff --git a/docs/understanding-airbyte/basic-normalization.md b/docs/10-understanding-airbyte/04-basic-normalization.md similarity index 90% rename from docs/understanding-airbyte/basic-normalization.md rename to docs/10-understanding-airbyte/04-basic-normalization.md index 8a739b119834..852ccb8bf069 100644 --- a/docs/understanding-airbyte/basic-normalization.md +++ b/docs/10-understanding-airbyte/04-basic-normalization.md @@ -69,7 +69,7 @@ Additional metadata columns can be added on some tables depending on the usage: - On de-duplicated (and SCD) tables: - `_airbyte_unique_key`: hash of primary keys used to de-duplicate the final table. -The [normalization rules](basic-normalization.md#Rules) are _not_ configurable. They are designed to pick a reasonable set of defaults to hit the 80/20 rule of data normalization. We respect that normalization is a detail-oriented problem and that with a fixed set of rules, we cannot normalize your data in such a way that covers all use cases. If this feature does not meet your normalization needs, we always put the full json blob in destination as well, so that you can parse that object however best meets your use case. We will be adding more advanced normalization functionality shortly. Airbyte is focused on the EL of ELT. If you need a really featureful tool for the transformations then, we suggest trying out dbt. +The [normalization rules](04-basic-normalization.md#Rules) are _not_ configurable. They are designed to pick a reasonable set of defaults to hit the 80/20 rule of data normalization. We respect that normalization is a detail-oriented problem and that with a fixed set of rules, we cannot normalize your data in such a way that covers all use cases. If this feature does not meet your normalization needs, we always put the full json blob in destination as well, so that you can parse that object however best meets your use case. We will be adding more advanced normalization functionality shortly. Airbyte is focused on the EL of ELT. If you need a really featureful tool for the transformations then, we suggest trying out dbt. Airbyte places the json blob version of your data in a table called `_airbyte_raw_`. If basic normalization is turned on, it will place a separate copy of the data in a table called ``. Under the hood, Airbyte is using dbt, which means that the data only ingresses into the data store one time. The normalization happens as a query within the datastore. This implementation avoids extra network time and costs. @@ -94,14 +94,14 @@ In Airbyte, the current normalization option is implemented using a dbt Transfor ## Destinations that Support Basic Normalization -* [BigQuery](../integrations/destinations/bigquery.md) -* [MS Server SQL](../integrations/destinations/mssql.md) -* [MySQL](../integrations/destinations/mysql.md) +* [BigQuery](../02-integrations/02-destinations/bigquery.md) +* [MS Server SQL](../02-integrations/02-destinations/mssql.md) +* [MySQL](../02-integrations/02-destinations/mysql.md) * The server must support the `WITH` keyword. * Require MySQL >= 8.0, or MariaDB >= 10.2.1. -* [Postgres](../integrations/destinations/postgres.md) -* [Redshift](../integrations/destinations/redshift.md) -* [Snowflake](../integrations/destinations/snowflake.md) +* [Postgres](../02-integrations/02-destinations/postgres.md) +* [Redshift](../02-integrations/02-destinations/redshift.md) +* [Snowflake](../02-integrations/02-destinations/snowflake.md) Basic Normalization can be configured when you're creating the connection between your Connection Setup and after in the Transformation Tab. Select the option: **Normalized tabular data**. @@ -122,8 +122,8 @@ Airbyte uses the types described in the catalog to determine the correct type fo | `bit` | boolean | | | `boolean` | boolean | | | `string` with format label `date-time`| timestamp with timezone | | -| `array` | new table | see [nesting](basic-normalization.md#Nesting) | -| `object` | new table | see [nesting](basic-normalization.md#Nesting) | +| `array` | new table | see [nesting](04-basic-normalization.md#Nesting) | +| `object` | new table | see [nesting](04-basic-normalization.md#Nesting) | ### Nesting @@ -321,7 +321,7 @@ To enable basic normalization \(which is optional\), you can toggle it on or dis ## Incremental runs -When the source is configured with sync modes compatible with incremental transformations (using append on destination) such as ( [full_refresh_append](connections/full-refresh-append.md), [incremental append](connections/incremental-append.md) or [incremental deduped history](connections/incremental-deduped-history.md)), only rows that have changed in the source are transferred over the network and written by the destination connector. +When the source is configured with sync modes compatible with incremental transformations (using append on destination) such as ( [full_refresh_append](05-connections/02-full-refresh-append.md), [incremental append](05-connections/03-incremental-append.md) or [incremental deduped history](05-connections/04-incremental-deduped-history.md)), only rows that have changed in the source are transferred over the network and written by the destination connector. Normalization will then try to build the normalized tables incrementally as the rows in the raw tables that have been created or updated since the last time dbt ran. As such, on each dbt run, the models get built incrementally. This limits the amount of data that needs to be transformed, vastly reducing the runtime of the transformations. This improves warehouse performance and reduces compute costs. Because normalization can be either run incrementally and, or, in full refresh, a technical column `_airbyte_normalized_at` can serve to track when was the last time a record has been transformed and written by normalization. This may greatly diverge from the `_airbyte_emitted_at` value as the normalized tables could be totally re-built at a latter time from the data stored in the `_airbyte_raw` tables. @@ -333,15 +333,15 @@ Normalization produces tables that are partitioned, clustered, sorted or indexed In general, normalization needs to do lookup on the last emitted_at column to know if a record is freshly produced and need to be incrementally processed or not. But in certain models, such as SCD tables for example, we also need to retrieve older data to update their type 2 SCD end_date and active_row flags, thus a different partitioning scheme is used to optimize that use case. -On Postgres destination, an additional table suffixed with `_stg` for every stream replicated in [incremental deduped history](connections/incremental-deduped-history.md) needs to be persisted (in a different staging schema) for incremental transformations to work because of a [limitation](https://github.com/dbt-labs/docs.getdbt.com/issues/335#issuecomment-694199569). +On Postgres destination, an additional table suffixed with `_stg` for every stream replicated in [incremental deduped history](05-connections/04-incremental-deduped-history.md) needs to be persisted (in a different staging schema) for incremental transformations to work because of a [limitation](https://github.com/dbt-labs/docs.getdbt.com/issues/335#issuecomment-694199569). ## Extending Basic Normalization Note that all the choices made by Normalization as described in this documentation page in terms of naming (and more) could be overridden by your own custom choices. To do so, you can follow the following tutorials: -* to build a [custom SQL view](../operator-guides/transformation-and-normalization/transformations-with-sql.md) with your own naming conventions -* to export, edit and run [custom dbt normalization](../operator-guides/transformation-and-normalization/transformations-with-dbt.md) yourself -* or further, you can configure the use of a custom dbt project within Airbyte by following [this guide](../operator-guides/transformation-and-normalization/transformations-with-airbyte.md). +* to build a [custom SQL view](../06-operator-guides/09-transformation-and-normalization/01-transformations-with-sql.md) with your own naming conventions +* to export, edit and run [custom dbt normalization](../06-operator-guides/09-transformation-and-normalization/02-transformations-with-dbt.md) yourself +* or further, you can configure the use of a custom dbt project within Airbyte by following [this guide](../06-operator-guides/09-transformation-and-normalization/03-transformations-with-airbyte.md). ## CHANGELOG diff --git a/docs/understanding-airbyte/connections/full-refresh-overwrite.md b/docs/10-understanding-airbyte/05-connections/01-full-refresh-overwrite.md similarity index 89% rename from docs/understanding-airbyte/connections/full-refresh-overwrite.md rename to docs/10-understanding-airbyte/05-connections/01-full-refresh-overwrite.md index f5c962da8ce3..8a6d799e8976 100644 --- a/docs/understanding-airbyte/connections/full-refresh-overwrite.md +++ b/docs/10-understanding-airbyte/05-connections/01-full-refresh-overwrite.md @@ -2,7 +2,7 @@ ## Overview -The **Full Refresh** modes are the simplest methods that Airbyte uses to sync data, as they always retrieve all available information requested from the source, regardless of whether it has been synced before. This contrasts with [**Incremental sync**](incremental-append.md), which does not sync data that has already been synced before. +The **Full Refresh** modes are the simplest methods that Airbyte uses to sync data, as they always retrieve all available information requested from the source, regardless of whether it has been synced before. This contrasts with [**Incremental sync**](03-incremental-append.md), which does not sync data that has already been synced before. In the **Overwrite** variant, new syncs will destroy all data in the existing destination table and then pull the new data in. Therefore, data that has been removed from the source after an old sync will be deleted in the destination table. diff --git a/docs/understanding-airbyte/connections/full-refresh-append.md b/docs/10-understanding-airbyte/05-connections/02-full-refresh-append.md similarity index 91% rename from docs/understanding-airbyte/connections/full-refresh-append.md rename to docs/10-understanding-airbyte/05-connections/02-full-refresh-append.md index 56fdbaab4475..81dafc404bc8 100644 --- a/docs/understanding-airbyte/connections/full-refresh-append.md +++ b/docs/10-understanding-airbyte/05-connections/02-full-refresh-append.md @@ -2,7 +2,7 @@ ## Overview -The **Full Refresh** modes are the simplest methods that Airbyte uses to sync data, as they always retrieve all available data requested from the source, regardless of whether it has been synced before. This contrasts with [**Incremental sync**](incremental-append.md), which does not sync data that has already been synced before. +The **Full Refresh** modes are the simplest methods that Airbyte uses to sync data, as they always retrieve all available data requested from the source, regardless of whether it has been synced before. This contrasts with [**Incremental sync**](03-incremental-append.md), which does not sync data that has already been synced before. In the **Append** variant, new syncs will take all data from the sync and append it to the destination table. Therefore, if syncing similar information multiple times, every sync will create duplicates of already existing data. diff --git a/docs/understanding-airbyte/connections/incremental-append.md b/docs/10-understanding-airbyte/05-connections/03-incremental-append.md similarity index 87% rename from docs/understanding-airbyte/connections/incremental-append.md rename to docs/10-understanding-airbyte/05-connections/03-incremental-append.md index e5cec603b521..b7598e993877 100644 --- a/docs/understanding-airbyte/connections/incremental-append.md +++ b/docs/10-understanding-airbyte/05-connections/03-incremental-append.md @@ -2,7 +2,7 @@ ## Overview -Airbyte supports syncing data in **Incremental Append** mode i.e: syncing only replicate _new_ or _modified_ data. This prevents re-fetching data that you have already replicated from a source. If the sync is running for the first time, it is equivalent to a [Full Refresh](full-refresh-append.md) since all data will be considered as _new_. +Airbyte supports syncing data in **Incremental Append** mode i.e: syncing only replicate _new_ or _modified_ data. This prevents re-fetching data that you have already replicated from a source. If the sync is running for the first time, it is equivalent to a [Full Refresh](02-full-refresh-append.md) since all data will be considered as _new_. In this flavor of incremental, records in the warehouse destination will never be deleted or mutated. A copy of each new or updated record is _appended_ to the data in the warehouse. This means you can find multiple copies of the same record in the destination warehouse. We provide an "at least once" guarantee of replicating each record that is present when the sync runs. @@ -62,25 +62,25 @@ The output we expect to see in the warehouse is as follows: ## Source-Defined Cursor -Some sources are able to determine the cursor that they use without any user input. For example, in the [exchange rates source](../../integrations/sources/exchangeratesapi.md), the source knows that the date field should be used to determine the last record that was synced. In these cases, simply select the incremental option in the UI. +Some sources are able to determine the cursor that they use without any user input. For example, in the [exchange rates source](../../02-integrations/01-sources/exchangeratesapi.md), the source knows that the date field should be used to determine the last record that was synced. In these cases, simply select the incremental option in the UI. ![](../../.gitbook/assets/incremental_source_defined.png) -\(You can find a more technical details about the configuration data model [here](../airbyte-protocol.md#catalog)\). +\(You can find a more technical details about the configuration data model [here](../02-airbyte-protocol.md#catalog)\). ## User-Defined Cursor -Some sources cannot define the cursor without user input. For example, in the [postgres source](../../integrations/sources/postgres.md), the user needs to choose which column in a database table they want to use as the `cursor field`. In these cases, select the column in the sync settings dropdown that should be used as the `cursor field`. +Some sources cannot define the cursor without user input. For example, in the [postgres source](../../02-integrations/01-sources/postgres.md), the user needs to choose which column in a database table they want to use as the `cursor field`. In these cases, select the column in the sync settings dropdown that should be used as the `cursor field`. ![](../../.gitbook/assets/incremental_user_defined.png) -\(You can find a more technical details about the configuration data model [here](../airbyte-protocol.md#catalog)\). +\(You can find a more technical details about the configuration data model [here](../02-airbyte-protocol.md#catalog)\). ## Getting the Latest Snapshot of data As demonstrated in the examples above, with **Incremental Append,** a record which was updated in the source will be appended to the destination rather than updated in-place. This means that if data in the source uses a primary key \(e.g: `user_id` in the `users` table\), then the destination will end up having multiple records with the same primary key value. -However, some use cases require only the latest snapshot of the data. This is available by using other flavors of sync modes such as [Incremental - Deduped History](incremental-deduped-history.md) instead. +However, some use cases require only the latest snapshot of the data. This is available by using other flavors of sync modes such as [Incremental - Deduped History](04-incremental-deduped-history.md) instead. Note that in **Incremental Append**, the size of the data in your warehouse increases monotonically since an updated record in the source is appended to the destination rather than updated in-place. @@ -122,9 +122,9 @@ At the end of the second incremental sync, the data warehouse would still contai Similarly, if multiple modifications are made during the same day to the same records. If the frequency of the sync is not granular enough \(for example, set for every 24h\), then intermediate modifications to the data are not going to be detected and emitted. Only the state of data at the time the sync runs will be reflected in the destination. -Those concerns could be solved by using a different incremental approach based on binary logs, Write-Ahead-Logs \(WAL\), or also called [Change Data Capture \(CDC\)](../cdc.md). +Those concerns could be solved by using a different incremental approach based on binary logs, Write-Ahead-Logs \(WAL\), or also called [Change Data Capture \(CDC\)](../10-cdc.md). -The current behavior of **Incremental** is not able to handle source schema changes yet, for example, when a column is added, renamed or deleted from an existing table etc. It is recommended to trigger a [Full refresh - Overwrite](full-refresh-overwrite.md) to correctly replicate the data to the destination with the new schema changes. +The current behavior of **Incremental** is not able to handle source schema changes yet, for example, when a column is added, renamed or deleted from an existing table etc. It is recommended to trigger a [Full refresh - Overwrite](01-full-refresh-overwrite.md) to correctly replicate the data to the destination with the new schema changes. -If you are not satisfied with how transformations are applied on top of the appended data, you can find more relevant SQL transformations you might need to do on your data in the [Connecting EL with T using SQL \(part 1/2\)](incremental-append.md) +If you are not satisfied with how transformations are applied on top of the appended data, you can find more relevant SQL transformations you might need to do on your data in the [Connecting EL with T using SQL \(part 1/2\)](03-incremental-append.md) diff --git a/docs/understanding-airbyte/connections/incremental-deduped-history.md b/docs/10-understanding-airbyte/05-connections/04-incremental-deduped-history.md similarity index 79% rename from docs/understanding-airbyte/connections/incremental-deduped-history.md rename to docs/10-understanding-airbyte/05-connections/04-incremental-deduped-history.md index 00f61502bf40..fbbc73af520c 100644 --- a/docs/understanding-airbyte/connections/incremental-deduped-history.md +++ b/docs/10-understanding-airbyte/05-connections/04-incremental-deduped-history.md @@ -2,16 +2,16 @@ ## High-Level Context -This connector syncs data **incrementally**, which means that only new or modified data will be synced. In contrast with the [Incremental Append mode](./incremental-append.md), this mode updates rows that have been modified instead of adding a new version of the row with the updated data. Simply put, if you've synced a row before and it has since been updated, this mode will combine the two rows -in the destination and use the updated data. On the other hand, the [Incremental Append mode](./incremental-append.md) would just add a new row with the updated data. +This connector syncs data **incrementally**, which means that only new or modified data will be synced. In contrast with the [Incremental Append mode](./03-incremental-append.md), this mode updates rows that have been modified instead of adding a new version of the row with the updated data. Simply put, if you've synced a row before and it has since been updated, this mode will combine the two rows +in the destination and use the updated data. On the other hand, the [Incremental Append mode](./03-incremental-append.md) would just add a new row with the updated data. ## Overview Airbyte supports syncing data in **Incremental Deduped History** mode i.e: -1. **Incremental** means syncing only replicate _new_ or _modified_ data. This prevents re-fetching data that you have already replicated from a source. If the sync is running for the first time, it is equivalent to a [Full Refresh](full-refresh-append.md) since all data will be considered as _new_. -2. **Deduped** means that data in the final table will be unique per primary key \(unlike [Append modes](incremental-append.md)\). This is determined by sorting the data using the cursor field and keeping only the latest de-duplicated data row. In dimensional data warehouse jargon defined by Ralph Kimball, this is referred as a Slowly Changing Dimension \(SCD\) table of type 1. -3. **History** means that an additional intermediate table is created in which data is being continuously appended to \(with duplicates exactly like [Append modes](incremental-append.md)\). With the use of primary key fields, it is identifying effective `start` and `end` dates of each row of a record. In dimensional data warehouse jargon, this is referred as a Slowly Changing Dimension \(SCD\) table of type 2. +1. **Incremental** means syncing only replicate _new_ or _modified_ data. This prevents re-fetching data that you have already replicated from a source. If the sync is running for the first time, it is equivalent to a [Full Refresh](02-full-refresh-append.md) since all data will be considered as _new_. +2. **Deduped** means that data in the final table will be unique per primary key \(unlike [Append modes](03-incremental-append.md)\). This is determined by sorting the data using the cursor field and keeping only the latest de-duplicated data row. In dimensional data warehouse jargon defined by Ralph Kimball, this is referred as a Slowly Changing Dimension \(SCD\) table of type 1. +3. **History** means that an additional intermediate table is created in which data is being continuously appended to \(with duplicates exactly like [Append modes](03-incremental-append.md)\). With the use of primary key fields, it is identifying effective `start` and `end` dates of each row of a record. In dimensional data warehouse jargon, this is referred as a Slowly Changing Dimension \(SCD\) table of type 2. In this flavor of incremental, records in the warehouse destination will never be deleted in the history tables \(named with a `_scd` suffix\), but might not exist in the final table. A copy of each new or updated record is _appended_ to the history data in the warehouse. Only the `end` date column is mutated when a new version of the same record is inserted to denote effective date ranges of a row. This means you can find multiple copies of the same record in the destination warehouse. We provide an "at least once" guarantee of replicating each record that is present when the sync runs. @@ -25,7 +25,7 @@ A `cursor field` is the _field_ or _column_ in the data where that cursor can be We will refer to the set of records that the source identifies as being new or updated as a `delta`. -A `primary key` is one or multiple \(called `composite primary keys`\) _fields_ or _columns_ that is used to identify the unique entities of a table. Only one row per primary key value is permitted in a database table. In the data warehouse, just like in [incremental - Append](incremental-append.md), multiple rows for the same primary key can be found in the history table. The unique records per primary key behavior is mirrored in the final table with **incremental deduped** sync mode. The primary key is then used to refer to the entity which values should be updated. +A `primary key` is one or multiple \(called `composite primary keys`\) _fields_ or _columns_ that is used to identify the unique entities of a table. Only one row per primary key value is permitted in a database table. In the data warehouse, just like in [incremental - Append](03-incremental-append.md), multiple rows for the same primary key can be found in the history table. The unique records per primary key behavior is mirrored in the final table with **incremental deduped** sync mode. The primary key is then used to refer to the entity which values should be updated. ## Rules @@ -85,19 +85,19 @@ In the final de-duplicated table: ## Source-Defined Cursor -Some sources are able to determine the cursor that they use without any user input. For example, in the [exchange rates source](../../integrations/sources/exchangeratesapi.md), the source knows that the date field should be used to determine the last record that was synced. In these cases, simply select the incremental option in the UI. +Some sources are able to determine the cursor that they use without any user input. For example, in the [exchange rates source](../../02-integrations/01-sources/exchangeratesapi.md), the source knows that the date field should be used to determine the last record that was synced. In these cases, simply select the incremental option in the UI. ![](../../.gitbook/assets/incremental_source_defined.png) -\(You can find a more technical details about the configuration data model [here](../airbyte-protocol.md#catalog)\). +\(You can find a more technical details about the configuration data model [here](../02-airbyte-protocol.md#catalog)\). ## User-Defined Cursor -Some sources cannot define the cursor without user input. For example, in the [postgres source](../../integrations/sources/postgres.md), the user needs to choose which column in a database table they want to use as the `cursor field`. In these cases, select the column in the sync settings dropdown that should be used as the `cursor field`. +Some sources cannot define the cursor without user input. For example, in the [postgres source](../../02-integrations/01-sources/postgres.md), the user needs to choose which column in a database table they want to use as the `cursor field`. In these cases, select the column in the sync settings dropdown that should be used as the `cursor field`. ![](../../.gitbook/assets/incremental_user_defined.png) -\(You can find a more technical details about the configuration data model [here](../airbyte-protocol.md#catalog)\). +\(You can find a more technical details about the configuration data model [here](../02-airbyte-protocol.md#catalog)\). ## Source-Defined Primary key @@ -147,11 +147,11 @@ At the end of the second incremental sync, the data warehouse would still contai Similarly, if multiple modifications are made during the same day to the same records. If the frequency of the sync is not granular enough \(for example, set for every 24h\), then intermediate modifications to the data are not going to be detected and emitted. Only the state of data at the time the sync runs will be reflected in the destination. -Those concerns could be solved by using a different incremental approach based on binary logs, Write-Ahead-Logs \(WAL\), or also called [Change Data Capture \(CDC\)](../cdc.md). +Those concerns could be solved by using a different incremental approach based on binary logs, Write-Ahead-Logs \(WAL\), or also called [Change Data Capture \(CDC\)](../10-cdc.md). -The current behavior of **Incremental** is not able to handle source schema changes yet, for example, when a column is added, renamed or deleted from an existing table etc. It is recommended to trigger a [Full refresh - Overwrite](full-refresh-overwrite.md) to correctly replicate the data to the destination with the new schema changes. +The current behavior of **Incremental** is not able to handle source schema changes yet, for example, when a column is added, renamed or deleted from an existing table etc. It is recommended to trigger a [Full refresh - Overwrite](01-full-refresh-overwrite.md) to correctly replicate the data to the destination with the new schema changes. Additionally, this sync mode is only supported for destinations where dbt/normalization is possible for the moment. The de-duplicating logic is indeed implemented as dbt models as part of a sequence of transformations applied after the Extract and Load activities \(thus, an ELT approach\). Nevertheless, it is theoretically possible that destinations can handle directly this logic \(maybe in the future\) before actually writing records to the destination \(as in traditional ETL manner\), but that's not the way it is implemented at this time. -If you are not satisfied with how transformations are applied on top of the appended data, you can find more relevant SQL transformations you might need to do on your data in the [Connecting EL with T using SQL \(part 1/2\)](../../operator-guides/transformation-and-normalization/transformations-with-sql.md) +If you are not satisfied with how transformations are applied on top of the appended data, you can find more relevant SQL transformations you might need to do on your data in the [Connecting EL with T using SQL \(part 1/2\)](../../06-operator-guides/09-transformation-and-normalization/01-transformations-with-sql.md) diff --git a/docs/understanding-airbyte/connections/README.md b/docs/10-understanding-airbyte/05-connections/README.md similarity index 78% rename from docs/understanding-airbyte/connections/README.md rename to docs/10-understanding-airbyte/05-connections/README.md index 1add762cfb0d..d0bacecbe62b 100644 --- a/docs/understanding-airbyte/connections/README.md +++ b/docs/10-understanding-airbyte/05-connections/README.md @@ -3,14 +3,14 @@ A connection is a configuration for syncing data between a source and a destination. To setup a connection, a user must configure things such as: * Sync schedule: when to trigger a sync of the data. -* Destination [Namespace](../namespaces.md) and stream names: where the data will end up being written. -* A catalog selection: which [streams and fields](../airbyte-protocol.md#catalog) to replicate from the source +* Destination [Namespace](../11-namespaces.md) and stream names: where the data will end up being written. +* A catalog selection: which [streams and fields](../02-airbyte-protocol.md#catalog) to replicate from the source * Sync mode: how streams should be replicated \(read and write\): * Optional transformations: how to convert Airbyte protocol messages \(raw JSON blob\) data into some other data representations. ## Sync schedules -Sync schedules are explained below. For information about catalog selections, see [AirbyteCatalog & ConfiguredAirbyteCatalog](../airbyte-protocol.md#catalog). +Sync schedules are explained below. For information about catalog selections, see [AirbyteCatalog & ConfiguredAirbyteCatalog](../02-airbyte-protocol.md#catalog). Syncs will be triggered by either: @@ -28,7 +28,7 @@ When a scheduled connection is first created, a sync is executed as soon as poss ## Destination namespace -The location of where a connection replication will store data is referenced as the destination namespace. The destination connectors should create and write records \(for both raw and normalized tables\) in the specified namespace which should be configurable in the UI via the Namespace Configuration field \(or NamespaceDefinition in the API\). You can read more about configuring namespaces [here](../namespaces.md). +The location of where a connection replication will store data is referenced as the destination namespace. The destination connectors should create and write records \(for both raw and normalized tables\) in the specified namespace which should be configurable in the UI via the Namespace Configuration field \(or NamespaceDefinition in the API\). You can read more about configuring namespaces [here](../11-namespaces.md). ## Destination stream name @@ -38,7 +38,7 @@ Stream names refer to table names in a typical RDBMS. But it can also be the nam ## Stream-specific customization -All the customization of namespace and stream names described above will be equally applied to all streams selected for replication in a catalog per connection. If you need more granular customization, stream by stream, for example, or with different logic rules, then you could follow the tutorial on [customizing transformations with dbt](../../operator-guides/transformation-and-normalization/transformations-with-dbt.md). +All the customization of namespace and stream names described above will be equally applied to all streams selected for replication in a catalog per connection. If you need more granular customization, stream by stream, for example, or with different logic rules, then you could follow the tutorial on [customizing transformations with dbt](../../06-operator-guides/09-transformation-and-normalization/02-transformations-with-dbt.md). ## Sync modes @@ -47,7 +47,7 @@ A sync mode governs how Airbyte reads from a source and writes to a destination. 1. The first part of the name denotes how the source connector reads data from the source: 1. Incremental: Read records added to the source since the last sync job. \(The first sync using Incremental is equivalent to a Full Refresh\) * Method 1: Using a cursor. Generally supported by all connectors whose data source allows extracting records incrementally. - * Method 2: Using change data capture. Only supported by some sources. See [CDC](../cdc.md) for more info. + * Method 2: Using change data capture. Only supported by some sources. See [CDC](../10-cdc.md) for more info. 2. Full Refresh: Read everything in the source. 2. The second part of the sync mode name denotes how the destination connector writes data. This is not affected by how the source connector produced the data: 1. Overwrite: Overwrite by first deleting existing data in the destination. @@ -56,18 +56,18 @@ A sync mode governs how Airbyte reads from a source and writes to a destination. A sync mode is therefore, a combination of a source and destination mode together. The UI exposes the following options, whenever both source and destination connectors are capable to support it for the corresponding stream: -* [Full Refresh Overwrite](full-refresh-overwrite.md): Sync the whole stream and replace data in destination by overwriting it. -* [Full Refresh Append](full-refresh-append.md): Sync the whole stream and append data in destination. -* [Incremental Append](incremental-append.md): Sync new records from stream and append data in destination. -* [Incremental Deduped History](incremental-deduped-history.md): Sync new records from stream and append data in destination, also provides a de-duplicated view mirroring the state of the stream in the source. +* [Full Refresh Overwrite](01-full-refresh-overwrite.md): Sync the whole stream and replace data in destination by overwriting it. +* [Full Refresh Append](02-full-refresh-append.md): Sync the whole stream and append data in destination. +* [Incremental Append](03-incremental-append.md): Sync new records from stream and append data in destination. +* [Incremental Deduped History](04-incremental-deduped-history.md): Sync new records from stream and append data in destination, also provides a de-duplicated view mirroring the state of the stream in the source. ## Optional operations ### Airbyte basic normalization -As described by the [Airbyte Protocol from the Airbyte Specifications](../airbyte-protocol.md), a replication is composed of source connectors that are transmitting data in a JSON format. It is then written as such by the destination connectors. +As described by the [Airbyte Protocol from the Airbyte Specifications](../02-airbyte-protocol.md), a replication is composed of source connectors that are transmitting data in a JSON format. It is then written as such by the destination connectors. -On top of this replication, Airbyte provides the option to enable or disable an additional transformation step at the end of the sync called [basic normalization](../basic-normalization.md). This operation is: +On top of this replication, Airbyte provides the option to enable or disable an additional transformation step at the end of the sync called [basic normalization](../04-basic-normalization.md). This operation is: * only available for destinations that support dbt execution. * responsible for automatically generating a pipeline or a DAG of dbt transformation models to convert JSON blob objects into normalized tables. @@ -75,5 +75,5 @@ On top of this replication, Airbyte provides the option to enable or disable an ### Custom sync operations -Further operations can be included in a sync on top of Airbyte basic normalization \(or even to replace it completely\). See [operations](../operations.md) for more details. +Further operations can be included in a sync on top of Airbyte basic normalization \(or even to replace it completely\). See [operations](../06-operations.md) for more details. diff --git a/docs/understanding-airbyte/operations.md b/docs/10-understanding-airbyte/06-operations.md similarity index 95% rename from docs/understanding-airbyte/operations.md rename to docs/10-understanding-airbyte/06-operations.md index f3839499e39b..eabcf7f981b1 100644 --- a/docs/understanding-airbyte/operations.md +++ b/docs/10-understanding-airbyte/06-operations.md @@ -1,6 +1,6 @@ # Operations -Airbyte [connections](connections/) support configuring additional transformations that execute after the sync. Useful applications could be: +Airbyte [connections](05-connections/README.md) support configuring additional transformations that execute after the sync. Useful applications could be: * Customized normalization to better fit the requirements of your own business context. * Business transformations from a technical data representation into a more logical and business oriented data structure. This can facilitate usage by end-users, non-technical operators, and executives looking to generate Business Intelligence dashboards and reports. diff --git a/docs/understanding-airbyte/high-level-view.md b/docs/10-understanding-airbyte/07-high-level-view.md similarity index 85% rename from docs/understanding-airbyte/high-level-view.md rename to docs/10-understanding-airbyte/07-high-level-view.md index ce45613163db..b1b2512a4598 100644 --- a/docs/understanding-airbyte/high-level-view.md +++ b/docs/10-understanding-airbyte/07-high-level-view.md @@ -8,7 +8,7 @@ Airbyte is conceptually composed of two parts: platform and connectors. The platform provides all the horizontal services required to configure and run data movement operations e.g: the UI, configuration API, job scheduling, logging, alerting, etc. and is structured as a set of microservices. -Connectors are independent modules which push/pull data to/from sources and destinations. Connectors are built in accordance with the [Airbyte Specification](./airbyte-protocol.md), which describes the interface with which data can be moved between a source and a destination using Airbyte. Connectors are packaged as Docker images, which allows total flexibility over the technologies used to implement them. +Connectors are independent modules which push/pull data to/from sources and destinations. Connectors are built in accordance with the [Airbyte Specification](./02-airbyte-protocol.md), which describes the interface with which data can be moved between a source and a destination using Airbyte. Connectors are packaged as Docker images, which allows total flexibility over the technologies used to implement them. A more concrete diagram can be seen below: diff --git a/docs/understanding-airbyte/jobs.md b/docs/10-understanding-airbyte/08-jobs.md similarity index 89% rename from docs/understanding-airbyte/jobs.md rename to docs/10-understanding-airbyte/08-jobs.md index 45dc16046be3..5eeb3d78fef8 100644 --- a/docs/understanding-airbyte/jobs.md +++ b/docs/10-understanding-airbyte/08-jobs.md @@ -12,9 +12,9 @@ In Airbyte, all interactions with connectors are run as jobs performed by a Work The worker has 4 main responsibilities in its lifecycle. 1. Spin up any connector docker containers that are needed for the job. -2. They facilitate message passing to or from a connector docker container \(more on this [below](jobs.md#message-passing)\). +2. They facilitate message passing to or from a connector docker container \(more on this [below](08-jobs.md#message-passing)\). 3. Shut down any connector docker containers that it started. -4. Return the output of the job. \(See [Airbyte Specification](airbyte-protocol.md) to understand the output of each worker type.\) +4. Return the output of the job. \(See [Airbyte Specification](02-airbyte-protocol.md) to understand the output of each worker type.\) ## Message Passing @@ -25,7 +25,7 @@ There are 2 flavors of workers: In the first case, the worker is generally extracting data from the connector and reporting it back to the scheduler. It does this by listening to STDOUT of the connector. In the second case, the worker is facilitating passing data \(via record messages\) from the source to the destination. It does this by listening on STDOUT of the source and writing to STDIN on the destination. -For more information on the schema of the messages that are passed, refer to [Airbyte Specification](airbyte-protocol.md). +For more information on the schema of the messages that are passed, refer to [Airbyte Specification](02-airbyte-protocol.md). ## Worker Lifecycle @@ -37,7 +37,7 @@ Note: When a source has passed all of its messages, the docker process should au [Image Source](https://docs.google.com/drawings/d/1k4v_m2M5o2UUoNlYM7mwtZicRkQgoGLgb3eTOVH8QFo/edit) -See the [architecture overview](high-level-view.md) for more information about workers. +See the [architecture overview](07-high-level-view.md) for more information about workers. ## Worker parallelization Airbyte exposes the following environment variable to change the maximum number of each type of worker allowed to run in parallel. diff --git a/docs/understanding-airbyte/tech-stack.md b/docs/10-understanding-airbyte/09-tech-stack.md similarity index 98% rename from docs/understanding-airbyte/tech-stack.md rename to docs/10-understanding-airbyte/09-tech-stack.md index ba69157075e6..0e31b3e1fbd3 100644 --- a/docs/understanding-airbyte/tech-stack.md +++ b/docs/10-understanding-airbyte/09-tech-stack.md @@ -38,7 +38,7 @@ JDBC makes writing reusable database connector frameworks fairly easy, saving us ### _Why are most REST API connectors written in Python?_ -Most contributors felt comfortable writing in Python, so we created a [Python CDK](../connector-development/cdk-python/) to accelerate this development. You can write a connector from scratch in any language as long as it follows the [Airbyte Specification](airbyte-protocol.md). +Most contributors felt comfortable writing in Python, so we created a [Python CDK](../connector-development/cdk-python/) to accelerate this development. You can write a connector from scratch in any language as long as it follows the [Airbyte Specification](02-airbyte-protocol.md). ### _Why did we choose to build the server with Java?_ diff --git a/docs/understanding-airbyte/cdc.md b/docs/10-understanding-airbyte/10-cdc.md similarity index 92% rename from docs/understanding-airbyte/cdc.md rename to docs/10-understanding-airbyte/10-cdc.md index 290d23a09e37..843eb9dd8d72 100644 --- a/docs/understanding-airbyte/cdc.md +++ b/docs/10-understanding-airbyte/10-cdc.md @@ -30,9 +30,9 @@ We add some metadata columns for CDC sources: ## Current Support -* [Postgres](../integrations/sources/postgres.md) \(For a quick video overview of CDC on Postgres, click [here](https://www.youtube.com/watch?v=NMODvLgZvuE&ab_channel=Airbyte)\) -* [MySQL](../integrations/sources/mysql.md) -* [Microsoft SQL Server / MSSQL](../integrations/sources/mssql.md) +* [Postgres](../02-integrations/01-sources/postgres.md) \(For a quick video overview of CDC on Postgres, click [here](https://www.youtube.com/watch?v=NMODvLgZvuE&ab_channel=Airbyte)\) +* [MySQL](../02-integrations/01-sources/mysql.md) +* [Microsoft SQL Server / MSSQL](../02-integrations/01-sources/mssql.md) ## Coming Soon diff --git a/docs/understanding-airbyte/namespaces.md b/docs/10-understanding-airbyte/11-namespaces.md similarity index 96% rename from docs/understanding-airbyte/namespaces.md rename to docs/10-understanding-airbyte/11-namespaces.md index d5deac5d12fc..2a4055648abf 100644 --- a/docs/understanding-airbyte/namespaces.md +++ b/docs/10-understanding-airbyte/11-namespaces.md @@ -35,7 +35,7 @@ If the Destination does not support namespaces, the [namespace field](https://gi ## Destination namespace configuration -As part of the [connections sync settings](connections/), it is possible to configure the namespace used by: 1. destination connectors: to store the `_airbyte_raw_*` tables. 2. basic normalization: to store the final normalized tables. +As part of the [connections sync settings](05-connections/README.md), it is possible to configure the namespace used by: 1. destination connectors: to store the `_airbyte_raw_*` tables. 2. basic normalization: to store the final normalized tables. Note that custom transformation outputs are not affected by the namespace settings from Airbyte: It is up to the configuration of the custom dbt project, and how it is written to handle its [custom schemas](https://docs.getdbt.com/docs/building-a-dbt-project/building-models/using-custom-schemas). The default target schema for dbt in this case, will always be the destination namespace. diff --git a/docs/understanding-airbyte/supported-data-types.md b/docs/10-understanding-airbyte/12-supported-data-types.md similarity index 100% rename from docs/understanding-airbyte/supported-data-types.md rename to docs/10-understanding-airbyte/12-supported-data-types.md diff --git a/docs/understanding-airbyte/json-avro-conversion.md b/docs/10-understanding-airbyte/13-json-avro-conversion.md similarity index 100% rename from docs/understanding-airbyte/json-avro-conversion.md rename to docs/10-understanding-airbyte/13-json-avro-conversion.md diff --git a/docs/understanding-airbyte/glossary.md b/docs/10-understanding-airbyte/14-glossary.md similarity index 84% rename from docs/understanding-airbyte/glossary.md rename to docs/10-understanding-airbyte/14-glossary.md index 187bd806a28c..f7c0b9740c26 100644 --- a/docs/understanding-airbyte/glossary.md +++ b/docs/10-understanding-airbyte/14-glossary.md @@ -5,8 +5,8 @@ The Airbyte CDK (Connector Development Kit) allows you to create connectors for Sources or Destinations. If your source or destination doesn't exist, you can use the CDK to make the building process a lot easier. It generates all the tests and files you need and all you need to do is write the connector-specific code -for your source or destination. We created one in Python which you can check out [here](../connector-development/cdk-python/) -and the Faros AI team created a Javascript/Typescript one that you can check out [here](../connector-development/cdk-faros-js.md). +for your source or destination. We created one in Python which you can check out [here](../08-connector-development/07-cdk-python/README.md) +and the Faros AI team created a Javascript/Typescript one that you can check out [here](../08-connector-development/08-cdk-faros-js.md). ### DAG @@ -24,12 +24,12 @@ the execution of software based on different cases or states. Stands for **E**xtract, **T**ransform, and **L**oad and **E**xtract, **L**oad, and **T**ransform, respectively. -**Extract**: Retrieve data from a [source](../integrations/README.md#Sources), which can be an application, database, anything really. +**Extract**: Retrieve data from a [source](../02-integrations/README.md#Sources), which can be an application, database, anything really. -**Load**: Move data to your [destination](../integrations/README.md#Destinations). +**Load**: Move data to your [destination](../02-integrations/README.md#Destinations). -**Transform**: Clean up the data. This is referred to as [normalization](basic-normalization.md) -in Airbyte and involves [deduplication](connections/incremental-deduped-history.md), changing data types, formats, and more. +**Transform**: Clean up the data. This is referred to as [normalization](04-basic-normalization.md) +in Airbyte and involves [deduplication](05-connections/04-incremental-deduped-history.md), changing data types, formats, and more. ### Full Refresh Sync @@ -69,7 +69,7 @@ This refers to how you define the data that you can retrieve from a Source. For if you want to retrieve information from an API, the data that you can receive needs to be defined clearly so that Airbyte can have a clear expectation of what endpoints are supported and what the objects that the streams return look like. This is represented as a sort of schema -that Airbyte can interpret. Learn more [here](beginners-guide-to-catalog.md). +that Airbyte can interpret. Learn more [here](01-beginners-guide-to-catalog.md). ### Airbyte Specification @@ -81,7 +81,7 @@ This is only relevant for individuals who want to create a connector. This refers to the functions that a Source or Destination must implement to successfully retrieve data and load it, respectively. Implementing these functions using the Airbyte -Specification makes a Source or Destination work correctly. Learn more [here](airbyte-protocol.md). +Specification makes a Source or Destination work correctly. Learn more [here](02-airbyte-protocol.md). ### Temporal diff --git a/docs/10-understanding-airbyte/README.md b/docs/10-understanding-airbyte/README.md new file mode 100644 index 000000000000..805dd27db87a --- /dev/null +++ b/docs/10-understanding-airbyte/README.md @@ -0,0 +1,16 @@ +# Understand Airbyte + +- [A Beginner's Guide to the AirbyteCatalog](01-beginners-guide-to-catalog.md) +- [Airbyte protocol](02-airbyte-protocol.md) +- [Airbyte Protocol Docker Interface](03-airbyte-protocol-docker.md) +- [Basic Normalization](04-basic-normalization.md) +- [Connections and Sync Modes](05-connections/README.md) +- [Operations](06-operations.md) +- [Architecture overview](07-high-level-view.md) +- [Workers & Jobs](08-jobs.md) +- [Technical Stack](09-tech-stack.md) +- [Change Data Capture (CDC)](10-cdc.md) +- [Namespaces](11-namespaces.md) +- [Data Types in Records](12-supported-data-types.md) +- [Json to Avro Conversion for Blob Storage Destinations](13-json-avro-conversion.md) +- [Glossary](14-glossary.md) \ No newline at end of file diff --git a/docs/api-documentation.md b/docs/11-api-documentation.md similarity index 100% rename from docs/api-documentation.md rename to docs/11-api-documentation.md diff --git a/docs/12-octavia-cli.md b/docs/12-octavia-cli.md new file mode 100644 index 000000000000..7dfb630899b1 --- /dev/null +++ b/docs/12-octavia-cli.md @@ -0,0 +1,3 @@ +# CLI documentation + +CLI documentation can be found here - [Octiavia CLI](https://github.com/airbytehq/airbyte/blob/master/octavia-cli/README.md) \ No newline at end of file diff --git a/docs/13-project-overview/01-roadmap.md b/docs/13-project-overview/01-roadmap.md new file mode 100644 index 000000000000..a20654f563a2 --- /dev/null +++ b/docs/13-project-overview/01-roadmap.md @@ -0,0 +1,8 @@ +--- +description: 'Here''s what''s coming in the next few days, weeks, months, and years!' +--- + +# Roadmap + +Roadmap can be found [here](https://app.harvestr.io/roadmap/view/pQU6gdCyc/airbyte-roadmap) + diff --git a/docs/project-overview/product-release-stages.md b/docs/13-project-overview/02-product-release-stages.md similarity index 100% rename from docs/project-overview/product-release-stages.md rename to docs/13-project-overview/02-product-release-stages.md diff --git a/docs/project-overview/changelog/README.md b/docs/13-project-overview/03-Changelog/01-changelog.md similarity index 96% rename from docs/project-overview/changelog/README.md rename to docs/13-project-overview/03-Changelog/01-changelog.md index 97baf9e0fae7..dce4dd615697 100644 --- a/docs/project-overview/changelog/README.md +++ b/docs/13-project-overview/03-Changelog/01-changelog.md @@ -464,7 +464,7 @@ For this week's update, we got... a few new connectors this week in 0.29.0. We f * 🐛 Shopify source: Fixed the products schema to be in accordance with the API * 🐛 BigQuery source: No longer fails with nested array data types. -View the full release highlights here: [Platform](platform.md), [Connectors](connectors.md) +View the full release highlights here: [Platform](02-platform.md), [Connectors](03-connectors.md) And as always, thank you to our wonderful contributors: Madison Swain-Bowden, Brian Krausz, Apostol Tegko, Matej Hamas, Vladimir Remar, Oren Haliva, satishblotout, jacqueskpoty, wallies @@ -482,7 +482,7 @@ What's going on? We just released 0.28.0 and here's the main highlights. * 🐛 Square source: The send\_request method is no longer broken due to CDK changes * 🐛 MySQL destination: Does not fail on columns with JSON data now. -View the full release highlights here: [Platform](platform.md), [Connectors](connectors.md) +View the full release highlights here: [Platform](02-platform.md), [Connectors](03-connectors.md) And as always, thank you to our wonderful contributors: Mario Molina, Daniel Mateus Pires \(Earnest Research\), gunu, Ankur Adhikari, Vladimir Remar, Madison Swain-Bowden, Maksym Pavlenok, Sam Crowder, mildbyte, avida, and gaart @@ -499,7 +499,7 @@ As for our changes this week... * 🐛 GitHub source: Now uses the correct cursor field for the IssueEvents stream * 🐛 Square source: The send\_request method is no longer broken due to CDK changes -View the full release highlights here: [Platform](platform.md), [Connectors](connectors.md) +View the full release highlights here: [Platform](02-platform.md), [Connectors](03-connectors.md) As usual, thank you to our awesome community contributors this week: Oliver Meyer, Varun, Brian Krausz, shadabshaukat, Serhii Lazebnyi, Juliano Benvenuto Piovezan, mildbyte, and Sam Crowder! @@ -523,7 +523,7 @@ As usual, thank you to our awesome community contributors this week: Oliver Meye * 📚 New document on how the CDK handles schemas. * 🏗️ Python CDK: Now allows setting of network adapter args on outgoing HTTP requests. -View the full release highlights here: [Platform](platform.md), [Connectors](connectors.md) +View the full release highlights here: [Platform](02-platform.md), [Connectors](03-connectors.md) As usual, thank you to our awesome community contributors this week: gunu, P.VAD, Rodrigo Parra, Mario Molina, Antonio Grass, sabifranjo, Jaime Farres, shadabshaukat, Rodrigo Menezes, dkelwa, Jonathan Duval, and Augustin Lafanechère. @@ -544,40 +544,40 @@ _The risks and issues with upgrading Airbyte are now gone..._ Starting from next week, our weekly office hours will now become demo days! Drop by to get sneak peeks and new feature demos. * We added the \#careers channel, so if you're hiring, post your job reqs there! -* We added a \#understanding-airbyte channel to mirror [this](../../understanding-airbyte/) section on our docs site. Ask any questions about our architecture or protocol there. +* We added a \#understanding-airbyte channel to mirror [this](../../10-understanding-airbyte/README.md) section on our docs site. Ask any questions about our architecture or protocol there. * We added a \#contributing-to-airbyte channel. A lot of people ask us about how to contribute to the project, so ask away there! -View the full release highlights here: [Platform](platform.md), [Connectors](connectors.md) +View the full release highlights here: [Platform](02-platform.md), [Connectors](03-connectors.md) As usual, thank you to our awesome community contributors this week: Harshith Mullapudi, Michael Irvine, and [sabifranjo](https://github.com/sabifranjo). ## 06/24/2021 Summary -* New Source: [IBM Db2](../../integrations/sources/db2.md) +* New Source: [IBM Db2](../../02-integrations/01-sources/db2.md) * 💎 We now support Avro and JSONL output for our S3 destination! 💎 * 💎 Brand new BigQuery destination flavor that now supports denormalized STRUCT types. * ✨ Looker source now supports self-hosted instances. * ✨ Facebook Marketing source is now migrated to the CDK, massively improving async job performance and error handling. -View the full connector release notes [here](connectors.md). +View the full connector release notes [here](03-connectors.md). As usual, thank you to some of our awesome community contributors this week: Harshith Mullapudi, Tyler DeLange, Daniel Mateus Pires, EdBizarro, Tyler Schroeder, and Konrad Schlatte! ## 06/18/2021 Summary -* New Source: [Snowflake](../../integrations/sources/snowflake.md) +* New Source: [Snowflake](../../02-integrations/01-sources/snowflake.md) * 💎 We now support custom dbt transformations! 💎 * ✨ We now support configuring your destination namespace at the table level when setting up a connection! * ✨ The S3 destination now supports Minio S3 and Parquet output! -View the full release notes here: [Platform](platform.md), [Connectors](connectors.md) +View the full release notes here: [Platform](02-platform.md), [Connectors](03-connectors.md) As usual, thank you to some of our awesome community contributors this week: Tyler DeLange, Mario Molina, Rodrigo Parra, Prashanth Patali, Christopher Wu, Itai Admi, Fred Reimer, and Konrad Schlatte! ## 06/10/2021 Summary -* New Destination: [S3!!](../../integrations/destinations/s3.md) -* New Sources: [Harvest](../../integrations/sources/harvest.md), [Amplitude](../../integrations/sources/amplitude.md), [Posthog](../../integrations/sources/posthog.md) +* New Destination: [S3!!](../../02-integrations/02-destinations/s3.md) +* New Sources: [Harvest](../../02-integrations/01-sources/harvest.md), [Amplitude](../../02-integrations/01-sources/amplitude.md), [Posthog](../../02-integrations/01-sources/posthog.md) * 🐛 Ensure that logs from threads created by replication workers are added to the log file. * 🐛 Handle TINYINT\(1\) and BOOLEAN correctly and fix target file comparison for MySQL CDC. * Jira source: now supports all available entities in Jira Cloud. @@ -589,7 +589,7 @@ As usual, thank you to some of our awesome community contributors this week: Tyl * Added JDBC source connector bootstrap template. * Added Java destination generator. -View the full release notes highlights here: [Platform](platform.md), [Connectors](connectors.md) +View the full release notes highlights here: [Platform](02-platform.md), [Connectors](03-connectors.md) As usual, thank you to some of our awesome community contributors this week \(I've noticed that we've had more contributors to our docs, which we really appreciate\). Ping, Harshith Mullapudi, Michael Irvine, Matheus di Paula, jacqueskpoty and P.VAD. @@ -597,8 +597,8 @@ As usual, thank you to some of our awesome community contributors this week \(I' Airbyte is comprised of 2 parts: -* Platform \(The scheduler, workers, api, web app, and the Airbyte protocol\). Here is the [changelog for Platform](platform.md). -* Connectors that run in Docker containers. Here is the [changelog for the connectors](connectors.md). +* Platform \(The scheduler, workers, api, web app, and the Airbyte protocol\). Here is the [changelog for Platform](02-platform.md). +* Connectors that run in Docker containers. Here is the [changelog for the connectors](03-connectors.md). ## Airbyte Platform Releases diff --git a/docs/project-overview/changelog/platform.md b/docs/13-project-overview/03-Changelog/02-platform.md similarity index 98% rename from docs/project-overview/changelog/platform.md rename to docs/13-project-overview/03-Changelog/02-platform.md index d27e9246e338..a682b506c71d 100644 --- a/docs/project-overview/changelog/platform.md +++ b/docs/13-project-overview/03-Changelog/02-platform.md @@ -4,7 +4,7 @@ description: Be sure to not miss out on new features and improvements! # Platform -This is the changelog for Airbyte Platform. For our connector changelog, please visit our [Connector Changelog](connectors.md) page. +This is the changelog for Airbyte Platform. For our connector changelog, please visit our [Connector Changelog](03-connectors.md) page. ## [20-12-2021 - 0.32.5](https://github.com/airbytehq/airbyte/releases/tag/v0.32.5-alpha) * Add an endpoint that specify that the feedback have been given after the first sync. @@ -27,7 +27,7 @@ This is the changelog for Airbyte Platform. For our connector changelog, please * Enhance API for use by cloud to provide per-connector billing info. ## [11-12-2021 - 0.32.0](https://github.com/airbytehq/airbyte/releases/tag/v0.32.0-alpha) -* This is a **MAJOR** version update. You need to [update to this version](../../operator-guides/upgrading-airbyte.md#mandatory-intermediate-upgrade) before updating to any version newer than `0.32.0` +* This is a **MAJOR** version update. You need to [update to this version](../../06-operator-guides/01-upgrading-airbyte.md#mandatory-intermediate-upgrade) before updating to any version newer than `0.32.0` ## [11-11-2021 - 0.31.0](https://github.com/airbytehq/airbyte/releases/tag/v0.31.0-alpha) * No major changes to Airbyte Core. @@ -442,7 +442,7 @@ This interim patch period mostly contained stability changes for Airbyte Cloud, * **Incremental - Append"** * We now allow sources to replicate only new or modified data. This enables to avoid re-fetching data that you have already replicated from a source. * The delta from a sync will be _appended_ to the existing data in the data warehouse. - * Here are [all the details of this feature](../../understanding-airbyte/connections/incremental-append.md). + * Here are [all the details of this feature](../../10-understanding-airbyte/05-connections/03-incremental-append.md). * It has been released for 15 connectors, including Postgres, MySQL, Intercom, Zendesk, Stripe, Twilio, Marketo, Shopify, GitHub, and all the destination connectors. We will expand it to all the connectors in the next couple of weeks. * **Other features:** * Improve interface for writing python sources \(should make writing new python sources easier and clearer\). diff --git a/docs/project-overview/changelog/connectors.md b/docs/13-project-overview/03-Changelog/03-connectors.md similarity index 91% rename from docs/project-overview/changelog/connectors.md rename to docs/13-project-overview/03-Changelog/03-connectors.md index 780517848b44..ff236fc24a88 100644 --- a/docs/project-overview/changelog/connectors.md +++ b/docs/13-project-overview/03-Changelog/03-connectors.md @@ -691,7 +691,7 @@ Other progress on connectors: ## 01/19/2021 -* **Our new** [**Connector Health Grade**](../../integrations/) **page** +* **Our new** [**Connector Health Grade**](../../02-integrations/README.md) **page** * **1 new source:** App Store \(thanks to [@Muriloo](https://github.com/Muriloo)\) * Fixes on connectors: * Bug fix writing boolean columns to Redshift @@ -701,14 +701,14 @@ Other progress on connectors: ## 01/12/2021 * **1 new source:** Tempo \(thanks to [@thomasvl](https://github.com/thomasvl)\) -* **Incremental support for 3 new source connectors:** [Salesforce](../../integrations/sources/salesforce.md), [Slack](../../integrations/sources/slack.md) and [Braintree](../../integrations/sources/braintree.md) +* **Incremental support for 3 new source connectors:** [Salesforce](../../02-integrations/01-sources/salesforce.md), [Slack](../../02-integrations/01-sources/slack.md) and [Braintree](../../02-integrations/01-sources/braintree.md) * Fixes on connectors: * Fix a bug in MSSQL and Redshift source connectors where custom SQL types weren't being handled correctly. * Improvement of the Snowflake connector from [@hudsondba](https://github.com/hudsondba) \(batch size and timeout sync\) ## 01/05/2021 -* **Incremental support for 2 new source connectors:** [Mixpanel](../../integrations/sources/mixpanel.md) and [HubSpot](../../integrations/sources/hubspot.md) +* **Incremental support for 2 new source connectors:** [Mixpanel](../../02-integrations/01-sources/mixpanel.md) and [HubSpot](../../02-integrations/01-sources/hubspot.md) * Fixes on connectors: * Fixed a bug in the github connector where the connector didn’t verify the provided API token was granted the correct permissions * Fixed a bug in the Google sheets connector where rate limits were not always respected @@ -716,64 +716,64 @@ Other progress on connectors: ## 12/30/2020 -**New sources:** [Plaid](../../integrations/sources/plaid.md) \(contributed by [tgiardina](https://github.com/tgiardina)\), [Looker](../../integrations/sources/looker.md) +**New sources:** [Plaid](../../02-integrations/01-sources/plaid.md) \(contributed by [tgiardina](https://github.com/tgiardina)\), [Looker](../../02-integrations/01-sources/looker.md) ## 12/18/2020 -**New sources:** [Drift](../../integrations/sources/drift.md), [Microsoft Teams](../../integrations/sources/microsoft-teams.md) +**New sources:** [Drift](../../02-integrations/01-sources/drift.md), [Microsoft Teams](../../02-integrations/01-sources/microsoft-teams.md) ## 12/10/2020 -**New sources:** [Intercom](../../integrations/sources/intercom.md), [Mixpanel](../../integrations/sources/mixpanel.md), [Jira Cloud](../../integrations/sources/jira.md), [Zoom](../../integrations/sources/zoom.md) +**New sources:** [Intercom](../../02-integrations/01-sources/intercom.md), [Mixpanel](../../02-integrations/01-sources/mixpanel.md), [Jira Cloud](../../02-integrations/01-sources/jira.md), [Zoom](../../02-integrations/01-sources/zoom.md) ## 12/07/2020 -**New sources:** [Slack](../../integrations/sources/slack.md), [Braintree](../../integrations/sources/braintree.md), [Zendesk Support](../../integrations/sources/zendesk-support.md) +**New sources:** [Slack](../../02-integrations/01-sources/slack.md), [Braintree](../../02-integrations/01-sources/braintree.md), [Zendesk Support](../../02-integrations/01-sources/zendesk-support.md) ## 12/04/2020 -**New sources:** [Redshift](../../integrations/sources/redshift.md), [Greenhouse](../../integrations/sources/greenhouse.md) **New destination:** [Redshift](../../integrations/destinations/redshift.md) +**New sources:** [Redshift](../../02-integrations/01-sources/redshift.md), [Greenhouse](../../02-integrations/01-sources/greenhouse.md) **New destination:** [Redshift](../../02-integrations/02-destinations/redshift.md) ## 11/30/2020 -**New sources:** [Freshdesk](../../integrations/sources/freshdesk.md), [Twilio](../../integrations/sources/twilio.md) +**New sources:** [Freshdesk](../../02-integrations/01-sources/freshdesk.md), [Twilio](../../02-integrations/01-sources/twilio.md) ## 11/25/2020 -**New source:** [Recurly](../../integrations/sources/recurly.md) +**New source:** [Recurly](../../02-integrations/01-sources/recurly.md) ## 11/23/2020 -**New source:** [Sendgrid](../../integrations/sources/sendgrid.md) +**New source:** [Sendgrid](../../02-integrations/01-sources/sendgrid.md) ## 11/18/2020 -**New source:** [Mailchimp](../../integrations/sources/mailchimp.md) +**New source:** [Mailchimp](../../02-integrations/01-sources/mailchimp.md) ## 11/13/2020 -**New source:** [MSSQL](../../integrations/sources/mssql.md) +**New source:** [MSSQL](../../02-integrations/01-sources/mssql.md) ## 11/11/2020 -**New source:** [Shopify](../../integrations/sources/shopify.md) +**New source:** [Shopify](../../02-integrations/01-sources/shopify.md) ## 11/09/2020 -**New sources:** [Files \(CSV, JSON, HTML...\)](../../integrations/sources/file.md) +**New sources:** [Files \(CSV, JSON, HTML...\)](../../02-integrations/01-sources/file.md) ## 11/04/2020 -**New sources:** [Facebook Ads](connectors.md), [Google Ads](../../integrations/sources/google-ads.md), [Marketo](../../integrations/sources/marketo.md) **New destination:** [Snowflake](../../integrations/destinations/snowflake.md) +**New sources:** [Facebook Ads](03-connectors.md), [Google Ads](../../02-integrations/01-sources/google-ads.md), [Marketo](../../02-integrations/01-sources/marketo.md) **New destination:** [Snowflake](../../02-integrations/02-destinations/snowflake.md) ## 10/30/2020 -**New sources:** [Salesforce](../../integrations/sources/salesforce.md), Google Analytics, [HubSpot](../../integrations/sources/hubspot.md), [GitHub](../../integrations/sources/github.md), [Google Sheets](../../integrations/sources/google-sheets.md), [Rest APIs](connectors.md), and [MySQL](../../integrations/sources/mysql.md) +**New sources:** [Salesforce](../../02-integrations/01-sources/salesforce.md), Google Analytics, [HubSpot](../../02-integrations/01-sources/hubspot.md), [GitHub](../../02-integrations/01-sources/github.md), [Google Sheets](../../02-integrations/01-sources/google-sheets.md), [Rest APIs](03-connectors.md), and [MySQL](../../02-integrations/01-sources/mysql.md) ## 10/21/2020 -**New destinations:** we built our own connectors for [BigQuery](../../integrations/destinations/bigquery.md) and [Postgres](../../integrations/destinations/postgres.md), to ensure they are of the highest quality. +**New destinations:** we built our own connectors for [BigQuery](../../02-integrations/02-destinations/bigquery.md) and [Postgres](../../02-integrations/02-destinations/postgres.md), to ensure they are of the highest quality. ## 09/23/2020 -**New sources:** [Stripe](../../integrations/sources/stripe.md), [Postgres](../../integrations/sources/postgres.md) **New destinations:** [BigQuery](../../integrations/destinations/bigquery.md), [Postgres](../../integrations/destinations/postgres.md), [local CSV](../../integrations/destinations/local-csv.md) +**New sources:** [Stripe](../../02-integrations/01-sources/stripe.md), [Postgres](../../02-integrations/01-sources/postgres.md) **New destinations:** [BigQuery](../../02-integrations/02-destinations/bigquery.md), [Postgres](../../02-integrations/02-destinations/postgres.md), [local CSV](../../02-integrations/02-destinations/local-csv.md) diff --git a/docs/project-overview/slack-code-of-conduct.md b/docs/13-project-overview/04-slack-code-of-conduct.md similarity index 100% rename from docs/project-overview/slack-code-of-conduct.md rename to docs/13-project-overview/04-slack-code-of-conduct.md diff --git a/docs/project-overview/security.md b/docs/13-project-overview/05-security.md similarity index 100% rename from docs/project-overview/security.md rename to docs/13-project-overview/05-security.md diff --git a/docs/13-project-overview/06-airbyte-repo.md b/docs/13-project-overview/06-airbyte-repo.md new file mode 100644 index 000000000000..c222dd1b7253 --- /dev/null +++ b/docs/13-project-overview/06-airbyte-repo.md @@ -0,0 +1,3 @@ +# Airbyte Repository + +Airbyte Repository can be found [here](https://github.com/airbytehq/airbyte) \ No newline at end of file diff --git a/docs/project-overview/licenses/license-faq.md b/docs/13-project-overview/07-licenses/01-license-faq.md similarity index 99% rename from docs/project-overview/licenses/license-faq.md rename to docs/13-project-overview/07-licenses/01-license-faq.md index 837ae5a5fd3d..5862aa9b0226 100644 --- a/docs/project-overview/licenses/license-faq.md +++ b/docs/13-project-overview/07-licenses/01-license-faq.md @@ -17,7 +17,7 @@ ELv2 is a simple, non-copyleft license, allowing for the right to “use, copy, In case you want to work with Airbyte without these limitations, we offer alternative licenses. These licenses include maintenance, support, and customary commercial terms. If you need a different license, please get in touch with us at: contact@airbyte.io. -[View License](elv2-license.md) +[View License](02-elv2-license.md) ## FAQ ### What limitations does ELv2 impose on my use of Airbyte? diff --git a/docs/project-overview/licenses/elv2-license.md b/docs/13-project-overview/07-licenses/02-elv2-license.md similarity index 100% rename from docs/project-overview/licenses/elv2-license.md rename to docs/13-project-overview/07-licenses/02-elv2-license.md diff --git a/docs/project-overview/licenses/mit-license.md b/docs/13-project-overview/07-licenses/03-mit-license.md similarity index 100% rename from docs/project-overview/licenses/mit-license.md rename to docs/13-project-overview/07-licenses/03-mit-license.md diff --git a/docs/project-overview/licenses/examples.md b/docs/13-project-overview/07-licenses/04-examples.md similarity index 100% rename from docs/project-overview/licenses/examples.md rename to docs/13-project-overview/07-licenses/04-examples.md diff --git a/docs/project-overview/licenses/README.md b/docs/13-project-overview/07-licenses/README.md similarity index 86% rename from docs/project-overview/licenses/README.md rename to docs/13-project-overview/07-licenses/README.md index 6601c9e166e2..504d78d6ca83 100644 --- a/docs/project-overview/licenses/README.md +++ b/docs/13-project-overview/07-licenses/README.md @@ -9,9 +9,9 @@ The license for a particular work is defined with following prioritized rules: 3. First LICENSE found when exploring parent directories up to the project top level directory 4. Defaults to Elastic License 2.0 -If you have any question regarding licenses, just visit our [FAQ](license-faq.md) or [contact us](mailto:license@airbyte.io). +If you have any question regarding licenses, just visit our [FAQ](01-license-faq.md) or [contact us](mailto:license@airbyte.io). -If you want to see a list of examples supported by ELv2, and not, to have a better understanding whether you should be concerned or not, check the [examples](examples.md). +If you want to see a list of examples supported by ELv2, and not, to have a better understanding whether you should be concerned or not, check the [examples](04-examples.md). **TL;DR:** Unless you want to host Airbyte yourself and sell it as an ELT/ETL tool, or to sell a product that directly exposes Airbyte’s UI or API, you should be good to go! diff --git a/docs/13-project-overview/README.md b/docs/13-project-overview/README.md new file mode 100644 index 000000000000..61b51428d314 --- /dev/null +++ b/docs/13-project-overview/README.md @@ -0,0 +1,9 @@ +# Project Overview + + - [Roadmap](https://app.harvestr.io/roadmap/view/pQU6gdCyc/airbyte-roadmap) + - [Product Release Stages](02-product-release-stages.md) + - [Changelog](03-Changelog/01-changelog.md) + - [Slack Code of Conduct](04-slack-code-of-conduct.md) + - [Security and Data Privacy at Airbyte](05-security.md) + - [Airbyte Repository](https://github.com/airbytehq/airbyte) + - [Licenses](07-licenses/README.md) diff --git a/docs/archive/examples/README.md b/docs/archive/examples/README.md deleted file mode 100644 index e62ee1c8eb21..000000000000 --- a/docs/archive/examples/README.md +++ /dev/null @@ -1,2 +0,0 @@ -# Example Use Cases - diff --git a/docs/archive/examples/build-a-slack-activity-dashboard.md b/docs/archive/examples/build-a-slack-activity-dashboard.md deleted file mode 100644 index f1a1969c7299..000000000000 --- a/docs/archive/examples/build-a-slack-activity-dashboard.md +++ /dev/null @@ -1,424 +0,0 @@ ---- -description: Using Airbyte and Apache Superset ---- - -# Build a Slack Activity Dashboard - -![](../../.gitbook/assets/46.png) - -This article will show how to use [Airbyte](http://airbyte.io) - an open-source data integration platform - and [Apache Superset](https://superset.apache.org/) - an open-source data exploration platform - in order to build a Slack activity dashboard showing: - -* Total number of members of a Slack workspace -* The evolution of the number of Slack workspace members -* Evolution of weekly messages -* Evolution of messages per channel -* Members per time zone - -Before we get started, let’s take a high-level look at how we are going to achieve creating a Slack dashboard using Airbyte and Apache Superset. - -1. We will use the Airbyte’s Slack connector to get the data off a Slack workspace \(we will be using Airbyte’s own Slack workspace for this tutorial\). -2. We will save the data onto a PostgreSQL database. -3. Finally, using Apache Superset, we will implement the various metrics we care about. - -Got it? Now let’s get started. - -## 1. Replicating Data from Slack to Postgres with Airbyte - -### a. Deploying Airbyte - -There are several easy ways to deploy Airbyte, as listed [here](https://docs.airbyte.io/). For this tutorial, I will just use the [Docker Compose method](https://docs.airbyte.io/deploying-airbyte/local-deployment) from my workstation: - -```text -# In your workstation terminal -git clone https://github.com/airbytehq/airbyte.git -cd airbyte -docker-compose up -``` - -The above command will make the Airbyte app available on `localhost:8000`. Visit the URL on your favorite browser, and you should see Airbyte’s dashboard \(if this is your first time, you will be prompted to enter your email to get started\). - -If you haven’t set Docker up, follow the [instructions here](https://docs.docker.com/desktop/) to set it up on your machine. - -### b. Setting Up Airbyte’s Slack Source Connector - -Airbyte’s Slack connector will give us access to the data. So, we are going to kick things off by setting this connector to be our data source in Airbyte’s web app. I am assuming you already have Airbyte and Docker set up on your local machine. We will be using Docker to create our PostgreSQL database container later on. - -Now, let’s proceed. If you already went through the onboarding, click on the “new source” button at the top right of the Sources section. If you're going through the onboarding, then follow the instructions. - -You will be requested to enter a name for the source you are about to create. You can call it “slack-source”. Then, in the Source Type combo box, look for “Slack,” and then select it. Airbyte will then present the configuration fields needed for the Slack connector. So you should be seeing something like this on the Airbyte App: - -![](../../.gitbook/assets/1.png) - -The first thing you will notice is that this connector requires a Slack token. So, we have to obtain one. If you are not a workspace admin, you will need to ask for permission. - -Let’s walk through how we would get the Slack token we need. - -Assuming you are a workspace admin, open the Slack workspace and navigate to \[Workspace Name\] > Administration > Customize \[Workspace Name\]. In our case, it will be Airbyte > Administration > Customize Airbyte \(as shown below\): - -![](../../.gitbook/assets/2.png) - -In the new page that opens up in your browser, you will then need to navigate to **Configure apps**. - -![](../../.gitbook/assets/3.png) - -In the new window that opens up, click on **Build** in the top right corner. - -![](../../.gitbook/assets/4.png) - -Click on the **Create an App** button. - -![](../../.gitbook/assets/5.png) - -In the modal form that follows, give your app a name - you can name it `airbyte_superset`, then select your workspace from the Development Slack Workspace. - -![](../../.gitbook/assets/6.png) - -Next, click on the **Create App** button. You will then be presented with a screen where we are going to set permissions for our `airbyte_superset` app, by clicking on the **Permissions** button on this page. - -![](../../.gitbook/assets/7.png) - -In the next screen, navigate to the scope section. Then, click on the **Add an OAuth Scope** button. This will allow you to add permission scopes for your app. At a minimum, your app should have the following permission scopes: - -![](../../.gitbook/assets/8.png) - -Then, we are going to add our created app to the workspace by clicking the **Install to Workspace** button. - -![](../../.gitbook/assets/9.png) - -Slack will prompt you that your app is requesting permission to access your workspace of choice. Click Allow. - -![](../../.gitbook/assets/10.png) - -After the app has been successfully installed, you will be navigated to Slack’s dashboard, where you will see the Bot User OAuth Access Token. - -This is the token you will provide back on the Airbyte page, where we dropped off to obtain this token. So make sure to copy it and keep it in a safe place. - -Now that we are done with obtaining a Slack token, let’s go back to the Airbyte page we dropped off and add the token in there. - -We will also need to provide Airbyte with `start_date`. This is the date from which we want Airbyte to start replicating data from the Slack API, and we define that in the format: `YYYY-MM-DDT00:00:00Z`. - -We will specify ours as `2020-09-01T00:00:00Z`. We will also tell Airbyte to exclude archived channels and not include private channels, and also to join public channels, so the latter part of the form should look like this: - -![](../../.gitbook/assets/11.png) - -Finally, click on the **Set up source** button for Airbyte to set the Slack source up. - -If the source was set up correctly, you will be taken to the destination section of Airbyte’s dashboard, where you will tell Airbyte where to store the replicated data. - -### c. Setting Up Airbyte’s Postgres Destination Connector - -For our use case, we will be using PostgreSQL as the destination. - -Click the **add destination** button in the top right corner, then click on **add a new destination**. - -![](../../.gitbook/assets/12.png) - -In the next screen, Airbyte will validate the source, and then present you with a form to give your destination a name. We’ll call this destination slack-destination. Then, we will select the Postgres destination type. Your screen should look like this now: - -![](../../.gitbook/assets/13.png) - -Great! We have a form to enter Postgres connection credentials, but we haven’t set up a Postgres database. Let’s do that! - -Since we already have Docker installed, we can spin off a Postgres container with the following command in our terminal: - -```text -docker run --rm --name slack-db -e POSTGRES_PASSWORD=password -p 2000:5432 -d postgres -``` - -\(Note that the Docker compose file for Superset ships with a Postgres database, as you can see [here](https://github.com/apache/superset/blob/master/docker-compose.yml#L40)\). - -The above command will do the following: - -* create a Postgres container with the name slack-db, -* set the password to password, -* expose the container’s port 5432, as our machine’s port 2000. -* create a database and a user, both called postgres. - -With this, we can go back to the Airbyte screen and supply the information needed. Your form should look like this: - -![](../../.gitbook/assets/14.png) - -Then click on the **Set up destination** button. - -### d. Setting Up the Replication - -You should now see the following screen: - -![](../../.gitbook/assets/15.png) - -Airbyte will then fetch the schema for the data coming from the Slack API for your workspace. You should leave all boxes checked and then choose the sync frequency - this is the interval in which Airbyte will sync the data coming from your workspace. Let’s set the sync interval to every 24 hours. - -Then click on the **Set up connection** button. - -Airbyte will now take you to the destination dashboard, where you will see the destination you just set up. Click on it to see more details about this destination. - -![](../../.gitbook/assets/16.png) - -You will see Airbyte running the very first sync. Depending on the size of the data Airbyte is replicating, it might take a while before syncing is complete. - -![](../../.gitbook/assets/17.png) - -When it’s done, you will see the **Running status** change to **Succeeded**, and the size of the data Airbyte replicated as well as the number of records being stored on the Postgres database. - -![](../../.gitbook/assets/18.png) - -To test if the sync worked, run the following in your terminal: - -```text -docker exec slack-source psql -U postgres -c "SELECT * FROM public.users;" -``` - -This should output the rows in the users’ table. - -To get the count of the users’ table as well, you can also run: - -```text -docker exec slack-db psql -U postgres -c "SELECT count(*) FROM public.users;" -``` - -Now that we have the data from the Slack workspace in our Postgres destination, we will head on to creating the Slack dashboard with Apache Superset. - -## 2. Setting Up Apache Superset for the Dashboards - -### a. Installing Apache Superset - -Apache Superset, or simply Superset, is a modern data exploration and visualization platform. To get started using it, we will be cloning the Superset repo. Navigate to a destination in your terminal where you want to clone the Superset repo to and run: - -```text -git clone https://github.com/apache/superset.git -``` - -It’s recommended to check out the latest branch of Superset, so run: - -```text -cd superset -``` - -And then run: - -```text -git checkout latest -``` - -Superset needs you to install and build its frontend dependencies and assets. So, we will start by installing the frontend dependencies: - -```text -npm install -``` - -Note: The above command assumes you have both Node and NPM installed on your machine. - -Finally, for the frontend, we will build the assets by running: - -```text -npm run build -``` - -After that, go back up one directory into the Superset directory by running: - -```text -cd.. -``` - -Then run: - -```text -docker-compose up -``` - -This will download the Docker images Superset needs and build containers and start services Superset needs to run locally on your machine. - -Once that’s done, you should be able to access Superset on your browser by visiting [`http://localhost:8088`](http://localhost:8088), and you should be presented with the Superset login screen. - -Enter username: **admin** and Password: **admin** to be taken to your Superset dashboard. - -Great! You’ve got Superset set up. Now let’s tell Superset about our Postgres Database holding the Slack data from Airbyte. - -### b. Setting Up a Postgres Database in Superset - -To do this, on the top menu in your Superset dashboard, hover on the Data dropdown and click on **Databases**. - -![](../../.gitbook/assets/19.png) - -In the page that opens up, click on the **+ Database** button in the top right corner. - -![](../../.gitbook/assets/20.png) - -Then, you will be presented with a modal to add your Database Name and the connection URI. - -![](../../.gitbook/assets/21.png) - -Let’s call our Database `slack_db`, and then add the following URI as the connection URI: - -```text -postgresql://postgres:password@docker.for.mac.localhost:2000/postgres -``` - -If you are on a Windows Machine, yours will be: - -```text -postgresql://postgres:password@docker.for.win.localhost:2000/postgres -``` - -Note: We are using `docker.for.[mac|win].localhost` in order to access the localhost of your machine, because using just localhost will point to the Docker container network and not your machine’s network. - -Your Superset UI should look like this: - -![](../../.gitbook/assets/22.png) - -We will need to enable some settings on this connection. Click on the **SQL LAB SETTINGS** and check the following boxes: - -![](../../.gitbook/assets/23.png) - -Afterwards, click on the **ADD** button, and you will see your database on the data page of Superset. - -![](../../.gitbook/assets/24.png) - -### c. Importing our dataset - -Now that you’ve added the database, you will need to hover over the data menu again; now click on **Datasets**. - -![](../../.gitbook/assets/25.png) - -Then, you will be taken to the datasets page: - -![](../../.gitbook/assets/26.png) - -We want to only see the datasets that are in our `slack_db` database, so in the Database that is currently showing All, select `slack_db` and you will see that we don’t have any datasets at the moment. - -![](../../.gitbook/assets/27.png) - -![](../../.gitbook/assets/28.png) - -You can fix this by clicking on the **+ DATASET** button and adding the following datasets. - -Note: Make sure you select the public schema under the Schema dropdown. - -![](../../.gitbook/assets/29.png) - -Now that we have set up Superset and given it our Slack data, let’s proceed to creating the visualizations we need. - -Still remember them? Here they are again: - -* Total number of members of a Slack workspace -* The evolution of the number of Slack workspace members -* Evolution of weekly messages -* Evolution of weekly threads created -* Evolution of messages per channel -* Members per time zone - -## 3. Creating Our Dashboards with Superset - -### a. Total number of members of a Slack workspace - -To get this, we will first click on the users’ dataset of our `slack_db` on the Superset dashboard. - -![](../../.gitbook/assets/30.png) - -Next, change **untitled** at the top to **Number of Members**. - -![](../../.gitbook/assets/31.png) - -Now change the **Visualization Type** to **Big Number,** remove the **Time Range** filter, and add a Subheader named “Slack Members.” So your UI should look like this: - -![](../../.gitbook/assets/32.png) - -Then, click on the **RUN QUERY** button, and you should now see the total number of members. - -Pretty cool, right? Now let’s save this chart by clicking on the **SAVE** button. - -![](../../.gitbook/assets/33.png) - -Then, in the **ADD TO DASHBOARD** section, type in “Slack Dashboard”, click on the “Create Slack Dashboard” button, and then click the **Save** button. - -Great! We have successfully created our first Chart, and we also created the Dashboard. Subsequently, we will be following this flow to add the other charts to the created Slack Dashboard. - -### b. Casting the ts column - -Before we proceed with the rest of the charts for our dashboard, if you inspect the **ts** column on either the **messages** table or the **threads** table, you will see it’s of the type `VARCHAR`. We can’t really use this for our charts, so we have to cast both the **messages** and **threads**’ **ts** column as `TIMESTAMP`. Then, we can create our charts from the results of those queries. Let’s do this. - -First, navigate to the **Data** menu, and click on the **Datasets** link. In the list of datasets, click the **Edit** button for the **messages** table. - -![](../../.gitbook/assets/34.png) - -You’re now in the Edit Dataset view. Click the **Lock** button to enable editing of the dataset. Then, navigate to the **Columns** tab, expand the **ts** dropdown, and then tick the **Is Temporal** box. - -![](../../.gitbook/assets/35.png) - -Persist the changes by clicking the Save button. - -### c. The evolution of the number of Slack workspace members - -In the exploration page, let’s first get the chart showing the evolution of the number of Slack members. To do this, make your settings on this page match the screenshot below: - -![](../../.gitbook/assets/36.png) - -Save this chart onto the Slack Dashboard. - -### d. Evolution of weekly messages posted - -Now, we will look at the evolution of weekly messages posted. Let’s configure the chart settings on the same page as the previous one. - -![](../../.gitbook/assets/37.png) - -Remember, your visualization will differ based on the data you have. - -### e. Evolution of weekly threads created - -Now, we are finished with creating the message chart. Let's go over to the thread chart. You will recall that we will need to cast the **ts** column as stated earlier. So, do that and get to the exploration page, and make it match the screenshot below to achieve the required visualization: - -![](../../.gitbook/assets/38.png) - -### f. Evolution of messages per channel - -For this visualization, we will need a more complex SQL query. Here’s the query we used \(as you can see in the screenshot below\): - -```text -SELECT CAST(m.ts as TIMESTAMP), c.name, m.text -FROM public.messages m -INNER JOIN public.channels c -ON m.channel_id = c_id -``` - -![](../../.gitbook/assets/39.png) - -Next, click on **EXPLORE** to be taken to the exploration page; make it match the screenshot below: - -![](../../.gitbook/assets/40.png) - -Save this chart to the dashboard. - -### g. Members per time zone - -Finally, we will be visualizing members per time zone. To do this, instead of casting in the SQL lab as we’ve previously done, we will explore another method to achieve casting by using Superset’s Virtual calculated column feature. This feature allows us to write SQL queries that customize the appearance and behavior of a specific column. - -For our use case, we will need the updated column of the users table to be a `TIMESTAMP`, in order to perform the visualization we need for Members per time zone. Let’s start on clicking the edit icon on the users table in Superset. - -![](../../.gitbook/assets/41.png) - -You will be presented with a modal like so: - -![](../../.gitbook/assets/42.png) - -Click on the **CALCULATED COLUMNS** tab: - -![](../../.gitbook/assets/43.png) - -Then, click on the **+ ADD ITEM** button, and make your settings match the screenshot below. - -![](../../.gitbook/assets/44.png) - -Then, go to the **exploration** page and make it match the settings below: - -![](../../.gitbook/assets/45.png) - -Now save this last chart, and head over to your Slack Dashboard. It should look like this: - -![](../../.gitbook/assets/46.png) - -Of course, you can edit how the dashboard looks to fit what you want on it. - -## Conclusion - -In this article, we looked at using Airbyte’s Slack connector to get the data from a Slack workspace into a Postgres database, and then used Apache Superset to craft a dashboard of visualizations.If you have any questions about Airbyte, don’t hesitate to ask questions on our [Slack](https://slack.airbyte.io)! If you have questions about Superset, you can join the [Superset Community Slack](https://superset.apache.org/community/)! - diff --git a/docs/archive/examples/postgres-replication.md b/docs/archive/examples/postgres-replication.md deleted file mode 100644 index dbb2edf78570..000000000000 --- a/docs/archive/examples/postgres-replication.md +++ /dev/null @@ -1,116 +0,0 @@ ---- -description: Start syncing data in minutes with Airbyte ---- - -# Postgres Replication - -Let's see how you can spin up a local instance of Airbyte and syncing data from one Postgres database to another. - -Here's a 6-minute video showing you how you can do it. - -{% embed url="https://www.youtube.com/watch?v=Rcpt5SVsMpk" caption="" %} - -First of all, make sure you have Docker and Docker Compose installed. If this isn't the case, follow the [guide](../../deploying-airbyte/local-deployment.md) for the recommended approach to install Docker. - -Once Docker is installed successfully, run the following commands: - -```text -git clone https://github.com/airbytehq/airbyte.git -cd airbyte -docker-compose up -``` - -Once you see an Airbyte banner, the UI is ready to go at [http://localhost:8000/](http://localhost:8000/). - -## 1. Set up your preferences - -You should see an onboarding page. Enter your email and continue. - -![](../../.gitbook/assets/airbyte_get-started.png) - -## 2. Set up your first connection - -We support a growing [list of source connectors](../../integrations/sources). For now, we will start out with a Postgres source and destination. - -**If you don't have a readily available Postgres database to sync, here are some quick instructions:** -Run the following commands in a new terminal window to start backgrounded source and destination databases: - -```text -docker run --rm --name airbyte-source -e POSTGRES_PASSWORD=password -p 2000:5432 -d postgres -docker run --rm --name airbyte-destination -e POSTGRES_PASSWORD=password -p 3000:5432 -d postgres -``` - -Add a table with a few rows to the source database: - -```text -docker exec -it airbyte-source psql -U postgres -c "CREATE TABLE users(id SERIAL PRIMARY KEY, col1 VARCHAR(200));" -docker exec -it airbyte-source psql -U postgres -c "INSERT INTO public.users(col1) VALUES('record1');" -docker exec -it airbyte-source psql -U postgres -c "INSERT INTO public.users(col1) VALUES('record2');" -docker exec -it airbyte-source psql -U postgres -c "INSERT INTO public.users(col1) VALUES('record3');" -``` - -You now have a Postgres database ready to be replicated! - -### **Connect the Postgres database** - -In the UI, you will see a wizard that allows you choose the data you want to send through Airbyte. - -![](../../.gitbook/assets/02_set-up-sources.png) - -Use the name `airbyte-source` for the name and `Postgres`as the type. If you used our instructions to create a Postgres database, fill in the configuration fields as follows: - -```text -Host: localhost -Port: 2000 -User: postgres -Password: password -DB Name: postgres -``` - -Click on `Set Up Source` and the wizard should move on to allow you to configure a destination. - -We support a growing list of data warehouses, lakes and databases. For now, use the name `airbyte-destination`, and configure the destination Postgres database: - -```text -Host: localhost -Port: 3000 -User: postgres -Password: password -DB Name: postgres -``` - -After adding the destination, you can choose what tables and columns you want to sync. - -![](../../.gitbook/assets/03_set-up-connection.png) - -For this demo, we recommend leaving the defaults and selecting "Every 5 Minutes" as the frequency. Click `Set Up Connection` to finish setting up the sync. - -## 3. Check the logs of your first sync - -You should now see a list of sources with the source you just added. Click on it to find more information about your connection. This is the page where you can update any settings about this source and how it syncs. There should be a `Completed` job under the history section. If you click on that run, it will show logs from that run. - -![](../../.gitbook/assets/04_source-details.png) - -One of biggest problems we've seen in tools like Fivetran is the lack of visibility when debugging. In Airbyte, allowing full log access and the ability to debug and fix connector problems is one of our highest priorities. We'll be working hard to make these logs accessible and understandable. - -## 4. Check if the syncing actually worked - -Now let's verify that this worked. Let's output the contents of the destination db: - -```text -docker exec airbyte-destination psql -U postgres -c "SELECT * FROM public.users;" -``` - -:::info - -Don't worry about the awkward `public_users` name for now; we are currently working on an update to allow users to configure their destination table names! - -::: - -You should see the rows from the source database inside the destination database! - -And there you have it. You've taken data from one database and replicated it to another. All of the actual configuration for this replication only took place in the UI. - -That's it! This is just the beginning of Airbyte. If you have any questions at all, please reach out to us on [Slack](https://slack.airbyte.io/). We’re still in alpha, so if you see any rough edges or want to request a connector you need, please create an issue on our [Github](https://github.com/airbytehq/airbyte) or leave a thumbs up on an existing issue. - -Thank you and we hope you enjoy using Airbyte. diff --git a/docs/archive/examples/slack-history.md b/docs/archive/examples/slack-history.md deleted file mode 100644 index ca78ec035533..000000000000 --- a/docs/archive/examples/slack-history.md +++ /dev/null @@ -1,109 +0,0 @@ ---- -description: Using Airbyte and MeiliSearch ---- - -# Save and Search Through Your Slack History on a Free Slack Plan - -![](../../.gitbook/assets/slack-history-ui-title.png) - -The [Slack free tier](https://slack.com/pricing/paid-vs-free) saves only the last 10K messages. For social Slack instances, it may be impractical to upgrade to a paid plan to retain these messages. Similarly, for an open-source project like [Airbyte](../../understanding-airbyte/airbyte-protocol.md#catalog) where we interact with our community through a public Slack instance, the cost of paying for a seat for every Slack member is prohibitive. - -However, searching through old messages can be really helpful. Losing that history feels like some advanced form of memory loss. What was that joke about Java 8 Streams? This contributor question sounds familiar—haven't we seen it before? But you just can't remember! - -This tutorial will show you how you can, for free, use Airbyte to save these messages \(even after Slack removes access to them\). It will also provide you a convenient way to search through them. - -Specifically, we will export messages from your Slack instance into an open-source search engine called [MeiliSearch](https://github.com/meilisearch/meilisearch). We will be focusing on getting this setup running from your local workstation. We will mention at the end how you can set up a more productionized version of this pipeline. - -We want to make this process easy, so while we will link to some external documentation for further exploration, we will provide all the instructions you need here to get this up and running. - -## 1. Set Up MeiliSearch - -First, let's get MeiliSearch running on our workstation. MeiliSearch has extensive docs for [getting started](https://docs.meilisearch.com/reference/features/installation.html#download-and-launch). For this tutorial, however, we will give you all the instructions you need to set up MeiliSearch using Docker. - -```text -docker run -it --rm \ - -p 7700:7700 \ - -v $(pwd)/data.ms:/data.ms \ - getmeili/meilisearch -``` - -That's it! - -:::info - -MeiliSearch stores data in $\(pwd\)/data.ms, so if you prefer to store it somewhere else, just adjust this path. - -::: - -## 2. Replicate Your Slack Messages to MeiliSearch - -### a. Set Up Airbyte - -Make sure you have Docker and Docker Compose installed. If you haven’t set Docker up, follow the [instructions here](https://docs.docker.com/desktop/) to set it up on your machine. Then, run the following commands: - -```bash -git clone https://github.com/airbytehq/airbyte.git -cd airbyte -docker-compose up -``` - -If you run into any problems, feel free to check out our more extensive [Getting Started FAQ](https://discuss.airbyte.io/c/faq/15) for help. - -Once you see an Airbyte banner, the UI is ready to go at [http://localhost:8000/](http://localhost:8000/). Once you have set your user preferences, you will be brought to a page that asks you to set up a source. In the next step, we'll go over how to do that. - -### b. Set Up Airbyte’s Slack Source Connector - -In the Airbyte UI, select Slack from the dropdown. We provide step-by-step instructions for setting up the Slack source in Airbyte [here](https://docs.airbyte.io/integrations/sources/slack#setup-guide). These will walk you through how to complete the form on this page. - -![](../../.gitbook/assets/slack-history-setup-wizard.png) - -By the end of these instructions, you should have created a Slack source in the Airbyte UI. For now, just add your Slack app to a single public channel \(you can add it to more channels later\). Only messages from that channel will be replicated. - -The Airbyte app will now prompt you to set up a destination. Next, we will walk through how to set up MeiliSearch. - -### c. Set Up Airbyte’s MeiliSearch Destination Connector - -Head back to the Airbyte UI. It should still be prompting you to set up a destination. Select "MeiliSearch" from the dropdown. For the `host` field, set: `http://localhost:7700`. The `api_key` can be left blank. - -### d. Set Up the Replication - -On the next page, you will be asked to select which streams of data you'd like to replicate. We recommend unchecking "files" and "remote files" since you won't really be able to search them easily in this search engine. - -![](../../.gitbook/assets/airbyte_connection-settings.png) - -For frequency, we recommend every 24 hours. - -## 3. Search MeiliSearch - -After the connection has been saved, Airbyte should start replicating the data immediately. When it completes you should see the following: - -![](../../.gitbook/assets/slack-history-sync.png) - -When the sync is done, you can sanity check that this is all working by making a search request to MeiliSearch. Replication can take several minutes depending on the size of your Slack instance. - -```bash -curl 'http://localhost:7700/indexes/messages/search' --data '{ "q": "" }' -``` - -For example, I have the following message in one of the messages that I replicated: "welcome to airbyte". - -```bash -curl 'http://localhost:7700/indexes/messages/search' --data '{ "q": "welcome to" }' -# => {"hits":[{"_ab_pk":"7ff9a858_6959_45e7_ad6b_16f9e0e91098","channel_id":"C01M2UUP87P","client_msg_id":"77022f01-3846-4b9d-a6d3-120a26b2c2ac","type":"message","text":"welcome to airbyte.","user":"U01AS8LGX41","ts":"2021-02-05T17:26:01.000000Z","team":"T01AB4DDR2N","blocks":[{"type":"rich_text"}],"file_ids":[],"thread_ts":"1612545961.000800"}],"offset":0,"limit":20,"nbHits":2,"exhaustiveNbHits":false,"processingTimeMs":21,"query":"test-72"} -``` - -## 4. Search via a UI - -Making curl requests to search your Slack History is a little clunky, so we have modified the example UI that MeiliSearch provides in [their docs](https://docs.meilisearch.com/learn/tutorials/getting_started.html#integrate-with-your-project) to search through the Slack results. - -Download \(or copy and paste\) this [html file](https://github.com/airbytehq/airbyte/blob/master/docs/examples/slack-history/index.html) to your workstation. Then, open it using a browser. You should now be able to write search terms in the search bar and get results instantly! - -![](../../.gitbook/assets/slack-history-ui.png) - -## 5. "Productionizing" Saving Slack History - -You can find instructions for how to host Airbyte on various cloud platforms [here](../../deploying-airbyte/README.md). - -Documentation on how to host MeiliSearch on cloud platforms can be found [here](https://docs.meilisearch.com/running-production/#a-quick-introduction). - -If you want to use the UI mentioned in the section above, we recommend statically hosting it on S3, GCS, or equivalent. diff --git a/docs/archive/examples/slack-history/index.html b/docs/archive/examples/slack-history/index.html deleted file mode 100644 index 0812368137cd..000000000000 --- a/docs/archive/examples/slack-history/index.html +++ /dev/null @@ -1,77 +0,0 @@ - - - - - - - - -
- -
-
- -
- - - - - - - diff --git a/docs/archive/examples/zoom-activity-dashboard.md b/docs/archive/examples/zoom-activity-dashboard.md deleted file mode 100644 index 379931d8f33a..000000000000 --- a/docs/archive/examples/zoom-activity-dashboard.md +++ /dev/null @@ -1,272 +0,0 @@ ---- -description: Using Airbyte and Tableau ---- - -# Visualizing the Time Spent by Your Team in Zoom Calls - -In this article, we will show you how you can understand how much your team leverages Zoom, or spends time in meetings, in a couple of minutes. We will be using [Airbyte](https://airbyte.io) \(an open-source data integration platform\) and [Tableau](https://www.tableau.com) \(a business intelligence and analytics software\) for this tutorial. - -Here is what we will cover: - -1. Replicating data from Zoom to a PostgreSQL database, using Airbyte -2. Connecting the PostgreSQL database to Tableau -3. Creating charts in Tableau with Zoom data - -We will produce the following charts in Tableau: - -* Meetings per week in a team -* Hours a team spends in meetings per week -* Listing of team members with the number of meetings per week and number of hours spent in meetings, ranked -* Webinars per week in a team -* Hours a team spends in webinars per week -* Participants for all webinars in a team per week -* Listing of team members with the number of webinars per week and number of hours spent in meetings, ranked - -Let’s get started by replicating Zoom data using Airbyte. - -## Step 1: Replicating Zoom data to PostgreSQL - -### Launching Airbyte - -In order to replicate Zoom data, we will need to use [Airbyte’s Zoom connector](https://docs.airbyte.io/integrations/sources/zoom). To do this, you need to start off Airbyte’s web app by opening up your terminal and navigating to Airbyte and running: - -`docker-compose up` - -You can find more details about this in the [Getting Started FAQ](https://discuss.airbyte.io/c/faq/15) on our Discourse Forum. - -This will start up Airbyte on `localhost:8000`; open that address in your browser to access the Airbyte dashboard. - -![](../../.gitbook/assets/01_airbyte-dashboard.png) - -If you haven't gone through the onboarding yet, you will be prompted to connect a source and a destination. Then just follow the instructions. If you've gone through it, then you will see the screenshot above. In the top right corner of the Airbyte dashboard, click on the **+ new source** button to add a new Airbyte source. In the screen to set up the new source, enter the source name \(we will use airbyte-zoom\) and select **Zoom** as source type. - -Choosing Zoom as **source type** will cause Airbyte to display the configuration parameters needed to set up the Zoom source. - -![](../../.gitbook/assets/02_setting-zoom-connector-name.png) - -The Zoom connector for Airbyte requires you to provide it with a Zoom JWT token. Let’s take a detour and look at how to obtain one from Zoom. - -### Obtaining a Zoom JWT Token - -To obtain a Zoom JWT Token, login to your Zoom account and go to the [Zoom Marketplace](https://marketplace.zoom.us/). If this is your first time in the marketplace, you will need to agree to the Zoom’s marketplace terms of use. - -Once you are in, you need to click on the **Develop** dropdown and then click on **Build App.** - -![](../../.gitbook/assets/03_click.png) - -Clicking on **Build App** for the first time will display a modal for you to accept the Zoom’s API license and terms of use. Do accept if you agree and you will be presented with the below screen. - -![](../../.gitbook/assets/zoom-marketplace-build-screen%20(3)%20(3).png) - -Select **JWT** as the app you want to build and click on the **Create** button on the card. You will be presented with a modal to enter the app name; type in `airbyte-zoom`. - -![](../../.gitbook/assets/05_app-name-modal.png) - -Next, click on the **Create** button on the modal. - -You will then be taken to the **App Information** page of the app you just created. Fill in the required information. - -![](../../.gitbook/assets/06_app-information.png) - -After filling in the needed information, click on the **Continue** button. You will be taken to the **App Credentials** page. Here, click on the **View JWT Token** dropdown. - -![](../../.gitbook/assets/07_view-jwt-token.png) - -There you can set the expiration time of the token \(we will leave the default 90 minutes\), and then you click on the **Copy** button of the **JWT Token**. - -After copying it, click on the **Continue** button. - -![](../../.gitbook/assets/08_activate-webhook.png) - -You will be taken to a screen to activate **Event Subscriptions**. Just leave it as is, as we won’t be needing Webhooks. Click on **Continue**, and your app should be marked as activated. - -### Connecting Zoom on Airbyte - -So let’s go back to the Airbyte web UI and provide it with the JWT token we copied from our Zoom app. - -Now click on the **Set up source** button. You will see the below success message when the connection is made successfully. - -![](../../.gitbook/assets/setup-successful%20(3)%20(2).png) - -And you will be taken to the page to add your destination. - -### Connecting PostgreSQL on Airbyte - -![](../../.gitbook/assets/10_destination.png) - -For our destination, we will be using a PostgreSQL database, since Tableau supports PostgreSQL as a data source. Click on the **add destination** button, and then in the drop down click on **+ add a new destination**. In the page that presents itself, add the destination name and choose the Postgres destination. - -![](../../.gitbook/assets/11_choose-postgres-destination.png) - -To supply Airbyte with the PostgreSQL configuration parameters needed to make a PostgreSQL destination, we will spin off a PostgreSQL container with Docker using the following command in our terminal. - -`docker run --rm --name airbyte-zoom-db -e POSTGRES_PASSWORD=password -v airbyte_zoom_data:/var/lib/postgresql/data -p 2000:5432 -d postgres` - -This will spin a docker container and persist the data we will be replicating in the PostgreSQL database in a Docker volume `airbyte_zoom_data`. - -Now, let’s supply the above credentials to the Airbyte UI requiring those credentials. - -![](../../.gitbook/assets/postgres_credentials%20(3)%20(3).png) - -Then click on the **Set up destination** button. - -After the connection has been made to your PostgreSQL database successfully, Airbyte will generate the schema of the data to be replicated in your database from the Zoom source. - -Leave all the fields checked. - -![](../../.gitbook/assets/schema%20(3)%20(3).png) - -Select a **Sync frequency** of **manual** and then click on **Set up connection**. - -After successfully making the connection, you will see your PostgreSQL destination. Click on the Launch button to start the data replication. - -![](../../.gitbook/assets/launch%20(3)%20(3).png) - -Then click on the **airbyte-zoom-destination** to see the Sync page. - -![](../../.gitbook/assets/sync-screen%20(3)%20(3).png) - -Syncing should take a few minutes or longer depending on the size of the data being replicated. Once Airbyte is done replicating the data, you will get a **succeeded** status. - -Then, you can run the following SQL command on the PostgreSQL container to confirm that the sync was done successfully. - -`docker exec airbyte-zoom-db psql -U postgres -c "SELECT * FROM public.users;"` - -Now that we have our Zoom data replicated successfully via Airbyte, let’s move on and set up Tableau to make the various visualizations and analytics we want. - -## Step 2: Connect the PostgreSQL database to Tableau - -Tableau helps people and organizations to get answers from their data. It’s a visual analytic platform that makes it easy to explore and manage data. - -To get started with Tableau, you can opt in for a [free trial period](https://www.tableau.com/products/trial) by providing your email and clicking the **DOWNLOAD FREE TRIAL** button to download the Tableau desktop app. The download should automatically detect your machine type \(Windows/Mac\). - -Go ahead and install Tableau on your machine. After the installation is complete, you will need to fill in some more details to activate your free trial. - -Once your activation is successful, you will see your Tableau dashboard. - -![](../../.gitbook/assets/tableau-dashboard%20(3)%20(3).png) - -On the sidebar menu under the **To a Server** section, click on the **More…** menu. You will see a list of datasource connectors you can connect Tableau with. - -![](../../.gitbook/assets/datasources%20(4)%20(4).png) - -Select **PostgreSQL** and you will be presented with a connection credentials modal. - -Fill in the same details of the PostgreSQL database we used as the destination in Airbyte. - -![](../../.gitbook/assets/18_fill-in-connection-details.png) - -Next, click on the **Sign In** button. If the connection was made successfully, you will see the Tableau dashboard for the database you just connected. - -_Note: If you are having trouble connecting PostgreSQL with Tableau, it might be because the driver Tableau comes with for PostgreSQL might not work for newer versions of PostgreSQL. You can download the JDBC driver for PostgreSQL_ [_here_](https://www.tableau.com/support/drivers?_ga=2.62351404.1800241672.1616922684-1838321730.1615100968) _and follow the setup instructions._ - -Now that we have replicated our Zoom data into a PostgreSQL database using Airbyte’s Zoom connector, and connected Tableau with our PostgreSQL database containing our Zoom data, let’s proceed to creating the charts we need to visualize the time spent by a team in Zoom calls. - -## Step 3: Create the charts on Tableau with the Zoom data - -### Meetings per week in a team - -To create this chart, we will need to use the count of the meetings and the **createdAt** field of the **meetings** table. Currently, we haven’t selected a table to work on in Tableau. So you will see a prompt to **Drag tables here**. - -![](../../.gitbook/assets/19_tableau-view-with-all-tables.png) - -Drag the **meetings** table from the sidebar onto the space with the prompt. - -Now that we have the meetings table, we can start building out the chart by clicking on **Sheet 1** at the bottom left of Tableau. - -![](../../.gitbook/assets/20_empty-meeting-sheet.png) - -As stated earlier, we need **Created At**, but currently it’s a String data type. Let’s change that by converting it to a data time. So right click on **Created At**, then select `ChangeDataType` and choose Date & Time. And that’s it! That field is now of type **Date** & **Time**. - -![](../../.gitbook/assets/21_change-to-date-time.png) - -Next, drag **Created At** to **Columns**. - -![](../../.gitbook/assets/22_drag-created-at.png) - -Currently, we get the Created At in **YEAR**, but per our requirement we want them in Weeks, so right click on the **YEAR\(Created At\)** and choose **Week Number**. - -![](../../.gitbook/assets/change-to-per-week%20(3)%20(3).png) - -Tableau should now look like this: - -![](../../.gitbook/assets/24_meetings-per-week.png) - -Now, to finish up, we need to add the **meetings\(Count\) measure** Tableau already calculated for us in the **Rows** section. So drag **meetings\(Count\)** onto the Columns section to complete the chart. - -![](../../.gitbook/assets/evolution-of-meetings-per-week%20(3)%20(3).png) - -And now we are done with the very first chart. Let's save the sheet and create a new Dashboard that we will add this sheet to as well as the others we will be creating. - -Currently the sheet shows **Sheet 1**; right click on **Sheet 1** at the bottom left and rename it to **Weekly Meetings**. - -To create our Dashboard, we can right click on the sheet we just renamed and choose **new Dashboard**. Rename the Dashboard to Zoom Dashboard and drag the sheet into it to have something like this: - -![](../../.gitbook/assets/26_zoom-dashboard.png) - -Now that we have this first chart out of the way, we just need to replicate most of the process we used for this one to create the other charts. Because the steps are so similar, we will mostly be showing the finished screenshots of the charts except when we need to conform to the chart requirements. - -### Hours a team spends in meetings per week - -For this chart, we need the sum of the duration spent in weekly meetings. We already have a Duration field, which is currently displaying durations in minutes. We can derive a calculated field off this field since we want the duration in hours \(we just need to divide the duration field by 60\). - -To do this, right click on the Duration field and select **create**, then click on **calculatedField**. Change the name to **Duration in Hours**, and then the calculation should be **\[Duration\]/60**. Click ok to create the field. - -So now we can drag the Duration in Hours and Created At fields onto your sheet like so: - -![](../../.gitbook/assets/27_hours-spent-in-weekly-meetings.png) - -Note: We are adding a filter on the Duration to filter out null values. You can do this by right clicking on the **SUM\(Duration\)** pill and clicking filter, then make sure the **include null values** checkbox is unchecked. - -### Participants for all meetings per week - -For this chart, we will need to have a calculated field called **\# of meetings attended**, which will be an aggregate of the counts of rows matching a particular user's email in the `report_meeting_participants` table plotted against the **Created At** field of the **meetings** table. To get this done, right click on the **User Email** field. Select **create** and click on **calculatedField**, then enter the title of the field as **\# of meetings attended**. Next, enter the below formula: - -`COUNT(IF [User Email] == [User Email] THEN [Id (Report Meeting Participants)] END)` - -Then click on apply. Finally, drag the **Created At** fields \(make sure it’s on the **Weekly** number\) and the calculated field you just created to match the below screenshot: - -![](../../.gitbook/assets/number_of_participants_per_weekly_meetings.png) - -### Listing of team members with the number of meetings per week and number of hours spent in meetings, ranked. - -To get this chart, we need to create a relationship between the **meetings table** and the `report_meeting_participants` table. You can do this by dragging the `report_meeting_participants` table in as a source alongside the **meetings** table and relate both via the **meeting id**. Then you will be able to create a new worksheet that looks like this: - -![](../../.gitbook/assets/meetings-participant-ranked%20(3)%20(3).png) - -Note: To achieve the ranking, we simply use the sort menu icon on the top menu bar. - -### Webinars per week in a team - -The rest of the charts will be needing the **webinars** and `report_webinar_participants` tables. Similar to the number of meetings per week in a team, we will be plotting the Count of webinars against the **Created At** property. - -![](../../.gitbook/assets/30_weekly-webinars.png) - -### Hours a week spends in webinars per week - -For this chart, as for the meeting’s counterpart, we will get a calculated field off the Duration field to get the **Webinar Duration in Hours**, and then plot **Created At** against the **Sum of Webinar Duration in Hours**, as shown in the screenshot below. Note: Make sure you create a new sheet for each of these graphs. - -### Participants for all webinars per week - -This calculation is the same as the number of participants for all meetings per week, but instead of using the **meetings** and `report_meeting_participants` tables, we will use the webinars and `report_webinar_participants` tables. - -Also, the formula will now be: - -`COUNT(IF [User Email] == [User Email] THEN [Id (Report Webinar Participants)] END)` - -Below is the chart: - -![](../../.gitbook/assets/32_number_of_webinar_attended_per_week.png) - -#### Listing of team members with the number of webinars per week and number of hours spent in meetings, ranked - -Below is the chart with these specs - -![](../../.gitbook/assets/33_number-of-webinars-participants.png) - -## Conclusion - -In this article, we see how we can use Airbyte to get data off the Zoom API onto a PostgreSQL database, and then use that data to create some chart visualizations in Tableau. - -You can leverage Airbyte and Tableau to produce graphs on any collaboration tool. We just used Zoom to illustrate how it can be done. Hope this is helpful! - diff --git a/docs/archive/faq/README.md b/docs/archive/faq/README.md deleted file mode 100644 index daec5e69ead1..000000000000 --- a/docs/archive/faq/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# FAQ - -Our FAQ is now a section on our Discourse forum. Check it out [here](https://discuss.airbyte.io/c/faq/15)! - -If you don't see your question answered, feel free to open up a new topic for it. \ No newline at end of file diff --git a/docs/archive/faq/data-loading.md b/docs/archive/faq/data-loading.md deleted file mode 100644 index 16dd12be3d21..000000000000 --- a/docs/archive/faq/data-loading.md +++ /dev/null @@ -1,124 +0,0 @@ -# Data Loading - -## **Why don’t I see any data in my destination yet?** - -It can take a while for Airbyte to load data into your destination. Some sources have restrictive API limits which constrain how much -data we can sync in a given time. Large amounts of data in your source can also make the initial sync take longer. You can check your -sync status in your connection detail page that you can access through the destination detail page or the source one. - -## **Why my final tables are being recreated everytime?** - -Airbyte ingests data into raw tables and applies the process of normalization if you selected it in the connection page. -The normalization runs a full refresh each sync and for some destinations like Snowflake, Redshift, Bigquery this may incur more -resource consumption and more costs. You need to pay attention to the frequency that you're retrieving your data to avoid issues. -For example, if you create a connection to sync every 5 minutes with incremental sync on, it will only retrieve new records into the raw tables but will apply normalization -to *all* the data in every sync! If you have tons of data, this may not be the right sync frequency for you. - -There is a [Github issue](https://github.com/airbytehq/airbyte/issues/4286) to implement normalization using incremental, which will reduce -costs and resources in your destination. - -## **What happens if a sync fails?** - -You won't lose data when a sync fails, however, no data will be added or updated in your destination. - -Airbyte will automatically attempt to replicate data 3 times. You can see and export the logs for those attempts in the connection -detail page. You can access this page through the Source or Destination detail page. - -You can configure a Slack webhook to warn you when a sync fails. - -In the future you will be able to configure other notification method (email, Sentry) and an option to create a -GitHub issue with the logs. We’re still working on it, and the purpose would be to help the community and the Airbyte team to fix the -issue as soon as possible, especially if it is a connector issue. - -Until Airbyte has this system in place, here is what you can do: - -* File a GitHub issue: go [here](https://github.com/airbytehq/airbyte/issues/new?assignees=&labels=type%2Fbug&template=bug-report.md&title=) - and file an issue with the detailed logs copied in the issue’s description. The team will be notified about your issue and will update - it for any progress or comment on it. -* Fix the issue yourself: Airbyte is open source so you don’t need to wait for anybody to fix your issue if it is important to you. - To do so, just fork the [GitHub project](https://github.com/airbytehq/airbyte) and fix the piece of code that need fixing. If you’re okay - with contributing your fix to the community, you can submit a pull request. We will review it ASAP. -* Ask on Slack: don’t hesitate to ping the team on [Slack](https://slack.airbyte.io). - -Once all this is done, Airbyte resumes your sync from where it left off. - -We truly appreciate any contribution you make to help the community. Airbyte will become the open-source standard only if everybody participates. - -## **Can Airbyte support 2-way sync i.e. changes from A go to B and changes from B go to A?** - -Airbyte actually does not support this right now. There are some details around how we handle schema and tables names that isn't going to -work for you in the current iteration. -If you attempt to do a circular dependency between source and destination, you'll end up with the following -A.public.table_foo writes to B.public.public_table_foo to A.public.public_public_table_foo. You won't be writing into your original table, -which I think is your intention. - - -## **What happens to data in the pipeline if the destination gets disconnected? Could I lose data, or wind up with duplicate data when the pipeline is reconnected?** - -Airbyte is architected to prevent data loss or duplication. Airbyte will display a failure for the sync, and re-attempt it at the next syncing, -according to the frequency you set. - -## **How frequently can Airbyte sync data?** - -You can adjust the load time to run as frequent as every five minutes and as infrequent as every 24 hours. - -## **Why wouldn’t I choose to load all of my data every five minutes?** - -While frequent data loads will give you more up-to-date data, there are a few reasons you wouldn’t want to load your data every five minutes, including: - -* Higher API usage may cause you to hit a limit that could impact other systems that rely on that API. -* Higher cost of loading data into your warehouse. -* More frequent delays, resulting in increased delay notification emails. For instance, if the data source generally takes several hours to - update but you choose five-minute increments, you may receive a delay notification every sync. - -Generally is recommended setting the incremental loads to every hour to help limit API calls. - -## **Is there a way to know the estimated time to completion for the first historic sync?** - -Unfortunately not yet. - -## **Do you support change data capture \(CDC\) or logical replication for databases?** - -Airbyte currently supports [CDC for Postgres and Mysql](../../understanding-airbyte/cdc.md). Airbyte is adding support for a few other -databases you can check in the roadmap. - -## Using incremental sync, is it possible to add more fields when some new columns are added to a source table, or when a new table is added? - -For the moment, incremental sync doesn't support schema changes, so you would need to perform a full refresh whenever that happens. -Here’s a related [Github issue](https://github.com/airbytehq/airbyte/issues/1601). - -## There is a limit of how many tables one connection can handle? - -Yes, for more than 6000 thousand tables could be a problem to load the information on UI. - -There are two Github issues about this limitation: [Issue #3942](https://github.com/airbytehq/airbyte/issues/3942) -and [Issue #3943](https://github.com/airbytehq/airbyte/issues/3943). - -## Help, Airbyte is hanging/taking a long time to discover my source's schema! - -This usually happens for database sources that contain a lot of tables. This should resolve itself in half an hour or so. - -If the source contains more than 6k tables, see the [above question](#there-is-a-limit-of-how-many-tables-one-connection-can-handle). - -There is a known issue with [Oracle databases](https://github.com/airbytehq/airbyte/issues/4944). - -## **I see you support a lot of connectors – what about connectors Airbyte doesn’t support yet?** - -You can either: - -* Submit a [connector request](https://github.com/airbytehq/airbyte/issues/new?assignees=&labels=area%2Fintegration%2C+new-integration&template=new-integration-request.md&title=) on our Github project, and be notified once we or the community build a connector for it. -* Build a connector yourself by forking our [GitHub project](https://github.com/airbytehq/airbyte) and submitting a pull request. Here - are the [instructions how to build a connector](../../contributing-to-airbyte/README.md). -* Ask on Slack: don’t hesitate to ping the team on [Slack](https://slack.airbyte.io). - -## **What kind of notifications do I get?** - -For the moment, the UI will only display one kind of notification: when a sync fails, Airbyte will display the failure at the source/destination -level in the list of sources/destinations, and in the connection detail page along with the logs. - -However, there are other types of notifications: - -* When a connector that you use is no longer up to date -* When your connections fails -* When core isn't up to date - diff --git a/docs/archive/faq/deploying-on-other-os.md b/docs/archive/faq/deploying-on-other-os.md deleted file mode 100644 index 0b493c3db200..000000000000 --- a/docs/archive/faq/deploying-on-other-os.md +++ /dev/null @@ -1,40 +0,0 @@ -# Deploying Airbyte on a Non-Standard Operating System - -## CentOS 8 - -From clean install: - -``` -firewall-cmd --zone=public --add-port=8000/tcp --permanent -firewall-cmd --zone=public --add-port=8001/tcp --permanent -firewall-cmd --zone=public --add-port=7233/tcp --permanent -systemctl restart firewalld -``` -OR... if you prefer iptables: -``` -iptables -A INPUT -p tcp -m tcp --dport 8000 -j ACCEPT -iptables -A INPUT -p tcp -m tcp --dport 8001 -j ACCEPT -iptables -A INPUT -p tcp -m tcp --dport 7233 -j ACCEPT -systemctl restart iptables -``` -Setup the docker repo: -``` -dnf config-manager --add-repo=https://download.docker.com/linux/centos/docker-ce.repo` -dnf install docker-ce --nobest -systemctl enable --now docker -usermod -aG docker $USER -``` -You'll need to get docker-compose separately. -``` -dnf install wget git curl -curl -L https://github.com/docker/compose/releases/download/1.25.0/docker-compose-`uname -s`-`uname -m` -o /usr/local/bin/docker-compose -chmod +x /usr/local/bin/docker-compose -``` -Now we can install Airbyte. In this example, we will install it under `/opt/` -``` -cd /opt -git clone https://github.com/airbytehq/airbyte.git -cd airbyte -docker-compose up -docker-compose ps -``` \ No newline at end of file diff --git a/docs/archive/faq/differences-with/README.md b/docs/archive/faq/differences-with/README.md deleted file mode 100644 index d020cfd1db38..000000000000 --- a/docs/archive/faq/differences-with/README.md +++ /dev/null @@ -1,2 +0,0 @@ -# Differences with - diff --git a/docs/archive/faq/differences-with/fivetran-vs-airbyte.md b/docs/archive/faq/differences-with/fivetran-vs-airbyte.md deleted file mode 100644 index d6f015218e9e..000000000000 --- a/docs/archive/faq/differences-with/fivetran-vs-airbyte.md +++ /dev/null @@ -1,27 +0,0 @@ -# Fivetran vs Airbyte - -We wrote an article, “[Open-source vs. Commercial Software: How to Solve the Data Integration Problem](https://airbyte.io/articles/data-engineering-thoughts/open-source-vs-commercial-software-how-to-better-solve-data-integration/),” in which we describe the pros and cons of Fivetran’s commercial approach and Airbyte’s open-source approach. Don’t hesitate to check it out for more detailed arguments. As a summary, here are the differences: - -![](https://airbyte.io/wp-content/uploads/2021/01/Airbyte-vs-Fivetran.png) - -## **Fivetran:** - -* **Limited high-quality connectors:** after 8 years in business, Fivetran supports 150 connectors. The more connectors, the more difficult it is for Fivetran to keep the same level of maintenance across all connectors. They will always have a ROI consideration to maintaining long-tailed connectors. -* **Pricing indexed on usage:** Fivetran’s pricing is indexed on the number of active rows \(rows added or edited\) per month. Teams always need to keep that in mind and are not free to move data without thinking about cost, as the costs can grow fast. -* **Security and privacy compliance:** all companies are subject to privacy compliance laws, such as GDPR, CCPA, HIPAA, etc. As a matter of fact, above a certain stage \(about 100 employees\) in a company, all external products need to go through a security compliance process that can take several months. -* **No moving data between internal databases:** Fivetran sits in the cloud, so if you have to replicate data from an internal database to another, it makes no sense to have the data move through them \(Fivetran\) for privacy and cost reasons. - -## **Airbyte:** - -* **Free, as open source, so no more pricing based on usage**: learn more about our [future business model](https://handbook.airbyte.io/strategy/business-model) \(connectors will always remain open source\). -* **Supporting 60 connectors within 8 months from inception**. Our goal is to reach 200+ connectors by the end of 2021. -* **Building new connectors made trivial, in the language of your choice:** Airbyte makes it a lot easier to create your own connector, vs. building them yourself in-house \(with Airflow or other tools\). Scheduling, orchestration, and monitoring comes out of the box with Airbyte. -* **Addressing the long tail of connectors:** with the help of the community, Airbyte ambitions to support thousands of connectors. -* **Adapt existing connectors to your needs:** you can adapt any existing connector to address your own unique edge case. -* **Using data integration in a workflow:** Airbyte’s API lets engineering teams add data integration jobs into their workflow seamlessly. -* **Integrates with your data stack and your needs:** Airflow, Kubernetes, dbt, etc. Its normalization is optional, it gives you a basic version that works out of the box, but also allows you to use dbt to do more complicated things. -* **Debugging autonomy:** if you experience any connector issue, you won’t need to wait for Fivetran’s customer support team to get back to you, if you can fix the issue fast yourself. -* **No more security and privacy compliance, as self-hosted, source-available and open-sourced \(MIT\)**. Any team can directly address their integration needs. - -Your data stays in your cloud. Have full control over your data, and the costs of your data transfers. - diff --git a/docs/archive/faq/differences-with/meltano-vs-airbyte.md b/docs/archive/faq/differences-with/meltano-vs-airbyte.md deleted file mode 100644 index 0b4aef9033b9..000000000000 --- a/docs/archive/faq/differences-with/meltano-vs-airbyte.md +++ /dev/null @@ -1,28 +0,0 @@ -# Meltano vs Airbyte - -We wrote an article, “[The State of Open-Source Data Integration and ETL](https://airbyte.io/articles/data-engineering-thoughts/the-state-of-open-source-data-integration-and-etl/),” in which we list and compare all ETL-related open-source projects, including Meltano and Airbyte. Don’t hesitate to check it out for more detailed arguments. As a summary, here are the differences: - -## **Meltano:** - -* **Meltano is built on top of the Singer protocol, whereas Airbyte is built on top of the Airbyte protocol**. Having initially created Airbyte on top of Singer, we wrote about why we didn't move forward with it [here](https://airbyte.io/blog/why-you-should-not-build-your-data-pipeline-on-top-of-singer) and [here](https://airbyte.io/blog/airbyte-vs-singer-why-airbyte-is-not-built-on-top-of-singer). Summarized, the reasons were: Singer connectors didn't always adhere to the Singer protocol, had poor standardization and visibility in terms of quality, and community governance and support was abandoned by Stitch. By contrast, we aim to make Airbyte a product that ["just works"](https://airbyte.io/blog/our-truth-for-2021-airbyte-just-works) and always plan to maximize engagement within the Airbyte community. -* **CLI-first approach:** Meltano was primarily built with a command line interface in mind. In that sense, they seem to target engineers with a preference for that interface. -* **Integration with Airflow for orchestration:** You can either use Meltano alone for orchestration or with Airflow; Meltano works both ways. -* All connectors must use Python. -* Meltano works with any of Singer's 200+ available connectors. However, in our experience, quality has been hit or miss. - -## **Airbyte:** - -In contrast, Airbyte is a company fully committed to the open-source project and has a [business model](https://handbook.airbyte.io/strategy/business-model) in mind around this project. Our [team](https://airbyte.io/about-us) are data integration experts that have built more than 1,000 integrations collectively at large scale. The team now counts 20 engineers working full-time on Airbyte. - -* **Airbyte supports more than 100 connectors after only 1 year since its inception**, 20% of which were built by the community. Our ambition is to support **200+ connectors by the end of 2021.** -* Airbyte’s connectors are **usable out of the box through a UI and API,** with monitoring, scheduling and orchestration. Airbyte was built on the premise that a user, whatever their background, should be able to move data in 2 minutes. Data engineers might want to use raw data and their own transformation processes, or to use Airbyte’s API to include data integration in their workflows. On the other hand, analysts and data scientists might want to use normalized consolidated data in their database or data warehouses. Airbyte supports all these use cases. -* **One platform, one project with standards:** This will help consolidate the developments behind one single project, some standardization and specific data protocol that can benefit all teams and specific cases. -* **Not limited by Singer’s data protocol:** In contrast to Meltano, Airbyte was not built on top of Singer, but its data protocol is compatible with Singer’s. This means Airbyte can go beyond Singer, but Meltano will remain limited. -* **Connectors can be built in the language of your choice,** as Airbyte runs them as Docker containers. -* **Airbyte integrates with your data stack and your needs:** Airflow, Kubernetes, dbt, etc. Its normalization is optional, it gives you a basic version that works out of the box, but also allows you to use dbt to do more complicated things. - -## **Other noteworthy differences:** - -* In terms of community, Meltano's Slack community got 430 new members in the last 6 months, while Airbyte got 800. -* The difference in velocity in terms of feature progress is easily measurable as both are open-source projects. Meltano closes about 30 issues per month, while Airbyte closes about 120. - diff --git a/docs/archive/faq/differences-with/pipelinewise-vs-airbyte.md b/docs/archive/faq/differences-with/pipelinewise-vs-airbyte.md deleted file mode 100644 index adcc9c2bf376..000000000000 --- a/docs/archive/faq/differences-with/pipelinewise-vs-airbyte.md +++ /dev/null @@ -1,25 +0,0 @@ -# Pipelinewise vs Airbyte - -## **PipelineWise:** - -PipelineWise is an open-source project by Transferwise that was built with the primary goal of serving their own needs. There is no business model attached to the project, and no apparent interest in growing the community. - -* **Supports 21 connectors,** and only adds new ones based on the needs of the mother company, Transferwise. -* **No business model attached to the project,** and no apparent interest from the company in growing the community. -* **As close to the original format as possible:** PipelineWise aims to reproduce the data from the source to an Analytics-Data-Store in as close to the original format as possible. Some minor load time transformations are supported, but complex mapping and joins have to be done in the Analytics-Data-Store to extract meaning. -* **Managed Schema Changes:** When source data changes, PipelineWise detects the change and alters the schema in your Analytics-Data-Store automatically. -* **YAML based configuration:** Data pipelines are defined as YAML files, ensuring that the entire configuration is kept under version control. -* **Lightweight:** No daemons or database setup are required. - -## **Airbyte:** - -In contrast, Airbyte is a company fully committed to the open-source project and has a [business model in mind](https://handbook.airbyte.io/) around this project. - -* Our ambition is to support **300+ connectors by the end of 2021.** We already supported about 50 connectors at the end of 2020, just 5 months after its inception. -* Airbyte’s connectors are **usable out of the box through a UI and API,** with monitoring, scheduling and orchestration. Airbyte was built on the premise that a user, whatever their background, should be able to move data in 2 minutes. Data engineers might want to use raw data and their own transformation processes, or to use Airbyte’s API to include data integration in their workflows. On the other hand, analysts and data scientists might want to use normalized consolidated data in their database or data warehouses. Airbyte supports all these use cases. -* **One platform, one project with standards:** This will help consolidate the developments behind one single project, some standardization and specific data protocol that can benefit all teams and specific cases. -* **Connectors can be built in the language of your choice,** as Airbyte runs them as Docker containers. -* **Airbyte integrates with your data stack and your needs:** Airflow, Kubernetes, dbt, etc. Its normalization is optional, it gives you a basic version that works out of the box, but also allows you to use dbt to do more complicated things. - -The data protocols for both projects are compatible with Singer’s. So it is easy to migrate a Singer tap or target onto Airbyte or PipelineWise. - diff --git a/docs/archive/faq/differences-with/singer-vs-airbyte.md b/docs/archive/faq/differences-with/singer-vs-airbyte.md deleted file mode 100644 index 42342dcaed92..000000000000 --- a/docs/archive/faq/differences-with/singer-vs-airbyte.md +++ /dev/null @@ -1,28 +0,0 @@ -# Singer vs Airbyte - -If you want to understand the difference between Airbyte and Singer, you might be interested in 2 articles we wrote: - -* “[Airbyte vs. Singer: Why Airbyte is not built on top of Singer](https://airbyte.io/articles/data-engineering-thoughts/airbyte-vs-singer-why-airbyte-is-not-built-on-top-of-singer/).” -* “[The State of Open-Source Data Integration and ETL](https://airbyte.io/articles/data-engineering-thoughts/the-state-of-open-source-data-integration-and-etl/),” in which we list and compare all ETL-related open-source projects, including Singer and Airbyte. As a summary, here are the differences: - -![](https://airbyte.io/wp-content/uploads/2020/10/Landscape-of-open-source-data-integration-platforms-4.png) - -## **Singer:** - -* **Supports 96 connectors after 4 years.** -* **Increasingly outdated connectors:** Talend \(acquirer of StitchData\) seems to have stopped investing in maintaining Singer’s community and connectors. As most connectors see schema changes several times a year, more and more Singer’s taps and targets are not actively maintained and are becoming outdated. -* **Absence of standardization:** each connector is its own open-source project. So you never know the quality of a tap or target until you have actually used it. There is no guarantee whatsoever about what you’ll get. -* **Singer’s connectors are standalone binaries:** you still need to build everything around to make them work \(e.g. UI, configuration validation, state management, normalization, schema migration, monitoring, etc\). -* **No full commitment to open sourcing all connectors,** as some connectors are only offered by StitchData under a paid plan. _\*\*_ - -## **Airbyte:** - -* Our ambition is to support **300+ connectors by the end of 2021.** We already supported about 50 connectors at the end of 2020, just 5 months after its inception. -* Airbyte’s connectors are **usable out of the box through a UI and API**, with monitoring, scheduling and orchestration. Airbyte was built on the premise that a user, whatever their background, should be able to move data in 2 minutes. Data engineers might want to use raw data and their own transformation processes, or to use Airbyte’s API to include data integration in their workflows. On the other hand, analysts and data scientists might want to use normalized consolidated data in their database or data warehouses. Airbyte supports all these use cases. -* **One platform, one project with standards:** This will help consolidate the developments behind one single project, some standardization and specific data protocol that can benefit all teams and specific cases. -* **Connectors can be built in the language of your choice,** as Airbyte runs them as Docker containers. -* **Airbyte integrates with your data stack and your needs:** Airflow, Kubernetes, dbt, etc. Its normalization is optional, it gives you a basic version that works out of the box, but also allows you to use dbt to do more complicated things. -* **A full commitment to the open-source MIT project** with the promise not to hide some connectors behind paid walls. - -Note that Airbyte’s data protocol is compatible with Singer’s. So it is easy to migrate a Singer tap onto Airbyte. - diff --git a/docs/archive/faq/differences-with/stitchdata-vs-airbyte.md b/docs/archive/faq/differences-with/stitchdata-vs-airbyte.md deleted file mode 100644 index f5ea358f5851..000000000000 --- a/docs/archive/faq/differences-with/stitchdata-vs-airbyte.md +++ /dev/null @@ -1,29 +0,0 @@ -# StitchData vs Airbyte - -We wrote an article, “[Open-source vs. Commercial Software: How to Solve the Data Integration Problem](https://airbyte.io/articles/data-engineering-thoughts/open-source-vs-commercial-software-how-to-better-solve-data-integration/),” in which we describe the pros and cons of StitchData’s commercial approach and Airbyte’s open-source approach. Don’t hesitate to check it out for more detailed arguments. As a summary, here are the differences: - -![](https://airbyte.io/wp-content/uploads/2020/10/Open-source-vs-commercial-approach-2048x1843.png) - -## StitchData: - -* **Limited deprecating connectors:** Stitch only supports 150 connectors. Talend has stopped investing in StitchData and its connectors. And on Singer, each connector is its own open-source project. So you never know the quality of a tap or target until you have actually used it. There is no guarantee whatsoever about what you’ll get. -* **Pricing indexed on usage:** StitchData’s pricing is indexed on the connectors used and the volume of data transferred. Teams always need to keep that in mind and are not free to move data without thinking about cost. -* **Security and privacy compliance:** all companies are subject to privacy compliance laws, such as GDPR, CCPA, HIPAA, etc. As a matter of fact, above a certain stage \(about 100 employees\) in a company, all external products need to go through a security compliance process that can take several months. -* **No moving data between internal databases:** StitchData sits in the cloud, so if you have to replicate data from an internal database to another, it makes no sense to have the data move through their cloud for privacy and cost reasons. -* **StitchData’s Singer connectors are standalone binaries:** you still need to build everything around to make them work. And it’s hard to update some pre-built connectors, as they are of poor quality. - -## Airbyte: - -* **Free, as open source, so no more pricing based on usage:** learn more about our [future business model](https://handbook.airbyte.io/strategy/business-model) \(connectors will always remain open-source\). -* **Supporting 50+ connectors by the end of 2020** \(so in only 5 months of existence\). Our goal is to reach 300+ connectors by the end of 2021. -* **Building new connectors made trivial, in the language of your choice:** Airbyte makes it a lot easier to create your own connector, vs. building them yourself in-house \(with Airflow or other tools\). Scheduling, orchestration, and monitoring comes out of the box with Airbyte. -* **Maintenance-free connectors you can use in minutes.** Just authenticate your sources and warehouse, and get connectors that adapt to schema and API changes for you. -* **Addressing the long tail of connectors:** with the help of the community, Airbyte ambitions to support thousands of connectors. -* **Adapt existing connectors to your needs:** you can adapt any existing connector to address your own unique edge case. -* **Using data integration in a workflow:** Airbyte’s API lets engineering teams add data integration jobs into their workflow seamlessly. -* **Integrates with your data stack and your needs:** Airflow, Kubernetes, dbt, etc. Its normalization is optional, it gives you a basic version that works out of the box, but also allows you to use dbt to do more complicated things. -* **Debugging autonomy:** if you experience any connector issue, you won’t need to wait for Fivetran’s customer support team to get back to you, if you can fix the issue fast yourself. -* **Your data stays in your cloud.** Have full control over your data, and the costs of your data transfers. -* **No more security and privacy compliance, as self-hosted and open-sourced \(MIT\).** Any team can directly address their integration needs. -* **Premium support directly on our Slack for free**. Our time to resolution is about 3-4 hours in average. - diff --git a/docs/archive/faq/getting-started.md b/docs/archive/faq/getting-started.md deleted file mode 100644 index 61d0e4e7bc1d..000000000000 --- a/docs/archive/faq/getting-started.md +++ /dev/null @@ -1,50 +0,0 @@ -# Getting Started - -## **What do I need to get started using Airbyte?** - -You can deploy Airbyte in several ways, as [documented here](../../deploying-airbyte/README.md). Airbyte will then help you replicate data between a source and a destination. Airbyte offers pre-built connectors for both, you can see their list [here](../../project-overview/changelog/connectors.md). If you don’t see the connector you need, you can [build your connector yourself](../../connector-development) and benefit from Airbyte’s optional scheduling, orchestration and monitoring modules. - -## **How long does it take to set up Airbyte?** - -It depends on your source and destination. Check our setup guides to see the tasks for your source and destination. Each source and destination also has a list of prerequisites for setup. To make setup faster, get your prerequisites ready before you start to set up your connector. During the setup process, you may need to contact others \(like a database administrator or AWS account owner\) for help, which might slow you down. But if you have access to the connection information, it can take 2 minutes: see [demo video. ](https://www.youtube.com/watch?v=jWVYpUV9vEg) - -## **What data sources does Airbyte offer connectors for?** - -We already offer 100+ connectors, and will focus all our effort in ramping up the number of connectors and strengthening them. View the [full list here](../../project-overview/changelog/connectors.md). If you don’t see a source you need, you can file a [connector request here](https://github.com/airbytehq/airbyte/issues/new?assignees=&labels=area%2Fintegration%2C+new-integration&template=new-integration-request.md&title=). - -## **Where can I see my data in Airbyte?** - -You can’t see your data in Airbyte, because we don’t store it. The sync loads your data into your destination \(data warehouse, data lake, etc.\). While you can’t see your data directly in Airbyte, you can check your schema and sync status on the source detail page in Airbyte. - -## **Can I add multiple destinations?** - -Sure, you can. Just go to the "Destinations" section and click on the top right "+ new destination" button. You can have multiple destinations for the same source, and multiple sources for the same destination. - -## Am I limited to GUI interaction or is there a way to set up / run / interact with Airbyte programmatically? - -You can use the API to do anything you do today from the UI. Though, word of notice, the API is in alpha and may change. You won’t lose any functionality, but you may need to update your code to catch up to any backwards incompatible changes in the API. - -## How does Airbyte handle connecting to databases that are behind a firewall / NAT? - -We don’t. Airbyte is to be self-hosted in your own private cloud. - -## Can I set a start time for my integration? - -[Here](../../understanding-airbyte/connections#sync-schedules) is the link to the docs on scheduling syncs. - -## **Can I disable analytics in Airbyte?** - -Yes, you can control what's sent outside of Airbyte for analytics purposes. - -We added the following telemetry to Airbyte to ensure the best experience for users: - -* Measure usage of features & connectors -* Measure failure rate of connectors to address bugs quickly -* Reach out to our users about Airbyte community updates if they opt-in -* ... - -To disable telemetry, modify the `.env` file and define the two following environment variables: - -```text -TRACKING_STRATEGY=logging -``` diff --git a/docs/archive/faq/security-and-data-audits.md b/docs/archive/faq/security-and-data-audits.md deleted file mode 100644 index e56db4de7ac3..000000000000 --- a/docs/archive/faq/security-and-data-audits.md +++ /dev/null @@ -1,14 +0,0 @@ -# Security & Data Audits - -## **How secure is Airbyte?** - -Airbyte is an open-source self-hosted solution, so let’s say it is as safe as your data infrastructure. _\*\*_ - -## **Is Airbyte GDPR compliant?** - -Airbyte is a self-hosted solution, so it doesn’t bring any security or privacy risk to your infrastructure. We do intend to add data quality and privacy compliance features in the future, in order to give you more visibility on that topic. - -## **How does Airbyte charge?** - -We don’t. All connectors are all under the MIT license. If you are curious about the business model we have in mind, please check our [company handbook](https://handbook.airbyte.io/strategy/business-model). - diff --git a/docs/archive/faq/transformation-and-schemas.md b/docs/archive/faq/transformation-and-schemas.md deleted file mode 100644 index 554b11b558fd..000000000000 --- a/docs/archive/faq/transformation-and-schemas.md +++ /dev/null @@ -1,20 +0,0 @@ -# Transformation and Schemas - -## **Where's the T in Airbyte’s ETL tool?** - -Airbyte is actually an ELT tool, and you have the freedom to use it as an EL-only tool. The transformation part is done by default, but it is optional. You can choose to receive the data in raw \(JSON file for instance\) in your destination. - -We do provide normalization \(if option is still on\) so that data analysts / scientists / any users of the data can use it without much effort. - -We also intend to integrate deeply with dbt to make it easier for your team to continue relying you on them, if this was what you were doing. - -## **How does Airbyte handle replication when a data source changes its schema?** - -Airbyte continues to sync data using the configured schema until that schema is updated. Because Airbyte treats all fields as optional, if a field is renamed or deleted in the source, that field simply will no longer be replicated, but all remaining fields will. The same is true for streams as well. - -For now, the schema can only be updated manually in the UI \(by clicking "Update Schema" in the settings page for the connection\). When a schema is updated Airbyte will re-sync all data for that source using the new schema. - -## **How does Airbyte handle namespaces \(or schemas for the DB-inclined\)?** - -Airbyte respects source-defined namespaces when syncing data with a namespace-supported destination. See [this](../../understanding-airbyte/namespaces.md) for more details. - diff --git a/docs/career-and-open-positions/README.md b/docs/career-and-open-positions/README.md deleted file mode 100644 index 6da8437c7ab6..000000000000 --- a/docs/career-and-open-positions/README.md +++ /dev/null @@ -1,67 +0,0 @@ -# Careers & Open Positions - -## **Who we are** - -[Airbyte](http://airbyte.io) is the upcoming open-source standard for EL\(T\). We enable data teams to replicate data from applications, APIs, and databases to data warehouses, lakes, and other destinations. We believe only an open-source approach can solve the problem of data integration, as it enables us to cover the long tail of integrations while enabling teams to adapt prebuilt connectors to their needs. - -Airbyte is remote friendly, with most of the team still based in the Silicon Valley. We’re fully open as a company. Our [**company handbook**](https://handbook.airbyte.io), [**culture & values**](https://handbook.airbyte.io/company/culture-and-values), [**strategy**](https://handbook.airbyte.io/strategy/strategy) and [**roadmap**](../project-overview/roadmap.md) are open to all. - -We're backed by some of the world's [top investors](./#our-investors) and believe in product-led growth, where we build something awesome and let our product bring the users, rather than an outbound sales engine with cold calls. - -## **Our Culture** - -We believe that a company culture needs to be opinionated to be effective and attract the right people. It cannot be right for every person, or it would mean we don’t have a culture. And that’s perfectly fine. Culture is also a living creature that will grow and evolve with the team and company. It is something we deeply care about. - -Here are the values we deeply believe in: - -* [Transparency and Candor](https://handbook.airbyte.io/company/culture-and-values#transparency-and-candor) - * For any collaboration to be fruitful, we need full transparency. - * Any opinion or concern needs to be shared. - * Bad news must travel faster than good news. -* [Humility and Maximizing Growth](https://handbook.airbyte.io/company/culture-and-values#humility-and-maximizing-growth) - * We are a team in search of truth, not in search of winning points. - * We prioritize progress over perfection. We are thankful towards errors of action, provided we learn and get better. - * We should never pull rank. -* [Being Intentional & Owning the Outcome](https://handbook.airbyte.io/company/culture-and-values#being-intentional-and-owning-the-outcome) - * We don’t take things for granted. - * Having a task means you are responsible for anticipating and solving problems. -* [Trust & Caring](https://handbook.airbyte.io/company/culture-and-values#trust-and-caring) - * Constructive feedback should be on a one-on-one basis. - * Assume positive intent. - * Address behavior, not people. - * Don't let others fail, if you can help. We succeed together. - -## **Our Investors** - -We have raised $31M seed and Series-A round with Benchmark, Accel, YCombinator, 8VC, and a few leaders in the data industry \(including the co-founder of Elastic, MongoDB, Segment, Liveramp, the former GM of Cloudera\). - -We have a lot of capital, but [we're a lean, strong team](https://airbyte.io/about-us) - so you've got the opportunity to have a huge impact. - -## **Our Recruiting Process** - -We will schedule one first call with our founders or team. It will focus on two key areas: - -* Ensuring you will be a great cultural fit -* Aptitude for the role - -If you succeed with the above, we may do a written interview. Here are [several reasons](https://www.safegraph.com/blog/why-safegraph-does-written-interviews-and-why-your-company-should-do-them-too) why we're big fans of this kind of interview. The main argument is that it removes a lot of the bias people might have during live interviews. Plus, we mostly work asynchronously nowadays, don't we? - -If the written interview is a success, we might set you up with one or 2 additional calls, to test how you actually think and work, but also so you meet the rest of the team. - -Once all of this done, we will discuss the process internally and get back to you very fast \(velocity is everything here\)! So about 2-3 calls and one written interview, that's it! - -## [**Our Benefits**](https://handbook.airbyte.io/people/benefits) - -* **Flexible work environment as fully remote** - we don’t look at when you log in, log out or how much time you work. We trust you, it’s the only way remote can actually work. -* [**Unlimited vacation policy**](https://handbook.airbyte.io/people/time-off) with mandatory minimum time off - so you can fit work around your life. -* [**Co-working space stipend**](https://handbook.airbyte.io/people/expense-policy#work-space) - we provide everyone with $200/month to use on a coworking space of their choice, if any. -* [**Parental leave**](https://handbook.airbyte.io/people/time-off#parental-leave) \(for both parents, after one year spent with the company\) - so those raising families can do so while still working for us. -* **Open book policy** - we reimburse books that employees want to purchase for their professional and career development. -* **Continuous learning / training policy** - we sponsor the conferences and training programs you feel would add to your development in the company. -* **Health insurance** for those from countries that do not provide this freely. Through Savvy in the US, which means you can choose the insurance you want and will receive a stipend from the company. -* **401k** for the US employees. -* **Sponsored visas** for those who need them -* We'll give you a corporate card for expenses. Our philosophy is Freedom & Responsibiility. We trust you, just do what's best for the company. - -An important note: At Airbyte, we don’t just accept difference — we celebrate it, we support it, and we thrive on it for the benefit of our employees, our product, and our community. Airbyte is proud to be an **Equal Opportunity Workplace** and is an **Affirmative Action employer**. - diff --git a/docs/career-and-open-positions/founding-developer-evangelist.md b/docs/career-and-open-positions/founding-developer-evangelist.md deleted file mode 100644 index 63cf610841b7..000000000000 --- a/docs/career-and-open-positions/founding-developer-evangelist.md +++ /dev/null @@ -1,63 +0,0 @@ -# Founding Developer Evangelist - -## **About Airbyte** - -[Airbyte](http://airbyte.io) is the upcoming open-source standard for EL(T). We enable data teams to replicate data from applications, APIs, and databases to data warehouses, lakes, and other destinations. We believe only an open-source approach can solve the problem of data integration, as it enables us to cover the long tail of integrations while enabling teams to adapt prebuilt connectors to their needs. - -Airbyte is remote friendly, with most of the team still based in the Silicon Valley. We’re fully open as a company. Our **[company handbook](https://handbook.airbyte.io)**, **[culture & values](https://handbook.airbyte.io/company/culture-and-values)**, **[strategy](https://handbook.airbyte.io/strategy/strategy)** and **[roadmap](../project-overview/roadmap.md)** are open to all. - -We raised a total of $31.2M by some of the world's [top investors](./#our-investors) (Benchmark, Accel, YCombinator, co-founders or CEOs of Segment, Elastic, MongoDB, Cloudera, etc.) and believe in product-led growth, where we build something awesome and let our product bring the users, rather than an outbound sales engine with cold calls. - -## **Description** - -As our first Developer Evangelist, you will have two focuses: - -1. **Building a content hub focused on data engineering** (and not Airbyte per se), featuring a podcast that brings thought leaders and community members into the conversation. This will be supplemented by a rich catalog of written content focused on increasing Airbyte’s share of voice in the data integration space. -2. **Launching a meetup program around data engineering**, leveraging the content hub’s brand - -The goal of the developer evangelism program is to make Airbyte’s content hub the best place to get thoughtful content on data engineering. As the first member of our developer evangelism team, you will work closely with the founders and our growing community team. - -## **Responsibilities** - -* Own the content hub’s content production and guest list -* Create data engineering thought leadership posts -* Produce a podcast with guests/outside contributors -* Collaborate with DevRel to produce high impact content and reach mass developers -* Lead the conversation around the latest technology advancements and best practices in the developer community -* Lead the efforts in creating an international meetup program around the content hub -* Be a force in the community! - -## **Requirements** - -* In-depth industry experience building software and contributing to open source, if possible in the data ecosystem -* At least 3 years of experience giving talks and developing demos, webinars, videos, and other technical content -* Meaningful social presence with engaged followers -* Self-directed and work with minimal supervision. -* Outstanding written and verbal communications skills with the ability to explain and translate complex technology concepts into simple and intuitive communications. -* You share [our values](https://handbook.airbyte.io/company/culture-and-values) - -## **Location** - -Remote but compatible with US timezones. - -## **We provide** - -* **Flexible work environment as fully remote** - we don’t look at when you log in, log out or how much time you work. We trust you, it’s the only way remote can actually work. -* **[Unlimited vacation policy](https://handbook.airbyte.io/people/time-off)** with mandatory minimum time off - so you can fit work around your life. -* **[Co-working space stipend](https://handbook.airbyte.io/people/expense-policy#work-space)** - we provide everyone with $200/month to use on a coworking space of their choice, if any. -* **[Parental leave](https://handbook.airbyte.io/people/time-off#parental-leave)** \(for both parents, after one year spent with the company\) - so those raising families can do so while still working for us. -* **Open book policy** - we reimburse books that employees want to purchase for their professional and career development. -* **Continuous learning / training policy** - we sponsor the conferences and training programs you feel would add to your development in the company. -* **Health insurance** for those from countries that do not provide this freely. Through Savvy in the US, which means you can choose the insurance you want and will receive a stipend from the company. -* **401k** for the US employees. -* **Sponsored visas** for those who need them -* We'll give you a corporate card for expenses. Our philosophy is Freedom & Responsibiility. We trust you, just do what's best for the company. - -## **Applying** - -Email us at [join-us@airbyte.io](mailto:join-us@airbyte.io) with a link to your LinkedIn / Resume / GitHub \(optional\). - -You don't need to include a cover letter, but just a paragraph how you found us and what makes you a great person to join our founding team! - -At Airbyte, we don’t just accept difference — we celebrate it and support it. We thrive on it for the benefit of our employees, our product, and our community. Airbyte is proud to be an **Equal Opportunity Workplace** and is an **Affirmative Action employer**. - diff --git a/docs/career-and-open-positions/head-of-growth.md b/docs/career-and-open-positions/head-of-growth.md deleted file mode 100644 index bf9be2f58268..000000000000 --- a/docs/career-and-open-positions/head-of-growth.md +++ /dev/null @@ -1,62 +0,0 @@ -# Head of Growth - -## **About Airbyte** - -[Airbyte](http://airbyte.io) is the upcoming open-source standard for EL(T). We enable data teams to replicate data from applications, APIs, and databases to data warehouses, lakes, and other destinations. We believe only an open-source approach can solve the problem of data integration, as it enables us to cover the long tail of integrations while enabling teams to adapt prebuilt connectors to their needs. - -Airbyte is remote friendly, with most of the team still based in San Francisco. We’re fully open as a company. Our **[company handbook](https://handbook.airbyte.io)**, **[culture & values](https://handbook.airbyte.io/company/culture-and-values)**, **[strategy](https://handbook.airbyte.io/strategy/strategy)** and **[roadmap](../project-overview/roadmap.md)** are open to all. - -We raised a total of $31.2M by some of the world's [top investors](./#our-investors) (Benchmark, Accel, YCombinator, co-founders or CEOs of Segment, Elastic, MongoDB, Cloudera, etc.) and believe in product-led growth, where we build something awesome and let our product bring the users, rather than an outbound sales engine with cold calls. - -## **The Opportunity** - -As the Head of Growth, you will work closely with both co-founders, and will be responsible for the strategy, development, and execution of growth programs to drive awareness and demand for Airbyte Cloud. While the responsibilities below will be the core of your jon, we are looking for a high-performing individual who will take initiative, proactively solve new problems, and create their own career path with us. - -## **What You'll Do** - -* Identify common denominators across key user conversion points -* Build our pipeline. You’ll help deliver a robust sales pipeline through organic and paid efforts to fuel our rapid growth. -* Drive key business metrics including leads, PQLs, SQLs, pipeline, and revenue through strategic and integrated programs and campaigns. -* Act as a strategic partner to product, sales, customer success and engineering teams to prioritize opportunities and increase user retention -* Leverage our open-source user community as a lead channel for our premium offers. -* Optimize our paid digital spend. You’ll lead the team to significantly grow our paid digital leads from key market segments. -* Lead our marketing operations and analytics. You’ll build out our marketing automation systems that we need to support our rapid growth, you’ll lead our marketing operations team for strong execution with our sales and SDR teams, and your team will manage our dashboards, reporting and developing insights to inform our strategy. -* Keep our budget on track. Your team will manage a significant marketing budget, and you’ll help us make the right investment decisions. -* Build, train and nurture your team. Provide mentorship, management and support to your rapidly growing team of marketers. - - -## **What You'll Need** - -* 5+ years of experience in lead generation and prospecting -* Experience in B2B and developer tools for at least 3+ years -* Top-notch verbal and written communication skills -* A proven track record of building and maintaining strong client relationships -* Excellent project management skills -* Thorough knowledge of GTM software (including CRM, etc.) -* You share [our values](https://handbook.airbyte.io/company/culture-and-values) - -## **Location** - -Remote but compatible with US timezones. - -## **We provide** - -* **Flexible work environment as fully remote** - we don’t look at when you log in, log out or how much time you work. We trust you, it’s the only way remote can actually work. -* **[Unlimited vacation policy](https://handbook.airbyte.io/people/time-off)** with mandatory minimum time off - so you can fit work around your life. -* **[Co-working space stipend](https://handbook.airbyte.io/people/expense-policy#work-space)** - we provide everyone with $200/month to use on a coworking space of their choice, if any. -* **[Parental leave](https://handbook.airbyte.io/people/time-off#parental-leave)** \(for both parents, after one year spent with the company\) - so those raising families can do so while still working for us. -* **Open book policy** - we reimburse books that employees want to purchase for their professional and career development. -* **Continuous learning / training policy** - we sponsor the conferences and training programs you feel would add to your development in the company. -* **Health insurance** for those from countries that do not provide this freely. Through Savvy in the US, which means you can choose the insurance you want and will receive a stipend from the company. -* **401k** for the US employees. -* **Sponsored visas** for those who need them -* We'll give you a corporate card for expenses. Our philosophy is Freedom & Responsibiility. We trust you, just do what's best for the company. - -## **Applying** - -Email us at [join-us@airbyte.io](mailto:join-us@airbyte.io) with a link to your LinkedIn / Resume / GitHub \(optional\). - -You don't need to include a cover letter, but just a paragraph how you found us and what makes you a great person to join our founding team! - -At Airbyte, we don’t just accept difference — we celebrate it and support it. We thrive on it for the benefit of our employees, our product, and our community. Airbyte is proud to be an **Equal Opportunity Workplace** and is an **Affirmative Action employer**. - diff --git a/docs/career-and-open-positions/revenue-product-manager.md b/docs/career-and-open-positions/revenue-product-manager.md deleted file mode 100644 index b533dc1659f0..000000000000 --- a/docs/career-and-open-positions/revenue-product-manager.md +++ /dev/null @@ -1,59 +0,0 @@ -# Revenue Product Manager - -## **About Airbyte** - -[Airbyte](http://airbyte.io) is the upcoming open-source standard for EL(T). We enable data teams to replicate data from applications, APIs, and databases to data warehouses, lakes, and other destinations. We believe only an open-source approach can solve the problem of data integration, as it enables us to cover the long tail of integrations while enabling teams to adapt prebuilt connectors to their needs. - -Airbyte is remote friendly, with most of the team still based in the Silicon Valley. We’re fully open as a company. Our **[company handbook](https://handbook.airbyte.io)**, **[culture & values](https://handbook.airbyte.io/company/culture-and-values)**, **[strategy](https://handbook.airbyte.io/strategy/strategy)** and **[roadmap](../project-overview/roadmap.md)** are open to all. - -We raised a total of $31.2M by some of the world's [top investors](./#our-investors) (Benchmark, Accel, YCombinator, co-founders or CEOs of Segment, Elastic, MongoDB, Cloudera, etc.) and believe in product-led growth, where we build something awesome and let our product bring the users, rather than an outbound sales engine with cold calls. - -## **Description** - -You’ll work closely with our co-founders and our head of lead generation to help us build, execute, and iterate on our go-to-market playbook. You will build customer excitement, close inbound leads, and at the same time identify and document patterns to build the foundational sales enablement resources for the future sales team. You will also work hand in hand with our head of lead generation on outbound strategy, generating pipeline, and use learnings to improve and scale our sales organization. - -You’ll operate as a core member of our revenue team and play an integral role in future hiring, product, and overall strategy, with the opportunity to see significant upward mobility in Airbyte if the fit is strong. We’re looking for someone that’s just as excited about building something as they are about closing big deals. - -## **What you will do here** - -* Identify and develop the playbook to sell to scale-up and mid-market companies who have data engineers in the IT sector at first. You will be selling to people all across the data team, from data engineers to the CTO/CIO. -* * Manage full sales cycle from lead qualification and prospecting to close for target accounts, including sales ops and enablement resources and tools that you will help define. -* Work closely with lead generation to iterate on strategy, content, messaging, potential channels. -* Help define the recruiting strategy, quotas, and sales compensation model for the future sales team. - - -## **What we’re looking for** - -* 3+ years of go-to-market experience in B2B SaaS with a record of high performance -* Excellent communication skills - you write a great email and give an excellent demo -* Energy, grit, and flexibility needed to thrive in a constantly changing work environment -* An innate ability to self-start, prioritize, and creatively problem-solve -* Experience selling into technical buyers/users and managing multiple deal cycles quickly and efficiently -* No travel required -* You share [our values](https://handbook.airbyte.io/company/culture-and-values). - -## **Location** - -Remote but compatible with US timezones. - -## **We provide** - -* **Flexible work environment as fully remote** - we don’t look at when you log in, log out or how much time you work. We trust you, it’s the only way remote can actually work. -* **[Unlimited vacation policy](https://handbook.airbyte.io/people/time-off)** with mandatory minimum time off - so you can fit work around your life. -* **[Co-working space stipend](https://handbook.airbyte.io/people/expense-policy#work-space)** - we provide everyone with $200/month to use on a coworking space of their choice, if any. -* **[Parental leave](https://handbook.airbyte.io/people/time-off#parental-leave)** \(for both parents, after one year spent with the company\) - so those raising families can do so while still working for us. -* **Open book policy** - we reimburse books that employees want to purchase for their professional and career development. -* **Continuous learning / training policy** - we sponsor the conferences and training programs you feel would add to your development in the company. -* **Health insurance** for those from countries that do not provide this freely. Through Savvy in the US, which means you can choose the insurance you want and will receive a stipend from the company. -* **401k** for the US employees. -* **Sponsored visas** for those who need them -* We'll give you a corporate card for expenses. Our philosophy is Freedom & Responsibiility. We trust you, just do what's best for the company. - -## **Applying** - -Email us at [join-us@airbyte.io](mailto:join-us@airbyte.io) with a link to your LinkedIn / Resume / GitHub \(optional\). - -You don't need to include a cover letter, but just a paragraph how you found us and what makes you a great person to join our founding team! - -At Airbyte, we don’t just accept difference — we celebrate it and support it. We thrive on it for the benefit of our employees, our product, and our community. Airbyte is proud to be an **Equal Opportunity Workplace** and is an **Affirmative Action employer**. - diff --git a/docs/career-and-open-positions/senior-content-editor.md b/docs/career-and-open-positions/senior-content-editor.md deleted file mode 100644 index 715b53e473e9..000000000000 --- a/docs/career-and-open-positions/senior-content-editor.md +++ /dev/null @@ -1,74 +0,0 @@ -# Senior Content Editor / Strategist - -## **About Airbyte** - -[Airbyte](http://airbyte.io) is the upcoming open-source standard for EL(T). We enable data teams to replicate data from applications, APIs, and databases to data warehouses, lakes, and other destinations. We believe only an open-source approach can solve the problem of data integration, as it enables us to cover the long tail of integrations while enabling teams to adapt prebuilt connectors to their needs. - -Airbyte is remote friendly, with most of the team still based in the Silicon Valley. We’re fully open as a company. Our **[company handbook](https://handbook.airbyte.io)**, **[culture & values](https://handbook.airbyte.io/company/culture-and-values)**, **[strategy](https://handbook.airbyte.io/strategy/strategy)** and **[roadmap](../project-overview/roadmap.md)** are open to all. - -We raised a total of $31.2M by some of the world's [top investors](./#our-investors) (Benchmark, Accel, YCombinator, co-founders or CEOs of Segment, Elastic, MongoDB, Cloudera, etc.) and believe in product-led growth, where we build something awesome and let our product bring the users, rather than an outbound sales engine with cold calls. - -## **Description** - -As senior content editor / strategist, you will be responsible for: - -* Owning the written content development roadmap for the entire Airbyte experience, to help us take our user experience to the next level with the best content on the market. Content includes thought leadership or technical articles, [recipes](https://airbyte.io/recipes), tutorials, etc. -* Overseeing a team of internal and external content developers (contracting company, but also community contributors with our [Write for the community](https://airbyte.io/write-for-the-community) program. -* Developing and maintaining a clear and concise style guide, SEO optimization and promotion processes for all published content. - -This is a highly visible, internal-facing role that will report directly to the founders. The successful candidate will focus on impact for all content produced, drive optimum operational efficiency, effectiveness, and scale. This role is ideal for an experienced project manager with a passion for open-source and data engineering, and who wants to join a fast-growing startup. Since this role works very closely with the community, you will be directly engaging with authors a lot, who will rely on your rhetorical and grammatical expertise to express their technical findings in the most powerful way possible. Naturally, having technical understanding of the concepts will help you move quickly here. - -## **What you will do here** - -* Own roadmap of all written content for Airbyte across all platforms and delivery methods -* Lead a team of content developers, technical writers, and subject matter experts from our community and contractors -* Develop systems and processes for regularly gathering, analyzing, and implementing feedback based on key quality metrics that relate to the company’s broader strategic goals -* Operationalize and formalize ongoing content development processes, systems and tools, and documentation to ensure high quality content is delivered in the most effective and efficient manner possible -* Find and curate topics based on interest level from our community or SEO potential -* Optimize our publications for visibility and impact. -* Maintain our website’s content and connector catalog. -* Find unique ways to encourage community contribution to our content and documentation. - -## **What we’re looking for** - -* 3+ years of experience working in content development -* Data-driven mindset, strong business judgment, and proven ability to perform relevant data analyses, draw key insights, and use data to form and execute a strategy -* Meticulous attention to detail and process-driven -* Very strong control over the English language, with published examples to back it up. -* Verbal communication that matches your written expertise. -* Experience with community management. -* Strong understanding of ELT/ETL concepts, preferably in an applied role. -* Experience with SEO and content promotion. -* You share [our values](https://handbook.airbyte.io/company/culture-and-values). - -## **Bonus points** - -* Strong applied programming experience. -* Technical writing experience specific to the data integration space. -* History of working for or with open-source communities. - -## **Location** - -Remote but compatible with US timezones. - -## **We provide** - -* **Flexible work environment as fully remote** - we don’t look at when you log in, log out or how much time you work. We trust you, it’s the only way remote can actually work. -* **[Unlimited vacation policy](https://handbook.airbyte.io/people/time-off)** with mandatory minimum time off - so you can fit work around your life. -* **[Co-working space stipend](https://handbook.airbyte.io/people/expense-policy#work-space)** - we provide everyone with $200/month to use on a coworking space of their choice, if any. -* **[Parental leave](https://handbook.airbyte.io/people/time-off#parental-leave)** \(for both parents, after one year spent with the company\) - so those raising families can do so while still working for us. -* **Open book policy** - we reimburse books that employees want to purchase for their professional and career development. -* **Continuous learning / training policy** - we sponsor the conferences and training programs you feel would add to your development in the company. -* **Health insurance** for those from countries that do not provide this freely. Through Savvy in the US, which means you can choose the insurance you want and will receive a stipend from the company. -* **401k** for the US employees. -* **Sponsored visas** for those who need them -* We'll give you a corporate card for expenses. Our philosophy is Freedom & Responsibiility. We trust you, just do what's best for the company. - -## **Applying** - -Email us at [join-us@airbyte.io](mailto:join-us@airbyte.io) with a link to your LinkedIn / Resume / GitHub \(optional\). - -You don't need to include a cover letter, but just a paragraph how you found us and what makes you a great person to join our founding team! - -At Airbyte, we don’t just accept difference — we celebrate it and support it. We thrive on it for the benefit of our employees, our product, and our community. Airbyte is proud to be an **Equal Opportunity Workplace** and is an **Affirmative Action employer**. - diff --git a/docs/career-and-open-positions/senior-product-manager.md b/docs/career-and-open-positions/senior-product-manager.md deleted file mode 100644 index 83348c73fd9c..000000000000 --- a/docs/career-and-open-positions/senior-product-manager.md +++ /dev/null @@ -1,58 +0,0 @@ -# Senior Product Manager - -## **About Airbyte** - -[Airbyte](http://airbyte.io) is the upcoming open-source standard for EL(T). We enable data teams to replicate data from applications, APIs, and databases to data warehouses, lakes, and other destinations. We believe only an open-source approach can solve the problem of data integration, as it enables us to cover the long tail of integrations while enabling teams to adapt prebuilt connectors to their needs. - -Airbyte is remote friendly, with most of the team still based in the Silicon Valley. We’re fully open as a company. Our **[company handbook](https://handbook.airbyte.io)**, **[culture & values](https://handbook.airbyte.io/company/culture-and-values)**, **[strategy](https://handbook.airbyte.io/strategy/strategy)** and **[roadmap](../project-overview/roadmap.md)** are open to all. - -We raised a total of $31.2M by some of the world's [top investors](./#our-investors) (Benchmark, Accel, YCombinator, co-founders or CEOs of Segment, Elastic, MongoDB, Cloudera, etc.) and believe in product-led growth, where we build something awesome and let our product bring the users, rather than an outbound sales engine with cold calls. - -## **Description** - -As the first product manager, you will work closely with both co-founders to define the product vision and strategy. You will work hand-in-hand with engineering and customer success leaders, in order to ensure the voice of the customer is at the foundation of our product roadmap and prioritization. You will help define our tracking system to support actual business cases, in addition to product and engineering processes. - -## **What you will do here** - -* Help define the product vision, strategy and roadmap, with co-founders, engineering and custom success leaders -* Act as the customer advocate articulating the user’s and customer’s needs -* Develop positioning and messaging for the website -* Help define our product-related metrics, our UX/UX and usability testing process -* Produce a vision of the evolution of the product function within the company, in coordination with co-founders & engineering leaders -* Recommends or contributes information in setting product pricing. - -## **What we’re looking for** - -* 5+ years of experience of product management in startups or scale-ups -* Has technical product and data integration knowledge -* Demonstrated success in defining and launching products that meet and exceed business objectives -* Excellent written and verbal communication skills -* Excellent teamwork skills -* Proven ability to influence cross-functional teams without formal authority -* You share [our values](https://handbook.airbyte.io/company/culture-and-values). - -## **Location** - -Remote but compatible with US timezones. - -## **We provide** - -* **Flexible work environment as fully remote** - we don’t look at when you log in, log out or how much time you work. We trust you, it’s the only way remote can actually work. -* **[Unlimited vacation policy](https://handbook.airbyte.io/people/time-off)** with mandatory minimum time off - so you can fit work around your life. -* **[Co-working space stipend](https://handbook.airbyte.io/people/expense-policy#work-space)** - we provide everyone with $200/month to use on a coworking space of their choice, if any. -* **[Parental leave](https://handbook.airbyte.io/people/time-off#parental-leave)** \(for both parents, after one year spent with the company\) - so those raising families can do so while still working for us. -* **Open book policy** - we reimburse books that employees want to purchase for their professional and career development. -* **Continuous learning / training policy** - we sponsor the conferences and training programs you feel would add to your development in the company. -* **Health insurance** for those from countries that do not provide this freely. Through Savvy in the US, which means you can choose the insurance you want and will receive a stipend from the company. -* **401k** for the US employees. -* **Sponsored visas** for those who need them -* We'll give you a corporate card for expenses. Our philosophy is Freedom & Responsibiility. We trust you, just do what's best for the company. - -## **Applying** - -Email us at [join-us@airbyte.io](mailto:join-us@airbyte.io) with a link to your LinkedIn / Resume / GitHub \(optional\). - -You don't need to include a cover letter, but just a paragraph how you found us and what makes you a great person to join our founding team! - -At Airbyte, we don’t just accept difference — we celebrate it and support it. We thrive on it for the benefit of our employees, our product, and our community. Airbyte is proud to be an **Equal Opportunity Workplace** and is an **Affirmative Action employer**. - diff --git a/docs/career-and-open-positions/senior-software-engineer.md b/docs/career-and-open-positions/senior-software-engineer.md deleted file mode 100644 index 5d8f23e4d562..000000000000 --- a/docs/career-and-open-positions/senior-software-engineer.md +++ /dev/null @@ -1,69 +0,0 @@ -# Senior Software Engineer - -Join [Airbyte's](https://airbyte.io/) small, but growing, engineering team. We are backed by the world's [top investors](https://airbyte.io/about-us) (Benchmark, Accel, YCombinator, etc.). - -You will spend your time solving one of the most complex and unsolved problem with data: Access. We're on a mission to allow anyone to access data, wherever it is, and make it available into the system of their choice. - -We are passionate about data, open source, our community and scale! - -We love Java, Python, Node, K8s, Docker... but in the end, it doesn't matter: we just want to build the best product. - -## **About you** - -* Have attention to detail and bias for action. -* Are a prolific communicator, and thrive in uncertainty. -* Are passionate about data, big data, open source and... DATA. -* Want to help & support users solve their problems. -* Are looking to work in a fast-paced environment, solving fundamental industry problem. -* Are comfortable working remotely. - -## **Requirements** - -* 3+ years of backend engineering work experience. -* Self-directed and work with minimal supervision. -* Proficiency writing production-quality code, preferably in Java, Kotlin or Python. -* Familiarity with scalability. -* Excellent communication skills. -* You share [our values](https://handbook.airbyte.io/company/culture-and-values) - -## **Great-to-haves** - -* Experience with AWS / GCP / Azure. -* Experience working with huge data sets. -* Experience with open source development. - -## **Location** - -Wherever you want! - -## **Perks!!!** - -* **Flexible work environment as fully remote** - we don’t look at when you log in, log out or how much time you work. We trust you, it’s the only way remote can actually work. -* [**Unlimited vacation policy**](https://handbook.airbyte.io/people/time-off) with mandatory minimum time off - so you can fit work around your life. -* [**Co-working space stipend**](https://handbook.airbyte.io/people/expense-policy#work-space) - we provide everyone with $200/month to use on a coworking space of their choice, if any. -* [**Parental leave**](https://handbook.airbyte.io/people/time-off#parental-leave) \(for both parents, after one year spent with the company\) - so those raising families can do so while still working for us. -* **Open book policy** - we reimburse books that employees want to purchase for their professional and career development. -* **Continuous learning / training policy** - we sponsor the conferences and training programs you feel would add to your development in the company. -* **Health insurance** for those from countries that do not provide this freely. Through Savvy in the US, which means you can choose the insurance you want and will receive a stipend from the company. -* **401k** for the US employees. -* **Sponsored visas** for those who need them -* We'll give you a corporate card for expenses. Our philosophy is Freedom & Responsibiility. We trust you, just do what's best for the company. - -## **This is how we think** - -We are an open company, we are opinionated and public about how we think! - -* Our [company handbook](https://handbook.airbyte.io/), -* Our [culture & values](https://handbook.airbyte.io/company/culture-and-values), -* Our [strategy](https://handbook.airbyte.io/strategy/strategy), -* Our [roadmap](https://handbook.airbyte.com/strategy/roadmap), -* The [future of data integration](https://airbyte.io/articles/data-engineering-thoughts/why-the-future-of-etl-is-not-elt-but-el/) - -## **Applying** - -Email us at [join-us@airbyte.io](mailto:join-us@airbyte.io) with a link to your LinkedIn / Resume / GitHub \(optional\). - -Don't send a cover letter, your resume will speak for you! - -At Airbyte, we don’t just accept difference — we celebrate it and support it. We thrive on it for the benefit of our employees, our product, and our community. Airbyte is proud to be an **Equal Opportunity Workplace** and is an **Affirmative Action employer**. - diff --git a/docs/connector-development/testing-connectors/legacy-standard-source-tests.md b/docs/connector-development/testing-connectors/legacy-standard-source-tests.md deleted file mode 100644 index d60fbd5db8e6..000000000000 --- a/docs/connector-development/testing-connectors/legacy-standard-source-tests.md +++ /dev/null @@ -1,101 +0,0 @@ -# Standard Tests (Legacy) - -## Airbyte's Standard Tests (v1) - -This document describes the old version Standard Tests, please check the latest version [here](../../connector-development/testing-connectors/README.md) -To ensure a minimum quality bar, Airbyte runs all connectors against the same set of integration tests \(sources & destinations have two different test suites\). Those tests ensure that each connector adheres to the [Airbyte Specification](../../understanding-airbyte/airbyte-protocol.md) and responds correctly to Airbyte commands when provided valid \(or invalid\) inputs. - -### Architecture of standard tests - -![Standard test sequence diagram](../../.gitbook/assets/standard_tests_sequence1.png) - -The Standard Test Suite runs its tests against the connector's Docker image. It takes as input the following: - -* **The Connector's Docker image name**, so it can run tests against that image -* **A config file** containing necessary credentials/information to connect to the underlying data source/destination -* **A configured catalog** that will be used when running read/write operations against the connector -* **\(Optional\) A state file** for use in incremental sync test scenarios - -The test suite then runs its test cases, which include: - -* Using the input config file, running the `check` operation should succeed. -* Using a made up/fake config file, running the `check` operation should fail. -* Running a `read` operation should produce at least one record. -* Running two consecutive full refresh reads should produce identical records. - -See all the test cases and their description in [Standard Source Tests](standard-source-tests.md). - -### Setting up standard tests for your connector - -Standard tests are typically run from a docker container. The default standard test runner is the File-based Standard Test suite, which gets its name because its inputs are passed as files via Docker volume mounts. This is the simplest way to run the standard test suite: the only requirements are that you place its input files inside your connector's directory, and to pass the paths to those input files as arguments to the Gradle plugin required to invoke it. This is setup by default inside the `build.gradle` file on all connectors generated from templates. - -For reference, to configure the file-based standard test suite the only requirement is to add the following block in your connectors `build.gradle` file: - -```text -apply plugin: 'airbyte-standard-source-test-file' -airbyteStandardSourceTestFile { - // all these paths must be inside your connector's directory - configPath = "/path/to/config" - specPath = "/path/to/spec" - configuredCatalogPath = "/path/to/catalog" -} -``` - -These inputs are all described in the [Airbyte Specification](../../understanding-airbyte/airbyte-protocol.md) and will be used as follows: - -* **Spec file** will be compared to the spec file output by the connector when the `spec` command is called. -* **Config file** is expected to be a valid config file. It's expected that calling `check` with this config will succeed. -* **Configured Catalog** read operations will be performed on all the streams found in this catalog. All sync modes supported for each stream will be tested. If any stream requires a user-defined cursor, this should be defined in this catalog file. \(If this sounds like gibberish, make sure to read about incremental sync\). - -### Dynamically managing inputs & resources used in standard tests - -Since the inputs to standard tests are often static, the file-based runner is sufficient for most connectors. However, in some cases, you may need to run pre or post hooks to dynamically create or destroy resources for use in standard tests. For example, if we need to spin up a Redshift cluster to use in the test then tear it down afterwards, we need the ability to run code before and after the tests, as well as customize the Redshift cluster URL we pass to the standard tests. If you have need for this use case, please reach out to us via [Github](https://github.com/airbytehq/airbyte) or [Slack](https://slack.airbyte.io). We currently support it for Java & Python, and other languages can be made available upon request. - -## Running Integration tests - -The GitHub `master` and branch builds will build the core Airbyte infrastructure \(scheduler, ui, etc\) as well as the images for all connectors. Integration tests \(tests that run a connector's image against an external resource\) can be run one of three ways. - -### 1. Local iteration - -First, you can run the image locally. Connectors should have instructions in the connector's README on how to create or pull credentials necessary for the test. Also, during local development, there is usually a `main` entrypoint for Java integrations or `main_dev.py` for Python integrations that let you run your connector without containerization, which is fastest for iteration. - -### 2. Requesting GitHub PR Integration Test Runs - -:::caution - -This option is not available to PRs from forks, so it is effectively limited to Airbyte employees. - -::: - -If you don't want to handle secrets, you're making a relatively minor change, or you want to ensure the connector's integration test will run remotely, you should request builds on GitHub. You can request an integration test run by creating a comment with a slash command. - -Here are some example commands: - -1. `/test connector=all` - Runs integration tests for all connectors in a single GitHub workflow. Some of our integration tests interact with rate-limited resources, so please use this judiciously. -2. `/test connector=source-sendgrid` - Runs integration tests for a single connector on the latest PR commit. -3. `/test connector=connectors/source-sendgrid` - Runs integration tests for a single connector on the latest PR commit. -4. `/test connector=source-sendgrid ref=master` - Runs integration tests for a single connector on a different branch. -5. `/test connector=source-sendgrid ref=d5c53102` - Runs integration tests for a single connector on a specific commit. - -A command dispatcher GitHub workflow will launch on comment submission. This dispatcher will add an :eyes: reaction to the comment when it starts processing. If there is an error dispatching your request, an error will be appended to your comment. If it launches the test run successfully, a :rocket: reaction will appear on your comment. - -Once the integration test workflow launches, it will append a link to the workflow at the end of the comment. A success or failure response will also be added upon workflow completion. - -Integration tests can also be manually requested by clicking "[Run workflow](https://github.com/airbytehq/airbyte/actions?query=workflow%3Aintegration-test)" and specifying the connector and GitHub ref. - -### 3. Requesting GitHub PR publishing Docker Images - -In order for users to reference the new versions of a connector, it needs to be published and available in the [dockerhub](https://hub.docker.com/r/airbyte/source-sendgrid/tags?page=1&ordering=last_updated) with the latest tag updated. - -As seen previously, GitHub workflow can be triggered by comment submission. Publishing docker images to the dockerhub repository can also be submitted likewise: - -Note that integration tests can be triggered with a slightly different syntax for arguments. This second set is required to distinguish between `connectors` and `bases` folders. Thus, it is also easier to switch between the `/test` and `/publish` commands: - -* `/test connector=connectors/source-sendgrid` - Runs integration tests for a single connector on the latest PR commit. -* `/publish connector=connectors/source-sendgrid` - Publish the docker image if it doesn't exist for a single connector on the latest PR commit. - -### 4. Automatically Run From `master` - -Commits to `master` attempt to launch integration tests. Two workflows launch for each commit: one is a launcher for integration tests, the other is the core build \(the same as the default for PR and branch builds\). - -Since some of our connectors use rate-limited external resources, we don't want to overload from multiple commits to master. If a certain threshold of `master` integration tests are running, the integration test launcher passes but does not launch any tests. This can manually be re-run if necessary. The `master` build also runs every few hours automatically, and will launch the integration tests at that time. diff --git a/docs/connector-development/testing-connectors/standard-source-tests.md b/docs/connector-development/testing-connectors/standard-source-tests.md deleted file mode 100644 index 4f9d1b530756..000000000000 --- a/docs/connector-development/testing-connectors/standard-source-tests.md +++ /dev/null @@ -1,4 +0,0 @@ -# Standard Source Test Suite - -Test methods start with `test`. Other methods are internal helpers in the java class implementing the test suite. - diff --git a/docs/connector-development/tutorials/adding-incremental-sync.md b/docs/connector-development/tutorials/adding-incremental-sync.md deleted file mode 100644 index cde50aa6123f..000000000000 --- a/docs/connector-development/tutorials/adding-incremental-sync.md +++ /dev/null @@ -1,244 +0,0 @@ -# Adding Incremental Sync to a Source - -## Overview - -This tutorial will assume that you already have a working source. If you do not, feel free to refer to the [Building a Toy Connector](building-a-python-source.md) tutorial. This tutorial will build directly off the example from that article. We will also assume that you have a basic understanding of how Airbyte's Incremental-Append replication strategy works. We have a brief explanation of it [here](../../understanding-airbyte/connections/incremental-append.md). - -## Update Catalog in `discover` - -First we need to identify a given stream in the Source as supporting incremental. This information is declared in the catalog that the `discover` method returns. You will notice in the stream object contains a field called `supported_sync_modes`. If we are adding incremental to an existing stream, we just need to add `"incremental"` to that array. This tells Airbyte that this stream can either be synced in an incremental fashion. In practice, this will mean that in the UI, a user will have the ability to configure this type of sync. - -In the example we used in the Toy Connector tutorial, the `discover` method would not look like this. Note: that "incremental" has been added to the `supported_sync_modes` array. We also set `source_defined_cursor` to `True` and `default_cursor_field` to `["date"]` to declare that the Source knows what field to use for the cursor, in this case the date field, and does not require user input. Nothing else has changed. - -```python -def discover(): - catalog = { - "streams": [{ - "name": "stock_prices", - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": True, - "default_cursor_field": ["date"], - "json_schema": { - "properties": { - "date": { - "type": "string" - }, - "price": { - "type": "number" - }, - "stock_ticker": { - "type": "string" - } - } - } - }] - } - airbyte_message = {"type": "CATALOG", "catalog": catalog} - print(json.dumps(airbyte_message)) -``` - -## Update `read` - -Next we will adapt the `read` method that we wrote previously. We need to change three things. - -First, we need to pass it information about what data was replicated in the previous sync. In Airbyte this is called a `state` object. The structure of the state object is determined by the Source. This means that each Source can construct a state object that makes sense to it and does not need to worry about adhering to any other convention. That being said, a pretty typical structure for a state object is a map of stream name to the last value in the cursor field for that stream. - -In this case we might choose something like this: - -```javascript -{ - "stock_prices": { - "date": "2020-02-01" - } -} -``` - -The second change we need to make to the `read` method is to use the state object so that we only emit new records. - -Lastly, we need to emit an updated state object, so that the next time this Source runs we do not resend messages that we have already sent. - -Here's what our updated `read` method would look like. - -```python -def read(config, catalog, state): - # Assert required configuration was provided - if "api_key" not in config or "stock_ticker" not in config: - log("Input config must contain the properties 'api_key' and 'stock_ticker'") - sys.exit(1) - - # Find the stock_prices stream if it is present in the input catalog - stock_prices_stream = None - for configured_stream in catalog["streams"]: - if configured_stream["stream"]["name"] == "stock_prices": - stock_prices_stream = configured_stream - - if stock_prices_stream is None: - log("No streams selected") - return - - # By default we fetch stock prices for the 7 day period ending with today - today = date.today() - cursor_value = today.strftime("%Y-%m-%d") - from_day = (today - timedelta(days=7)).strftime("%Y-%m-%d") - - # In case of incremental sync, state should contain the last date when we fetched stock prices - if stock_prices_stream["sync_mode"] == "incremental": - if state and "stock_prices" in state and state["stock_prices"].get("date"): - from_date = datetime.strptime(state["stock_prices"].get("date"), "%Y-%m-%d") - from_day = (from_date + timedelta(days=1)).strftime("%Y-%m-%d") - - # If the state indicates that we have already ran the sync up to cursor_value, we can skip the sync - if cursor_value > from_day: - # If we've made it this far, all the configuration is good and we can pull the market data - response = _call_api(ticker=config["stock_ticker"], token = config["api_key"], from_day=from_day, to_day=cursor_value) - if response.status_code != 200: - # In a real scenario we'd handle this error better :) - log("Failure occurred when calling Polygon.io API") - sys.exit(1) - else: - # Stock prices are returned sorted by date in ascending order - # We want to output them one by one as AirbyteMessages - response_json = response.json() - if response_json["resultsCount"] > 0: - results = response_json["results"] - for result in results: - data = {"date": datetime.fromtimestamp(result["t"]/1000, tz=timezone.utc).strftime("%Y-%m-%d"), "stock_ticker": config["stock_ticker"], "price": result["c"]} - record = {"stream": "stock_prices", "data": data, "emitted_at": int(datetime.now().timestamp()) * 1000} - output_message = {"type": "RECORD", "record": record} - print(json.dumps(output_message)) - - # We update the cursor as we print out the data, so that next time sync starts where we stopped printing out results - if stock_prices_stream["sync_mode"] == "incremental": - cursor_value = datetime.fromtimestamp(results[len(results)-1]["t"]/1000, tz=timezone.utc).strftime("%Y-%m-%d") - - # Emit new state message. - if stock_prices_stream["sync_mode"] == "incremental": - output_message = {"type": "STATE", "state": {"data": {"stock_prices": {"date": cursor_value}}}} - print(json.dumps(output_message)) -``` - -We will also need to parse `state` argument in the `run` method. In order to do that, we will modify the code that -calls `read` method from `run` method: -```python - elif command == "read": - config = read_json(get_input_file_path(parsed_args.config)) - configured_catalog = read_json(get_input_file_path(parsed_args.catalog)) - state = None - if parsed_args.state: - state = read_json(get_input_file_path(parsed_args.state)) - - read(config, configured_catalog, state) -``` -Finally, we need to pass more arguments to our `_call_api` method in order to fetch only new prices for incremental sync: -```python -def _call_api(ticker, token, from_day, to_day): - return requests.get(f"https://api.polygon.io/v2/aggs/ticker/{ticker}/range/1/day/{from_day}/{to_day}?sort=asc&limit=120&apiKey={token}") -``` - -You will notice that in order to test these changes you need a `state` object. If you run an incremental sync -without passing a state object, the new code will output a state object that you can use with the next sync. If you run this: -```bash -python source.py read --config secrets/valid_config.json --catalog incremental_configured_catalog.json -``` - -The output will look like following: -```bash -{"type": "RECORD", "record": {"stream": "stock_prices", "data": {"date": "2022-03-07", "stock_ticker": "TSLA", "price": 804.58}, "emitted_at": 1647294277000}} -{"type": "RECORD", "record": {"stream": "stock_prices", "data": {"date": "2022-03-08", "stock_ticker": "TSLA", "price": 824.4}, "emitted_at": 1647294277000}} -{"type": "RECORD", "record": {"stream": "stock_prices", "data": {"date": "2022-03-09", "stock_ticker": "TSLA", "price": 858.97}, "emitted_at": 1647294277000}} -{"type": "RECORD", "record": {"stream": "stock_prices", "data": {"date": "2022-03-10", "stock_ticker": "TSLA", "price": 838.3}, "emitted_at": 1647294277000}} -{"type": "RECORD", "record": {"stream": "stock_prices", "data": {"date": "2022-03-11", "stock_ticker": "TSLA", "price": 795.35}, "emitted_at": 1647294277000}} -{"type": "STATE", "state": {"data": {"stock_prices": {"date": "2022-03-11"}}}} -``` - -Notice that the last line of output is the state object. Copy the state object: -```json -{"stock_prices": {"date": "2022-03-11"}} -``` -and paste it into a new file (i.e. `state.json`). Now you can run an incremental sync: -```bash -python source.py read --config secrets/valid_config.json --catalog incremental_configured_catalog.json --state state.json -``` - -## Run the incremental tests - -The [Source Acceptance Test (SAT) suite](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) also includes test cases to ensure that incremental mode is working correctly. - -To enable these tests, modify the existing `acceptance-test-config.yml` by adding the following: - -```yaml - incremental: - - config_path: "secrets/valid_config.json" - configured_catalog_path: "incremental_configured_catalog.json" - future_state_path: "abnormal_state.json" -``` - -Your full `acceptance-test-config.yml` should look something like this: - -```yaml -# See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) -# for more information about how to configure these tests -connector_image: airbyte/source-stock-ticker-api:dev -tests: - spec: - - spec_path: "spec.json" - config_path: "secrets/valid_config.json" - connection: - - config_path: "secrets/valid_config.json" - status: "succeed" - - config_path: "secrets/invalid_config.json" - status: "failed" - discovery: - - config_path: "secrets/valid_config.json" - basic_read: - - config_path: "secrets/valid_config.json" - configured_catalog_path: "fullrefresh_configured_catalog.json" - empty_streams: [] - full_refresh: - - config_path: "secrets/valid_config.json" - configured_catalog_path: "fullrefresh_configured_catalog.json" - incremental: - - config_path: "secrets/valid_config.json" - configured_catalog_path: "incremental_configured_catalog.json" - future_state_path: "abnormal_state.json" -``` - -You will also need to create an `abnormal_state.json` file with a date in the future, which should not produce any records: - -``` -{"stock_prices": {"date": "2121-01-01"}} -``` - -Run the tests once again: - -```bash -./acceptance-test-docker.sh -``` - -And finally, you should see a successful test summary: - -``` -collecting ... - test_core.py ✓✓✓✓✓✓✓✓✓✓✓✓✓✓✓✓✓✓✓ 86% ████████▋ - test_full_refresh.py ✓ 91% █████████▏ - test_incremental.py ✓✓ 100% ██████████ - - -Results (8.90s): - 22 passed -``` - -That's all you need to do to add incremental functionality to the stock ticker Source. - -You can deploy the new version of your connector simply by running: -```bash -./gradlew clean :airbyte-integrations:connectors:source-stock-ticker-api:build -``` - -Bonus points: go to Airbyte UI and reconfigure the connection to use incremental sync. - -Incremental definitely requires more configurability than full refresh, so your implementation may deviate slightly depending on whether your cursor -field is source defined or user-defined. If you think you are running into one of those cases, check out -our [incremental](../../understanding-airbyte/connections/incremental-append.md) documentation for more information on different types of -configuration. - diff --git a/docs/connector-development/tutorials/build-a-connector-the-hard-way.md b/docs/connector-development/tutorials/build-a-connector-the-hard-way.md deleted file mode 100644 index 2f12a6b04da3..000000000000 --- a/docs/connector-development/tutorials/build-a-connector-the-hard-way.md +++ /dev/null @@ -1,1168 +0,0 @@ ---- -description: Building a source connector without using any helpers to learn the Airbyte Specification for sources ---- - -# Building a Source Connector: The Hard Way - -This tutorial walks you through building a simple Airbyte source without using any helpers to demonstrate the following concepts in Action: - -* [The Airbyte Specification](../../understanding-airbyte/airbyte-protocol.md) and the interface implemented by a source connector -* [The AirbyteCatalog](../../understanding-airbyte/beginners-guide-to-catalog.md) -* [Packaging your connector](https://docs.airbyte.io/connector-development#1.-implement-and-package-the-connector) -* [Testing your connector](../testing-connectors/source-acceptance-tests-reference.md) - -At the end of this tutorial, you will have a working source that you will be able to use in the Airbyte UI. - -**This tutorial is meant for those interested in learning how the Airbyte Specification works in detail, not for creating production connectors**. We intentionally don't use helper libraries provided by Airbyte so that this tutorial is self-contained. If you were building a "real" source, you'll want to use the helper modules such as the [Connector Development Kit](https://github.com/airbytehq/airbyte/tree/master/airbyte-cdk/python/docs/tutorials). - -This tutorial can be done entirely on your local workstation. - -### Requirements - -To run this tutorial, you'll need: - -* Docker, Python, and Java with the versions listed in the [tech stack section](../../understanding-airbyte/tech-stack.md). -* The `requests` Python package installed via `pip install requests` \(or `pip3` if `pip` is linked to a Python2 installation on your system\) - -**A note on running Python**: all the commands below assume that `python` points to a version of Python 3.9 or greater. Verify this by running - -```bash -$ python --version -Python 3.9.11 -``` - -On some systems, `python` points to a Python2 installation and `python3` points to Python3. If this is the case on your machine, substitute all `python` commands in this guide with `python3` . Otherwise, make sure to install Python 3 before beginning. - -## Our connector: a stock ticker API - -Our connector will output the daily price of a stock since a given date. We'll leverage the free [Polygon.io API](https://polygon.io/pricing) for this. We'll use Python to implement the connector because its syntax is accessible to most programmers, but the process described here can be applied to any language. - -Here's the outline of what we'll do to build our connector: - -1. Use the Airbyte connector template to bootstrap the connector package -2. Implement the methods required by the Airbyte Specification for our connector: - 1. `spec`: declares the user-provided credentials or configuration needed to run the connector - 2. `check`: tests if the connector can connect with the underlying data source with the user-provided configuration - 3. `discover`: declares the different streams of data that this connector can output - 4. `read`: reads data from the underlying data source \(The stock ticker API\) -3. Package the connector in a Docker image -4. Test the connector using Airbyte's Standard Test Suite -5. Use the connector to create a new Connection and run a sync in Airbyte UI - -Once we've completed the above steps, we will have built a functioning connector. Then, we'll add some optional functionality: - -* Support [incremental sync](../../understanding-airbyte/connections/incremental-append.md) -* Add custom integration tests - -### 1. Bootstrap the connector package - -We'll start the process from the Airbyte repository root: - -```bash -$ pwd -/Users/sherifnada/code/airbyte -``` - -First, let's create a new branch: - -```bash -$ git checkout -b $(whoami)/source-connector-tutorial -Switched to a new branch 'sherifnada/source-connector-tutorial' -``` - -Airbyte provides a code generator which bootstraps the scaffolding for our connector. Let's use it by running: - -```bash -$ cd airbyte-integrations/connector-templates/generator -$ ./generate.sh -``` - -We'll select the `generic` template and call the connector `stock-ticker-api`: - -![](../../.gitbook/assets/newsourcetutorial_plop.gif) - -Note: The generic template is very bare. If you are planning on developing a Python source, we recommend using the `python` template. It provides some convenience code to help reduce boilerplate. This tutorial uses the bare-bones version because it makes it easier to see how all the pieces of a connector work together. You can find a walk through on how to build a Python connector here \(**coming soon**\). - -Head to the connector directory and we should see the following files have been generated: - -```bash -$ cd ../../connectors/source-stock-ticker-api -$ ls -Dockerfile README.md acceptance-test-config.yml acceptance-test-docker.sh build.gradle -``` - -We'll use each of these files later. But first, let's write some code! - -### 2. Implement the connector in line with the Airbyte Specification - -In the connector package directory, create a single Python file `source.py` that will hold our implementation: - -```bash -touch source.py -``` - -#### Implement the spec operation - -At this stage in the tutorial, we just want to implement the `spec` operation as described in the [Airbyte Protocol](https://docs.airbyte.io/architecture/airbyte-protocol#spec). This involves a couple of steps: - -1. Decide which inputs we need from the user in order to connect to the stock ticker API \(i.e: the connector's specification\) and encode it as a JSON file. -2. Identify when the connector has been invoked with the `spec` operation and return the specification as an `AirbyteMessage` - -To contact the stock ticker API, we need two things: - -1. Which stock ticker we're interested in -2. The API key to use when contacting the API \(you can obtain a free API token from [Polygon.io](https://polygon.io/dashboard/signup) free plan\) - -For reference, the API docs we'll be using [can be found here](https://polygon.io/docs/stocks/get_v2_aggs_ticker__stocksticker__range__multiplier___timespan___from___to). - -Let's create a [JSONSchema](http://json-schema.org/) file `spec.json` encoding these two requirements: - -```javascript -{ - "documentationUrl": "https://polygon.io/docs/stocks/get_v2_aggs_ticker__stocksticker__range__multiplier___timespan___from___to", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "required": ["stock_ticker", "api_key"], - "additionalProperties": false, - "properties": { - "stock_ticker": { - "type": "string", - "title": "Stock Ticker", - "description": "The stock ticker to track", - "examples": ["AAPL", "TSLA", "AMZN"] - }, - "api_key": { - "title": "API Key", - "type": "string", - "description": "The Polygon.io Stocks API key to use to hit the API.", - "airbyte_secret": true - } - } - } -} -``` - -* `documentationUrl` is the URL that will appear in the UI for the user to gain more info about this connector. Typically this points to `docs.airbyte.io/integrations/sources/source-` but to keep things simple we won't show adding documentation -* `title` is the "human readable" title displayed in the UI. Without this field, The Stock Ticker field will have the title `stock_ticker` in the UI -* `description` will be shown in the Airbyte UI under each field to help the user understand it -* `airbyte_secret` used by Airbyte to determine if the field should be displayed as a password \(e.g: `********`\) in the UI and not readable from the API - -We'll save this file in the root directory of our connector. Now we have the following files: - -```bash -$ ls -1 -Dockerfile -README.md -acceptance-test-config.yml -acceptance-test-docker.sh -build.gradle -spec.json -``` - -Now, let's edit `source.py` to detect if the program was invoked with the `spec` argument and if so, output the connector specification: - -```python -# source.py -import argparse # helps parse commandline arguments -import json -import sys -import os - - -def read_json(filepath): - with open(filepath, "r") as f: - return json.loads(f.read()) - - -def log(message): - log_json = {"type": "LOG", "log": message} - print(json.dumps(log_json)) - - -def spec(): - # Read the file named spec.json from the module directory as a JSON file - current_script_directory = os.path.dirname(os.path.realpath(__file__)) - spec_path = os.path.join(current_script_directory, "spec.json") - specification = read_json(spec_path) - - # form an Airbyte Message containing the spec and print it to stdout - airbyte_message = {"type": "SPEC", "spec": specification} - # json.dumps converts the JSON (Python dict) to a string - print(json.dumps(airbyte_message)) - - -def run(args): - parent_parser = argparse.ArgumentParser(add_help=False) - main_parser = argparse.ArgumentParser() - subparsers = main_parser.add_subparsers(title="commands", dest="command") - - # Accept the spec command - subparsers.add_parser("spec", help="outputs the json configuration specification", parents=[parent_parser]) - - parsed_args = main_parser.parse_args(args) - command = parsed_args.command - - if command == "spec": - spec() - else: - # If we don't recognize the command log the problem and exit with an error code greater than 0 to indicate the process - # had a failure - log("Invalid command. Allowable commands: [spec]") - sys.exit(1) - - # A zero exit code means the process successfully completed - sys.exit(0) - - -def main(): - arguments = sys.argv[1:] - run(arguments) - - -if __name__ == "__main__": - main() -``` - -Some notes on the above code: - -1. As described in the [specification](https://docs.airbyte.io/architecture/airbyte-protocol#key-takeaways), Airbyte connectors are CLIs which communicate via stdout, so the output of the command is simply a JSON string formatted according to the Airbyte Specification. So to "return" a value we use `print` to output the return value to stdout -2. All Airbyte commands can output log messages that take the form `{"type":"LOG", "log":"message"}`, so we create a helper method `log(message)` to allow logging - -Now if we run `python source.py spec` we should see the specification printed out: - -```bash -python source.py spec -{"type": "SPEC", "spec": {"documentationUrl": "https://polygon.io/docs/stocks/get_v2_aggs_ticker__stocksticker__range__multiplier___timespan___from___to", "connectionSpecification": {"$schema": "http://json-schema.org/draft-07/schema#", "type": "object", "required": ["stock_ticker", "api_key"], "additionalProperties": false, "properties": {"stock_ticker": {"type": "string", "title": "Stock Ticker", "description": "The stock ticker to track", "examples": ["AAPL", "TSLA", "AMZN"]}, "api_key": {"type": "string", "description": "The Polygon.io Stocks API key to use to hit the API.", "airbyte_secret": true}}}}} -``` - -We've implemented the first command! Three more and we'll have a working connector. - -#### Implementing check connection - -The second command to implement is the [check operation](https://docs.airbyte.io/architecture/airbyte-protocol#key-takeaways) `check --config `, which tells the user whether a config file they gave us is correct. In our case, "correct" means they input a valid stock ticker and a correct API key like we declare via the `spec` operation. - -To achieve this, we'll: - -1. Create valid and invalid configuration files to test the success and failure cases with our connector. We'll place config files in the `secrets/` directory which is gitignored everywhere in the Airbyte monorepo by default to avoid accidentally checking in API keys -2. Add a `check` method which calls the Polygon.io API to verify if the provided token & stock ticker are correct and output the correct airbyte message -3. Extend the argument parser to recognize the `check --config ` command and call the `check` method when the `check` command is invoked - -Let's first add the configuration files: - -```bash -$ mkdir secrets -$ echo '{"api_key": "put_your_key_here", "stock_ticker": "TSLA"}' > secrets/valid_config.json -$ echo '{"api_key": "not_a_real_key", "stock_ticker": "TSLA"}' > secrets/invalid_config.json -``` - -Make sure to add your actual API key instead of the placeholder value `` when following the tutorial. - -Then we'll add the `check_method`: - -```python -import requests - -def _call_api(ticker, token): - today = date.today() - to_day = today.strftime("%Y-%m-%d") - from_day = (today - timedelta(days=7)).strftime("%Y-%m-%d") - return requests.get(f"https://api.polygon.io/v2/aggs/ticker/{ticker}/range/1/day/{from_day}/{to_day}?sort=asc&limit=120&apiKey={token}") - - -def check(config): - # Validate input configuration by attempting to get the daily closing prices of the input stock ticker - response = _call_api(ticker=config["stock_ticker"], token=config["api_key"]) - if response.status_code == 200: - result = {"status": "SUCCEEDED"} - elif response.status_code == 403: - # HTTP code 403 means authorization failed so the API key is incorrect - result = {"status": "FAILED", "message": "API Key is incorrect."} - else: - result = {"status": "FAILED", "message": "Input configuration is incorrect. Please verify the input stock ticker and API key."} - - output_message = {"type": "CONNECTION_STATUS", "connectionStatus": result} - print(json.dumps(output_message)) -``` - -Lastly we'll extend the `run` method to accept the `check` command and call the `check` method. First we'll add a helper method for reading input: - -```python -def get_input_file_path(path): - if os.path.isabs(path): - return path - else: - return os.path.join(os.getcwd(), path) -``` - -In Airbyte, the contract for input files is that they will be available in the current working directory if they are not provided as an absolute path. This method helps us achieve that. - -We also need to extend the arguments parser by adding the following two blocks to the `run` method: - -```python - # Accept the check command - check_parser = subparsers.add_parser("check", help="checks the config used to connect", parents=[parent_parser]) - required_check_parser = check_parser.add_argument_group("required named arguments") - required_check_parser.add_argument("--config", type=str, required=True, help="path to the json configuration file") -``` - -and - -```python -elif command == "check": - config_file_path = get_input_file_path(parsed_args.config) - config = read_json(config_file_path) - check(config) -``` - -This results in the following `run` method. - -```python -def run(args): - parent_parser = argparse.ArgumentParser(add_help=False) - main_parser = argparse.ArgumentParser() - subparsers = main_parser.add_subparsers(title="commands", dest="command") - - # Accept the spec command - subparsers.add_parser("spec", help="outputs the json configuration specification", parents=[parent_parser]) - - # Accept the check command - check_parser = subparsers.add_parser("check", help="checks the config used to connect", parents=[parent_parser]) - required_check_parser = check_parser.add_argument_group("required named arguments") - required_check_parser.add_argument("--config", type=str, required=True, help="path to the json configuration file") - - parsed_args = main_parser.parse_args(args) - command = parsed_args.command - - if command == "spec": - spec() - elif command == "check": - config_file_path = get_input_file_path(parsed_args.config) - config = read_json(config_file_path) - check(config) - else: - # If we don't recognize the command log the problem and exit with an error code greater than 0 to indicate the process - # had a failure - log("Invalid command. Allowable commands: [spec, check]") - sys.exit(1) - - # A zero exit code means the process successfully completed - sys.exit(0) -``` - -and that should be it. Let's test our new method: - -```bash -$ python source.py check --config secrets/valid_config.json -{'type': 'CONNECTION_STATUS', 'connectionStatus': {'status': 'SUCCEEDED'}} -$ python source.py check --config secrets/invalid_config.json -{'type': 'CONNECTION_STATUS', 'connectionStatus': {'status': 'FAILED', 'message': 'API Key is incorrect.'}} -``` - -Our connector is able to detect valid and invalid configs correctly. Two methods down, two more to go! - -#### Implementing Discover - -The `discover` command outputs a Catalog, a struct that declares the Streams and Fields \(Airbyte's equivalents of tables and columns\) output by the connector. It also includes metadata around which features a connector supports \(e.g. which sync modes\). In other words it describes what data is available in the source. If you'd like to read a bit more about this concept check out our [Beginner's Guide to the Airbyte Catalog](../../understanding-airbyte/beginners-guide-to-catalog.md) or for a more detailed treatment read the [Airbyte Specification](../../understanding-airbyte/airbyte-protocol.md). - -The data output by this connector will be structured in a very simple way. This connector outputs records belonging to exactly one Stream \(table\). Each record contains three Fields \(columns\): `date`, `price`, and `stock_ticker`, corresponding to the price of a stock on a given day. - -To implement `discover`, we'll: - -1. Add a method `discover` in `source.py` which outputs the Catalog. To better understand what a catalog is, check out our [Beginner's Guide to the AirbyteCatalog](../../understanding-airbyte/beginners-guide-to-catalog.md) -2. Extend the arguments parser to use detect the `discover --config ` command and call the `discover` method - -Let's implement `discover` by adding the following in `source.py`: - -```python -def discover(): - catalog = { - "streams": [{ - "name": "stock_prices", - "supported_sync_modes": ["full_refresh"], - "json_schema": { - "properties": { - "date": { - "type": "string" - }, - "price": { - "type": "number" - }, - "stock_ticker": { - "type": "string" - } - } - } - }] - } - airbyte_message = {"type": "CATALOG", "catalog": catalog} - print(json.dumps(airbyte_message)) -``` - -Note that we describe the schema of the output stream using [JSONSchema](http://json-schema.org/). - -Then we'll extend the arguments parser by adding the following blocks to the `run` method: - -```python -# Accept the discover command -discover_parser = subparsers.add_parser("discover", help="outputs a catalog describing the source's schema", parents=[parent_parser]) -required_discover_parser = discover_parser.add_argument_group("required named arguments") -required_discover_parser.add_argument("--config", type=str, required=True, help="path to the json configuration file") -``` - -and - -```python -elif command == "discover": - discover() -``` - -You may be wondering why `config` is a required input to `discover` if it's not used. This is done for consistency: the Airbyte Specification requires `--config` as an input to `discover` because many sources require it \(e.g: to discover the tables available in a Postgres database, you must supply a password\). So instead of guessing whether the flag is required depending on the connector, we always assume it is required, and the connector can choose whether to use it. - -The full run method is now below: - -```python -def run(args): - parent_parser = argparse.ArgumentParser(add_help=False) - main_parser = argparse.ArgumentParser() - subparsers = main_parser.add_subparsers(title="commands", dest="command") - - # Accept the spec command - subparsers.add_parser("spec", help="outputs the json configuration specification", parents=[parent_parser]) - - # Accept the check command - check_parser = subparsers.add_parser("check", help="checks the config used to connect", parents=[parent_parser]) - required_check_parser = check_parser.add_argument_group("required named arguments") - required_check_parser.add_argument("--config", type=str, required=True, help="path to the json configuration file") - - # Accept the discover command - discover_parser = subparsers.add_parser("discover", help="outputs a catalog describing the source's schema", parents=[parent_parser]) - required_discover_parser = discover_parser.add_argument_group("required named arguments") - required_discover_parser.add_argument("--config", type=str, required=True, help="path to the json configuration file") - - parsed_args = main_parser.parse_args(args) - command = parsed_args.command - - if command == "spec": - spec() - elif command == "check": - config_file_path = get_input_file_path(parsed_args.config) - config = read_json(config_file_path) - check(config) - elif command == "discover": - discover() - else: - # If we don't recognize the command log the problem and exit with an error code greater than 0 to indicate the process - # had a failure - log("Invalid command. Allowable commands: [spec, check, discover]") - sys.exit(1) - - # A zero exit code means the process successfully completed - sys.exit(0) -``` - -Let's test our new command: - -```bash -$ python source.py discover --config secrets/valid_config.json -{"type": "CATALOG", "catalog": {"streams": [{"name": "stock_prices", "supported_sync_modes": ["full_refresh"], "json_schema": {"properties": {"date": {"type": "string"}, "price": {"type": "number"}, "stock_ticker": {"type": "string"}}}}]}} -``` - -With that, we're done implementing the `discover` command. - -#### Implementing the read operation - -We've done a lot so far, but a connector ultimately exists to read data! This is where the [`read` command](https://docs.airbyte.io/architecture/airbyte-protocol#read) comes in. The format of the command is: - -```bash -python source.py read --config --catalog [--state ] -``` - -Each of these are described in the Airbyte Specification in detail, but we'll give a quick description of the two options we haven't seen so far: - -* `--catalog` points to a Configured Catalog. The Configured Catalog contains the contents for the Catalog \(remember the Catalog we output from discover?\). It also contains some configuration information that describes how the data will by replicated. For example, we had `supported_sync_modes` in the Catalog. In the Configured Catalog, we select which of the `supported_sync_modes` we want to use by specifying the `sync_mode` field. \(This is the most complicated concept when working Airbyte, so if it is still not making sense that's okay for now. If you're just dying to understand how the Configured Catalog works checkout the [Beginner's Guide to the Airbyte Catalog](../../understanding-airbyte/beginners-guide-to-catalog.md)\) -* `--state` points to a state file. The state file is only relevant when some Streams are synced with the sync mode `incremental`, so we'll cover the state file in more detail in the incremental section below - -For our connector, the contents of those two files should be very unsurprising: the connector only supports one Stream, `stock_prices`, so we'd expect the input catalog to contain that stream configured to sync in full refresh. Since our connector doesn't support incremental sync \(yet!\) we'll ignore the state option for now. - -To read data in our connector, we'll: - -1. Create a configured catalog which tells our connector that we want to sync the `stock_prices` stream -2. Implement a method `read` in `source.py`. For now we'll always read the last 7 days of a stock price's data -3. Extend the arguments parser to recognize the `read` command and its arguments - -First, let's create a configured catalog `fullrefresh_configured_catalog.json` to use as test input for the read operation: - -```javascript -{ - "streams": [ - { - "stream": { - "name": "stock_prices", - "supported_sync_modes": [ - "full_refresh" - ], - "json_schema": { - "properties": { - "date": { - "type": "string" - }, - "price": { - "type": "number" - }, - "stock_ticker": { - "type": "string" - } - } - } - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - } - ] -} - -``` - -Then we'll define the `read` method in `source.py`: - -```python -import datetime -from datetime import date -from datetime import timedelta - -def read(config, catalog): - # Assert required configuration was provided - if "api_key" not in config or "stock_ticker" not in config: - log("Input config must contain the properties 'api_key' and 'stock_ticker'") - sys.exit(1) - - # Find the stock_prices stream if it is present in the input catalog - stock_prices_stream = None - for configured_stream in catalog["streams"]: - if configured_stream["stream"]["name"] == "stock_prices": - stock_prices_stream = configured_stream - - if stock_prices_stream is None: - log("No streams selected") - return - - # We only support full_refresh at the moment, so verify the user didn't ask for another sync mode - if stock_prices_stream["sync_mode"] != "full_refresh": - log("This connector only supports full refresh syncs! (for now)") - sys.exit(1) - - # If we've made it this far, all the configuration is good and we can pull the last 7 days of market data - response = _call_api(ticker=config["stock_ticker"], token = config["api_key"]) - if response.status_code != 200: - # In a real scenario we'd handle this error better :) - log("Failure occurred when calling Polygon.io API") - sys.exit(1) - else: - # Stock prices are returned sorted by date in ascending order - # We want to output them one by one as AirbyteMessages - results = response.json()["results"] - for result in results: - data = {"date": date.fromtimestamp(result["t"]/1000).isoformat(), "stock_ticker": config["stock_ticker"], "price": result["c"]} - record = {"stream": "stock_prices", "data": data, "emitted_at": int(datetime.datetime.now().timestamp()) * 1000} - output_message = {"type": "RECORD", "record": record} - print(json.dumps(output_message)) -``` - -After doing some input validation, the code above calls the API to obtain daily prices for the input stock ticker, then outputs the prices. As always, our output is formatted according to the Airbyte Specification. Let's update our args parser with the following blocks: - -```python -# Accept the read command -read_parser = subparsers.add_parser("read", help="reads the source and outputs messages to STDOUT", parents=[parent_parser]) -read_parser.add_argument("--state", type=str, required=False, help="path to the json-encoded state file") -required_read_parser = read_parser.add_argument_group("required named arguments") -required_read_parser.add_argument("--config", type=str, required=True, help="path to the json configuration file") -required_read_parser.add_argument( - "--catalog", type=str, required=True, help="path to the catalog used to determine which data to read" -) -``` - -and: - -```python -elif command == "read": - config = read_json(get_input_file_path(parsed_args.config)) - configured_catalog = read_json(get_input_file_path(parsed_args.catalog)) - read(config, configured_catalog) -``` - -this yields the following `run` method: - -```python -def run(args): - parent_parser = argparse.ArgumentParser(add_help=False) - main_parser = argparse.ArgumentParser() - subparsers = main_parser.add_subparsers(title="commands", dest="command") - - # Accept the spec command - subparsers.add_parser("spec", help="outputs the json configuration specification", parents=[parent_parser]) - - # Accept the check command - check_parser = subparsers.add_parser("check", help="checks the config used to connect", parents=[parent_parser]) - required_check_parser = check_parser.add_argument_group("required named arguments") - required_check_parser.add_argument("--config", type=str, required=True, help="path to the json configuration file") - - # Accept the discover command - discover_parser = subparsers.add_parser("discover", help="outputs a catalog describing the source's schema", parents=[parent_parser]) - required_discover_parser = discover_parser.add_argument_group("required named arguments") - required_discover_parser.add_argument("--config", type=str, required=True, help="path to the json configuration file") - - # Accept the read command - read_parser = subparsers.add_parser("read", help="reads the source and outputs messages to STDOUT", parents=[parent_parser]) - read_parser.add_argument("--state", type=str, required=False, help="path to the json-encoded state file") - required_read_parser = read_parser.add_argument_group("required named arguments") - required_read_parser.add_argument("--config", type=str, required=True, help="path to the json configuration file") - required_read_parser.add_argument( - "--catalog", type=str, required=True, help="path to the catalog used to determine which data to read" - ) - - parsed_args = main_parser.parse_args(args) - command = parsed_args.command - - if command == "spec": - spec() - elif command == "check": - config_file_path = get_input_file_path(parsed_args.config) - config = read_json(config_file_path) - check(config) - elif command == "discover": - discover() - elif command == "read": - config = read_json(get_input_file_path(parsed_args.config)) - configured_catalog = read_json(get_input_file_path(parsed_args.catalog)) - read(config, configured_catalog) - else: - # If we don't recognize the command log the problem and exit with an error code greater than 0 to indicate the process - # had a failure - log("Invalid command. Allowable commands: [spec, check, discover, read]") - sys.exit(1) - - # A zero exit code means the process successfully completed - sys.exit(0) -``` - -Let's test out our new command: - -```bash -$ python source.py read --config secrets/valid_config.json --catalog fullrefresh_configured_catalog.json -{'type': 'RECORD', 'record': {'stream': 'stock_prices', 'data': {'date': '2020-12-15', 'stock_ticker': 'TSLA', 'price': 633.25}, 'emitted_at': 1608626365000}} -{'type': 'RECORD', 'record': {'stream': 'stock_prices', 'data': {'date': '2020-12-16', 'stock_ticker': 'TSLA', 'price': 622.77}, 'emitted_at': 1608626365000}} -{'type': 'RECORD', 'record': {'stream': 'stock_prices', 'data': {'date': '2020-12-17', 'stock_ticker': 'TSLA', 'price': 655.9}, 'emitted_at': 1608626365000}} -{'type': 'RECORD', 'record': {'stream': 'stock_prices', 'data': {'date': '2020-12-18', 'stock_ticker': 'TSLA', 'price': 695}, 'emitted_at': 1608626365000}} -{'type': 'RECORD', 'record': {'stream': 'stock_prices', 'data': {'date': '2020-12-21', 'stock_ticker': 'TSLA', 'price': 649.86}, 'emitted_at': 1608626365000}} -``` - -With this method, we now have a fully functioning connector! Let's pat ourselves on the back for getting there. - -For reference, the full `source.py` file now looks like this: - -```python -# MIT License -# -# Copyright (c) 2020 Airbyte -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - - -import argparse # helps parse commandline arguments -import json -import sys -import os -import requests -import datetime -from datetime import date -from datetime import timedelta - -def read(config, catalog): - # Assert required configuration was provided - if "api_key" not in config or "stock_ticker" not in config: - log("Input config must contain the properties 'api_key' and 'stock_ticker'") - sys.exit(1) - - # Find the stock_prices stream if it is present in the input catalog - stock_prices_stream = None - for configured_stream in catalog["streams"]: - if configured_stream["stream"]["name"] == "stock_prices": - stock_prices_stream = configured_stream - - if stock_prices_stream is None: - log("No streams selected") - return - - # We only support full_refresh at the moment, so verify the user didn't ask for another sync mode - if stock_prices_stream["sync_mode"] != "full_refresh": - log("This connector only supports full refresh syncs! (for now)") - sys.exit(1) - - # If we've made it this far, all the configuration is good and we can pull the last 7 days of market data - response = _call_api(ticker=config["stock_ticker"], token = config["api_key"]) - if response.status_code != 200: - # In a real scenario we'd handle this error better :) - log("Failure occurred when calling Polygon.io API") - sys.exit(1) - else: - # Stock prices are returned sorted by date in ascending order - # We want to output them one by one as AirbyteMessages - results = response.json()["results"] - for result in results: - data = {"date": date.fromtimestamp(result["t"]/1000).isoformat(), "stock_ticker": config["stock_ticker"], "price": result["c"]} - record = {"stream": "stock_prices", "data": data, "emitted_at": int(datetime.datetime.now().timestamp()) * 1000} - output_message = {"type": "RECORD", "record": record} - print(json.dumps(output_message)) - - -def read_json(filepath): - with open(filepath, "r") as f: - return json.loads(f.read()) - - -def _call_api(ticker, token): - today = date.today() - to_day = today.strftime("%Y-%m-%d") - from_day = (today - timedelta(days=7)).strftime("%Y-%m-%d") - return requests.get(f"https://api.polygon.io/v2/aggs/ticker/{ticker}/range/1/day/{from_day}/{to_day}?sort=asc&limit=120&apiKey={token}") - - -def check(config): - # Assert required configuration was provided - if "api_key" not in config or "stock_ticker" not in config: - log("Input config must contain the properties 'api_key' and 'stock_ticker'") - sys.exit(1) - else: - # Validate input configuration by attempting to get the daily closing prices of the input stock ticker - response = _call_api(ticker=config["stock_ticker"], token=config["api_key"]) - if response.status_code == 200: - result = {"status": "SUCCEEDED"} - elif response.status_code == 403: - # HTTP code 403 means authorization failed so the API key is incorrect - result = {"status": "FAILED", "message": "API Key is incorrect."} - else: - # Consider any other code a "generic" failure and tell the user to make sure their config is correct. - result = {"status": "FAILED", "message": "Input configuration is incorrect. Please verify the input stock ticker and API key."} - - # Format the result of the check operation according to the Airbyte Specification - output_message = {"type": "CONNECTION_STATUS", "connectionStatus": result} - print(json.dumps(output_message)) - - -def log(message): - log_json = {"type": "LOG", "log": message} - print(json.dumps(log_json)) - - -def discover(): - catalog = { - "streams": [{ - "name": "stock_prices", - "supported_sync_modes": ["full_refresh"], - "json_schema": { - "properties": { - "date": { - "type": "string" - }, - "price": { - "type": "number" - }, - "stock_ticker": { - "type": "string" - } - } - } - }] - } - airbyte_message = {"type": "CATALOG", "catalog": catalog} - print(json.dumps(airbyte_message)) - - -def get_input_file_path(path): - if os.path.isabs(path): - return path - else: - return os.path.join(os.getcwd(), path) - - -def spec(): - # Read the file named spec.json from the module directory as a JSON file - current_script_directory = os.path.dirname(os.path.realpath(__file__)) - spec_path = os.path.join(current_script_directory, "spec.json") - specification = read_json(spec_path) - - # form an Airbyte Message containing the spec and print it to stdout - airbyte_message = {"type": "SPEC", "spec": specification} - # json.dumps converts the JSON (Python dict) to a string - print(json.dumps(airbyte_message)) - - -def run(args): - parent_parser = argparse.ArgumentParser(add_help=False) - main_parser = argparse.ArgumentParser() - subparsers = main_parser.add_subparsers(title="commands", dest="command") - - # Accept the spec command - subparsers.add_parser("spec", help="outputs the json configuration specification", parents=[parent_parser]) - - # Accept the check command - check_parser = subparsers.add_parser("check", help="checks the config used to connect", parents=[parent_parser]) - required_check_parser = check_parser.add_argument_group("required named arguments") - required_check_parser.add_argument("--config", type=str, required=True, help="path to the json configuration file") - - # Accept the discover command - discover_parser = subparsers.add_parser("discover", help="outputs a catalog describing the source's schema", parents=[parent_parser]) - required_discover_parser = discover_parser.add_argument_group("required named arguments") - required_discover_parser.add_argument("--config", type=str, required=True, help="path to the json configuration file") - - # Accept the read command - read_parser = subparsers.add_parser("read", help="reads the source and outputs messages to STDOUT", parents=[parent_parser]) - read_parser.add_argument("--state", type=str, required=False, help="path to the json-encoded state file") - required_read_parser = read_parser.add_argument_group("required named arguments") - required_read_parser.add_argument("--config", type=str, required=True, help="path to the json configuration file") - required_read_parser.add_argument( - "--catalog", type=str, required=True, help="path to the catalog used to determine which data to read" - ) - - parsed_args = main_parser.parse_args(args) - command = parsed_args.command - - if command == "spec": - spec() - elif command == "check": - config_file_path = get_input_file_path(parsed_args.config) - config = read_json(config_file_path) - check(config) - elif command == "discover": - discover() - elif command == "read": - config = read_json(get_input_file_path(parsed_args.config)) - configured_catalog = read_json(get_input_file_path(parsed_args.catalog)) - read(config, configured_catalog) - else: - # If we don't recognize the command log the problem and exit with an error code greater than 0 to indicate the process - # had a failure - log("Invalid command. Allowable commands: [spec, check, discover, read]") - sys.exit(1) - - # A zero exit code means the process successfully completed - sys.exit(0) - - -def main(): - arguments = sys.argv[1:] - run(arguments) - - -if __name__ == "__main__": - main() -``` - -A full connector in less than 200 lines of code. Not bad! We're now ready to package & test our connector then use it in the Airbyte UI. - -### 3. Package the connector in a Docker image - -Our connector is very lightweight, so the Dockerfile needed to run it is very light as well. We edit the autogenerated `Dockerfile` so that its contents are as followed: - -```Dockerfile -FROM python:3.9-slim - -# We change to a directory unique to us -WORKDIR /airbyte/integration_code -# Install any needed Python dependencies -RUN pip install requests -# Copy source files -COPY source.py . -COPY spec.json . - -# When this container is invoked, append the input argemnts to `python source.py` -ENTRYPOINT ["python", "/airbyte/integration_code/source.py"] - -# Airbyte's build system uses these labels to know what to name and tag the docker images produced by this Dockerfile. -LABEL io.airbyte.name=airbyte/source-stock-ticker-api -LABEL io.airbyte.version=0.1.0 - -# In order to launch a source on Kubernetes in a pod, we need to be able to wrap the entrypoint. -# The source connector must specify its entrypoint in the AIRBYTE_ENTRYPOINT variable. -ENV AIRBYTE_ENTRYPOINT='python /airbyte/integration_code/source.py' -``` - -Once we save the `Dockerfile`, we can build the image by running: - -```bash -docker build . -t airbyte/source-stock-ticker-api:dev -``` - -Then we can run the image using: - -```bash -docker run airbyte/source-stock-ticker-api:dev -``` - -to run any of our commands, we'll need to mount all the inputs into the Docker container first, then refer to their _mounted_ paths when invoking the connector. For example, we'd run `check` or `read` as follows: - -```bash -$ docker run airbyte/source-stock-ticker-api:dev spec -{"type": "SPEC", "spec": {"documentationUrl": "https://polygon.io/docs/stocks/get_v2_aggs_ticker__stocksticker__range__multiplier___timespan___from___to", "connectionSpecification": {"$schema": "http://json-schema.org/draft-07/schema#", "type": "object", "required": ["stock_ticker", "api_key"], "additionalProperties": false, "properties": {"stock_ticker": {"type": "string", "title": "Stock Ticker", "description": "The stock ticker to track", "examples": ["AAPL", "TSLA", "AMZN"]}, "api_key": {"type": "string", "description": "The Polygon.io Stocks API key to use to hit the API.", "airbyte_secret": true}}}}} - -$ docker run -v $(pwd)/secrets/valid_config.json:/data/config.json airbyte/source-stock-ticker-api:dev check --config /data/config.json -{'type': 'CONNECTION_STATUS', 'connectionStatus': {'status': 'SUCCEEDED'}} - -$ docker run -v $(pwd)/secrets/valid_config.json:/data/config.json airbyte/source-stock-ticker-api:dev discover --config /data/config.json -{"type": "CATALOG", "catalog": {"streams": [{"name": "stock_prices", "supported_sync_modes": ["full_refresh"], "json_schema": {"properties": {"date": {"type": "string"}, "price": {"type": "number"}, "stock_ticker": {"type": "string"}}}}]}} - -$ docker run -v $(pwd)/secrets/valid_config.json:/data/config.json -v $(pwd)/fullrefresh_configured_catalog.json:/data/fullrefresh_configured_catalog.json airbyte/source-stock-ticker-api:dev read --config /data/config.json --catalog /data/fullrefresh_configured_catalog.json -{'type': 'RECORD', 'record': {'stream': 'stock_prices', 'data': {'date': '2020-12-15', 'stock_ticker': 'TSLA', 'price': 633.25}, 'emitted_at': 1608628424000}} -{'type': 'RECORD', 'record': {'stream': 'stock_prices', 'data': {'date': '2020-12-16', 'stock_ticker': 'TSLA', 'price': 622.77}, 'emitted_at': 1608628424000}} -{'type': 'RECORD', 'record': {'stream': 'stock_prices', 'data': {'date': '2020-12-17', 'stock_ticker': 'TSLA', 'price': 655.9}, 'emitted_at': 1608628424000}} -{'type': 'RECORD', 'record': {'stream': 'stock_prices', 'data': {'date': '2020-12-18', 'stock_ticker': 'TSLA', 'price': 695}, 'emitted_at': 1608628424000}} -{'type': 'RECORD', 'record': {'stream': 'stock_prices', 'data': {'date': '2020-12-21', 'stock_ticker': 'TSLA', 'price': 649.86}, 'emitted_at': 1608628424000}} -``` - -and with that, we've packaged our connector in a functioning Docker image. The last requirement before calling this connector finished is to pass the [Airbyte Source Acceptance Test suite](../testing-connectors/source-acceptance-tests-reference.md). - -### 4. Test the connector - -The minimum requirement for testing your connector is to pass the [Source Acceptance Test (SAT)](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) suite. SAT is a blackbox test suite containing a number of tests that validate your connector behaves as intended by the Airbyte Specification. You're encouraged to add custom test cases for your connector where it makes sense to do so e.g: to test edge cases that are not covered by the standard suite. But at the very least, you must pass Airbyte's SATs suite. - -The code generator should have already generated a YAML file which configures the test suite. In order to run it, modify the `acceptance-test-config.yaml` file to look like this: - - -```yaml -# See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) -# for more information about how to configure these tests -connector_image: airbyte/source-stock-ticker-api:dev -tests: - spec: - - spec_path: "spec.json" - config_path: "secrets/valid_config.json" - connection: - - config_path: "secrets/valid_config.json" - status: "succeed" - - config_path: "secrets/invalid_config.json" - status: "failed" - discovery: - - config_path: "secrets/valid_config.json" - basic_read: - - config_path: "secrets/valid_config.json" - configured_catalog_path: "fullrefresh_configured_catalog.json" - empty_streams: [] - full_refresh: - - config_path: "secrets/valid_config.json" - configured_catalog_path: "fullrefresh_configured_catalog.json" -# incremental: # TODO uncomment this once you implement incremental sync in part 2 of the tutorial -# - config_path: "secrets/config.json" -# configured_catalog_path: "integration_tests/configured_catalog.json" -# future_state_path: "integration_tests/abnormal_state.json" -``` - -Then from the connector module directory run - -```bash -./acceptance-test-docker.sh -``` - -After tests have run, you should see a test summary like: - -```text -collecting ... - test_core.py ✓✓✓✓✓✓✓✓✓✓✓✓✓✓✓✓✓✓✓ 95% █████████▌ - test_full_refresh.py ✓ 100% ██████████ - -================== short test summary info ================== -SKIPPED [1] source_acceptance_test/plugin.py:56: Skipping TestIncremental.test_two_sequential_reads because not found in the config - -Results (8.91s): - 20 passed -``` - -That's it! We've created a fully functioning connector. Now let's get to the exciting part: using it from the Airbyte UI. - -### Use the connector in the Airbyte UI - -Let's recap what we've achieved so far: - -1. Implemented a connector -2. Packaged it in a Docker image -3. Integrated it with the Airbyte Standard Test suite - -To use it from the Airbyte UI, we need to: - -1. Publish our connector's Docker image somewhere accessible by Airbyte Core \(Airbyte's server, scheduler, workers, and webapp infrastructure\) -2. Add the connector via the Airbyte UI and setup a connection from our new connector to a local CSV file for illustration purposes -3. Run a sync and inspect the output - -#### 1. Publish the Docker image - -Since we're running this tutorial locally, Airbyte will have access to any Docker images available to your local `docker` daemon. So all we need to do is build & tag our connector. If you want your connector to be available to everyone using Airbyte, you'll need to publish it to `Dockerhub`. [Open a PR](https://github.com/airbytehq/airbyte) or visit our [Slack](https://slack.airbyte.io) for help with this. - -Airbyte's build system builds and tags your connector's image correctly by default as part of the connector's standard `build` process. **From the Airbyte repo root**, run: - -```bash -./gradlew clean :airbyte-integrations:connectors:source-stock-ticker-api:build -``` - -This is the equivalent of running `docker build . -t airbyte/source-stock-ticker-api:dev` from the connector root, where the tag `airbyte/source-stock-ticker-api` is extracted from the label `LABEL io.airbyte.name` inside your `Dockerfile`. - -Verify the image was built by running: - -```bash -$ docker images | head - REPOSITORY TAG IMAGE ID CREATED SIZE - airbyte/source-stock-ticker-api dev 9494ea93b7d0 16 seconds ago 121MB - 8fe5b49f9ae5 3 hours ago 121MB - 4cb00a551b3c 3 hours ago 121MB - 1caf57c72afd 3 hours ago 121MB -``` - -`airbyte/source-stock-ticker-api` was built and tagged with the `dev` tag. Now let's head to the last step. - -#### 2. Add the connector via the Airbyte UI - -If the Airbyte server isn't already running, start it by running **from the Airbyte repository root**: - -```bash -docker-compose up -``` - -When Airbyte server is done starting up, it prints the following banner in the log output \(it can take 10-20 seconds for the server to start\): - -```bash -airbyte-server | 2022-03-11 18:38:33 INFO i.a.s.ServerApp(start):121 - -airbyte-server | ___ _ __ __ -airbyte-server | / | (_)____/ /_ __ __/ /____ -airbyte-server | / /| | / / ___/ __ \/ / / / __/ _ \ -airbyte-server | / ___ |/ / / / /_/ / /_/ / /_/ __/ -airbyte-server | /_/ |_/_/_/ /_.___/\__, /\__/\___/ -airbyte-server | /____/ -airbyte-server | -------------------------------------- -airbyte-server | Now ready at http://localhost:8000/ -airbyte-server | -------------------------------------- -airbyte-server | Version: dev -airbyte-server | -``` - -After you see the above banner printed out in the terminal window where you are running `docker-compose up`, visit [http://localhost:8000](http://localhost:8000) in your browser. - -If this is the first time using the Airbyte UI, then you will be prompted to go through a first-time wizard. To skip it, click the "Skip Onboarding" button. - -In the UI, click the "Settings" button in the left side bar: - -![](../../.gitbook/assets/newsourcetutorial_sidebar_settings.png) - -Then on the Settings page, select Sources - -![](../../.gitbook/assets/newsourcetutorial_settings_page.png) - -Then on the Settings/Sources page, click "+ New Connector" button at the top right: - -![](../../.gitbook/assets/newsourcetutorial_settings_sources_newconnector.png) - -On the modal that pops up, enter the following information then click "Add" - -![](../../.gitbook/assets/newsourcetutorial_new_connector_modal.png) - -After you click "Add", the modal will close and you will be back at the Settings page. -Now click "Sources" in the navigation bar on the left: - -![](../../.gitbook/assets/newsourcetutorial_sources_navbar.png) - -You will be redirected to Sources page, which, if you have not set up any connections, will be empty. -On the Sources page click "+ new source" in the top right corner: - -![](../../.gitbook/assets/newsourcetutorial_sources_page.png) - -A new modal will prompt you for details of the new source. Type "Stock Ticker" in the Name field. -Then, find your connector in the Source type dropdown. We have lots of connectors already, so it might be easier -to find your connector by typing part of its name: - -![](../../.gitbook/assets/newsourcetutorial_find_your_connector.png) - -After you select your connector in the Source type dropdown, the modal will show two more fields: API Key and Stock Ticker. -Remember that `spec.json` file you created at the very beginning of this tutorial? These fields should correspond to the `properties` -section of that file. Copy-paste your Polygon.io API key and a stock ticker into these fields and then click "Set up source" -button at the bottom right of the modal. - -![](../../.gitbook/assets/newsourcetutorial_source_config.png) - -Once you click "Set up source", Airbyte will spin up your connector and run "check" method to verify the configuration. -You will see a progress bar briefly and if the configuration is valid, you will see a success message, -the modal will close and you will see your connector on the updated Sources page. - -![](../../.gitbook/assets/newsourcetutorial_sources_stock_ticker.png) - -Next step is to add a destination. On the same page, click "add destination" and then click "+ add a new destination": - -![](../../.gitbook/assets/newsourcetutorial_add_destination_new_destination.png) - -"New destination" wizard will show up. Type a name (e.g. "Local JSON") into the Name field and select "Local JSON" in Destination type drop-down. -After you select the destination type, type `/local/tutorial_json` into Destination path field. -When we run syncs, we'll find the output on our local filesystem in `/tmp/airbyte_local/tutorial_json`. - -Click "Set up destination" at the lower right of the form. - -![](../../.gitbook/assets/newsourcetutorial_add_destination.png) - -After that Airbyte will test the destination and prompt you to configure the connection between Stock Ticker source and Local JSON destination. -Select "Mirror source structure" in the Destination Namespace, check the checkbox next to the stock_prices stream, and click "Set up connection" button at the bottom of the form: - -![](../../.gitbook/assets/newsourcetutorial_configure_connection.png) - -Ta-da! Your connection is now configured to sync once a day. You will see your new connection on the next screen: - -![](../../.gitbook/assets/newsourcetutorial_connection_done.png) - -Airbyte will run the first sync job as soon as your connection is saved. Navigate to "Connections" in the side bar and wait for the first sync to succeed: - -![](../../.gitbook/assets/newsourcetutorial_first_sync.png) - -Let's verify the output. From your shell, run: - -```bash -$ cat /tmp/airbyte_local/tutorial_json/_airbyte_raw_stock_prices.jsonl -{"_airbyte_ab_id":"7383c6c1-783a-4a8a-a39c-3890ab562495","_airbyte_emitted_at":1647026803000,"_airbyte_data":{"date":"2022-03-04","stock_ticker":"TSLA","price":838.29}} -{"_airbyte_ab_id":"cf7dc8d9-1ece-4a40-a7d6-35cae54b94e5","_airbyte_emitted_at":1647026803000,"_airbyte_data":{"date":"2022-03-07","stock_ticker":"TSLA","price":804.58}} -{"_airbyte_ab_id":"da7da131-41d2-4ba7-bba1-1a0a5329a30a","_airbyte_emitted_at":1647026803000,"_airbyte_data":{"date":"2022-03-08","stock_ticker":"TSLA","price":824.4}} -{"_airbyte_ab_id":"20df0d78-5a5e-437b-95d8-aa57cf19fce1","_airbyte_emitted_at":1647026803000,"_airbyte_data":{"date":"2022-03-09","stock_ticker":"TSLA","price":858.97}} -{"_airbyte_ab_id":"0b7a8d33-4500-4a6d-9d74-11716bd22f01","_airbyte_emitted_at":1647026803000,"_airbyte_data":{"date":"2022-03-10","stock_ticker":"TSLA","price":838.3}} -``` - -Congratulations! We've successfully written a fully functioning Airbyte connector. You're an Airbyte contributor now ;\) - -Armed with the knowledge you gained in this guide, here are some places you can go from here: - -1. Implement Incremental Sync for your connector \(described in the sections below\) -2. Implement another connector using the language specific helpers listed below -3. While not required, we love contributions! if you end up creating a new connector, we're here to help you make it available to everyone using Airbyte. Remember that you're never expected to maintain a connector by yourself if you merge it to Airbyte -- we're committed to supporting connectors if you can't do it yourself - -## Optional additions - -This section is not yet complete and will be completed soon. Please reach out to us on [Slack](https://slack.airbyte.io) or [Github](https://github.com/airbytehq/airbyte) if you need the information promised by these sections immediately. - -### Incremental sync -Follow the [next tutorial](adding-incremental-sync.md) to implement incremental sync. - -### Connector Development Kit -Like we mention at the beginning of the tutorial, this guide is meant more for understanding than as a blueprint for implementing production connectors. See the [Connector Development Kit](https://github.com/airbytehq/airbyte/tree/master/airbyte-cdk/python/docs/tutorials) for the frameworks you should use to build production-ready connectors. - -### Language specific helpers - * [Building a Python Source](https://docs.airbyte.com/connector-development/tutorials/building-a-python-source) - * [Building a Python Destination](https://docs.airbyte.com/connector-development/tutorials/building-a-python-destination) - * [Building a Java Destination](https://docs.airbyte.com/connector-development/tutorials/building-a-java-destination) diff --git a/docs/connector-development/tutorials/cdk-speedrun-deprecated.md b/docs/connector-development/tutorials/cdk-speedrun-deprecated.md deleted file mode 100644 index 1a86c13c48c8..000000000000 --- a/docs/connector-development/tutorials/cdk-speedrun-deprecated.md +++ /dev/null @@ -1,233 +0,0 @@ -# Python CDK Speedrun: Creating a Source - -## CDK Speedrun \(HTTP API Source Creation [Any%](https://en.wikipedia.org/wiki/Speedrun#:~:text=Any%25%2C%20or%20fastest%20completion%2C,the%20game%20to%20its%20fullest.&text=Specific%20requirements%20for%20a%20100,different%20depending%20on%20the%20game.) Route\) - -This is a blazing fast guide to building an HTTP source connector. Think of it as the TL;DR version of [this tutorial.](cdk-tutorial-python-http/0-getting-started.md) - -## Dependencies - -1. Python >= 3.9 -2. Docker -3. NodeJS - -#### Generate the Template - -```bash -$ cd airbyte-integrations/connector-templates/generator # start from repo root -$ ./generate.sh -``` - -Select the `Python HTTP API Source` and name it `python-http-example`. - -#### Create Dev Environment - -```bash -cd ../../connectors/source-python-http-example -python -m venv .venv # Create a virtual environment in the .venv directory -source .venv/bin/activate -pip install -r requirements.txt -``` - -### Define Connector Inputs - -```bash -cd source_python_http_example -``` - -We're working with the Exchange Rates API, so we need to define our input schema to reflect that. Open the `spec.json` file here and replace it with: - -```javascript -{ - "documentationUrl": "https://docs.airbyte.io/integrations/sources/exchangeratesapi", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Python Http Example Spec", - "type": "object", - "required": ["start_date", "currency_base"], - "additionalProperties": false, - "properties": { - "start_date": { - "type": "string", - "description": "Start getting data from that date.", - "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}$", - "examples": ["%Y-%m-%d"] - }, - "base": { - "type": "string", - "examples": ["USD", "EUR"], - "description": "ISO reference currency. See here." - } - } - } -} -``` - -Ok, let's write a function that checks the inputs we just defined. Nuke the `source.py` file. Now add this code to it. For a crucial time skip, we're going to define all the imports we need in the future here. - -```python -from datetime import datetime, timedelta -from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple - -import requests -from airbyte_cdk.sources import AbstractSource -from airbyte_cdk.sources.streams import Stream -from airbyte_cdk.sources.streams.http import HttpStream -from airbyte_cdk.sources.streams.http.auth import NoAuth - -class SourcePythonHttpExample(AbstractSource): - def check_connection(self, logger, config) -> Tuple[bool, any]: - accepted_currencies = { - "USD", - "JPY", - "BGN", - "CZK", - "DKK", - } # there are more currencies but let's assume these are the only allowed ones - input_currency = config["base"] - if input_currency not in accepted_currencies: - return False, f"Input currency {input_currency} is invalid. Please input one of the following currencies: {accepted_currencies}" - else: - return True, None - - def streams(self, config: Mapping[str, Any]) -> List[Stream]: - # Parse the date from a string into a datetime object. - start_date = datetime.strptime(config["start_date"], "%Y-%m-%d") - - # NoAuth just means there is no authentication required for this API and is included for completeness. - # Skip passing an authenticator if no authentication is required. - # Other authenticators are available for API token-based auth and Oauth2. - auth = NoAuth() - return [ExchangeRates(authenticator=auth, base=config["base"], start_date=start_date)] -``` - -Test it. - -```bash -cd .. -mkdir sample_files -echo '{"start_date": "2021-04-01", "base": "USD"}' > sample_files/config.json -echo '{"start_date": "2021-04-01", "base": "BTC"}' > sample_files/invalid_config.json -python main.py check --config sample_files/config.json -python main.py check --config sample_files/invalid_config.json -``` - -Expected output: - -```text -> python main.py check --config sample_files/config.json -{"type": "CONNECTION_STATUS", "connectionStatus": {"status": "SUCCEEDED"}} - -> python main.py check --config sample_files/invalid_config.json -{"type": "CONNECTION_STATUS", "connectionStatus": {"status": "FAILED", "message": "Input currency BTC is invalid. Please input one of the following currencies: {'DKK', 'USD', 'CZK', 'BGN', 'JPY'}"}} -``` - -### Define your Stream - -In your `source.py` file, add this `ExchangeRates` class. This stream represents an endpoint you want to hit. - -```python -from airbyte_cdk.sources.streams.http import HttpStream - -class ExchangeRates(HttpStream): - url_base = "https://api.exchangeratesapi.io/" - - # Set this as a noop. - primary_key = None - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - # The API does not offer pagination, so we return None to indicate there are no more pages in the response - return None - - def path( - self, - ) -> str: - return "" # TODO - - def parse_response( - self, - ) -> Iterable[Mapping]: - return None # TODO -``` - -Now download [this file](https://github.com/airbytehq/airbyte/blob/master/airbyte-cdk/python/docs/tutorials/http_api_source_assets/exchange_rates.json). Name it `exchange_rates.json` and place it in `/source_python_http_example/schemas`. It defines your output schema. - -Test your discover function. You should receive a fairly large JSON object in return. - -```bash -python main.py discover --config sample_files/config.json -``` - -### Reading Data from the Source - -Update your `ExchangeRates` class to implement the required functions as follows: - -```python -class ExchangeRates(HttpStream): - url_base = "https://api.exchangeratesapi.io/" - - primary_key = None - - def __init__(self, base: str, **kwargs): - super().__init__() - self.base = base - - - def path( - self, - stream_state: Mapping[str, Any] = None, - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None - ) -> str: - # The "/latest" path gives us the latest currency exchange rates - return "latest" - - def request_params( - self, - stream_state: Mapping[str, Any], - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None, - ) -> MutableMapping[str, Any]: - # The api requires that we include the base currency as a query param so we do that in this method - return {'base': self.base} - - def parse_response( - self, - response: requests.Response, - stream_state: Mapping[str, Any], - stream_slice: Mapping[str, Any] = None, - next_page_token: Mapping[str, Any] = None, - ) -> Iterable[Mapping]: - # The response is a simple JSON whose schema matches our stream's schema exactly, - # so we just return a list containing the response - return [response.json()] - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - # The API does not offer pagination, - # so we return None to indicate there are no more pages in the response - return None -``` - -Update your `streams` method in your `SourcePythonHttpExample` class to use the currency base passed in from the stream above. - -```python -def streams(self, config: Mapping[str, Any]) -> List[Stream]: - auth = NoAuth() - return [ExchangeRates(authenticator=auth, base=config['base'])] -``` - -We now need a catalog that defines all of our streams. We only have one, `ExchangeRates`. Download that file [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-cdk/python/docs/tutorials/http_api_source_assets/configured_catalog.json). Place it in `/sample_files` named as `configured_catalog.json`. - -Let's read some data. - -```bash -python main.py read --config sample_files/config.json --catalog sample_files/configured_catalog.json -``` - -If all goes well, containerize it so you can use it in the UI: - -```bash -docker build . -t airbyte/source-python-http-example:dev -``` - -You're done. Stop the clock :\) - diff --git a/docs/deploying-airbyte/README.md b/docs/deploying-airbyte/README.md deleted file mode 100644 index 466ed6ee1b93..000000000000 --- a/docs/deploying-airbyte/README.md +++ /dev/null @@ -1,14 +0,0 @@ -# Deploy Airbyte where you want to - -![not all who wander are lost](https://user-images.githubusercontent.com/2591516/170351002-0d054d06-c901-4794-8719-97569060408f.png) - -- [Local Deployment](local-deployment.md) -- [On Airbyte Cloud](on-cloud.md) -- [On Aws](on-aws-ec2.md) -- [On Azure VM Cloud Shell](on-azure-vm-cloud-shell.md) -- [On Digital Ocean Droplet](on-digitalocean-droplet.md) -- [On GCP.md](on-gcp-compute-engine.md) -- [On Kubernetes](on-kubernetes.md) -- [On OCI VM](on-oci-vm.md) -- [On Plural](on-plural.md) -- [On AWS ECS (spoiler alert: it doesn't work)](on-aws-ecs.md) diff --git a/docs/deploying-airbyte/on-aws-ecs.md b/docs/deploying-airbyte/on-aws-ecs.md deleted file mode 100644 index 8f41dd6fa33c..000000000000 --- a/docs/deploying-airbyte/on-aws-ecs.md +++ /dev/null @@ -1,12 +0,0 @@ -# On AWS ECS (Coming Soon) - -:::info - -We do not currently support deployment on ECS. - -::: - -The current iteration is not compatible with ECS. -Airbyte currently relies on docker containers being able to create other docker containers. -ECS does not permit containers to do this. We will be revising this strategy soon, -so that we can be compatible with ECS and other container services. diff --git a/docs/integrations/README.md b/docs/integrations/README.md deleted file mode 100644 index c4129f79671b..000000000000 --- a/docs/integrations/README.md +++ /dev/null @@ -1,215 +0,0 @@ -# Connector Catalog - -## Connector Release Stages - -Airbyte uses a grading system for connectors to help you understand what to expect from a connector: - -**Generally Available**: A generally available connector has been deemed ready for use in a production environment and is officially supported by Airbyte. Its documentation is considered sufficient to support widespread adoption. - -**Beta**: A beta connector is considered stable and reliable with no backwards incompatible changes but has not been validated by a broader group of users. We expect to find and fix a few issues and bugs in the release before it’s ready for GA. - -**Alpha**: An alpha connector signifies a connector under development and helps Airbyte gather early feedback and issues reported by early adopters. We strongly discourage using alpha releases for production use cases and do not offer Cloud Support SLAs around these products, features, or connectors. - -For more information about the grading system, see [Product Release Stages](https://docs.airbyte.com/project-overview/product-release-stages) - -## Sources - -| Connector | Product Release Stage| Available in Cloud? | -|:--------------------------------------------------------------------------------------------| :------------------- | :------------------ | -| [3PL Central](sources/tplcentral.md) | Alpha | No | -| [Airtable](sources/airtable.md) | Alpha | Yes | -| [Amazon Ads](sources/amazon-ads.md) | Beta | Yes | -| [Amazon Seller Partner](sources/amazon-seller-partner.md) | Alpha | Yes | -| [Amazon SQS](sources/amazon-sqs.md) | Alpha | Yes | -| [Amplitude](sources/amplitude.md) | Generally Available | Yes | -| [Apify Dataset](sources/apify-dataset.md) | Alpha | Yes | -| [Appstore](sources/appstore.md) | Alpha | No | -| [Asana](sources/asana.md) | Alpha | No | -| [AWS CloudTrail](sources/aws-cloudtrail.md) | Alpha | Yes | -| [Azure Table Storage](sources/azure-table.md) | Alpha | Yes | -| [BambooHR](sources/bamboo-hr.md) | Alpha | No | -| [Baton](sources/hellobaton.md) | Alpha | No | -| [BigCommerce](sources/bigcommerce.md) | Alpha | Yes | -| [BigQuery](sources/bigquery.md) | Alpha | Yes | -| [Bing Ads](sources/bing-ads.md) | Generally Available | Yes | -| [Braintree](sources/braintree.md) | Alpha | Yes | -| [Cart.com](sources/cart.md) | Alpha | No | -| [Chargebee](sources/chargebee.md) | Alpha | Yes | -| [Chargify](sources/chargify.md) | Alpha | No | -| [Chartmogul](sources/chartmogul.md) | Alpha | Yes | -| [ClickHouse](sources/clickhouse.md) | Alpha | Yes | -| [Close.com](sources/close-com.md) | Alpha | Yes | -| [CockroachDB](sources/cockroachdb.md) | Alpha | No | -| [Commercetools](sources/commercetools.md) | Alpha | No | -| [Confluence](sources/confluence.md) | Alpha | No | -| [Customer.io](sources/customer-io.md) | Alpha | No | -| [Db2](sources/db2.md) | Alpha | No | -| [Delighted](sources/delighted.md) | Alpha | Yes | -| [Dixa](sources/dixa.md) | Alpha | Yes | -| [Dockerhub](sources/dockerhub.md) | Alpha | Yes | -| [Drift](sources/drift.md) | Alpha | No | -| [Drupal](sources/drupal.md) | Alpha | No | -| [End-to-End Testing](sources/e2e-test.md) | Alpha | Yes | -| [Exchange Rates API](sources/exchangeratesapi.md) | Alpha | Yes | -| [Facebook Marketing](sources/facebook-marketing.md) | Generally Available | Yes | -| [Facebook Pages](sources/facebook-pages.md) | Alpha | No | -| [Faker](sources/faker.md) | Alpha | Yes | -| [File](sources/file.md) | Alpha | Yes | -| [Firebolt](sources/firebolt.md) | Alpha | Yes | -| [Flexport](sources/flexport.md) | Alpha | No | -| [Freshdesk](sources/freshdesk.md) | Alpha | Yes | -| [Freshsales](sources/freshsales.md) | Alpha | No | -| [Freshservice](sources/freshservice.md) | Alpha | No | -| [GitHub](sources/github.md) | Generally Available | Yes | -| [GitLab](sources/gitlab.md) | Alpha | Yes | -| [Google Ads](sources/google-ads.md) | Generally Available | Yes | -| [Google Analytics (v4)](sources/google-analytics-v4.md) | Alpha | No | -| [Google Analytics (Universal Analytics)](sources/google-analytics-universal-analytics.md) | Generally Available | Yes | -| [Google Directory](sources/google-directory.md) | Alpha | Yes | -| [Google Search Console](sources/google-search-console.md) | Beta | Yes | -| [Google Sheets](sources/google-sheets.md) | Generally Available | Yes | -| [Google Workspace Admin Reports](sources/google-workspace-admin-reports.md) | Alpha | Yes | -| [Greenhouse](sources/greenhouse.md) | Alpha | Yes | -| [Harness](sources/harness.md) | Alpha | No | -| [Harvest](sources/harvest.md) | Alpha | No | -| [http-request](sources/http-request.md) | Alpha | No | -| [HubSpot](sources/hubspot.md) | Generally Available | Yes | -| [Instagram](sources/instagram.md) | Generally Available | Yes | -| [Intercom](sources/intercom.md) | Generally Available | Yes | -| [Iterable](sources/iterable.md) | Alpha | Yes | -| [Jenkins](sources/jenkins.md) | Alpha | No | -| [Jira](sources/jira.md) | Alpha | No | -| [Kafka](sources/kafka.md) | Alpha | No | -| [Klaviyo](sources/klaviyo.md) | Alpha | Yes | -| [Kustomer](sources/kustomer.md) | Alpha | Yes | -| [Lemlist](sources/lemlist.md) | Alpha | Yes | -| [Lever](sources/lever-hiring.md) | Alpha | No | -| [LinkedIn Ads](sources/linkedin-ads.md) | Generally Available | Yes | -| [Linnworks](sources/linnworks.md) | Alpha | Yes | -| [Looker](sources/looker.md) | Alpha | Yes | -| [Magento](sources/magento.md) | Alpha | No | -| [Mailchimp](sources/mailchimp.md) | Generally Available | Yes | -| [Marketo](sources/marketo.md) | Alpha | Yes | -| [Metabase](sources/metabase.md) | Alpha | Yes | -| [Microsoft Dynamics AX](sources/microsoft-dynamics-ax.md) | Alpha | No | -| [Microsoft Dynamics Customer Engagement](sources/microsoft-dynamics-customer-engagement.md) | Alpha | No | -| [Microsoft Dynamics GP](sources/microsoft-dynamics-gp.md) | Alpha | No | -| [Microsoft Dynamics NAV](sources/microsoft-dynamics-nav.md) | Alpha | No | -| [Microsoft SQL Server (MSSQL)](sources/mssql.md) | Alpha | Yes | -| [Microsoft Teams](sources/microsoft-teams.md) | Alpha | Yes | -| [Mixpanel](sources/mixpanel.md) | Beta | Yes | -| [Monday](sources/monday.md) | Alpha | Yes | -| [Mongo DB](sources/mongodb-v2.md) | Alpha | Yes | -| [My Hours](sources/my-hours.md) | Alpha | Yes | -| [MySQL](sources/mysql.md) | Alpha | Yes | -| [Notion](sources/notion.md) | Alpha | No | -| [Okta](sources/okta.md) | Alpha | Yes | -| [OneSignal](sources/onesignal.md) | Alpha | No | -| [OpenWeather](sources/openweather.md) | Alpha | No | -| [Oracle DB](sources/oracle.md) | Alpha | Yes | -| [Oracle PeopleSoft](sources/oracle-peoplesoft.md) | Alpha | No | -| [Oracle Siebel CRM](sources/oracle-siebel-crm.md) | Alpha | No | -| [Orb](sources/orb.md) | Alpha | Yes | -| [Outreach](./sources/outreach.md) | Alpha | No | -| [PagerDuty](sources/pagerduty.md) | Alpha | No | -| [PayPal Transaction](sources/paypal-transaction.md) | Alpha | No | -| [Paystack](sources/paystack.md) | Alpha | No | -| [PersistIq](sources/persistiq.md) | Alpha | Yes | -| [Pinterest](sources/pinterest.md) | Alpha | No | -| [Pipedrive](sources/pipedrive.md) | Alpha | No | -| [Pivotal Tracker](sources/pivotal-tracker.md) | Alpha | No | -| [Plaid](sources/plaid.md) | Alpha | No | -| [PokéAPI](sources/pokeapi.md) | Alpha | Yes | -| [Postgres](sources/postgres.md) | Beta | Yes | -| [PostHog](sources/posthog.md) | Alpha | Yes | -| [PrestaShop](sources/presta-shop.md) | Alpha | Yes | -| [Qualaroo](sources/qualaroo.md) | Alpha | Yes | -| [QuickBooks](sources/quickbooks.md) | Alpha | No | -| [Recharge](sources/recharge.md) | Alpha | Yes | -| [Recurly](sources/recurly.md) | Alpha | Yes | -| [Redshift](sources/redshift.md) | Alpha | Yes | -| [Retently](sources/retently.md) | Alpha | Yes | -| [S3](sources/s3.md) | Beta | Yes | -| [Salesforce](sources/salesforce.md) | Generally Available | Yes | -| [Salesloft](sources/salesloft.md) | Alpha | No | -| [SAP Business One](sources/sap-business-one.md) | Alpha | No | -| [SearchMetrics](./sources/search-metrics.md) | Alpha | No | -| [Sendgrid](sources/sendgrid.md) | Alpha | Yes | -| [Sentry](sources/sentry.md) | Alpha | Yes | -| [SFTP](sources/sftp.md) | Alpha | Yes | -| [Shopify](sources/shopify.md) | Alpha | No | -| [Short.io](sources/shortio.md) | Alpha | Yes | -| [Slack](sources/slack.md) | Alpha | No | -| [Smartsheets](sources/smartsheets.md) | Beta | Yes | -| [Snapchat Marketing](sources/snapchat-marketing.md) | Alpha | Yes | -| [Snowflake](sources/snowflake.md) | Alpha | Yes | -| [Spree Commerce](sources/spree-commerce.md) | Alpha | No | -| [Square](sources/square.md) | Alpha | Yes | -| [Strava](sources/strava.md) | Alpha | No | -| [Stripe](sources/stripe.md) | Generally Available | Yes | -| [Sugar CRM](sources/sugar-crm.md) | Alpha | No | -| [SurveyMonkey](sources/surveymonkey.md) | Alpha | No | -| [Tempo](sources/tempo.md) | Alpha | Yes | -| [TiDB](sources/tidb.md) | Alpha | No | -| [TikTok Marketing](./sources/tiktok-marketing.md) | Generally Available | Yes | -| [Trello](sources/trello.md) | Alpha | No | -| [Twilio](sources/twilio.md) | Alpha | Yes | -| [Typeform](sources/typeform.md) | Alpha | Yes | -| [US Census](sources/us-census.md) | Alpha | Yes | -| [VictorOps](sources/victorops.md) | Alpha | No | -| [Webflow](sources/webflow.md ) | Alpha | Yes | -| [WooCommerce](sources/woocommerce.md) | Alpha | No | -| [Wordpress](sources/wordpress.md) | Alpha | No | -| [YouTube Analytics](sources/youtube-analytics.md) | Alpha | No | -| [Zencart](sources/zencart.md) | Alpha | No | -| [Zendesk Chat](sources/zendesk-chat.md) | Alpha | Yes | -| [Zendesk Sunshine](sources/zendesk-sunshine.md) | Alpha | Yes | -| [Zendesk Support](sources/zendesk-support.md) | Generally Available | Yes | -| [Zendesk Talk](sources/zendesk-talk.md) | Alpha | No | -| [Zenloop](sources/zenloop.md) | Alpha | Yes | -| [Zoho CRM](sources/zoho-crm.md) | Alpha | No | -| [Zoom](sources/zoom.md) | Alpha | No | -| [Zuora](sources/zuora.md) | Alpha | Yes | - -## Destinations - -| Connector | Product Release Stage| Available in Cloud? | -|:-----------------------------------------------------------| :------------------- | :------------------ | -| [Amazon SQS](destinations/amazon-sqs.md) | Alpha | Yes | -| [Amazon Datalake](destinations/aws-datalake.md) | Alpha | No | -| [AzureBlobStorage](destinations/azureblobstorage.md) | Alpha | Yes | -| [BigQuery](destinations/bigquery.md) | Generally Available | Yes | -| [Cassandra](destinations/cassandra.md) | Alpha | Yes | -| [Chargify (Keen)](destinations/chargify.md) | Alpha | Yes | -| [ClickHouse](destinations/clickhouse.md) | Alpha | Yes | -| [Databricks](destinations/databricks.md) | Alpha | Yes | -| [DynamoDB](destinations/dynamodb.md) | Alpha | Yes | -| [Elasticsearch](destinations/elasticsearch.md) | Alpha | Yes | -| [End-to-End Testing](destinations/e2e-test.md) | Alpha | Yes | -| [Firebolt](destinations/firebolt.md) | Alpha | Yes | -| [Google Cloud Storage (GCS)](destinations/gcs.md) | Beta | Yes | -| [Google Pubsub](destinations/pubsub.md) | Alpha | Yes | -| [Google Sheets](destinations/google-sheets.md) | Alpha | Yes | -| [Kafka](destinations/kafka.md) | Alpha | No | -| [Keen](destinations/keen.md) | Alpha | No | -| [Kinesis](destinations/kinesis.md) | Alpha | No | -| [Local CSV](destinations/local-csv.md) | Alpha | No | -| [Local JSON](destinations/local-json.md) | Alpha | No | -| [MariaDB ColumnStore](destinations/mariadb-columnstore.md) | Alpha | Yes | -| [MeiliSearch](destinations/meilisearch.md) | Alpha | Yes | -| [MongoDB](destinations/mongodb.md) | Alpha | Yes | -| [MQTT](destinations/mqtt.md) | Alpha | Yes | -| [MS SQL Server](destinations/mssql.md) | Alpha | Yes | -| [MySQL](destinations/mysql.md) | Alpha | Yes | -| [Oracle](destinations/oracle.md) | Alpha | Yes | -| [Postgres](destinations/postgres.md) | Alpha | Yes | -| [Pulsar](destinations/pulsar.md) | Alpha | Yes | -| [RabbitMQ](destinations/rabbitmq.md) | Alpha | Yes | -| [Redis](destinations/redis.md) | Alpha | Yes | -| [Redshift](destinations/redshift.md) | Beta | Yes | -| [Rockset](destinations/rockset.md) | Alpha | Yes | -| [S3](destinations/s3.md) | Generally Available | Yes | -| [Scylla](destinations/scylla.md) | Alpha | Yes | -| [SFTP JSON](destinations/sftp-json.md) | Alpha | Yes | -| [Snowflake](destinations/snowflake.md) | Generally Available | Yes | -| [Streamr](destinations/streamr.md) | Alpha | No | diff --git a/docs/integrations/getting-started/destination-redshift.md b/docs/integrations/getting-started/destination-redshift.md deleted file mode 100644 index ae59b0eeff95..000000000000 --- a/docs/integrations/getting-started/destination-redshift.md +++ /dev/null @@ -1,70 +0,0 @@ -# Getting Started: Destination Redshift - -## Requirements - -1. Active Redshift cluster -2. Allow connections from Airbyte to your Redshift cluster \(if they exist in separate VPCs\) -3. A staging S3 bucket with credentials \(for the COPY strategy\). - -## Setup guide - -### 1. Make sure your cluster is active and accessible from the machine running Airbyte - -This is dependent on your networking setup. The easiest way to verify if Airbyte is able to connect to your Redshift cluster is via the check connection tool in the UI. You can check AWS Redshift documentation with a tutorial on how to properly configure your cluster's access [here](https://docs.aws.amazon.com/redshift/latest/gsg/rs-gsg-authorize-cluster-access.html) - -### 2. Fill up connection info - -Next is to provide the necessary information on how to connect to your cluster such as the `host` whcih is part of the connection string or Endpoint accessible [here](https://docs.aws.amazon.com/redshift/latest/gsg/rs-gsg-connect-to-cluster.html#rs-gsg-how-to-get-connection-string) without the `port` and `database` name \(it typically includes the cluster-id, region and end with `.redshift.amazonaws.com`\). - -You should have all the requirements needed to configure Redshift as a destination in the UI. You'll need the following information to configure the destination: - -* **Host** -* **Port** -* **Username** -* **Password** -* **Schema** -* **Database** - * This database needs to exist within the cluster provided. - -### 2a. Fill up S3 info \(for COPY strategy\) - -Provide the required S3 info. - -* **S3 Bucket Name** - * See [this](https://docs.aws.amazon.com/AmazonS3/latest/userguide/create-bucket-overview.html) to create an S3 bucket. -* **S3 Bucket Region** - * Place the S3 bucket and the Redshift cluster in the same region to save on networking costs. -* **Access Key Id** - * See [this](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys) on how to generate an access key. - * We recommend creating an Airbyte-specific user. This user will require [read and write permissions](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_examples_s3_rw-bucket.html) to objects in the staging bucket. -* **Secret Access Key** - * Corresponding key to the above key id. -* **Part Size** - * Affects the size limit of an individual Redshift table. Optional. Increase this if syncing tables larger than 100GB. Files are streamed to S3 in parts. This determines the size of each part, in MBs. As S3 has a limit of 10,000 parts per file, part size affects the table size. This is 10MB by default, resulting in a default table limit of 100GB. Note, a larger part size will result in larger memory requirements. A rule of thumb is to multiply the part size by 10 to get the memory requirement. Modify this with care. - -Optional parameters: -* **Bucket Path** - * The directory within the S3 bucket to place the staging data. For example, if you set this to `yourFavoriteSubdirectory`, staging data will be placed inside `s3://yourBucket/yourFavoriteSubdirectory`. If not provided, defaults to the root directory. - -## Notes about Redshift Naming Conventions - -From [Redshift Names & Identifiers](https://docs.aws.amazon.com/redshift/latest/dg/r_names.html): - -### Standard Identifiers - -* Begin with an ASCII single-byte alphabetic character or underscore character, or a UTF-8 multibyte character two to four bytes long. -* Subsequent characters can be ASCII single-byte alphanumeric characters, underscores, or dollar signs, or UTF-8 multibyte characters two to four bytes long. -* Be between 1 and 127 bytes in length, not including quotation marks for delimited identifiers. -* Contain no quotation marks and no spaces. - -### Delimited Identifiers - -Delimited identifiers \(also known as quoted identifiers\) begin and end with double quotation marks \("\). If you use a delimited identifier, you must use the double quotation marks for every reference to that object. The identifier can contain any standard UTF-8 printable characters other than the double quotation mark itself. Therefore, you can create column or table names that include otherwise illegal characters, such as spaces or the percent symbol. ASCII letters in delimited identifiers are case-insensitive and are folded to lowercase. To use a double quotation mark in a string, you must precede it with another double quotation mark character. - -Therefore, Airbyte Redshift destination will create tables and schemas using the Unquoted identifiers when possible or fallback to Quoted Identifiers if the names are containing special characters. - -## Data Size Limitations - -Redshift specifies a maximum limit of 65535 bytes to store the raw JSON record data. Thus, when a row is too big to fit, the Redshift destination fails to load such data and currently ignores that record. - -For more information, see the [docs here.](https://docs.aws.amazon.com/redshift/latest/dg/r_Character_types.html) diff --git a/docs/integrations/getting-started/source-facebook-marketing.md b/docs/integrations/getting-started/source-facebook-marketing.md deleted file mode 100644 index cb0303519372..000000000000 --- a/docs/integrations/getting-started/source-facebook-marketing.md +++ /dev/null @@ -1,42 +0,0 @@ -# Getting Started: Source Facebook Marketing - -## Requirements - -Google Ads Account with an approved Developer Token \(note: In order to get API access to Google Ads, you must have a "manager" account. This must be created separately from your standard account. You can find more information about this distinction in the [google ads docs](https://ads.google.com/home/tools/manager-accounts/).\) - -* developer_token -* client_id -* client_secret -* refresh_token -* start_date -* customer_id - -## Setup guide - -This guide will provide information as if starting from scratch. Please skip over any steps you have already completed. - -* Create an Google Ads Account. Here are [Google's instruction](https://support.google.com/google-ads/answer/6366720) on how to create one. -* Create an Google Ads MANAGER Account. Here are [Google's instruction](https://ads.google.com/home/tools/manager-accounts/) on how to create one. -* You should now have two Google Ads accounts: a normal account and a manager account. Link the Manager account to the normal account following [Google's documentation](https://support.google.com/google-ads/answer/7459601). -* Apply for a developer token \(**make sure you follow our** [**instructions**](#how-to-apply-for-the-developer-token)\) on your Manager account. This token allows you to access your data from the Google Ads API. Here are [Google's instructions](https://developers.google.com/google-ads/api/docs/first-call/dev-token). The docs are a little unclear on this point, but you will _not_ be able to access your data via the Google Ads API until this token is approved. You cannot use a test developer token, it has to be at least a basic developer token. It usually takes Google 24 hours to respond to these applications. This developer token is the value you will use in the `developer_token` field. -* Fetch your `client_id`, `client_secret`, and `refresh_token`. Google provides [instructions](https://developers.google.com/google-ads/api/docs/first-call/overview) on how to do this. -* Select your `customer_id`. The `customer_is` refer to the id of each of your Google Ads accounts. This is the 10 digit number in the top corner of the page when you are in google ads ui. The source will only pull data from the accounts for which you provide an id. If you are having trouble finding it, check out [Google's instructions](https://support.google.com/google-ads/answer/1704344). - -Wow! That was a lot of steps. We are working on making the OAuth flow for all of our connectors simpler \(allowing you to skip needing to get a `developer_token` and a `refresh_token` which are the most painful / time-consuming steps in this walkthrough\). - -## How to apply for the developer token - -Google is very picky about which software and which use case can get access to a developer token. The Airbyte team has worked with the Google Ads team to whitelist Airbyte and make sure you can get one \(see [issue 1981](https://github.com/airbytehq/airbyte/issues/1981) for more information\). - -When you apply for a token, you need to mention: - -* Why you need the token \(eg: want to run some internal analytics...\) -* That you will be using the Airbyte Open Source project -* That you have full access to the code base \(because we're open source\) -* That you have full access to the server running the code \(because you're self-hosting Airbyte\) - -If for any reason the request gets denied, let us know and we will be able to unblock you. - -## Understanding Google Ads Query Language - -The Google Ads Query Language can query the Google Ads API. Check out [Google Ads Query Language](https://developers.google.com/google-ads/api/docs/query/overview) diff --git a/docs/integrations/getting-started/source-github.md b/docs/integrations/getting-started/source-github.md deleted file mode 100644 index 6ae7f442aade..000000000000 --- a/docs/integrations/getting-started/source-github.md +++ /dev/null @@ -1,12 +0,0 @@ -## Getting Started: Source GitHub - -### Requirements - -* Github Account -* Github Personal Access Token wih the necessary permissions \(described below\) - -### Setup guide - -Log into Github and then generate a [personal access token](https://github.com/settings/tokens). - -Your token should have at least the `repo` scope. Depending on which streams you want to sync, the user generating the token needs more permissions: diff --git a/docs/integrations/missing-an-integration.md b/docs/integrations/missing-an-integration.md deleted file mode 100644 index e52613182866..000000000000 --- a/docs/integrations/missing-an-integration.md +++ /dev/null @@ -1,14 +0,0 @@ -# Missing an Integration? - -If you'd like to ask for a new connector, or build a new connectors and make them part of the pool of pre-built connectors on Airbyte, first a big thank you. We invite you to check our [contributing guide](../contributing-to-airbyte/). - -If you'd like to build new connectors, or update existing ones, for your own usage, without contributing to the Airbyte codebase, read along. - -## Developing your own connectors - -It's easy to code your own integrations on Airbyte. Here are some links to instruct on how to code new sources and destinations. - -* [Building new connectors](../contributing-to-airbyte/README.md) - -While the guides above are specific to the languages used most frequently to write integrations, **Airbyte integrations can be written in any language**. Please reach out to us if you'd like help developing integrations in other languages. - diff --git a/docs/project-overview/README.md b/docs/project-overview/README.md deleted file mode 100644 index a427d02b0519..000000000000 --- a/docs/project-overview/README.md +++ /dev/null @@ -1,2 +0,0 @@ -# Project Overview - diff --git a/docs/project-overview/roadmap.md b/docs/project-overview/roadmap.md deleted file mode 100644 index a2baa301615e..000000000000 --- a/docs/project-overview/roadmap.md +++ /dev/null @@ -1,66 +0,0 @@ ---- -description: 'Here''s what''s coming in the next few days, weeks, months, and years!' ---- - -# Roadmap - -## Coming within a few days - -Check out our [Roadmap for Core](https://github.com/airbytehq/airbyte/milestones) and our [Roadmap for Connectors](https://github.com/airbytehq/airbyte/projects/1) on GitHub. You'll see the features we're currently working on or about to. You may also give us insights, by adding your own issues and voting for specific features / integrations. - -## Coming within a few weeks / months - -We understand that we're not "production-ready" for a lot of companies yet. In the end, we just got started in July 2020, so we're at the beginning of the journey. Here is a highlight of the main features we are planning on releasing in the next few months: - -**Landing in April or so:** - -* Airbyte Cloud in NA. -* Improve scheduling performance and migrate completely to Temporal. -* Adapt automatically to sources' schema changes. -* Our declarative interface \(CLI\). - -**Coming a bit later:** - -* Airbyte Cloud in EU + Asia -* Support for creating destination connectors with the CDK. -* Credential and secrets vaulting \([\#837](https://github.com/airbytehq/airbyte/issues/837)\). -* Webhook connector. - -Our goal is to become "production-ready" for any company whatever their data stack, infrastructure, architecture, data volume, and connector needs. **If you see anything missing in this list that you would need before deploying us in prod, please talk to us via** [**Slack**](https://slack.airbyte.io) **or** [**email**](mailto:contact@airbyte.io)**!** - -## Coming within a few quarters / years - -We also wanted to share with you how we think about the high-level roadmap over the next few months and years. We foresee several high-level phases that we will try to share here. - -### **1. Parity on data consolidation \(ELT\) in warehouses / databases** - -Our first focus is to support batch-type ELT integrations. We feel that we can provide value right away as soon as we support one of the integrations you need. Batch integrations are also easier to build and sustain. So we would rather start with that. - -Before we move on to the next phase, we want to make sure we are supporting all the major integrations and that we are in a state where we can address the long tail, with the help of the community. - -We also want to fully integrate with the ecosystem, including Airflow, dbt, Kubernetes, GreatExpectations, Prefect, Dagster, etc., so teams have the ability to fully build the data infrastructure they need. - -### **2. Reverse-ETL from warehouses / databases** - -Some integrations we have in mind are batch distribution integrations, from warehouses to third-party tools. For instance, a use case could be if your marketing team wants to send back the data to your ad platforms, so it can better optimize the campaigns. Another use case could be syncing the consolidated data back to your CRM. - -It’s not yet clear in our minds when to prioritize those additional integrations. We will have a better idea once we see the feedback we get from the community we build with data consolidation. - -### **3. Parity with enterprise features: data quality, privacy compliance, customer data consolidation features, etc.** - -Hopefully, we will have raised a Series-A by then, so we can start focusing on the enterprise edition’s features, in addition to pursuing efforts on addressing the long tail of integrations. - -Those enterprise features comprise: - -* Hosting and management -* User and role access management -* SSO -* Privacy compliance \(GDPR, CCPA, etc\) -* Customer data consolidation with identity resolution - -### **4. Expand on all data engineering features** - -This is when we will start differentiating ourselves in terms of feature coverage with current cloud-based incumbents. Being open-sourced enables us to go faster, but also deeper. - -We are also thinking about supporting streaming-type integrations, a la Segment. - diff --git a/docs/quickstart/getting-started.md b/docs/quickstart/getting-started.md deleted file mode 100644 index 77107c22c5db..000000000000 --- a/docs/quickstart/getting-started.md +++ /dev/null @@ -1,99 +0,0 @@ -# Getting Started - -## Goal - -During this getting started tutorial, we are going to replicate currencies closing price into a JSON file. - -## Start Airbyte - -First of all, make sure you have Docker and Docker Compose installed. Then run the following commands: - -```text -git clone https://github.com/airbytehq/airbyte.git -cd airbyte -docker-compose -f docker-compose.yaml up -``` - -Once you see an Airbyte banner, the UI is ready to go at [http://localhost:8000/](http://localhost:8000/). - -## Set up your preferences - -You should see an onboarding page. Enter your email if you want updates about Airbyte and continue. - -![](../.gitbook/assets/airbyte_get-started.png) - -## Set up your first connection - -### Create a source - -The source we are creating will pull data from an external API. It will replicate the closing price of currencies compared to USD since the specified start date. - -To set it up, just follow the instructions on the screenshot below. - -:::info - -You might have to wait ~30 seconds before the fields show up because it is the first time you're using Airbyte. - -::: - -![](../.gitbook/assets/demo_source.png) - -### Create a destination - -The destination we are creating is a simple JSON line file, meaning that it will contain one JSON object per line. Each objects will represent data extracted from the source. - -The resulting files will be located in `/tmp/airbyte_local/json_data` - -To set it up, just follow the instructions on the screenshot below. - -:::info - -You might have to wait ~30 seconds before the fields show up because it is the first time you're using Airbyte. - -::: - -![](../.gitbook/assets/demo_destination.png) - -### Create connection - -When we create the connection, we can select which data stream we want to replicate. We can also select if we want an incremental replication. The replication will run at the specified sync frequency. - -To set it up, just follow the instructions on the screenshot below. - -![](../.gitbook/assets/demo_connection.png) - -## Check the logs of your first sync - -After you've completed the onboarding, you will be redirected to the source list and will see the source you just added. Click on it to find more information about it. You will now see all the destinations connected to that source. Click on it and you will see the sync history. - -From there, you can look at the logs, download them, force a sync and adjust the configuration of your connection. - -![](../.gitbook/assets/demo_history.png) - -## Check the data of your first sync - -Now let's verify that this worked: - -```bash -cat /tmp/airbyte_local/json_data/_airbyte_raw_exchange_rate.jsonl -``` - -You should see one line for each day that was replicated. - -If you have [`jq`](https://stedolan.github.io/jq/) installed, let's look at the evolution of `EUR`. - -```bash -cat /tmp/airbyte_local/test_json/_airbyte_raw_exchange_rate.jsonl | -jq -c '.data | {date: .date, EUR: .EUR }' -``` - -And there you have it. You've pulled data from an API directly into a file and all of the actual configuration for this replication only took place in the UI. - -## That's it! - -This is just the beginning of using Airbyte. We support a large collection of sources and destinations. You can even contribute your own. - -If you have any questions at all, please reach out to us on [Slack](https://slack.airbyte.io/). We’re still in alpha, so if you see any rough edges or want to request a connector you need, please create an issue on our [Github](https://github.com/airbytehq/airbyte) or leave a thumbs up on an existing issue. - -Thank you and we hope you enjoy using Airbyte. - diff --git a/docs/readme.md b/docs/readme.md deleted file mode 100644 index 7a38413f71e0..000000000000 --- a/docs/readme.md +++ /dev/null @@ -1,25 +0,0 @@ -# Welcome to Airbyte Docs - -Whether you are an Airbyte user or contributor, we have docs for you! - -### For Airbyte Cloud users - -Browse the [connector catalog](integrations) to find the connector you want. In case the connector is not yet supported on Airbyte Cloud, consider using [Airbyte Open Source](#for-airbyte-open-source-users). - -Next, check out the [step-by-step tutorial](cloud/getting-started-with-airbyte-cloud) to sign up for Airbyte Cloud, understand Airbyte [concepts](cloud/core-concepts.md), and run your first sync. Then learn how to [manage your Airbyte Cloud account](cloud/managing-airbyte-cloud.md). - -### For Airbyte Open Source users - -Browse the [connector catalog](integrations) to find the connector you want. If the connector is not yet supported on Airbyte Open Source, [build your own connector](connector-development). - -Next, check out the [Airbyte Open Source QuickStart](quickstart/deploy-airbyte.md). Then learn how to [deploy](deploying-airbyte/local-deployment) and [manage](operator-guides/upgrading-airbyte) Airbyte Open Source in your cloud infrastructure. - - -To get help with Airbyte deployments, check out the [Troubleshooting & FAQ](troubleshooting/README.md), chat with Support on [Discourse](https://discuss.airbyte.io/), or join us on [Community Slack](https://slack.airbyte.io/). - -### For Airbyte contributors - -To contribute to Airbyte code, connectors, and documentation, refer to our [Contributing Guide](contributing-to-airbyte). - -[![GitHub stars](https://img.shields.io/github/stars/airbytehq/airbyte?style=social&label=Star&maxAge=2592000)](https://GitHub.com/airbytehq/airbyte/stargazers/) [![GitHub Workflow Status](https://img.shields.io/github/workflow/status/airbytehq/airbyte/Airbyte%20CI)](https://github.com/airbytehq/airbyte/actions/workflows/gradle.yml) [![License](https://img.shields.io/static/v1?label=license&message=MIT&color=brightgreen)](https://github.com/airbytehq/airbyte/tree/a9b1c6c0420550ad5069aca66c295223e0d05e27/LICENSE/README.md) [![License](https://img.shields.io/static/v1?label=license&message=ELv2&color=brightgreen)](https://github.com/airbytehq/airbyte/tree/a9b1c6c0420550ad5069aca66c295223e0d05e27/LICENSE/README.md) - diff --git a/docs/reference/README.md b/docs/reference/README.md deleted file mode 100644 index 4a938e09d06b..000000000000 --- a/docs/reference/README.md +++ /dev/null @@ -1 +0,0 @@ -# Reference \ No newline at end of file diff --git a/docs/reference/api/README.md b/docs/reference/api/README.md deleted file mode 100644 index 7caedb572668..000000000000 --- a/docs/reference/api/README.md +++ /dev/null @@ -1,8 +0,0 @@ -# API Documentation Folder - -* `generated-api-html`: Plain HTML file automatically generated from the Airbyte OAS spec as part of the build. -* `api-documentation.md`: Markdown for API documentation Gitbook [page](https://docs.airbyte.io/api-documentation). -* `rapidoc-api-docs.html`: HTML for actual API Spec Documentation and linked to in the above Gitbook page. This is a S3 static website hosted out of - the [`airbyte-public-api-docs bucket`](https://s3.console.aws.amazon.com/s3/buckets/airbyte-public-api-docs?region=us-east-2&tab=objects) with a [Cloudfront Distribution](https://console.aws.amazon.com/cloudfront/home?#distribution-settings:E35VD0IIC8YUEW) - for SSL. This file points to the Airbyte OAS spec on Master and will automatically mirror spec changes. - This file will need to be uploaded to the `airbyte-public-api-docs` bucket for any file changes to propagate. diff --git a/docs/reference/api/generated-api-html/index.html b/docs/reference/api/generated-api-html/index.html deleted file mode 100644 index ae575f1d2a05..000000000000 --- a/docs/reference/api/generated-api-html/index.html +++ /dev/null @@ -1,11892 +0,0 @@ - - - - Airbyte Configuration API - - - -

Airbyte Configuration API

-

Airbyte Configuration API -https://airbyte.io.

-

This API is a collection of HTTP RPC-style methods. While it is not a REST API, those familiar with REST should find the conventions of this API recognizable.

-

Here are some conventions that this API follows:

-
    -
  • All endpoints are http POST methods.
  • -
  • All endpoints accept data via application/json request bodies. The API does not accept any data via query params.
  • -
  • The naming convention for endpoints is: localhost:8000/{VERSION}/{METHOD_FAMILY}/{METHOD_NAME} e.g. localhost:8000/v1/connections/create.
  • -
  • For all update methods, the whole object must be passed in, even the fields that did not change.
  • -
-

Change Management:

-
    -
  • The major version of the API endpoint can be determined / specified in the URL localhost:8080/v1/connections/create
  • -
  • Minor version bumps will be invisible to the end user. The user cannot specify minor versions in requests.
  • -
  • All backwards incompatible changes will happen in major version bumps. We will not make backwards incompatible changes in minor version bumps. Examples of non-breaking changes (includes but not limited to...): -
      -
    • Adding fields to request or response bodies.
    • -
    • Adding new HTTP endpoints.
    • -
    -
  • -
  • All web_backend APIs are not considered public APIs and are not guaranteeing backwards compatibility.
  • -
-
- -
Contact Info: contact@airbyte.io
-
Version: 1.0.0
-
BasePath:/api
-
MIT
-
https://opensource.org/licenses/MIT
-

Access

-
    -
  1. HTTP Basic Authentication
  2. -
- -

Methods

- [ Jump to Models ] - -

Table of Contents

-
-

Connection

- -

DbMigration

- -

Deployment

- -

Destination

- -

DestinationDefinition

- -

DestinationDefinitionSpecification

- -

Health

- -

Jobs

- -

Logs

- -

Notifications

- -

Oauth

- -

Openapi

- -

Operation

- -

Scheduler

- -

Source

- -

SourceDefinition

- -

SourceDefinitionSpecification

- -

WebBackend

- -

Workspace

- - -

Connection

-
-
- Up -
post /v1/connections/create
-
Create a connection between a source and a destination (createConnection)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
ConnectionCreate ConnectionCreate (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "sourceCatalogId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "prefix" : "prefix",
-  "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "resourceRequirements" : {
-    "cpu_limit" : "cpu_limit",
-    "memory_request" : "memory_request",
-    "memory_limit" : "memory_limit",
-    "cpu_request" : "cpu_request"
-  },
-  "schedule" : {
-    "units" : 0,
-    "timeUnit" : "minutes"
-  },
-  "name" : "name",
-  "syncCatalog" : {
-    "streams" : [ {
-      "stream" : {
-        "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-        "supportedSyncModes" : [ null, null ],
-        "sourceDefinedCursor" : true,
-        "name" : "name",
-        "namespace" : "namespace",
-        "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-      },
-      "config" : {
-        "aliasName" : "aliasName",
-        "cursorField" : [ "cursorField", "cursorField" ],
-        "selected" : true,
-        "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-      }
-    }, {
-      "stream" : {
-        "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-        "supportedSyncModes" : [ null, null ],
-        "sourceDefinedCursor" : true,
-        "name" : "name",
-        "namespace" : "namespace",
-        "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-      },
-      "config" : {
-        "aliasName" : "aliasName",
-        "cursorField" : [ "cursorField", "cursorField" ],
-        "selected" : true,
-        "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-      }
-    } ]
-  },
-  "connectionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "namespaceFormat" : "${SOURCE_NAMESPACE}",
-  "operationIds" : [ null, null ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - ConnectionRead -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/connections/delete
-
Delete a connection (deleteConnection)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
ConnectionIdRequestBody ConnectionIdRequestBody (required)
- -
Body Parameter
- -
- - - - - - - - -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

204

- The resource was deleted successfully. - -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/connections/get
-
Get a connection (getConnection)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
ConnectionIdRequestBody ConnectionIdRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "sourceCatalogId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "prefix" : "prefix",
-  "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "resourceRequirements" : {
-    "cpu_limit" : "cpu_limit",
-    "memory_request" : "memory_request",
-    "memory_limit" : "memory_limit",
-    "cpu_request" : "cpu_request"
-  },
-  "schedule" : {
-    "units" : 0,
-    "timeUnit" : "minutes"
-  },
-  "name" : "name",
-  "syncCatalog" : {
-    "streams" : [ {
-      "stream" : {
-        "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-        "supportedSyncModes" : [ null, null ],
-        "sourceDefinedCursor" : true,
-        "name" : "name",
-        "namespace" : "namespace",
-        "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-      },
-      "config" : {
-        "aliasName" : "aliasName",
-        "cursorField" : [ "cursorField", "cursorField" ],
-        "selected" : true,
-        "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-      }
-    }, {
-      "stream" : {
-        "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-        "supportedSyncModes" : [ null, null ],
-        "sourceDefinedCursor" : true,
-        "name" : "name",
-        "namespace" : "namespace",
-        "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-      },
-      "config" : {
-        "aliasName" : "aliasName",
-        "cursorField" : [ "cursorField", "cursorField" ],
-        "selected" : true,
-        "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-      }
-    } ]
-  },
-  "connectionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "namespaceFormat" : "${SOURCE_NAMESPACE}",
-  "operationIds" : [ null, null ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - ConnectionRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/state/get
-
Fetch the current state for a connection. (getState)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
ConnectionIdRequestBody ConnectionIdRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "globalState" : {
-    "streamStates" : [ {
-      "streamDescriptor" : {
-        "name" : "name",
-        "namespace" : "namespace"
-      }
-    }, {
-      "streamDescriptor" : {
-        "name" : "name",
-        "namespace" : "namespace"
-      }
-    } ]
-  },
-  "connectionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "streamState" : [ {
-    "streamDescriptor" : {
-      "name" : "name",
-      "namespace" : "namespace"
-    }
-  }, {
-    "streamDescriptor" : {
-      "name" : "name",
-      "namespace" : "namespace"
-    }
-  } ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - ConnectionState -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/web_backend/state/get_type
-
Fetch the current state type for a connection. (getStateType)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
ConnectionIdRequestBody ConnectionIdRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
null
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - ConnectionStateType -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/connections/list_all
-
Returns all connections for a workspace, including deleted connections. (listAllConnectionsForWorkspace)
-
List connections for workspace, including deleted connections.
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
WorkspaceIdRequestBody WorkspaceIdRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "connections" : [ {
-    "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "sourceCatalogId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "prefix" : "prefix",
-    "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "resourceRequirements" : {
-      "cpu_limit" : "cpu_limit",
-      "memory_request" : "memory_request",
-      "memory_limit" : "memory_limit",
-      "cpu_request" : "cpu_request"
-    },
-    "schedule" : {
-      "units" : 0,
-      "timeUnit" : "minutes"
-    },
-    "name" : "name",
-    "syncCatalog" : {
-      "streams" : [ {
-        "stream" : {
-          "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-          "supportedSyncModes" : [ null, null ],
-          "sourceDefinedCursor" : true,
-          "name" : "name",
-          "namespace" : "namespace",
-          "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-        },
-        "config" : {
-          "aliasName" : "aliasName",
-          "cursorField" : [ "cursorField", "cursorField" ],
-          "selected" : true,
-          "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-        }
-      }, {
-        "stream" : {
-          "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-          "supportedSyncModes" : [ null, null ],
-          "sourceDefinedCursor" : true,
-          "name" : "name",
-          "namespace" : "namespace",
-          "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-        },
-        "config" : {
-          "aliasName" : "aliasName",
-          "cursorField" : [ "cursorField", "cursorField" ],
-          "selected" : true,
-          "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-        }
-      } ]
-    },
-    "connectionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "namespaceFormat" : "${SOURCE_NAMESPACE}",
-    "operationIds" : [ null, null ]
-  }, {
-    "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "sourceCatalogId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "prefix" : "prefix",
-    "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "resourceRequirements" : {
-      "cpu_limit" : "cpu_limit",
-      "memory_request" : "memory_request",
-      "memory_limit" : "memory_limit",
-      "cpu_request" : "cpu_request"
-    },
-    "schedule" : {
-      "units" : 0,
-      "timeUnit" : "minutes"
-    },
-    "name" : "name",
-    "syncCatalog" : {
-      "streams" : [ {
-        "stream" : {
-          "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-          "supportedSyncModes" : [ null, null ],
-          "sourceDefinedCursor" : true,
-          "name" : "name",
-          "namespace" : "namespace",
-          "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-        },
-        "config" : {
-          "aliasName" : "aliasName",
-          "cursorField" : [ "cursorField", "cursorField" ],
-          "selected" : true,
-          "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-        }
-      }, {
-        "stream" : {
-          "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-          "supportedSyncModes" : [ null, null ],
-          "sourceDefinedCursor" : true,
-          "name" : "name",
-          "namespace" : "namespace",
-          "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-        },
-        "config" : {
-          "aliasName" : "aliasName",
-          "cursorField" : [ "cursorField", "cursorField" ],
-          "selected" : true,
-          "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-        }
-      } ]
-    },
-    "connectionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "namespaceFormat" : "${SOURCE_NAMESPACE}",
-    "operationIds" : [ null, null ]
-  } ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - ConnectionReadList -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/connections/list
-
Returns all connections for a workspace. (listConnectionsForWorkspace)
-
List connections for workspace. Does not return deleted connections.
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
WorkspaceIdRequestBody WorkspaceIdRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "connections" : [ {
-    "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "sourceCatalogId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "prefix" : "prefix",
-    "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "resourceRequirements" : {
-      "cpu_limit" : "cpu_limit",
-      "memory_request" : "memory_request",
-      "memory_limit" : "memory_limit",
-      "cpu_request" : "cpu_request"
-    },
-    "schedule" : {
-      "units" : 0,
-      "timeUnit" : "minutes"
-    },
-    "name" : "name",
-    "syncCatalog" : {
-      "streams" : [ {
-        "stream" : {
-          "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-          "supportedSyncModes" : [ null, null ],
-          "sourceDefinedCursor" : true,
-          "name" : "name",
-          "namespace" : "namespace",
-          "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-        },
-        "config" : {
-          "aliasName" : "aliasName",
-          "cursorField" : [ "cursorField", "cursorField" ],
-          "selected" : true,
-          "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-        }
-      }, {
-        "stream" : {
-          "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-          "supportedSyncModes" : [ null, null ],
-          "sourceDefinedCursor" : true,
-          "name" : "name",
-          "namespace" : "namespace",
-          "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-        },
-        "config" : {
-          "aliasName" : "aliasName",
-          "cursorField" : [ "cursorField", "cursorField" ],
-          "selected" : true,
-          "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-        }
-      } ]
-    },
-    "connectionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "namespaceFormat" : "${SOURCE_NAMESPACE}",
-    "operationIds" : [ null, null ]
-  }, {
-    "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "sourceCatalogId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "prefix" : "prefix",
-    "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "resourceRequirements" : {
-      "cpu_limit" : "cpu_limit",
-      "memory_request" : "memory_request",
-      "memory_limit" : "memory_limit",
-      "cpu_request" : "cpu_request"
-    },
-    "schedule" : {
-      "units" : 0,
-      "timeUnit" : "minutes"
-    },
-    "name" : "name",
-    "syncCatalog" : {
-      "streams" : [ {
-        "stream" : {
-          "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-          "supportedSyncModes" : [ null, null ],
-          "sourceDefinedCursor" : true,
-          "name" : "name",
-          "namespace" : "namespace",
-          "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-        },
-        "config" : {
-          "aliasName" : "aliasName",
-          "cursorField" : [ "cursorField", "cursorField" ],
-          "selected" : true,
-          "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-        }
-      }, {
-        "stream" : {
-          "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-          "supportedSyncModes" : [ null, null ],
-          "sourceDefinedCursor" : true,
-          "name" : "name",
-          "namespace" : "namespace",
-          "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-        },
-        "config" : {
-          "aliasName" : "aliasName",
-          "cursorField" : [ "cursorField", "cursorField" ],
-          "selected" : true,
-          "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-        }
-      } ]
-    },
-    "connectionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "namespaceFormat" : "${SOURCE_NAMESPACE}",
-    "operationIds" : [ null, null ]
-  } ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - ConnectionReadList -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/connections/reset
-
Reset the data for the connection. Deletes data generated by the connection in the destination. Resets any cursors back to initial state. (resetConnection)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
ConnectionIdRequestBody ConnectionIdRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

-
- JobInfoRead - -
- - - -

Example data

-
Content-Type: application/json
-
{
-  "job" : {
-    "createdAt" : 6,
-    "configId" : "configId",
-    "id" : 0,
-    "resetConfig" : {
-      "streamsToReset" : [ {
-        "name" : "name",
-        "namespace" : "namespace"
-      }, {
-        "name" : "name",
-        "namespace" : "namespace"
-      } ]
-    },
-    "updatedAt" : 1
-  },
-  "attempts" : [ {
-    "attempt" : {
-      "totalStats" : {
-        "stateMessagesEmitted" : 7,
-        "recordsCommitted" : 1,
-        "bytesEmitted" : 4,
-        "recordsEmitted" : 2
-      },
-      "failureSummary" : {
-        "failures" : [ {
-          "retryable" : true,
-          "stacktrace" : "stacktrace",
-          "internalMessage" : "internalMessage",
-          "externalMessage" : "externalMessage",
-          "timestamp" : 1
-        }, {
-          "retryable" : true,
-          "stacktrace" : "stacktrace",
-          "internalMessage" : "internalMessage",
-          "externalMessage" : "externalMessage",
-          "timestamp" : 1
-        } ],
-        "partialSuccess" : true
-      },
-      "createdAt" : 5,
-      "bytesSynced" : 9,
-      "endedAt" : 7,
-      "streamStats" : [ {
-        "stats" : {
-          "stateMessagesEmitted" : 7,
-          "recordsCommitted" : 1,
-          "bytesEmitted" : 4,
-          "recordsEmitted" : 2
-        },
-        "streamName" : "streamName"
-      }, {
-        "stats" : {
-          "stateMessagesEmitted" : 7,
-          "recordsCommitted" : 1,
-          "bytesEmitted" : 4,
-          "recordsEmitted" : 2
-        },
-        "streamName" : "streamName"
-      } ],
-      "id" : 5,
-      "recordsSynced" : 3,
-      "updatedAt" : 2
-    },
-    "logs" : {
-      "logLines" : [ "logLines", "logLines" ]
-    }
-  }, {
-    "attempt" : {
-      "totalStats" : {
-        "stateMessagesEmitted" : 7,
-        "recordsCommitted" : 1,
-        "bytesEmitted" : 4,
-        "recordsEmitted" : 2
-      },
-      "failureSummary" : {
-        "failures" : [ {
-          "retryable" : true,
-          "stacktrace" : "stacktrace",
-          "internalMessage" : "internalMessage",
-          "externalMessage" : "externalMessage",
-          "timestamp" : 1
-        }, {
-          "retryable" : true,
-          "stacktrace" : "stacktrace",
-          "internalMessage" : "internalMessage",
-          "externalMessage" : "externalMessage",
-          "timestamp" : 1
-        } ],
-        "partialSuccess" : true
-      },
-      "createdAt" : 5,
-      "bytesSynced" : 9,
-      "endedAt" : 7,
-      "streamStats" : [ {
-        "stats" : {
-          "stateMessagesEmitted" : 7,
-          "recordsCommitted" : 1,
-          "bytesEmitted" : 4,
-          "recordsEmitted" : 2
-        },
-        "streamName" : "streamName"
-      }, {
-        "stats" : {
-          "stateMessagesEmitted" : 7,
-          "recordsCommitted" : 1,
-          "bytesEmitted" : 4,
-          "recordsEmitted" : 2
-        },
-        "streamName" : "streamName"
-      } ],
-      "id" : 5,
-      "recordsSynced" : 3,
-      "updatedAt" : 2
-    },
-    "logs" : {
-      "logLines" : [ "logLines", "logLines" ]
-    }
-  } ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - JobInfoRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/connections/search
-
Search connections (searchConnections)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
ConnectionSearch ConnectionSearch (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "connections" : [ {
-    "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "sourceCatalogId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "prefix" : "prefix",
-    "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "resourceRequirements" : {
-      "cpu_limit" : "cpu_limit",
-      "memory_request" : "memory_request",
-      "memory_limit" : "memory_limit",
-      "cpu_request" : "cpu_request"
-    },
-    "schedule" : {
-      "units" : 0,
-      "timeUnit" : "minutes"
-    },
-    "name" : "name",
-    "syncCatalog" : {
-      "streams" : [ {
-        "stream" : {
-          "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-          "supportedSyncModes" : [ null, null ],
-          "sourceDefinedCursor" : true,
-          "name" : "name",
-          "namespace" : "namespace",
-          "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-        },
-        "config" : {
-          "aliasName" : "aliasName",
-          "cursorField" : [ "cursorField", "cursorField" ],
-          "selected" : true,
-          "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-        }
-      }, {
-        "stream" : {
-          "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-          "supportedSyncModes" : [ null, null ],
-          "sourceDefinedCursor" : true,
-          "name" : "name",
-          "namespace" : "namespace",
-          "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-        },
-        "config" : {
-          "aliasName" : "aliasName",
-          "cursorField" : [ "cursorField", "cursorField" ],
-          "selected" : true,
-          "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-        }
-      } ]
-    },
-    "connectionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "namespaceFormat" : "${SOURCE_NAMESPACE}",
-    "operationIds" : [ null, null ]
-  }, {
-    "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "sourceCatalogId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "prefix" : "prefix",
-    "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "resourceRequirements" : {
-      "cpu_limit" : "cpu_limit",
-      "memory_request" : "memory_request",
-      "memory_limit" : "memory_limit",
-      "cpu_request" : "cpu_request"
-    },
-    "schedule" : {
-      "units" : 0,
-      "timeUnit" : "minutes"
-    },
-    "name" : "name",
-    "syncCatalog" : {
-      "streams" : [ {
-        "stream" : {
-          "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-          "supportedSyncModes" : [ null, null ],
-          "sourceDefinedCursor" : true,
-          "name" : "name",
-          "namespace" : "namespace",
-          "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-        },
-        "config" : {
-          "aliasName" : "aliasName",
-          "cursorField" : [ "cursorField", "cursorField" ],
-          "selected" : true,
-          "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-        }
-      }, {
-        "stream" : {
-          "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-          "supportedSyncModes" : [ null, null ],
-          "sourceDefinedCursor" : true,
-          "name" : "name",
-          "namespace" : "namespace",
-          "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-        },
-        "config" : {
-          "aliasName" : "aliasName",
-          "cursorField" : [ "cursorField", "cursorField" ],
-          "selected" : true,
-          "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-        }
-      } ]
-    },
-    "connectionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "namespaceFormat" : "${SOURCE_NAMESPACE}",
-    "operationIds" : [ null, null ]
-  } ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - ConnectionReadList -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/connections/sync
-
Trigger a manual sync of the connection (syncConnection)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
ConnectionIdRequestBody ConnectionIdRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

-
- JobInfoRead - -
- - - -

Example data

-
Content-Type: application/json
-
{
-  "job" : {
-    "createdAt" : 6,
-    "configId" : "configId",
-    "id" : 0,
-    "resetConfig" : {
-      "streamsToReset" : [ {
-        "name" : "name",
-        "namespace" : "namespace"
-      }, {
-        "name" : "name",
-        "namespace" : "namespace"
-      } ]
-    },
-    "updatedAt" : 1
-  },
-  "attempts" : [ {
-    "attempt" : {
-      "totalStats" : {
-        "stateMessagesEmitted" : 7,
-        "recordsCommitted" : 1,
-        "bytesEmitted" : 4,
-        "recordsEmitted" : 2
-      },
-      "failureSummary" : {
-        "failures" : [ {
-          "retryable" : true,
-          "stacktrace" : "stacktrace",
-          "internalMessage" : "internalMessage",
-          "externalMessage" : "externalMessage",
-          "timestamp" : 1
-        }, {
-          "retryable" : true,
-          "stacktrace" : "stacktrace",
-          "internalMessage" : "internalMessage",
-          "externalMessage" : "externalMessage",
-          "timestamp" : 1
-        } ],
-        "partialSuccess" : true
-      },
-      "createdAt" : 5,
-      "bytesSynced" : 9,
-      "endedAt" : 7,
-      "streamStats" : [ {
-        "stats" : {
-          "stateMessagesEmitted" : 7,
-          "recordsCommitted" : 1,
-          "bytesEmitted" : 4,
-          "recordsEmitted" : 2
-        },
-        "streamName" : "streamName"
-      }, {
-        "stats" : {
-          "stateMessagesEmitted" : 7,
-          "recordsCommitted" : 1,
-          "bytesEmitted" : 4,
-          "recordsEmitted" : 2
-        },
-        "streamName" : "streamName"
-      } ],
-      "id" : 5,
-      "recordsSynced" : 3,
-      "updatedAt" : 2
-    },
-    "logs" : {
-      "logLines" : [ "logLines", "logLines" ]
-    }
-  }, {
-    "attempt" : {
-      "totalStats" : {
-        "stateMessagesEmitted" : 7,
-        "recordsCommitted" : 1,
-        "bytesEmitted" : 4,
-        "recordsEmitted" : 2
-      },
-      "failureSummary" : {
-        "failures" : [ {
-          "retryable" : true,
-          "stacktrace" : "stacktrace",
-          "internalMessage" : "internalMessage",
-          "externalMessage" : "externalMessage",
-          "timestamp" : 1
-        }, {
-          "retryable" : true,
-          "stacktrace" : "stacktrace",
-          "internalMessage" : "internalMessage",
-          "externalMessage" : "externalMessage",
-          "timestamp" : 1
-        } ],
-        "partialSuccess" : true
-      },
-      "createdAt" : 5,
-      "bytesSynced" : 9,
-      "endedAt" : 7,
-      "streamStats" : [ {
-        "stats" : {
-          "stateMessagesEmitted" : 7,
-          "recordsCommitted" : 1,
-          "bytesEmitted" : 4,
-          "recordsEmitted" : 2
-        },
-        "streamName" : "streamName"
-      }, {
-        "stats" : {
-          "stateMessagesEmitted" : 7,
-          "recordsCommitted" : 1,
-          "bytesEmitted" : 4,
-          "recordsEmitted" : 2
-        },
-        "streamName" : "streamName"
-      } ],
-      "id" : 5,
-      "recordsSynced" : 3,
-      "updatedAt" : 2
-    },
-    "logs" : {
-      "logLines" : [ "logLines", "logLines" ]
-    }
-  } ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - JobInfoRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/connections/update
-
Update a connection (updateConnection)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
ConnectionUpdate ConnectionUpdate (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "sourceCatalogId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "prefix" : "prefix",
-  "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "resourceRequirements" : {
-    "cpu_limit" : "cpu_limit",
-    "memory_request" : "memory_request",
-    "memory_limit" : "memory_limit",
-    "cpu_request" : "cpu_request"
-  },
-  "schedule" : {
-    "units" : 0,
-    "timeUnit" : "minutes"
-  },
-  "name" : "name",
-  "syncCatalog" : {
-    "streams" : [ {
-      "stream" : {
-        "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-        "supportedSyncModes" : [ null, null ],
-        "sourceDefinedCursor" : true,
-        "name" : "name",
-        "namespace" : "namespace",
-        "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-      },
-      "config" : {
-        "aliasName" : "aliasName",
-        "cursorField" : [ "cursorField", "cursorField" ],
-        "selected" : true,
-        "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-      }
-    }, {
-      "stream" : {
-        "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-        "supportedSyncModes" : [ null, null ],
-        "sourceDefinedCursor" : true,
-        "name" : "name",
-        "namespace" : "namespace",
-        "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-      },
-      "config" : {
-        "aliasName" : "aliasName",
-        "cursorField" : [ "cursorField", "cursorField" ],
-        "selected" : true,
-        "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-      }
-    } ]
-  },
-  "connectionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "namespaceFormat" : "${SOURCE_NAMESPACE}",
-  "operationIds" : [ null, null ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - ConnectionRead -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-

DbMigration

-
-
- Up -
post /v1/db_migrations/migrate
-
Migrate the database to the latest version (executeMigrations)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
DbMigrationRequestBody DbMigrationRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "initialVersion" : "initialVersion",
-  "executedMigrations" : [ {
-    "migrationVersion" : "migrationVersion",
-    "migrationDescription" : "migrationDescription",
-    "migratedAt" : 0,
-    "migrationType" : "migrationType",
-    "migrationScript" : "migrationScript",
-    "migratedBy" : "migratedBy"
-  }, {
-    "migrationVersion" : "migrationVersion",
-    "migrationDescription" : "migrationDescription",
-    "migratedAt" : 0,
-    "migrationType" : "migrationType",
-    "migrationScript" : "migrationScript",
-    "migratedBy" : "migratedBy"
-  } ],
-  "targetVersion" : "targetVersion"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - DbMigrationExecutionRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/db_migrations/list
-
List all database migrations (listMigrations)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
DbMigrationRequestBody DbMigrationRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "migrations" : [ {
-    "migrationVersion" : "migrationVersion",
-    "migrationDescription" : "migrationDescription",
-    "migratedAt" : 0,
-    "migrationType" : "migrationType",
-    "migrationScript" : "migrationScript",
-    "migratedBy" : "migratedBy"
-  }, {
-    "migrationVersion" : "migrationVersion",
-    "migrationDescription" : "migrationDescription",
-    "migratedAt" : 0,
-    "migrationType" : "migrationType",
-    "migrationScript" : "migrationScript",
-    "migratedBy" : "migratedBy"
-  } ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - DbMigrationReadList -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-

Deployment

-
-
- Up -
post /v1/deployment/export
-
Export Airbyte Configuration and Data Archive (exportArchive)
-
- - - - - - - -

Return type

-
- - File -
- - - - -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/x-gzip
  • -
- -

Responses

-

200

- Successful operation - File -
-
-
-
- Up -
post /v1/deployment/export_workspace
-
Export Airbyte Workspace Configuration (exportWorkspace)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
WorkspaceIdRequestBody WorkspaceIdRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

-
- - File -
- - - - -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/x-gzip
  • -
- -

Responses

-

200

- Successful operation - File -
-
-
-
- Up -
post /v1/deployment/import
-
Import Airbyte Configuration and Data Archive (importArchive)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/x-gzip
  • -
- -

Request body

-
-
body file (required)
- -
Body Parameter
- -
- - - - -

Return type

-
- ImportRead - -
- - - -

Example data

-
Content-Type: application/json
-
{
-  "reason" : "reason",
-  "status" : "succeeded"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - ImportRead -
-
-
-
- Up -
post /v1/deployment/import_into_workspace
-
Import Airbyte Configuration into Workspace (this operation might change ids of imported configurations). Note, in order to use this api endpoint, you might need to upload a temporary archive resource with 'deployment/upload_archive_resource' first (importIntoWorkspace)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
ImportRequestBody ImportRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

-
- ImportRead - -
- - - -

Example data

-
Content-Type: application/json
-
{
-  "reason" : "reason",
-  "status" : "succeeded"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - ImportRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -
-
-
-
- Up -
post /v1/deployment/upload_archive_resource
-
Upload a GZIP archive tarball and stage it in the server's cache as a temporary resource (uploadArchiveResource)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/x-gzip
  • -
- -

Request body

-
-
body file (required)
- -
Body Parameter
- -
- - - - -

Return type

-
- UploadRead - -
- - - -

Example data

-
Content-Type: application/json
-
{
-  "resourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "status" : "succeeded"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - UploadRead -
-
-

Destination

-
-
- Up -
post /v1/destinations/check_connection
-
Check connection to the destination (checkConnectionToDestination)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
DestinationIdRequestBody DestinationIdRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "message" : "message",
-  "jobInfo" : {
-    "createdAt" : 0,
-    "configId" : "configId",
-    "endedAt" : 6,
-    "id" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "logs" : {
-      "logLines" : [ "logLines", "logLines" ]
-    },
-    "succeeded" : true
-  },
-  "status" : "succeeded"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - CheckConnectionRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/destinations/check_connection_for_update
-
Check connection for a proposed update to a destination (checkConnectionToDestinationForUpdate)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
DestinationUpdate DestinationUpdate (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "message" : "message",
-  "jobInfo" : {
-    "createdAt" : 0,
-    "configId" : "configId",
-    "endedAt" : 6,
-    "id" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "logs" : {
-      "logLines" : [ "logLines", "logLines" ]
-    },
-    "succeeded" : true
-  },
-  "status" : "succeeded"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - CheckConnectionRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/destinations/clone
-
Clone destination (cloneDestination)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
DestinationCloneRequestBody DestinationCloneRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "connectionConfiguration" : {
-    "user" : "charles"
-  },
-  "destinationName" : "destinationName",
-  "name" : "name",
-  "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - DestinationRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/destinations/create
-
Create a destination (createDestination)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
DestinationCreate DestinationCreate (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "connectionConfiguration" : {
-    "user" : "charles"
-  },
-  "destinationName" : "destinationName",
-  "name" : "name",
-  "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - DestinationRead -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/destinations/delete
-
Delete the destination (deleteDestination)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
DestinationIdRequestBody DestinationIdRequestBody (required)
- -
Body Parameter
- -
- - - - - - - - -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

204

- The resource was deleted successfully. - -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/destinations/get
-
Get configured destination (getDestination)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
DestinationIdRequestBody DestinationIdRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "connectionConfiguration" : {
-    "user" : "charles"
-  },
-  "destinationName" : "destinationName",
-  "name" : "name",
-  "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - DestinationRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/destinations/list
-
List configured destinations for a workspace (listDestinationsForWorkspace)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
WorkspaceIdRequestBody WorkspaceIdRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "destinations" : [ {
-    "connectionConfiguration" : {
-      "user" : "charles"
-    },
-    "destinationName" : "destinationName",
-    "name" : "name",
-    "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  }, {
-    "connectionConfiguration" : {
-      "user" : "charles"
-    },
-    "destinationName" : "destinationName",
-    "name" : "name",
-    "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  } ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - DestinationReadList -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/destinations/search
-
Search destinations (searchDestinations)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
DestinationSearch DestinationSearch (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "destinations" : [ {
-    "connectionConfiguration" : {
-      "user" : "charles"
-    },
-    "destinationName" : "destinationName",
-    "name" : "name",
-    "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  }, {
-    "connectionConfiguration" : {
-      "user" : "charles"
-    },
-    "destinationName" : "destinationName",
-    "name" : "name",
-    "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  } ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - DestinationReadList -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/destinations/update
-
Update a destination (updateDestination)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
DestinationUpdate DestinationUpdate (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "connectionConfiguration" : {
-    "user" : "charles"
-  },
-  "destinationName" : "destinationName",
-  "name" : "name",
-  "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - DestinationRead -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-

DestinationDefinition

-
-
- Up -
post /v1/destination_definitions/create_custom
-
Creates a custom destinationDefinition for the given workspace (createCustomDestinationDefinition)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
CustomDestinationDefinitionCreate CustomDestinationDefinitionCreate (optional)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "resourceRequirements" : {
-    "default" : {
-      "cpu_limit" : "cpu_limit",
-      "memory_request" : "memory_request",
-      "memory_limit" : "memory_limit",
-      "cpu_request" : "cpu_request"
-    },
-    "jobSpecific" : [ {
-      "resourceRequirements" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      }
-    }, {
-      "resourceRequirements" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      }
-    } ]
-  },
-  "documentationUrl" : "https://openapi-generator.tech",
-  "dockerImageTag" : "dockerImageTag",
-  "releaseDate" : "2000-01-23",
-  "dockerRepository" : "dockerRepository",
-  "name" : "name",
-  "icon" : "icon",
-  "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - DestinationDefinitionRead -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/destination_definitions/create
-
Creates a destinationsDefinition (createDestinationDefinition)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
DestinationDefinitionCreate DestinationDefinitionCreate (optional)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "resourceRequirements" : {
-    "default" : {
-      "cpu_limit" : "cpu_limit",
-      "memory_request" : "memory_request",
-      "memory_limit" : "memory_limit",
-      "cpu_request" : "cpu_request"
-    },
-    "jobSpecific" : [ {
-      "resourceRequirements" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      }
-    }, {
-      "resourceRequirements" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      }
-    } ]
-  },
-  "documentationUrl" : "https://openapi-generator.tech",
-  "dockerImageTag" : "dockerImageTag",
-  "releaseDate" : "2000-01-23",
-  "dockerRepository" : "dockerRepository",
-  "name" : "name",
-  "icon" : "icon",
-  "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - DestinationDefinitionRead -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/destination_definitions/delete_custom
-
Delete a custom destination definition for the given workspace (deleteCustomDestinationDefinition)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
DestinationDefinitionIdWithWorkspaceId DestinationDefinitionIdWithWorkspaceId (required)
- -
Body Parameter
- -
- - - - - - - - -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

204

- The destination was deleted successfully. - -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/destination_definitions/delete
-
Delete a destination definition (deleteDestinationDefinition)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
DestinationDefinitionIdRequestBody DestinationDefinitionIdRequestBody (required)
- -
Body Parameter
- -
- - - - - - - - -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

204

- The resource was deleted successfully. - -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/destination_definitions/get
-
Get destinationDefinition (getDestinationDefinition)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
DestinationDefinitionIdRequestBody DestinationDefinitionIdRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "resourceRequirements" : {
-    "default" : {
-      "cpu_limit" : "cpu_limit",
-      "memory_request" : "memory_request",
-      "memory_limit" : "memory_limit",
-      "cpu_request" : "cpu_request"
-    },
-    "jobSpecific" : [ {
-      "resourceRequirements" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      }
-    }, {
-      "resourceRequirements" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      }
-    } ]
-  },
-  "documentationUrl" : "https://openapi-generator.tech",
-  "dockerImageTag" : "dockerImageTag",
-  "releaseDate" : "2000-01-23",
-  "dockerRepository" : "dockerRepository",
-  "name" : "name",
-  "icon" : "icon",
-  "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - DestinationDefinitionRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/destination_definitions/get_for_workspace
-
Get a destinationDefinition that is configured for the given workspace (getDestinationDefinitionForWorkspace)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
DestinationDefinitionIdWithWorkspaceId DestinationDefinitionIdWithWorkspaceId (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "resourceRequirements" : {
-    "default" : {
-      "cpu_limit" : "cpu_limit",
-      "memory_request" : "memory_request",
-      "memory_limit" : "memory_limit",
-      "cpu_request" : "cpu_request"
-    },
-    "jobSpecific" : [ {
-      "resourceRequirements" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      }
-    }, {
-      "resourceRequirements" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      }
-    } ]
-  },
-  "documentationUrl" : "https://openapi-generator.tech",
-  "dockerImageTag" : "dockerImageTag",
-  "releaseDate" : "2000-01-23",
-  "dockerRepository" : "dockerRepository",
-  "name" : "name",
-  "icon" : "icon",
-  "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - DestinationDefinitionRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/destination_definitions/grant_definition
-
grant a private, non-custom destinationDefinition to a given workspace (grantDestinationDefinitionToWorkspace)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
DestinationDefinitionIdWithWorkspaceId DestinationDefinitionIdWithWorkspaceId (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "destinationDefinition" : {
-    "resourceRequirements" : {
-      "default" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      },
-      "jobSpecific" : [ {
-        "resourceRequirements" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        }
-      }, {
-        "resourceRequirements" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        }
-      } ]
-    },
-    "documentationUrl" : "https://openapi-generator.tech",
-    "dockerImageTag" : "dockerImageTag",
-    "releaseDate" : "2000-01-23",
-    "dockerRepository" : "dockerRepository",
-    "name" : "name",
-    "icon" : "icon",
-    "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  },
-  "granted" : true
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - PrivateDestinationDefinitionRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/destination_definitions/list
-
List all the destinationDefinitions the current Airbyte deployment is configured to use (listDestinationDefinitions)
-
- - - - - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "destinationDefinitions" : [ {
-    "resourceRequirements" : {
-      "default" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      },
-      "jobSpecific" : [ {
-        "resourceRequirements" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        }
-      }, {
-        "resourceRequirements" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        }
-      } ]
-    },
-    "documentationUrl" : "https://openapi-generator.tech",
-    "dockerImageTag" : "dockerImageTag",
-    "releaseDate" : "2000-01-23",
-    "dockerRepository" : "dockerRepository",
-    "name" : "name",
-    "icon" : "icon",
-    "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  }, {
-    "resourceRequirements" : {
-      "default" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      },
-      "jobSpecific" : [ {
-        "resourceRequirements" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        }
-      }, {
-        "resourceRequirements" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        }
-      } ]
-    },
-    "documentationUrl" : "https://openapi-generator.tech",
-    "dockerImageTag" : "dockerImageTag",
-    "releaseDate" : "2000-01-23",
-    "dockerRepository" : "dockerRepository",
-    "name" : "name",
-    "icon" : "icon",
-    "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  } ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - DestinationDefinitionReadList -
-
-
-
- Up -
post /v1/destination_definitions/list_for_workspace
-
List all the destinationDefinitions the given workspace is configured to use (listDestinationDefinitionsForWorkspace)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
WorkspaceIdRequestBody WorkspaceIdRequestBody (optional)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "destinationDefinitions" : [ {
-    "resourceRequirements" : {
-      "default" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      },
-      "jobSpecific" : [ {
-        "resourceRequirements" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        }
-      }, {
-        "resourceRequirements" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        }
-      } ]
-    },
-    "documentationUrl" : "https://openapi-generator.tech",
-    "dockerImageTag" : "dockerImageTag",
-    "releaseDate" : "2000-01-23",
-    "dockerRepository" : "dockerRepository",
-    "name" : "name",
-    "icon" : "icon",
-    "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  }, {
-    "resourceRequirements" : {
-      "default" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      },
-      "jobSpecific" : [ {
-        "resourceRequirements" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        }
-      }, {
-        "resourceRequirements" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        }
-      } ]
-    },
-    "documentationUrl" : "https://openapi-generator.tech",
-    "dockerImageTag" : "dockerImageTag",
-    "releaseDate" : "2000-01-23",
-    "dockerRepository" : "dockerRepository",
-    "name" : "name",
-    "icon" : "icon",
-    "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  } ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - DestinationDefinitionReadList -
-
-
-
- Up -
post /v1/destination_definitions/list_latest
-
List the latest destinationDefinitions Airbyte supports (listLatestDestinationDefinitions)
-
Guaranteed to retrieve the latest information on supported destinations.
- - - - - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "destinationDefinitions" : [ {
-    "resourceRequirements" : {
-      "default" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      },
-      "jobSpecific" : [ {
-        "resourceRequirements" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        }
-      }, {
-        "resourceRequirements" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        }
-      } ]
-    },
-    "documentationUrl" : "https://openapi-generator.tech",
-    "dockerImageTag" : "dockerImageTag",
-    "releaseDate" : "2000-01-23",
-    "dockerRepository" : "dockerRepository",
-    "name" : "name",
-    "icon" : "icon",
-    "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  }, {
-    "resourceRequirements" : {
-      "default" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      },
-      "jobSpecific" : [ {
-        "resourceRequirements" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        }
-      }, {
-        "resourceRequirements" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        }
-      } ]
-    },
-    "documentationUrl" : "https://openapi-generator.tech",
-    "dockerImageTag" : "dockerImageTag",
-    "releaseDate" : "2000-01-23",
-    "dockerRepository" : "dockerRepository",
-    "name" : "name",
-    "icon" : "icon",
-    "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  } ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - DestinationDefinitionReadList -
-
-
-
- Up -
post /v1/destination_definitions/list_private
-
List all private, non-custom destinationDefinitions, and for each indicate whether the given workspace has a grant for using the definition. Used by admins to view and modify a given workspace's grants. (listPrivateDestinationDefinitions)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
WorkspaceIdRequestBody WorkspaceIdRequestBody (optional)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "destinationDefinitions" : [ {
-    "destinationDefinition" : {
-      "resourceRequirements" : {
-        "default" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        },
-        "jobSpecific" : [ {
-          "resourceRequirements" : {
-            "cpu_limit" : "cpu_limit",
-            "memory_request" : "memory_request",
-            "memory_limit" : "memory_limit",
-            "cpu_request" : "cpu_request"
-          }
-        }, {
-          "resourceRequirements" : {
-            "cpu_limit" : "cpu_limit",
-            "memory_request" : "memory_request",
-            "memory_limit" : "memory_limit",
-            "cpu_request" : "cpu_request"
-          }
-        } ]
-      },
-      "documentationUrl" : "https://openapi-generator.tech",
-      "dockerImageTag" : "dockerImageTag",
-      "releaseDate" : "2000-01-23",
-      "dockerRepository" : "dockerRepository",
-      "name" : "name",
-      "icon" : "icon",
-      "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-    },
-    "granted" : true
-  }, {
-    "destinationDefinition" : {
-      "resourceRequirements" : {
-        "default" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        },
-        "jobSpecific" : [ {
-          "resourceRequirements" : {
-            "cpu_limit" : "cpu_limit",
-            "memory_request" : "memory_request",
-            "memory_limit" : "memory_limit",
-            "cpu_request" : "cpu_request"
-          }
-        }, {
-          "resourceRequirements" : {
-            "cpu_limit" : "cpu_limit",
-            "memory_request" : "memory_request",
-            "memory_limit" : "memory_limit",
-            "cpu_request" : "cpu_request"
-          }
-        } ]
-      },
-      "documentationUrl" : "https://openapi-generator.tech",
-      "dockerImageTag" : "dockerImageTag",
-      "releaseDate" : "2000-01-23",
-      "dockerRepository" : "dockerRepository",
-      "name" : "name",
-      "icon" : "icon",
-      "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-    },
-    "granted" : true
-  } ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - PrivateDestinationDefinitionReadList -
-
-
-
- Up -
post /v1/destination_definitions/revoke_definition
-
revoke a grant to a private, non-custom destinationDefinition from a given workspace (revokeDestinationDefinitionFromWorkspace)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
DestinationDefinitionIdWithWorkspaceId DestinationDefinitionIdWithWorkspaceId (required)
- -
Body Parameter
- -
- - - - - - - - -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

204

- The resource was deleted successfully. - -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/destination_definitions/update_custom
-
Update a custom destinationDefinition for the given workspace (updateCustomDestinationDefinition)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
CustomDestinationDefinitionUpdate CustomDestinationDefinitionUpdate (optional)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "resourceRequirements" : {
-    "default" : {
-      "cpu_limit" : "cpu_limit",
-      "memory_request" : "memory_request",
-      "memory_limit" : "memory_limit",
-      "cpu_request" : "cpu_request"
-    },
-    "jobSpecific" : [ {
-      "resourceRequirements" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      }
-    }, {
-      "resourceRequirements" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      }
-    } ]
-  },
-  "documentationUrl" : "https://openapi-generator.tech",
-  "dockerImageTag" : "dockerImageTag",
-  "releaseDate" : "2000-01-23",
-  "dockerRepository" : "dockerRepository",
-  "name" : "name",
-  "icon" : "icon",
-  "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - DestinationDefinitionRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/destination_definitions/update
-
Update destinationDefinition (updateDestinationDefinition)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
DestinationDefinitionUpdate DestinationDefinitionUpdate (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "resourceRequirements" : {
-    "default" : {
-      "cpu_limit" : "cpu_limit",
-      "memory_request" : "memory_request",
-      "memory_limit" : "memory_limit",
-      "cpu_request" : "cpu_request"
-    },
-    "jobSpecific" : [ {
-      "resourceRequirements" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      }
-    }, {
-      "resourceRequirements" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      }
-    } ]
-  },
-  "documentationUrl" : "https://openapi-generator.tech",
-  "dockerImageTag" : "dockerImageTag",
-  "releaseDate" : "2000-01-23",
-  "dockerRepository" : "dockerRepository",
-  "name" : "name",
-  "icon" : "icon",
-  "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - DestinationDefinitionRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-

DestinationDefinitionSpecification

-
-
- Up -
post /v1/destination_definition_specifications/get
-
Get specification for a destinationDefinition (getDestinationDefinitionSpecification)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
DestinationDefinitionIdWithWorkspaceId DestinationDefinitionIdWithWorkspaceId (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "documentationUrl" : "documentationUrl",
-  "supportsNormalization" : true,
-  "connectionSpecification" : {
-    "user" : {
-      "type" : "string"
-    }
-  },
-  "supportedDestinationSyncModes" : [ null, null ],
-  "supportsDbt" : true,
-  "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "advancedAuth" : {
-    "predicateValue" : "predicateValue",
-    "oauthConfigSpecification" : { },
-    "predicateKey" : [ "predicateKey", "predicateKey" ],
-    "authFlowType" : "oauth2.0"
-  },
-  "authSpecification" : {
-    "auth_type" : "oauth2.0",
-    "oauth2Specification" : {
-      "oauthFlowOutputParameters" : [ [ "oauthFlowOutputParameters", "oauthFlowOutputParameters" ], [ "oauthFlowOutputParameters", "oauthFlowOutputParameters" ] ],
-      "rootObject" : [ "path", 1 ],
-      "oauthFlowInitParameters" : [ [ "oauthFlowInitParameters", "oauthFlowInitParameters" ], [ "oauthFlowInitParameters", "oauthFlowInitParameters" ] ]
-    }
-  },
-  "jobInfo" : {
-    "createdAt" : 0,
-    "configId" : "configId",
-    "endedAt" : 6,
-    "id" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "logs" : {
-      "logLines" : [ "logLines", "logLines" ]
-    },
-    "succeeded" : true
-  }
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - DestinationDefinitionSpecificationRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-

Health

-
-
- Up -
get /v1/health
-
Health Check (getHealthCheck)
-
- - - - - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "available" : true
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - HealthCheckRead -
-
-

Jobs

-
-
- Up -
post /v1/jobs/cancel
-
Cancels a job (cancelJob)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
JobIdRequestBody JobIdRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

-
- JobInfoRead - -
- - - -

Example data

-
Content-Type: application/json
-
{
-  "job" : {
-    "createdAt" : 6,
-    "configId" : "configId",
-    "id" : 0,
-    "resetConfig" : {
-      "streamsToReset" : [ {
-        "name" : "name",
-        "namespace" : "namespace"
-      }, {
-        "name" : "name",
-        "namespace" : "namespace"
-      } ]
-    },
-    "updatedAt" : 1
-  },
-  "attempts" : [ {
-    "attempt" : {
-      "totalStats" : {
-        "stateMessagesEmitted" : 7,
-        "recordsCommitted" : 1,
-        "bytesEmitted" : 4,
-        "recordsEmitted" : 2
-      },
-      "failureSummary" : {
-        "failures" : [ {
-          "retryable" : true,
-          "stacktrace" : "stacktrace",
-          "internalMessage" : "internalMessage",
-          "externalMessage" : "externalMessage",
-          "timestamp" : 1
-        }, {
-          "retryable" : true,
-          "stacktrace" : "stacktrace",
-          "internalMessage" : "internalMessage",
-          "externalMessage" : "externalMessage",
-          "timestamp" : 1
-        } ],
-        "partialSuccess" : true
-      },
-      "createdAt" : 5,
-      "bytesSynced" : 9,
-      "endedAt" : 7,
-      "streamStats" : [ {
-        "stats" : {
-          "stateMessagesEmitted" : 7,
-          "recordsCommitted" : 1,
-          "bytesEmitted" : 4,
-          "recordsEmitted" : 2
-        },
-        "streamName" : "streamName"
-      }, {
-        "stats" : {
-          "stateMessagesEmitted" : 7,
-          "recordsCommitted" : 1,
-          "bytesEmitted" : 4,
-          "recordsEmitted" : 2
-        },
-        "streamName" : "streamName"
-      } ],
-      "id" : 5,
-      "recordsSynced" : 3,
-      "updatedAt" : 2
-    },
-    "logs" : {
-      "logLines" : [ "logLines", "logLines" ]
-    }
-  }, {
-    "attempt" : {
-      "totalStats" : {
-        "stateMessagesEmitted" : 7,
-        "recordsCommitted" : 1,
-        "bytesEmitted" : 4,
-        "recordsEmitted" : 2
-      },
-      "failureSummary" : {
-        "failures" : [ {
-          "retryable" : true,
-          "stacktrace" : "stacktrace",
-          "internalMessage" : "internalMessage",
-          "externalMessage" : "externalMessage",
-          "timestamp" : 1
-        }, {
-          "retryable" : true,
-          "stacktrace" : "stacktrace",
-          "internalMessage" : "internalMessage",
-          "externalMessage" : "externalMessage",
-          "timestamp" : 1
-        } ],
-        "partialSuccess" : true
-      },
-      "createdAt" : 5,
-      "bytesSynced" : 9,
-      "endedAt" : 7,
-      "streamStats" : [ {
-        "stats" : {
-          "stateMessagesEmitted" : 7,
-          "recordsCommitted" : 1,
-          "bytesEmitted" : 4,
-          "recordsEmitted" : 2
-        },
-        "streamName" : "streamName"
-      }, {
-        "stats" : {
-          "stateMessagesEmitted" : 7,
-          "recordsCommitted" : 1,
-          "bytesEmitted" : 4,
-          "recordsEmitted" : 2
-        },
-        "streamName" : "streamName"
-      } ],
-      "id" : 5,
-      "recordsSynced" : 3,
-      "updatedAt" : 2
-    },
-    "logs" : {
-      "logLines" : [ "logLines", "logLines" ]
-    }
-  } ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - JobInfoRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/jobs/get_debug_info
-
Gets all information needed to debug this job (getJobDebugInfo)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
JobIdRequestBody JobIdRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "job" : {
-    "configId" : "configId",
-    "sourceDefinition" : {
-      "resourceRequirements" : {
-        "default" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        },
-        "jobSpecific" : [ {
-          "resourceRequirements" : {
-            "cpu_limit" : "cpu_limit",
-            "memory_request" : "memory_request",
-            "memory_limit" : "memory_limit",
-            "cpu_request" : "cpu_request"
-          }
-        }, {
-          "resourceRequirements" : {
-            "cpu_limit" : "cpu_limit",
-            "memory_request" : "memory_request",
-            "memory_limit" : "memory_limit",
-            "cpu_request" : "cpu_request"
-          }
-        } ]
-      },
-      "documentationUrl" : "https://openapi-generator.tech",
-      "dockerImageTag" : "dockerImageTag",
-      "releaseDate" : "2000-01-23",
-      "dockerRepository" : "dockerRepository",
-      "name" : "name",
-      "icon" : "icon",
-      "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-    },
-    "airbyteVersion" : "airbyteVersion",
-    "id" : 0,
-    "destinationDefinition" : {
-      "resourceRequirements" : {
-        "default" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        },
-        "jobSpecific" : [ {
-          "resourceRequirements" : {
-            "cpu_limit" : "cpu_limit",
-            "memory_request" : "memory_request",
-            "memory_limit" : "memory_limit",
-            "cpu_request" : "cpu_request"
-          }
-        }, {
-          "resourceRequirements" : {
-            "cpu_limit" : "cpu_limit",
-            "memory_request" : "memory_request",
-            "memory_limit" : "memory_limit",
-            "cpu_request" : "cpu_request"
-          }
-        } ]
-      },
-      "documentationUrl" : "https://openapi-generator.tech",
-      "dockerImageTag" : "dockerImageTag",
-      "releaseDate" : "2000-01-23",
-      "dockerRepository" : "dockerRepository",
-      "name" : "name",
-      "icon" : "icon",
-      "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-    }
-  },
-  "attempts" : [ {
-    "attempt" : {
-      "totalStats" : {
-        "stateMessagesEmitted" : 7,
-        "recordsCommitted" : 1,
-        "bytesEmitted" : 4,
-        "recordsEmitted" : 2
-      },
-      "failureSummary" : {
-        "failures" : [ {
-          "retryable" : true,
-          "stacktrace" : "stacktrace",
-          "internalMessage" : "internalMessage",
-          "externalMessage" : "externalMessage",
-          "timestamp" : 1
-        }, {
-          "retryable" : true,
-          "stacktrace" : "stacktrace",
-          "internalMessage" : "internalMessage",
-          "externalMessage" : "externalMessage",
-          "timestamp" : 1
-        } ],
-        "partialSuccess" : true
-      },
-      "createdAt" : 5,
-      "bytesSynced" : 9,
-      "endedAt" : 7,
-      "streamStats" : [ {
-        "stats" : {
-          "stateMessagesEmitted" : 7,
-          "recordsCommitted" : 1,
-          "bytesEmitted" : 4,
-          "recordsEmitted" : 2
-        },
-        "streamName" : "streamName"
-      }, {
-        "stats" : {
-          "stateMessagesEmitted" : 7,
-          "recordsCommitted" : 1,
-          "bytesEmitted" : 4,
-          "recordsEmitted" : 2
-        },
-        "streamName" : "streamName"
-      } ],
-      "id" : 5,
-      "recordsSynced" : 3,
-      "updatedAt" : 2
-    },
-    "logs" : {
-      "logLines" : [ "logLines", "logLines" ]
-    }
-  }, {
-    "attempt" : {
-      "totalStats" : {
-        "stateMessagesEmitted" : 7,
-        "recordsCommitted" : 1,
-        "bytesEmitted" : 4,
-        "recordsEmitted" : 2
-      },
-      "failureSummary" : {
-        "failures" : [ {
-          "retryable" : true,
-          "stacktrace" : "stacktrace",
-          "internalMessage" : "internalMessage",
-          "externalMessage" : "externalMessage",
-          "timestamp" : 1
-        }, {
-          "retryable" : true,
-          "stacktrace" : "stacktrace",
-          "internalMessage" : "internalMessage",
-          "externalMessage" : "externalMessage",
-          "timestamp" : 1
-        } ],
-        "partialSuccess" : true
-      },
-      "createdAt" : 5,
-      "bytesSynced" : 9,
-      "endedAt" : 7,
-      "streamStats" : [ {
-        "stats" : {
-          "stateMessagesEmitted" : 7,
-          "recordsCommitted" : 1,
-          "bytesEmitted" : 4,
-          "recordsEmitted" : 2
-        },
-        "streamName" : "streamName"
-      }, {
-        "stats" : {
-          "stateMessagesEmitted" : 7,
-          "recordsCommitted" : 1,
-          "bytesEmitted" : 4,
-          "recordsEmitted" : 2
-        },
-        "streamName" : "streamName"
-      } ],
-      "id" : 5,
-      "recordsSynced" : 3,
-      "updatedAt" : 2
-    },
-    "logs" : {
-      "logLines" : [ "logLines", "logLines" ]
-    }
-  } ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - JobDebugInfoRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/jobs/get
-
Get information about a job (getJobInfo)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
JobIdRequestBody JobIdRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

-
- JobInfoRead - -
- - - -

Example data

-
Content-Type: application/json
-
{
-  "job" : {
-    "createdAt" : 6,
-    "configId" : "configId",
-    "id" : 0,
-    "resetConfig" : {
-      "streamsToReset" : [ {
-        "name" : "name",
-        "namespace" : "namespace"
-      }, {
-        "name" : "name",
-        "namespace" : "namespace"
-      } ]
-    },
-    "updatedAt" : 1
-  },
-  "attempts" : [ {
-    "attempt" : {
-      "totalStats" : {
-        "stateMessagesEmitted" : 7,
-        "recordsCommitted" : 1,
-        "bytesEmitted" : 4,
-        "recordsEmitted" : 2
-      },
-      "failureSummary" : {
-        "failures" : [ {
-          "retryable" : true,
-          "stacktrace" : "stacktrace",
-          "internalMessage" : "internalMessage",
-          "externalMessage" : "externalMessage",
-          "timestamp" : 1
-        }, {
-          "retryable" : true,
-          "stacktrace" : "stacktrace",
-          "internalMessage" : "internalMessage",
-          "externalMessage" : "externalMessage",
-          "timestamp" : 1
-        } ],
-        "partialSuccess" : true
-      },
-      "createdAt" : 5,
-      "bytesSynced" : 9,
-      "endedAt" : 7,
-      "streamStats" : [ {
-        "stats" : {
-          "stateMessagesEmitted" : 7,
-          "recordsCommitted" : 1,
-          "bytesEmitted" : 4,
-          "recordsEmitted" : 2
-        },
-        "streamName" : "streamName"
-      }, {
-        "stats" : {
-          "stateMessagesEmitted" : 7,
-          "recordsCommitted" : 1,
-          "bytesEmitted" : 4,
-          "recordsEmitted" : 2
-        },
-        "streamName" : "streamName"
-      } ],
-      "id" : 5,
-      "recordsSynced" : 3,
-      "updatedAt" : 2
-    },
-    "logs" : {
-      "logLines" : [ "logLines", "logLines" ]
-    }
-  }, {
-    "attempt" : {
-      "totalStats" : {
-        "stateMessagesEmitted" : 7,
-        "recordsCommitted" : 1,
-        "bytesEmitted" : 4,
-        "recordsEmitted" : 2
-      },
-      "failureSummary" : {
-        "failures" : [ {
-          "retryable" : true,
-          "stacktrace" : "stacktrace",
-          "internalMessage" : "internalMessage",
-          "externalMessage" : "externalMessage",
-          "timestamp" : 1
-        }, {
-          "retryable" : true,
-          "stacktrace" : "stacktrace",
-          "internalMessage" : "internalMessage",
-          "externalMessage" : "externalMessage",
-          "timestamp" : 1
-        } ],
-        "partialSuccess" : true
-      },
-      "createdAt" : 5,
-      "bytesSynced" : 9,
-      "endedAt" : 7,
-      "streamStats" : [ {
-        "stats" : {
-          "stateMessagesEmitted" : 7,
-          "recordsCommitted" : 1,
-          "bytesEmitted" : 4,
-          "recordsEmitted" : 2
-        },
-        "streamName" : "streamName"
-      }, {
-        "stats" : {
-          "stateMessagesEmitted" : 7,
-          "recordsCommitted" : 1,
-          "bytesEmitted" : 4,
-          "recordsEmitted" : 2
-        },
-        "streamName" : "streamName"
-      } ],
-      "id" : 5,
-      "recordsSynced" : 3,
-      "updatedAt" : 2
-    },
-    "logs" : {
-      "logLines" : [ "logLines", "logLines" ]
-    }
-  } ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - JobInfoRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/jobs/list
-
Returns recent jobs for a connection. Jobs are returned in descending order by createdAt. (listJobsFor)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
JobListRequestBody JobListRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

-
- JobReadList - -
- - - -

Example data

-
Content-Type: application/json
-
{
-  "jobs" : [ {
-    "job" : {
-      "createdAt" : 6,
-      "configId" : "configId",
-      "id" : 0,
-      "resetConfig" : {
-        "streamsToReset" : [ {
-          "name" : "name",
-          "namespace" : "namespace"
-        }, {
-          "name" : "name",
-          "namespace" : "namespace"
-        } ]
-      },
-      "updatedAt" : 1
-    },
-    "attempts" : [ {
-      "totalStats" : {
-        "stateMessagesEmitted" : 7,
-        "recordsCommitted" : 1,
-        "bytesEmitted" : 4,
-        "recordsEmitted" : 2
-      },
-      "failureSummary" : {
-        "failures" : [ {
-          "retryable" : true,
-          "stacktrace" : "stacktrace",
-          "internalMessage" : "internalMessage",
-          "externalMessage" : "externalMessage",
-          "timestamp" : 1
-        }, {
-          "retryable" : true,
-          "stacktrace" : "stacktrace",
-          "internalMessage" : "internalMessage",
-          "externalMessage" : "externalMessage",
-          "timestamp" : 1
-        } ],
-        "partialSuccess" : true
-      },
-      "createdAt" : 5,
-      "bytesSynced" : 9,
-      "endedAt" : 7,
-      "streamStats" : [ {
-        "stats" : {
-          "stateMessagesEmitted" : 7,
-          "recordsCommitted" : 1,
-          "bytesEmitted" : 4,
-          "recordsEmitted" : 2
-        },
-        "streamName" : "streamName"
-      }, {
-        "stats" : {
-          "stateMessagesEmitted" : 7,
-          "recordsCommitted" : 1,
-          "bytesEmitted" : 4,
-          "recordsEmitted" : 2
-        },
-        "streamName" : "streamName"
-      } ],
-      "id" : 5,
-      "recordsSynced" : 3,
-      "updatedAt" : 2
-    }, {
-      "totalStats" : {
-        "stateMessagesEmitted" : 7,
-        "recordsCommitted" : 1,
-        "bytesEmitted" : 4,
-        "recordsEmitted" : 2
-      },
-      "failureSummary" : {
-        "failures" : [ {
-          "retryable" : true,
-          "stacktrace" : "stacktrace",
-          "internalMessage" : "internalMessage",
-          "externalMessage" : "externalMessage",
-          "timestamp" : 1
-        }, {
-          "retryable" : true,
-          "stacktrace" : "stacktrace",
-          "internalMessage" : "internalMessage",
-          "externalMessage" : "externalMessage",
-          "timestamp" : 1
-        } ],
-        "partialSuccess" : true
-      },
-      "createdAt" : 5,
-      "bytesSynced" : 9,
-      "endedAt" : 7,
-      "streamStats" : [ {
-        "stats" : {
-          "stateMessagesEmitted" : 7,
-          "recordsCommitted" : 1,
-          "bytesEmitted" : 4,
-          "recordsEmitted" : 2
-        },
-        "streamName" : "streamName"
-      }, {
-        "stats" : {
-          "stateMessagesEmitted" : 7,
-          "recordsCommitted" : 1,
-          "bytesEmitted" : 4,
-          "recordsEmitted" : 2
-        },
-        "streamName" : "streamName"
-      } ],
-      "id" : 5,
-      "recordsSynced" : 3,
-      "updatedAt" : 2
-    } ]
-  }, {
-    "job" : {
-      "createdAt" : 6,
-      "configId" : "configId",
-      "id" : 0,
-      "resetConfig" : {
-        "streamsToReset" : [ {
-          "name" : "name",
-          "namespace" : "namespace"
-        }, {
-          "name" : "name",
-          "namespace" : "namespace"
-        } ]
-      },
-      "updatedAt" : 1
-    },
-    "attempts" : [ {
-      "totalStats" : {
-        "stateMessagesEmitted" : 7,
-        "recordsCommitted" : 1,
-        "bytesEmitted" : 4,
-        "recordsEmitted" : 2
-      },
-      "failureSummary" : {
-        "failures" : [ {
-          "retryable" : true,
-          "stacktrace" : "stacktrace",
-          "internalMessage" : "internalMessage",
-          "externalMessage" : "externalMessage",
-          "timestamp" : 1
-        }, {
-          "retryable" : true,
-          "stacktrace" : "stacktrace",
-          "internalMessage" : "internalMessage",
-          "externalMessage" : "externalMessage",
-          "timestamp" : 1
-        } ],
-        "partialSuccess" : true
-      },
-      "createdAt" : 5,
-      "bytesSynced" : 9,
-      "endedAt" : 7,
-      "streamStats" : [ {
-        "stats" : {
-          "stateMessagesEmitted" : 7,
-          "recordsCommitted" : 1,
-          "bytesEmitted" : 4,
-          "recordsEmitted" : 2
-        },
-        "streamName" : "streamName"
-      }, {
-        "stats" : {
-          "stateMessagesEmitted" : 7,
-          "recordsCommitted" : 1,
-          "bytesEmitted" : 4,
-          "recordsEmitted" : 2
-        },
-        "streamName" : "streamName"
-      } ],
-      "id" : 5,
-      "recordsSynced" : 3,
-      "updatedAt" : 2
-    }, {
-      "totalStats" : {
-        "stateMessagesEmitted" : 7,
-        "recordsCommitted" : 1,
-        "bytesEmitted" : 4,
-        "recordsEmitted" : 2
-      },
-      "failureSummary" : {
-        "failures" : [ {
-          "retryable" : true,
-          "stacktrace" : "stacktrace",
-          "internalMessage" : "internalMessage",
-          "externalMessage" : "externalMessage",
-          "timestamp" : 1
-        }, {
-          "retryable" : true,
-          "stacktrace" : "stacktrace",
-          "internalMessage" : "internalMessage",
-          "externalMessage" : "externalMessage",
-          "timestamp" : 1
-        } ],
-        "partialSuccess" : true
-      },
-      "createdAt" : 5,
-      "bytesSynced" : 9,
-      "endedAt" : 7,
-      "streamStats" : [ {
-        "stats" : {
-          "stateMessagesEmitted" : 7,
-          "recordsCommitted" : 1,
-          "bytesEmitted" : 4,
-          "recordsEmitted" : 2
-        },
-        "streamName" : "streamName"
-      }, {
-        "stats" : {
-          "stateMessagesEmitted" : 7,
-          "recordsCommitted" : 1,
-          "bytesEmitted" : 4,
-          "recordsEmitted" : 2
-        },
-        "streamName" : "streamName"
-      } ],
-      "id" : 5,
-      "recordsSynced" : 3,
-      "updatedAt" : 2
-    } ]
-  } ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - JobReadList -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-

Logs

-
-
- Up -
post /v1/logs/get
-
Get logs (getLogs)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
LogsRequestBody LogsRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

-
- - File -
- - - - -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • text/plain
  • -
  • application/json
  • -
- -

Responses

-

200

- Returns the log file - File -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-

Notifications

-
-
- Up -
post /v1/notifications/try
-
Try sending a notifications (tryNotificationConfig)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
Notification Notification (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "message" : "message",
-  "status" : "succeeded"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - NotificationRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-

Oauth

-
-
- Up -
post /v1/destination_oauths/complete_oauth
-
Given a destination def ID generate an access/refresh token etc. (completeDestinationOAuth)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
CompleteDestinationOAuthRequest CompleteDestinationOAuthRequest (required)
- -
Body Parameter
- -
- - - - -

Return type

-
- - map[String, Object] -
- - - - -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - map[String, Object] -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/source_oauths/complete_oauth
-
Given a source def ID generate an access/refresh token etc. (completeSourceOAuth)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
CompleteSourceOauthRequest CompleteSourceOauthRequest (required)
- -
Body Parameter
- -
- - - - -

Return type

-
- - map[String, Object] -
- - - - -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - map[String, Object] -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/destination_oauths/get_consent_url
-
Given a destination connector definition ID, return the URL to the consent screen where to redirect the user to. (getDestinationOAuthConsent)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
DestinationOauthConsentRequest DestinationOauthConsentRequest (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "consentUrl" : "consentUrl"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - OAuthConsentRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/source_oauths/get_consent_url
-
Given a source connector definition ID, return the URL to the consent screen where to redirect the user to. (getSourceOAuthConsent)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
SourceOauthConsentRequest SourceOauthConsentRequest (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "consentUrl" : "consentUrl"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - OAuthConsentRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/destination_oauths/oauth_params/create
-
Sets instancewide variables to be used for the oauth flow when creating this destination. When set, these variables will be injected into a connector's configuration before any interaction with the connector image itself. This enables running oauth flows with consistent variables e.g: the company's Google Ads developer_token, client_id, and client_secret without the user having to know about these variables. (setInstancewideDestinationOauthParams)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
SetInstancewideDestinationOauthParamsRequestBody SetInstancewideDestinationOauthParamsRequestBody (required)
- -
Body Parameter
- -
- - - - - - - - -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful - -

400

- Exception occurred; see message for details. - KnownExceptionInfo -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -
-
-
-
- Up -
post /v1/source_oauths/oauth_params/create
-
Sets instancewide variables to be used for the oauth flow when creating this source. When set, these variables will be injected into a connector's configuration before any interaction with the connector image itself. This enables running oauth flows with consistent variables e.g: the company's Google Ads developer_token, client_id, and client_secret without the user having to know about these variables. (setInstancewideSourceOauthParams)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
SetInstancewideSourceOauthParamsRequestBody SetInstancewideSourceOauthParamsRequestBody (required)
- -
Body Parameter
- -
- - - - - - - - -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful - -

400

- Exception occurred; see message for details. - KnownExceptionInfo -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -
-
-

Openapi

-
-
- Up -
get /v1/openapi
-
Returns the openapi specification (getOpenApiSpec)
-
- - - - - - - -

Return type

-
- - File -
- - - - -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • text/plain
  • -
- -

Responses

-

200

- Returns the openapi specification file - File -
-
-

Operation

-
-
- Up -
post /v1/operations/check
-
Check if an operation to be created is valid (checkOperation)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
OperatorConfiguration OperatorConfiguration (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "message" : "message",
-  "status" : "succeeded"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - CheckOperationRead -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/operations/create
-
Create an operation to be applied as part of a connection pipeline (createOperation)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
OperationCreate OperationCreate (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "name" : "name",
-  "operationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "operatorConfiguration" : {
-    "normalization" : {
-      "option" : "basic"
-    },
-    "dbt" : {
-      "gitRepoBranch" : "gitRepoBranch",
-      "dockerImage" : "dockerImage",
-      "dbtArguments" : "dbtArguments",
-      "gitRepoUrl" : "gitRepoUrl"
-    }
-  },
-  "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - OperationRead -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/operations/delete
-
Delete an operation (deleteOperation)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
OperationIdRequestBody OperationIdRequestBody (required)
- -
Body Parameter
- -
- - - - - - - - -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

204

- The resource was deleted successfully. - -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/operations/get
-
Returns an operation (getOperation)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
OperationIdRequestBody OperationIdRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "name" : "name",
-  "operationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "operatorConfiguration" : {
-    "normalization" : {
-      "option" : "basic"
-    },
-    "dbt" : {
-      "gitRepoBranch" : "gitRepoBranch",
-      "dockerImage" : "dockerImage",
-      "dbtArguments" : "dbtArguments",
-      "gitRepoUrl" : "gitRepoUrl"
-    }
-  },
-  "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - OperationRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/operations/list
-
Returns all operations for a connection. (listOperationsForConnection)
-
List operations for connection.
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
ConnectionIdRequestBody ConnectionIdRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "operations" : [ {
-    "name" : "name",
-    "operationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "operatorConfiguration" : {
-      "normalization" : {
-        "option" : "basic"
-      },
-      "dbt" : {
-        "gitRepoBranch" : "gitRepoBranch",
-        "dockerImage" : "dockerImage",
-        "dbtArguments" : "dbtArguments",
-        "gitRepoUrl" : "gitRepoUrl"
-      }
-    },
-    "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  }, {
-    "name" : "name",
-    "operationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "operatorConfiguration" : {
-      "normalization" : {
-        "option" : "basic"
-      },
-      "dbt" : {
-        "gitRepoBranch" : "gitRepoBranch",
-        "dockerImage" : "dockerImage",
-        "dbtArguments" : "dbtArguments",
-        "gitRepoUrl" : "gitRepoUrl"
-      }
-    },
-    "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  } ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - OperationReadList -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/operations/update
-
Update an operation (updateOperation)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
OperationUpdate OperationUpdate (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "name" : "name",
-  "operationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "operatorConfiguration" : {
-    "normalization" : {
-      "option" : "basic"
-    },
-    "dbt" : {
-      "gitRepoBranch" : "gitRepoBranch",
-      "dockerImage" : "dockerImage",
-      "dbtArguments" : "dbtArguments",
-      "gitRepoUrl" : "gitRepoUrl"
-    }
-  },
-  "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - OperationRead -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-

Scheduler

-
-
- Up -
post /v1/scheduler/destinations/check_connection
-
Run check connection for a given destination configuration (executeDestinationCheckConnection)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
DestinationCoreConfig DestinationCoreConfig (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "message" : "message",
-  "jobInfo" : {
-    "createdAt" : 0,
-    "configId" : "configId",
-    "endedAt" : 6,
-    "id" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "logs" : {
-      "logLines" : [ "logLines", "logLines" ]
-    },
-    "succeeded" : true
-  },
-  "status" : "succeeded"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - CheckConnectionRead -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/scheduler/sources/check_connection
-
Run check connection for a given source configuration (executeSourceCheckConnection)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
SourceCoreConfig SourceCoreConfig (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "message" : "message",
-  "jobInfo" : {
-    "createdAt" : 0,
-    "configId" : "configId",
-    "endedAt" : 6,
-    "id" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "logs" : {
-      "logLines" : [ "logLines", "logLines" ]
-    },
-    "succeeded" : true
-  },
-  "status" : "succeeded"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - CheckConnectionRead -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/scheduler/sources/discover_schema
-
Run discover schema for a given source a source configuration (executeSourceDiscoverSchema)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
SourceCoreConfig SourceCoreConfig (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "catalogId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "catalog" : {
-    "streams" : [ {
-      "stream" : {
-        "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-        "supportedSyncModes" : [ null, null ],
-        "sourceDefinedCursor" : true,
-        "name" : "name",
-        "namespace" : "namespace",
-        "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-      },
-      "config" : {
-        "aliasName" : "aliasName",
-        "cursorField" : [ "cursorField", "cursorField" ],
-        "selected" : true,
-        "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-      }
-    }, {
-      "stream" : {
-        "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-        "supportedSyncModes" : [ null, null ],
-        "sourceDefinedCursor" : true,
-        "name" : "name",
-        "namespace" : "namespace",
-        "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-      },
-      "config" : {
-        "aliasName" : "aliasName",
-        "cursorField" : [ "cursorField", "cursorField" ],
-        "selected" : true,
-        "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-      }
-    } ]
-  },
-  "jobInfo" : {
-    "createdAt" : 0,
-    "configId" : "configId",
-    "endedAt" : 6,
-    "id" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "logs" : {
-      "logLines" : [ "logLines", "logLines" ]
-    },
-    "succeeded" : true
-  }
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - SourceDiscoverSchemaRead -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-

Source

-
-
- Up -
post /v1/sources/check_connection
-
Check connection to the source (checkConnectionToSource)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
SourceIdRequestBody SourceIdRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "message" : "message",
-  "jobInfo" : {
-    "createdAt" : 0,
-    "configId" : "configId",
-    "endedAt" : 6,
-    "id" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "logs" : {
-      "logLines" : [ "logLines", "logLines" ]
-    },
-    "succeeded" : true
-  },
-  "status" : "succeeded"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - CheckConnectionRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/sources/check_connection_for_update
-
Check connection for a proposed update to a source (checkConnectionToSourceForUpdate)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
SourceUpdate SourceUpdate (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "message" : "message",
-  "jobInfo" : {
-    "createdAt" : 0,
-    "configId" : "configId",
-    "endedAt" : 6,
-    "id" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "logs" : {
-      "logLines" : [ "logLines", "logLines" ]
-    },
-    "succeeded" : true
-  },
-  "status" : "succeeded"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - CheckConnectionRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/sources/clone
-
Clone source (cloneSource)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
SourceCloneRequestBody SourceCloneRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

-
- SourceRead - -
- - - -

Example data

-
Content-Type: application/json
-
{
-  "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "connectionConfiguration" : {
-    "user" : "charles"
-  },
-  "name" : "name",
-  "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "sourceName" : "sourceName",
-  "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - SourceRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/sources/create
-
Create a source (createSource)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
SourceCreate SourceCreate (required)
- -
Body Parameter
- -
- - - - -

Return type

-
- SourceRead - -
- - - -

Example data

-
Content-Type: application/json
-
{
-  "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "connectionConfiguration" : {
-    "user" : "charles"
-  },
-  "name" : "name",
-  "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "sourceName" : "sourceName",
-  "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - SourceRead -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/sources/delete
-
Delete a source (deleteSource)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
SourceIdRequestBody SourceIdRequestBody (required)
- -
Body Parameter
- -
- - - - - - - - -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

204

- The resource was deleted successfully. - -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/sources/discover_schema
-
Discover the schema catalog of the source (discoverSchemaForSource)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
SourceDiscoverSchemaRequestBody SourceDiscoverSchemaRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "catalogId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "catalog" : {
-    "streams" : [ {
-      "stream" : {
-        "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-        "supportedSyncModes" : [ null, null ],
-        "sourceDefinedCursor" : true,
-        "name" : "name",
-        "namespace" : "namespace",
-        "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-      },
-      "config" : {
-        "aliasName" : "aliasName",
-        "cursorField" : [ "cursorField", "cursorField" ],
-        "selected" : true,
-        "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-      }
-    }, {
-      "stream" : {
-        "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-        "supportedSyncModes" : [ null, null ],
-        "sourceDefinedCursor" : true,
-        "name" : "name",
-        "namespace" : "namespace",
-        "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-      },
-      "config" : {
-        "aliasName" : "aliasName",
-        "cursorField" : [ "cursorField", "cursorField" ],
-        "selected" : true,
-        "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-      }
-    } ]
-  },
-  "jobInfo" : {
-    "createdAt" : 0,
-    "configId" : "configId",
-    "endedAt" : 6,
-    "id" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "logs" : {
-      "logLines" : [ "logLines", "logLines" ]
-    },
-    "succeeded" : true
-  }
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - SourceDiscoverSchemaRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/sources/get
-
Get source (getSource)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
SourceIdRequestBody SourceIdRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

-
- SourceRead - -
- - - -

Example data

-
Content-Type: application/json
-
{
-  "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "connectionConfiguration" : {
-    "user" : "charles"
-  },
-  "name" : "name",
-  "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "sourceName" : "sourceName",
-  "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - SourceRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/sources/list
-
List sources for workspace (listSourcesForWorkspace)
-
List sources for workspace. Does not return deleted sources.
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
WorkspaceIdRequestBody WorkspaceIdRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "sources" : [ {
-    "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "connectionConfiguration" : {
-      "user" : "charles"
-    },
-    "name" : "name",
-    "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "sourceName" : "sourceName",
-    "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  }, {
-    "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "connectionConfiguration" : {
-      "user" : "charles"
-    },
-    "name" : "name",
-    "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "sourceName" : "sourceName",
-    "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  } ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - SourceReadList -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/sources/search
-
Search sources (searchSources)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
SourceSearch SourceSearch (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "sources" : [ {
-    "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "connectionConfiguration" : {
-      "user" : "charles"
-    },
-    "name" : "name",
-    "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "sourceName" : "sourceName",
-    "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  }, {
-    "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "connectionConfiguration" : {
-      "user" : "charles"
-    },
-    "name" : "name",
-    "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "sourceName" : "sourceName",
-    "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  } ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - SourceReadList -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/sources/update
-
Update a source (updateSource)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
SourceUpdate SourceUpdate (required)
- -
Body Parameter
- -
- - - - -

Return type

-
- SourceRead - -
- - - -

Example data

-
Content-Type: application/json
-
{
-  "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "connectionConfiguration" : {
-    "user" : "charles"
-  },
-  "name" : "name",
-  "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "sourceName" : "sourceName",
-  "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - SourceRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-

SourceDefinition

-
-
- Up -
post /v1/source_definitions/create_custom
-
Creates a custom sourceDefinition for the given workspace (createCustomSourceDefinition)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
CustomSourceDefinitionCreate CustomSourceDefinitionCreate (optional)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "resourceRequirements" : {
-    "default" : {
-      "cpu_limit" : "cpu_limit",
-      "memory_request" : "memory_request",
-      "memory_limit" : "memory_limit",
-      "cpu_request" : "cpu_request"
-    },
-    "jobSpecific" : [ {
-      "resourceRequirements" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      }
-    }, {
-      "resourceRequirements" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      }
-    } ]
-  },
-  "documentationUrl" : "https://openapi-generator.tech",
-  "dockerImageTag" : "dockerImageTag",
-  "releaseDate" : "2000-01-23",
-  "dockerRepository" : "dockerRepository",
-  "name" : "name",
-  "icon" : "icon",
-  "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - SourceDefinitionRead -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/source_definitions/create
-
Creates a sourceDefinition (createSourceDefinition)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
SourceDefinitionCreate SourceDefinitionCreate (optional)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "resourceRequirements" : {
-    "default" : {
-      "cpu_limit" : "cpu_limit",
-      "memory_request" : "memory_request",
-      "memory_limit" : "memory_limit",
-      "cpu_request" : "cpu_request"
-    },
-    "jobSpecific" : [ {
-      "resourceRequirements" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      }
-    }, {
-      "resourceRequirements" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      }
-    } ]
-  },
-  "documentationUrl" : "https://openapi-generator.tech",
-  "dockerImageTag" : "dockerImageTag",
-  "releaseDate" : "2000-01-23",
-  "dockerRepository" : "dockerRepository",
-  "name" : "name",
-  "icon" : "icon",
-  "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - SourceDefinitionRead -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/source_definitions/delete_custom
-
Delete a custom source definition for the given workspace (deleteCustomSourceDefinition)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
SourceDefinitionIdWithWorkspaceId SourceDefinitionIdWithWorkspaceId (required)
- -
Body Parameter
- -
- - - - - - - - -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

204

- The resource was deleted successfully. - -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/source_definitions/delete
-
Delete a source definition (deleteSourceDefinition)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
SourceDefinitionIdRequestBody SourceDefinitionIdRequestBody (required)
- -
Body Parameter
- -
- - - - - - - - -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

204

- The resource was deleted successfully. - -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/source_definitions/get
-
Get source (getSourceDefinition)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
SourceDefinitionIdRequestBody SourceDefinitionIdRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "resourceRequirements" : {
-    "default" : {
-      "cpu_limit" : "cpu_limit",
-      "memory_request" : "memory_request",
-      "memory_limit" : "memory_limit",
-      "cpu_request" : "cpu_request"
-    },
-    "jobSpecific" : [ {
-      "resourceRequirements" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      }
-    }, {
-      "resourceRequirements" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      }
-    } ]
-  },
-  "documentationUrl" : "https://openapi-generator.tech",
-  "dockerImageTag" : "dockerImageTag",
-  "releaseDate" : "2000-01-23",
-  "dockerRepository" : "dockerRepository",
-  "name" : "name",
-  "icon" : "icon",
-  "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - SourceDefinitionRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/source_definitions/get_for_workspace
-
Get a sourceDefinition that is configured for the given workspace (getSourceDefinitionForWorkspace)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
SourceDefinitionIdWithWorkspaceId SourceDefinitionIdWithWorkspaceId (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "resourceRequirements" : {
-    "default" : {
-      "cpu_limit" : "cpu_limit",
-      "memory_request" : "memory_request",
-      "memory_limit" : "memory_limit",
-      "cpu_request" : "cpu_request"
-    },
-    "jobSpecific" : [ {
-      "resourceRequirements" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      }
-    }, {
-      "resourceRequirements" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      }
-    } ]
-  },
-  "documentationUrl" : "https://openapi-generator.tech",
-  "dockerImageTag" : "dockerImageTag",
-  "releaseDate" : "2000-01-23",
-  "dockerRepository" : "dockerRepository",
-  "name" : "name",
-  "icon" : "icon",
-  "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - SourceDefinitionRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/source_definitions/grant_definition
-
grant a private, non-custom sourceDefinition to a given workspace (grantSourceDefinitionToWorkspace)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
SourceDefinitionIdWithWorkspaceId SourceDefinitionIdWithWorkspaceId (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "sourceDefinition" : {
-    "resourceRequirements" : {
-      "default" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      },
-      "jobSpecific" : [ {
-        "resourceRequirements" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        }
-      }, {
-        "resourceRequirements" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        }
-      } ]
-    },
-    "documentationUrl" : "https://openapi-generator.tech",
-    "dockerImageTag" : "dockerImageTag",
-    "releaseDate" : "2000-01-23",
-    "dockerRepository" : "dockerRepository",
-    "name" : "name",
-    "icon" : "icon",
-    "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  },
-  "granted" : true
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - PrivateSourceDefinitionRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/source_definitions/list_latest
-
List the latest sourceDefinitions Airbyte supports (listLatestSourceDefinitions)
-
Guaranteed to retrieve the latest information on supported sources.
- - - - - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "sourceDefinitions" : [ {
-    "resourceRequirements" : {
-      "default" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      },
-      "jobSpecific" : [ {
-        "resourceRequirements" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        }
-      }, {
-        "resourceRequirements" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        }
-      } ]
-    },
-    "documentationUrl" : "https://openapi-generator.tech",
-    "dockerImageTag" : "dockerImageTag",
-    "releaseDate" : "2000-01-23",
-    "dockerRepository" : "dockerRepository",
-    "name" : "name",
-    "icon" : "icon",
-    "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  }, {
-    "resourceRequirements" : {
-      "default" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      },
-      "jobSpecific" : [ {
-        "resourceRequirements" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        }
-      }, {
-        "resourceRequirements" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        }
-      } ]
-    },
-    "documentationUrl" : "https://openapi-generator.tech",
-    "dockerImageTag" : "dockerImageTag",
-    "releaseDate" : "2000-01-23",
-    "dockerRepository" : "dockerRepository",
-    "name" : "name",
-    "icon" : "icon",
-    "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  } ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - SourceDefinitionReadList -
-
-
-
- Up -
post /v1/source_definitions/list_private
-
List all private, non-custom sourceDefinitions, and for each indicate whether the given workspace has a grant for using the definition. Used by admins to view and modify a given workspace's grants. (listPrivateSourceDefinitions)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
WorkspaceIdRequestBody WorkspaceIdRequestBody (optional)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "sourceDefinitions" : [ {
-    "sourceDefinition" : {
-      "resourceRequirements" : {
-        "default" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        },
-        "jobSpecific" : [ {
-          "resourceRequirements" : {
-            "cpu_limit" : "cpu_limit",
-            "memory_request" : "memory_request",
-            "memory_limit" : "memory_limit",
-            "cpu_request" : "cpu_request"
-          }
-        }, {
-          "resourceRequirements" : {
-            "cpu_limit" : "cpu_limit",
-            "memory_request" : "memory_request",
-            "memory_limit" : "memory_limit",
-            "cpu_request" : "cpu_request"
-          }
-        } ]
-      },
-      "documentationUrl" : "https://openapi-generator.tech",
-      "dockerImageTag" : "dockerImageTag",
-      "releaseDate" : "2000-01-23",
-      "dockerRepository" : "dockerRepository",
-      "name" : "name",
-      "icon" : "icon",
-      "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-    },
-    "granted" : true
-  }, {
-    "sourceDefinition" : {
-      "resourceRequirements" : {
-        "default" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        },
-        "jobSpecific" : [ {
-          "resourceRequirements" : {
-            "cpu_limit" : "cpu_limit",
-            "memory_request" : "memory_request",
-            "memory_limit" : "memory_limit",
-            "cpu_request" : "cpu_request"
-          }
-        }, {
-          "resourceRequirements" : {
-            "cpu_limit" : "cpu_limit",
-            "memory_request" : "memory_request",
-            "memory_limit" : "memory_limit",
-            "cpu_request" : "cpu_request"
-          }
-        } ]
-      },
-      "documentationUrl" : "https://openapi-generator.tech",
-      "dockerImageTag" : "dockerImageTag",
-      "releaseDate" : "2000-01-23",
-      "dockerRepository" : "dockerRepository",
-      "name" : "name",
-      "icon" : "icon",
-      "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-    },
-    "granted" : true
-  } ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - PrivateSourceDefinitionReadList -
-
-
-
- Up -
post /v1/source_definitions/list
-
List all the sourceDefinitions the current Airbyte deployment is configured to use (listSourceDefinitions)
-
- - - - - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "sourceDefinitions" : [ {
-    "resourceRequirements" : {
-      "default" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      },
-      "jobSpecific" : [ {
-        "resourceRequirements" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        }
-      }, {
-        "resourceRequirements" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        }
-      } ]
-    },
-    "documentationUrl" : "https://openapi-generator.tech",
-    "dockerImageTag" : "dockerImageTag",
-    "releaseDate" : "2000-01-23",
-    "dockerRepository" : "dockerRepository",
-    "name" : "name",
-    "icon" : "icon",
-    "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  }, {
-    "resourceRequirements" : {
-      "default" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      },
-      "jobSpecific" : [ {
-        "resourceRequirements" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        }
-      }, {
-        "resourceRequirements" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        }
-      } ]
-    },
-    "documentationUrl" : "https://openapi-generator.tech",
-    "dockerImageTag" : "dockerImageTag",
-    "releaseDate" : "2000-01-23",
-    "dockerRepository" : "dockerRepository",
-    "name" : "name",
-    "icon" : "icon",
-    "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  } ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - SourceDefinitionReadList -
-
-
-
- Up -
post /v1/source_definitions/list_for_workspace
-
List all the sourceDefinitions the given workspace is configured to use (listSourceDefinitionsForWorkspace)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
WorkspaceIdRequestBody WorkspaceIdRequestBody (optional)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "sourceDefinitions" : [ {
-    "resourceRequirements" : {
-      "default" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      },
-      "jobSpecific" : [ {
-        "resourceRequirements" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        }
-      }, {
-        "resourceRequirements" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        }
-      } ]
-    },
-    "documentationUrl" : "https://openapi-generator.tech",
-    "dockerImageTag" : "dockerImageTag",
-    "releaseDate" : "2000-01-23",
-    "dockerRepository" : "dockerRepository",
-    "name" : "name",
-    "icon" : "icon",
-    "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  }, {
-    "resourceRequirements" : {
-      "default" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      },
-      "jobSpecific" : [ {
-        "resourceRequirements" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        }
-      }, {
-        "resourceRequirements" : {
-          "cpu_limit" : "cpu_limit",
-          "memory_request" : "memory_request",
-          "memory_limit" : "memory_limit",
-          "cpu_request" : "cpu_request"
-        }
-      } ]
-    },
-    "documentationUrl" : "https://openapi-generator.tech",
-    "dockerImageTag" : "dockerImageTag",
-    "releaseDate" : "2000-01-23",
-    "dockerRepository" : "dockerRepository",
-    "name" : "name",
-    "icon" : "icon",
-    "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  } ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - SourceDefinitionReadList -
-
-
-
- Up -
post /v1/source_definitions/revoke_definition
-
revoke a grant to a private, non-custom sourceDefinition from a given workspace (revokeSourceDefinitionFromWorkspace)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
SourceDefinitionIdWithWorkspaceId SourceDefinitionIdWithWorkspaceId (required)
- -
Body Parameter
- -
- - - - - - - - -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

204

- The resource was deleted successfully. - -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/source_definitions/update_custom
-
Update a custom sourceDefinition for the given workspace (updateCustomSourceDefinition)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
CustomSourceDefinitionUpdate CustomSourceDefinitionUpdate (optional)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "resourceRequirements" : {
-    "default" : {
-      "cpu_limit" : "cpu_limit",
-      "memory_request" : "memory_request",
-      "memory_limit" : "memory_limit",
-      "cpu_request" : "cpu_request"
-    },
-    "jobSpecific" : [ {
-      "resourceRequirements" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      }
-    }, {
-      "resourceRequirements" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      }
-    } ]
-  },
-  "documentationUrl" : "https://openapi-generator.tech",
-  "dockerImageTag" : "dockerImageTag",
-  "releaseDate" : "2000-01-23",
-  "dockerRepository" : "dockerRepository",
-  "name" : "name",
-  "icon" : "icon",
-  "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - SourceDefinitionRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/source_definitions/update
-
Update a sourceDefinition (updateSourceDefinition)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
SourceDefinitionUpdate SourceDefinitionUpdate (optional)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "resourceRequirements" : {
-    "default" : {
-      "cpu_limit" : "cpu_limit",
-      "memory_request" : "memory_request",
-      "memory_limit" : "memory_limit",
-      "cpu_request" : "cpu_request"
-    },
-    "jobSpecific" : [ {
-      "resourceRequirements" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      }
-    }, {
-      "resourceRequirements" : {
-        "cpu_limit" : "cpu_limit",
-        "memory_request" : "memory_request",
-        "memory_limit" : "memory_limit",
-        "cpu_request" : "cpu_request"
-      }
-    } ]
-  },
-  "documentationUrl" : "https://openapi-generator.tech",
-  "dockerImageTag" : "dockerImageTag",
-  "releaseDate" : "2000-01-23",
-  "dockerRepository" : "dockerRepository",
-  "name" : "name",
-  "icon" : "icon",
-  "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - SourceDefinitionRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-

SourceDefinitionSpecification

-
-
- Up -
post /v1/source_definition_specifications/get
-
Get specification for a SourceDefinition. (getSourceDefinitionSpecification)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
SourceDefinitionIdWithWorkspaceId SourceDefinitionIdWithWorkspaceId (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "documentationUrl" : "documentationUrl",
-  "connectionSpecification" : {
-    "user" : {
-      "type" : "string"
-    }
-  },
-  "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "advancedAuth" : {
-    "predicateValue" : "predicateValue",
-    "oauthConfigSpecification" : { },
-    "predicateKey" : [ "predicateKey", "predicateKey" ],
-    "authFlowType" : "oauth2.0"
-  },
-  "authSpecification" : {
-    "auth_type" : "oauth2.0",
-    "oauth2Specification" : {
-      "oauthFlowOutputParameters" : [ [ "oauthFlowOutputParameters", "oauthFlowOutputParameters" ], [ "oauthFlowOutputParameters", "oauthFlowOutputParameters" ] ],
-      "rootObject" : [ "path", 1 ],
-      "oauthFlowInitParameters" : [ [ "oauthFlowInitParameters", "oauthFlowInitParameters" ], [ "oauthFlowInitParameters", "oauthFlowInitParameters" ] ]
-    }
-  },
-  "jobInfo" : {
-    "createdAt" : 0,
-    "configId" : "configId",
-    "endedAt" : 6,
-    "id" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "logs" : {
-      "logLines" : [ "logLines", "logLines" ]
-    },
-    "succeeded" : true
-  }
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - SourceDefinitionSpecificationRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-

WebBackend

-
-
- Up -
post /v1/web_backend/connections/create
-
Create a connection (webBackendCreateConnection)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
WebBackendConnectionCreate WebBackendConnectionCreate (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "latestSyncJobCreatedAt" : 0,
-  "prefix" : "prefix",
-  "destination" : {
-    "connectionConfiguration" : {
-      "user" : "charles"
-    },
-    "destinationName" : "destinationName",
-    "name" : "name",
-    "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  },
-  "isSyncing" : true,
-  "source" : {
-    "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "connectionConfiguration" : {
-      "user" : "charles"
-    },
-    "name" : "name",
-    "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "sourceName" : "sourceName",
-    "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  },
-  "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "catalogDiff" : {
-    "transforms" : [ {
-      "streamDescriptor" : {
-        "name" : "name",
-        "namespace" : "namespace"
-      },
-      "transformType" : "add_stream",
-      "updateStream" : [ {
-        "updateFieldSchema" : { },
-        "fieldName" : [ "fieldName", "fieldName" ],
-        "addField" : { },
-        "transformType" : "add_field",
-        "removeField" : { }
-      }, {
-        "updateFieldSchema" : { },
-        "fieldName" : [ "fieldName", "fieldName" ],
-        "addField" : { },
-        "transformType" : "add_field",
-        "removeField" : { }
-      } ]
-    }, {
-      "streamDescriptor" : {
-        "name" : "name",
-        "namespace" : "namespace"
-      },
-      "transformType" : "add_stream",
-      "updateStream" : [ {
-        "updateFieldSchema" : { },
-        "fieldName" : [ "fieldName", "fieldName" ],
-        "addField" : { },
-        "transformType" : "add_field",
-        "removeField" : { }
-      }, {
-        "updateFieldSchema" : { },
-        "fieldName" : [ "fieldName", "fieldName" ],
-        "addField" : { },
-        "transformType" : "add_field",
-        "removeField" : { }
-      } ]
-    } ]
-  },
-  "resourceRequirements" : {
-    "cpu_limit" : "cpu_limit",
-    "memory_request" : "memory_request",
-    "memory_limit" : "memory_limit",
-    "cpu_request" : "cpu_request"
-  },
-  "schedule" : {
-    "units" : 0,
-    "timeUnit" : "minutes"
-  },
-  "operations" : [ {
-    "name" : "name",
-    "operationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "operatorConfiguration" : {
-      "normalization" : {
-        "option" : "basic"
-      },
-      "dbt" : {
-        "gitRepoBranch" : "gitRepoBranch",
-        "dockerImage" : "dockerImage",
-        "dbtArguments" : "dbtArguments",
-        "gitRepoUrl" : "gitRepoUrl"
-      }
-    },
-    "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  }, {
-    "name" : "name",
-    "operationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "operatorConfiguration" : {
-      "normalization" : {
-        "option" : "basic"
-      },
-      "dbt" : {
-        "gitRepoBranch" : "gitRepoBranch",
-        "dockerImage" : "dockerImage",
-        "dbtArguments" : "dbtArguments",
-        "gitRepoUrl" : "gitRepoUrl"
-      }
-    },
-    "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  } ],
-  "catalogId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "name" : "name",
-  "syncCatalog" : {
-    "streams" : [ {
-      "stream" : {
-        "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-        "supportedSyncModes" : [ null, null ],
-        "sourceDefinedCursor" : true,
-        "name" : "name",
-        "namespace" : "namespace",
-        "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-      },
-      "config" : {
-        "aliasName" : "aliasName",
-        "cursorField" : [ "cursorField", "cursorField" ],
-        "selected" : true,
-        "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-      }
-    }, {
-      "stream" : {
-        "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-        "supportedSyncModes" : [ null, null ],
-        "sourceDefinedCursor" : true,
-        "name" : "name",
-        "namespace" : "namespace",
-        "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-      },
-      "config" : {
-        "aliasName" : "aliasName",
-        "cursorField" : [ "cursorField", "cursorField" ],
-        "selected" : true,
-        "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-      }
-    } ]
-  },
-  "connectionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "namespaceFormat" : "${SOURCE_NAMESPACE}",
-  "operationIds" : [ null, null ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - WebBackendConnectionRead -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/web_backend/connections/get
-
Get a connection (webBackendGetConnection)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
WebBackendConnectionRequestBody WebBackendConnectionRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "latestSyncJobCreatedAt" : 0,
-  "prefix" : "prefix",
-  "destination" : {
-    "connectionConfiguration" : {
-      "user" : "charles"
-    },
-    "destinationName" : "destinationName",
-    "name" : "name",
-    "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  },
-  "isSyncing" : true,
-  "source" : {
-    "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "connectionConfiguration" : {
-      "user" : "charles"
-    },
-    "name" : "name",
-    "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "sourceName" : "sourceName",
-    "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  },
-  "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "catalogDiff" : {
-    "transforms" : [ {
-      "streamDescriptor" : {
-        "name" : "name",
-        "namespace" : "namespace"
-      },
-      "transformType" : "add_stream",
-      "updateStream" : [ {
-        "updateFieldSchema" : { },
-        "fieldName" : [ "fieldName", "fieldName" ],
-        "addField" : { },
-        "transformType" : "add_field",
-        "removeField" : { }
-      }, {
-        "updateFieldSchema" : { },
-        "fieldName" : [ "fieldName", "fieldName" ],
-        "addField" : { },
-        "transformType" : "add_field",
-        "removeField" : { }
-      } ]
-    }, {
-      "streamDescriptor" : {
-        "name" : "name",
-        "namespace" : "namespace"
-      },
-      "transformType" : "add_stream",
-      "updateStream" : [ {
-        "updateFieldSchema" : { },
-        "fieldName" : [ "fieldName", "fieldName" ],
-        "addField" : { },
-        "transformType" : "add_field",
-        "removeField" : { }
-      }, {
-        "updateFieldSchema" : { },
-        "fieldName" : [ "fieldName", "fieldName" ],
-        "addField" : { },
-        "transformType" : "add_field",
-        "removeField" : { }
-      } ]
-    } ]
-  },
-  "resourceRequirements" : {
-    "cpu_limit" : "cpu_limit",
-    "memory_request" : "memory_request",
-    "memory_limit" : "memory_limit",
-    "cpu_request" : "cpu_request"
-  },
-  "schedule" : {
-    "units" : 0,
-    "timeUnit" : "minutes"
-  },
-  "operations" : [ {
-    "name" : "name",
-    "operationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "operatorConfiguration" : {
-      "normalization" : {
-        "option" : "basic"
-      },
-      "dbt" : {
-        "gitRepoBranch" : "gitRepoBranch",
-        "dockerImage" : "dockerImage",
-        "dbtArguments" : "dbtArguments",
-        "gitRepoUrl" : "gitRepoUrl"
-      }
-    },
-    "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  }, {
-    "name" : "name",
-    "operationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "operatorConfiguration" : {
-      "normalization" : {
-        "option" : "basic"
-      },
-      "dbt" : {
-        "gitRepoBranch" : "gitRepoBranch",
-        "dockerImage" : "dockerImage",
-        "dbtArguments" : "dbtArguments",
-        "gitRepoUrl" : "gitRepoUrl"
-      }
-    },
-    "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  } ],
-  "catalogId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "name" : "name",
-  "syncCatalog" : {
-    "streams" : [ {
-      "stream" : {
-        "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-        "supportedSyncModes" : [ null, null ],
-        "sourceDefinedCursor" : true,
-        "name" : "name",
-        "namespace" : "namespace",
-        "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-      },
-      "config" : {
-        "aliasName" : "aliasName",
-        "cursorField" : [ "cursorField", "cursorField" ],
-        "selected" : true,
-        "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-      }
-    }, {
-      "stream" : {
-        "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-        "supportedSyncModes" : [ null, null ],
-        "sourceDefinedCursor" : true,
-        "name" : "name",
-        "namespace" : "namespace",
-        "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-      },
-      "config" : {
-        "aliasName" : "aliasName",
-        "cursorField" : [ "cursorField", "cursorField" ],
-        "selected" : true,
-        "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-      }
-    } ]
-  },
-  "connectionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "namespaceFormat" : "${SOURCE_NAMESPACE}",
-  "operationIds" : [ null, null ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - WebBackendConnectionRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/web_backend/workspace/state
-
Returns the current state of a workspace (webBackendGetWorkspaceState)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
WebBackendWorkspaceState WebBackendWorkspaceState (optional)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "hasDestinations" : true,
-  "hasConnections" : true,
-  "hasSources" : true
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - WebBackendWorkspaceStateResult -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/web_backend/connections/list_all
-
Returns all connections for a workspace. (webBackendListAllConnectionsForWorkspace)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
WorkspaceIdRequestBody WorkspaceIdRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "connections" : [ {
-    "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "latestSyncJobCreatedAt" : 0,
-    "prefix" : "prefix",
-    "destination" : {
-      "connectionConfiguration" : {
-        "user" : "charles"
-      },
-      "destinationName" : "destinationName",
-      "name" : "name",
-      "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-    },
-    "isSyncing" : true,
-    "source" : {
-      "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "connectionConfiguration" : {
-        "user" : "charles"
-      },
-      "name" : "name",
-      "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "sourceName" : "sourceName",
-      "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-    },
-    "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "catalogDiff" : {
-      "transforms" : [ {
-        "streamDescriptor" : {
-          "name" : "name",
-          "namespace" : "namespace"
-        },
-        "transformType" : "add_stream",
-        "updateStream" : [ {
-          "updateFieldSchema" : { },
-          "fieldName" : [ "fieldName", "fieldName" ],
-          "addField" : { },
-          "transformType" : "add_field",
-          "removeField" : { }
-        }, {
-          "updateFieldSchema" : { },
-          "fieldName" : [ "fieldName", "fieldName" ],
-          "addField" : { },
-          "transformType" : "add_field",
-          "removeField" : { }
-        } ]
-      }, {
-        "streamDescriptor" : {
-          "name" : "name",
-          "namespace" : "namespace"
-        },
-        "transformType" : "add_stream",
-        "updateStream" : [ {
-          "updateFieldSchema" : { },
-          "fieldName" : [ "fieldName", "fieldName" ],
-          "addField" : { },
-          "transformType" : "add_field",
-          "removeField" : { }
-        }, {
-          "updateFieldSchema" : { },
-          "fieldName" : [ "fieldName", "fieldName" ],
-          "addField" : { },
-          "transformType" : "add_field",
-          "removeField" : { }
-        } ]
-      } ]
-    },
-    "resourceRequirements" : {
-      "cpu_limit" : "cpu_limit",
-      "memory_request" : "memory_request",
-      "memory_limit" : "memory_limit",
-      "cpu_request" : "cpu_request"
-    },
-    "schedule" : {
-      "units" : 0,
-      "timeUnit" : "minutes"
-    },
-    "operations" : [ {
-      "name" : "name",
-      "operationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "operatorConfiguration" : {
-        "normalization" : {
-          "option" : "basic"
-        },
-        "dbt" : {
-          "gitRepoBranch" : "gitRepoBranch",
-          "dockerImage" : "dockerImage",
-          "dbtArguments" : "dbtArguments",
-          "gitRepoUrl" : "gitRepoUrl"
-        }
-      },
-      "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-    }, {
-      "name" : "name",
-      "operationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "operatorConfiguration" : {
-        "normalization" : {
-          "option" : "basic"
-        },
-        "dbt" : {
-          "gitRepoBranch" : "gitRepoBranch",
-          "dockerImage" : "dockerImage",
-          "dbtArguments" : "dbtArguments",
-          "gitRepoUrl" : "gitRepoUrl"
-        }
-      },
-      "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-    } ],
-    "catalogId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "name" : "name",
-    "syncCatalog" : {
-      "streams" : [ {
-        "stream" : {
-          "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-          "supportedSyncModes" : [ null, null ],
-          "sourceDefinedCursor" : true,
-          "name" : "name",
-          "namespace" : "namespace",
-          "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-        },
-        "config" : {
-          "aliasName" : "aliasName",
-          "cursorField" : [ "cursorField", "cursorField" ],
-          "selected" : true,
-          "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-        }
-      }, {
-        "stream" : {
-          "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-          "supportedSyncModes" : [ null, null ],
-          "sourceDefinedCursor" : true,
-          "name" : "name",
-          "namespace" : "namespace",
-          "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-        },
-        "config" : {
-          "aliasName" : "aliasName",
-          "cursorField" : [ "cursorField", "cursorField" ],
-          "selected" : true,
-          "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-        }
-      } ]
-    },
-    "connectionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "namespaceFormat" : "${SOURCE_NAMESPACE}",
-    "operationIds" : [ null, null ]
-  }, {
-    "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "latestSyncJobCreatedAt" : 0,
-    "prefix" : "prefix",
-    "destination" : {
-      "connectionConfiguration" : {
-        "user" : "charles"
-      },
-      "destinationName" : "destinationName",
-      "name" : "name",
-      "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-    },
-    "isSyncing" : true,
-    "source" : {
-      "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "connectionConfiguration" : {
-        "user" : "charles"
-      },
-      "name" : "name",
-      "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "sourceName" : "sourceName",
-      "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-    },
-    "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "catalogDiff" : {
-      "transforms" : [ {
-        "streamDescriptor" : {
-          "name" : "name",
-          "namespace" : "namespace"
-        },
-        "transformType" : "add_stream",
-        "updateStream" : [ {
-          "updateFieldSchema" : { },
-          "fieldName" : [ "fieldName", "fieldName" ],
-          "addField" : { },
-          "transformType" : "add_field",
-          "removeField" : { }
-        }, {
-          "updateFieldSchema" : { },
-          "fieldName" : [ "fieldName", "fieldName" ],
-          "addField" : { },
-          "transformType" : "add_field",
-          "removeField" : { }
-        } ]
-      }, {
-        "streamDescriptor" : {
-          "name" : "name",
-          "namespace" : "namespace"
-        },
-        "transformType" : "add_stream",
-        "updateStream" : [ {
-          "updateFieldSchema" : { },
-          "fieldName" : [ "fieldName", "fieldName" ],
-          "addField" : { },
-          "transformType" : "add_field",
-          "removeField" : { }
-        }, {
-          "updateFieldSchema" : { },
-          "fieldName" : [ "fieldName", "fieldName" ],
-          "addField" : { },
-          "transformType" : "add_field",
-          "removeField" : { }
-        } ]
-      } ]
-    },
-    "resourceRequirements" : {
-      "cpu_limit" : "cpu_limit",
-      "memory_request" : "memory_request",
-      "memory_limit" : "memory_limit",
-      "cpu_request" : "cpu_request"
-    },
-    "schedule" : {
-      "units" : 0,
-      "timeUnit" : "minutes"
-    },
-    "operations" : [ {
-      "name" : "name",
-      "operationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "operatorConfiguration" : {
-        "normalization" : {
-          "option" : "basic"
-        },
-        "dbt" : {
-          "gitRepoBranch" : "gitRepoBranch",
-          "dockerImage" : "dockerImage",
-          "dbtArguments" : "dbtArguments",
-          "gitRepoUrl" : "gitRepoUrl"
-        }
-      },
-      "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-    }, {
-      "name" : "name",
-      "operationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "operatorConfiguration" : {
-        "normalization" : {
-          "option" : "basic"
-        },
-        "dbt" : {
-          "gitRepoBranch" : "gitRepoBranch",
-          "dockerImage" : "dockerImage",
-          "dbtArguments" : "dbtArguments",
-          "gitRepoUrl" : "gitRepoUrl"
-        }
-      },
-      "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-    } ],
-    "catalogId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "name" : "name",
-    "syncCatalog" : {
-      "streams" : [ {
-        "stream" : {
-          "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-          "supportedSyncModes" : [ null, null ],
-          "sourceDefinedCursor" : true,
-          "name" : "name",
-          "namespace" : "namespace",
-          "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-        },
-        "config" : {
-          "aliasName" : "aliasName",
-          "cursorField" : [ "cursorField", "cursorField" ],
-          "selected" : true,
-          "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-        }
-      }, {
-        "stream" : {
-          "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-          "supportedSyncModes" : [ null, null ],
-          "sourceDefinedCursor" : true,
-          "name" : "name",
-          "namespace" : "namespace",
-          "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-        },
-        "config" : {
-          "aliasName" : "aliasName",
-          "cursorField" : [ "cursorField", "cursorField" ],
-          "selected" : true,
-          "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-        }
-      } ]
-    },
-    "connectionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "namespaceFormat" : "${SOURCE_NAMESPACE}",
-    "operationIds" : [ null, null ]
-  } ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - WebBackendConnectionReadList -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/web_backend/connections/list
-
Returns all non-deleted connections for a workspace. (webBackendListConnectionsForWorkspace)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
WorkspaceIdRequestBody WorkspaceIdRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "connections" : [ {
-    "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "latestSyncJobCreatedAt" : 0,
-    "prefix" : "prefix",
-    "destination" : {
-      "connectionConfiguration" : {
-        "user" : "charles"
-      },
-      "destinationName" : "destinationName",
-      "name" : "name",
-      "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-    },
-    "isSyncing" : true,
-    "source" : {
-      "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "connectionConfiguration" : {
-        "user" : "charles"
-      },
-      "name" : "name",
-      "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "sourceName" : "sourceName",
-      "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-    },
-    "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "catalogDiff" : {
-      "transforms" : [ {
-        "streamDescriptor" : {
-          "name" : "name",
-          "namespace" : "namespace"
-        },
-        "transformType" : "add_stream",
-        "updateStream" : [ {
-          "updateFieldSchema" : { },
-          "fieldName" : [ "fieldName", "fieldName" ],
-          "addField" : { },
-          "transformType" : "add_field",
-          "removeField" : { }
-        }, {
-          "updateFieldSchema" : { },
-          "fieldName" : [ "fieldName", "fieldName" ],
-          "addField" : { },
-          "transformType" : "add_field",
-          "removeField" : { }
-        } ]
-      }, {
-        "streamDescriptor" : {
-          "name" : "name",
-          "namespace" : "namespace"
-        },
-        "transformType" : "add_stream",
-        "updateStream" : [ {
-          "updateFieldSchema" : { },
-          "fieldName" : [ "fieldName", "fieldName" ],
-          "addField" : { },
-          "transformType" : "add_field",
-          "removeField" : { }
-        }, {
-          "updateFieldSchema" : { },
-          "fieldName" : [ "fieldName", "fieldName" ],
-          "addField" : { },
-          "transformType" : "add_field",
-          "removeField" : { }
-        } ]
-      } ]
-    },
-    "resourceRequirements" : {
-      "cpu_limit" : "cpu_limit",
-      "memory_request" : "memory_request",
-      "memory_limit" : "memory_limit",
-      "cpu_request" : "cpu_request"
-    },
-    "schedule" : {
-      "units" : 0,
-      "timeUnit" : "minutes"
-    },
-    "operations" : [ {
-      "name" : "name",
-      "operationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "operatorConfiguration" : {
-        "normalization" : {
-          "option" : "basic"
-        },
-        "dbt" : {
-          "gitRepoBranch" : "gitRepoBranch",
-          "dockerImage" : "dockerImage",
-          "dbtArguments" : "dbtArguments",
-          "gitRepoUrl" : "gitRepoUrl"
-        }
-      },
-      "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-    }, {
-      "name" : "name",
-      "operationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "operatorConfiguration" : {
-        "normalization" : {
-          "option" : "basic"
-        },
-        "dbt" : {
-          "gitRepoBranch" : "gitRepoBranch",
-          "dockerImage" : "dockerImage",
-          "dbtArguments" : "dbtArguments",
-          "gitRepoUrl" : "gitRepoUrl"
-        }
-      },
-      "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-    } ],
-    "catalogId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "name" : "name",
-    "syncCatalog" : {
-      "streams" : [ {
-        "stream" : {
-          "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-          "supportedSyncModes" : [ null, null ],
-          "sourceDefinedCursor" : true,
-          "name" : "name",
-          "namespace" : "namespace",
-          "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-        },
-        "config" : {
-          "aliasName" : "aliasName",
-          "cursorField" : [ "cursorField", "cursorField" ],
-          "selected" : true,
-          "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-        }
-      }, {
-        "stream" : {
-          "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-          "supportedSyncModes" : [ null, null ],
-          "sourceDefinedCursor" : true,
-          "name" : "name",
-          "namespace" : "namespace",
-          "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-        },
-        "config" : {
-          "aliasName" : "aliasName",
-          "cursorField" : [ "cursorField", "cursorField" ],
-          "selected" : true,
-          "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-        }
-      } ]
-    },
-    "connectionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "namespaceFormat" : "${SOURCE_NAMESPACE}",
-    "operationIds" : [ null, null ]
-  }, {
-    "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "latestSyncJobCreatedAt" : 0,
-    "prefix" : "prefix",
-    "destination" : {
-      "connectionConfiguration" : {
-        "user" : "charles"
-      },
-      "destinationName" : "destinationName",
-      "name" : "name",
-      "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-    },
-    "isSyncing" : true,
-    "source" : {
-      "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "connectionConfiguration" : {
-        "user" : "charles"
-      },
-      "name" : "name",
-      "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "sourceName" : "sourceName",
-      "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-    },
-    "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "catalogDiff" : {
-      "transforms" : [ {
-        "streamDescriptor" : {
-          "name" : "name",
-          "namespace" : "namespace"
-        },
-        "transformType" : "add_stream",
-        "updateStream" : [ {
-          "updateFieldSchema" : { },
-          "fieldName" : [ "fieldName", "fieldName" ],
-          "addField" : { },
-          "transformType" : "add_field",
-          "removeField" : { }
-        }, {
-          "updateFieldSchema" : { },
-          "fieldName" : [ "fieldName", "fieldName" ],
-          "addField" : { },
-          "transformType" : "add_field",
-          "removeField" : { }
-        } ]
-      }, {
-        "streamDescriptor" : {
-          "name" : "name",
-          "namespace" : "namespace"
-        },
-        "transformType" : "add_stream",
-        "updateStream" : [ {
-          "updateFieldSchema" : { },
-          "fieldName" : [ "fieldName", "fieldName" ],
-          "addField" : { },
-          "transformType" : "add_field",
-          "removeField" : { }
-        }, {
-          "updateFieldSchema" : { },
-          "fieldName" : [ "fieldName", "fieldName" ],
-          "addField" : { },
-          "transformType" : "add_field",
-          "removeField" : { }
-        } ]
-      } ]
-    },
-    "resourceRequirements" : {
-      "cpu_limit" : "cpu_limit",
-      "memory_request" : "memory_request",
-      "memory_limit" : "memory_limit",
-      "cpu_request" : "cpu_request"
-    },
-    "schedule" : {
-      "units" : 0,
-      "timeUnit" : "minutes"
-    },
-    "operations" : [ {
-      "name" : "name",
-      "operationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "operatorConfiguration" : {
-        "normalization" : {
-          "option" : "basic"
-        },
-        "dbt" : {
-          "gitRepoBranch" : "gitRepoBranch",
-          "dockerImage" : "dockerImage",
-          "dbtArguments" : "dbtArguments",
-          "gitRepoUrl" : "gitRepoUrl"
-        }
-      },
-      "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-    }, {
-      "name" : "name",
-      "operationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "operatorConfiguration" : {
-        "normalization" : {
-          "option" : "basic"
-        },
-        "dbt" : {
-          "gitRepoBranch" : "gitRepoBranch",
-          "dockerImage" : "dockerImage",
-          "dbtArguments" : "dbtArguments",
-          "gitRepoUrl" : "gitRepoUrl"
-        }
-      },
-      "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-    } ],
-    "catalogId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "name" : "name",
-    "syncCatalog" : {
-      "streams" : [ {
-        "stream" : {
-          "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-          "supportedSyncModes" : [ null, null ],
-          "sourceDefinedCursor" : true,
-          "name" : "name",
-          "namespace" : "namespace",
-          "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-        },
-        "config" : {
-          "aliasName" : "aliasName",
-          "cursorField" : [ "cursorField", "cursorField" ],
-          "selected" : true,
-          "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-        }
-      }, {
-        "stream" : {
-          "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-          "supportedSyncModes" : [ null, null ],
-          "sourceDefinedCursor" : true,
-          "name" : "name",
-          "namespace" : "namespace",
-          "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-        },
-        "config" : {
-          "aliasName" : "aliasName",
-          "cursorField" : [ "cursorField", "cursorField" ],
-          "selected" : true,
-          "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-        }
-      } ]
-    },
-    "connectionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "namespaceFormat" : "${SOURCE_NAMESPACE}",
-    "operationIds" : [ null, null ]
-  } ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - WebBackendConnectionReadList -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/web_backend/connections/search
-
Search connections (webBackendSearchConnections)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
WebBackendConnectionSearch WebBackendConnectionSearch (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "connections" : [ {
-    "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "latestSyncJobCreatedAt" : 0,
-    "prefix" : "prefix",
-    "destination" : {
-      "connectionConfiguration" : {
-        "user" : "charles"
-      },
-      "destinationName" : "destinationName",
-      "name" : "name",
-      "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-    },
-    "isSyncing" : true,
-    "source" : {
-      "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "connectionConfiguration" : {
-        "user" : "charles"
-      },
-      "name" : "name",
-      "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "sourceName" : "sourceName",
-      "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-    },
-    "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "catalogDiff" : {
-      "transforms" : [ {
-        "streamDescriptor" : {
-          "name" : "name",
-          "namespace" : "namespace"
-        },
-        "transformType" : "add_stream",
-        "updateStream" : [ {
-          "updateFieldSchema" : { },
-          "fieldName" : [ "fieldName", "fieldName" ],
-          "addField" : { },
-          "transformType" : "add_field",
-          "removeField" : { }
-        }, {
-          "updateFieldSchema" : { },
-          "fieldName" : [ "fieldName", "fieldName" ],
-          "addField" : { },
-          "transformType" : "add_field",
-          "removeField" : { }
-        } ]
-      }, {
-        "streamDescriptor" : {
-          "name" : "name",
-          "namespace" : "namespace"
-        },
-        "transformType" : "add_stream",
-        "updateStream" : [ {
-          "updateFieldSchema" : { },
-          "fieldName" : [ "fieldName", "fieldName" ],
-          "addField" : { },
-          "transformType" : "add_field",
-          "removeField" : { }
-        }, {
-          "updateFieldSchema" : { },
-          "fieldName" : [ "fieldName", "fieldName" ],
-          "addField" : { },
-          "transformType" : "add_field",
-          "removeField" : { }
-        } ]
-      } ]
-    },
-    "resourceRequirements" : {
-      "cpu_limit" : "cpu_limit",
-      "memory_request" : "memory_request",
-      "memory_limit" : "memory_limit",
-      "cpu_request" : "cpu_request"
-    },
-    "schedule" : {
-      "units" : 0,
-      "timeUnit" : "minutes"
-    },
-    "operations" : [ {
-      "name" : "name",
-      "operationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "operatorConfiguration" : {
-        "normalization" : {
-          "option" : "basic"
-        },
-        "dbt" : {
-          "gitRepoBranch" : "gitRepoBranch",
-          "dockerImage" : "dockerImage",
-          "dbtArguments" : "dbtArguments",
-          "gitRepoUrl" : "gitRepoUrl"
-        }
-      },
-      "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-    }, {
-      "name" : "name",
-      "operationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "operatorConfiguration" : {
-        "normalization" : {
-          "option" : "basic"
-        },
-        "dbt" : {
-          "gitRepoBranch" : "gitRepoBranch",
-          "dockerImage" : "dockerImage",
-          "dbtArguments" : "dbtArguments",
-          "gitRepoUrl" : "gitRepoUrl"
-        }
-      },
-      "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-    } ],
-    "catalogId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "name" : "name",
-    "syncCatalog" : {
-      "streams" : [ {
-        "stream" : {
-          "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-          "supportedSyncModes" : [ null, null ],
-          "sourceDefinedCursor" : true,
-          "name" : "name",
-          "namespace" : "namespace",
-          "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-        },
-        "config" : {
-          "aliasName" : "aliasName",
-          "cursorField" : [ "cursorField", "cursorField" ],
-          "selected" : true,
-          "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-        }
-      }, {
-        "stream" : {
-          "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-          "supportedSyncModes" : [ null, null ],
-          "sourceDefinedCursor" : true,
-          "name" : "name",
-          "namespace" : "namespace",
-          "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-        },
-        "config" : {
-          "aliasName" : "aliasName",
-          "cursorField" : [ "cursorField", "cursorField" ],
-          "selected" : true,
-          "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-        }
-      } ]
-    },
-    "connectionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "namespaceFormat" : "${SOURCE_NAMESPACE}",
-    "operationIds" : [ null, null ]
-  }, {
-    "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "latestSyncJobCreatedAt" : 0,
-    "prefix" : "prefix",
-    "destination" : {
-      "connectionConfiguration" : {
-        "user" : "charles"
-      },
-      "destinationName" : "destinationName",
-      "name" : "name",
-      "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-    },
-    "isSyncing" : true,
-    "source" : {
-      "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "connectionConfiguration" : {
-        "user" : "charles"
-      },
-      "name" : "name",
-      "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "sourceName" : "sourceName",
-      "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-    },
-    "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "catalogDiff" : {
-      "transforms" : [ {
-        "streamDescriptor" : {
-          "name" : "name",
-          "namespace" : "namespace"
-        },
-        "transformType" : "add_stream",
-        "updateStream" : [ {
-          "updateFieldSchema" : { },
-          "fieldName" : [ "fieldName", "fieldName" ],
-          "addField" : { },
-          "transformType" : "add_field",
-          "removeField" : { }
-        }, {
-          "updateFieldSchema" : { },
-          "fieldName" : [ "fieldName", "fieldName" ],
-          "addField" : { },
-          "transformType" : "add_field",
-          "removeField" : { }
-        } ]
-      }, {
-        "streamDescriptor" : {
-          "name" : "name",
-          "namespace" : "namespace"
-        },
-        "transformType" : "add_stream",
-        "updateStream" : [ {
-          "updateFieldSchema" : { },
-          "fieldName" : [ "fieldName", "fieldName" ],
-          "addField" : { },
-          "transformType" : "add_field",
-          "removeField" : { }
-        }, {
-          "updateFieldSchema" : { },
-          "fieldName" : [ "fieldName", "fieldName" ],
-          "addField" : { },
-          "transformType" : "add_field",
-          "removeField" : { }
-        } ]
-      } ]
-    },
-    "resourceRequirements" : {
-      "cpu_limit" : "cpu_limit",
-      "memory_request" : "memory_request",
-      "memory_limit" : "memory_limit",
-      "cpu_request" : "cpu_request"
-    },
-    "schedule" : {
-      "units" : 0,
-      "timeUnit" : "minutes"
-    },
-    "operations" : [ {
-      "name" : "name",
-      "operationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "operatorConfiguration" : {
-        "normalization" : {
-          "option" : "basic"
-        },
-        "dbt" : {
-          "gitRepoBranch" : "gitRepoBranch",
-          "dockerImage" : "dockerImage",
-          "dbtArguments" : "dbtArguments",
-          "gitRepoUrl" : "gitRepoUrl"
-        }
-      },
-      "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-    }, {
-      "name" : "name",
-      "operationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-      "operatorConfiguration" : {
-        "normalization" : {
-          "option" : "basic"
-        },
-        "dbt" : {
-          "gitRepoBranch" : "gitRepoBranch",
-          "dockerImage" : "dockerImage",
-          "dbtArguments" : "dbtArguments",
-          "gitRepoUrl" : "gitRepoUrl"
-        }
-      },
-      "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-    } ],
-    "catalogId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "name" : "name",
-    "syncCatalog" : {
-      "streams" : [ {
-        "stream" : {
-          "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-          "supportedSyncModes" : [ null, null ],
-          "sourceDefinedCursor" : true,
-          "name" : "name",
-          "namespace" : "namespace",
-          "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-        },
-        "config" : {
-          "aliasName" : "aliasName",
-          "cursorField" : [ "cursorField", "cursorField" ],
-          "selected" : true,
-          "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-        }
-      }, {
-        "stream" : {
-          "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-          "supportedSyncModes" : [ null, null ],
-          "sourceDefinedCursor" : true,
-          "name" : "name",
-          "namespace" : "namespace",
-          "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-        },
-        "config" : {
-          "aliasName" : "aliasName",
-          "cursorField" : [ "cursorField", "cursorField" ],
-          "selected" : true,
-          "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-        }
-      } ]
-    },
-    "connectionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "namespaceFormat" : "${SOURCE_NAMESPACE}",
-    "operationIds" : [ null, null ]
-  } ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - WebBackendConnectionReadList -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/web_backend/connections/update
-
Update a connection (webBackendUpdateConnection)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
WebBackendConnectionUpdate WebBackendConnectionUpdate (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "latestSyncJobCreatedAt" : 0,
-  "prefix" : "prefix",
-  "destination" : {
-    "connectionConfiguration" : {
-      "user" : "charles"
-    },
-    "destinationName" : "destinationName",
-    "name" : "name",
-    "destinationDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  },
-  "isSyncing" : true,
-  "source" : {
-    "sourceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "connectionConfiguration" : {
-      "user" : "charles"
-    },
-    "name" : "name",
-    "sourceDefinitionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "sourceName" : "sourceName",
-    "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  },
-  "destinationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "catalogDiff" : {
-    "transforms" : [ {
-      "streamDescriptor" : {
-        "name" : "name",
-        "namespace" : "namespace"
-      },
-      "transformType" : "add_stream",
-      "updateStream" : [ {
-        "updateFieldSchema" : { },
-        "fieldName" : [ "fieldName", "fieldName" ],
-        "addField" : { },
-        "transformType" : "add_field",
-        "removeField" : { }
-      }, {
-        "updateFieldSchema" : { },
-        "fieldName" : [ "fieldName", "fieldName" ],
-        "addField" : { },
-        "transformType" : "add_field",
-        "removeField" : { }
-      } ]
-    }, {
-      "streamDescriptor" : {
-        "name" : "name",
-        "namespace" : "namespace"
-      },
-      "transformType" : "add_stream",
-      "updateStream" : [ {
-        "updateFieldSchema" : { },
-        "fieldName" : [ "fieldName", "fieldName" ],
-        "addField" : { },
-        "transformType" : "add_field",
-        "removeField" : { }
-      }, {
-        "updateFieldSchema" : { },
-        "fieldName" : [ "fieldName", "fieldName" ],
-        "addField" : { },
-        "transformType" : "add_field",
-        "removeField" : { }
-      } ]
-    } ]
-  },
-  "resourceRequirements" : {
-    "cpu_limit" : "cpu_limit",
-    "memory_request" : "memory_request",
-    "memory_limit" : "memory_limit",
-    "cpu_request" : "cpu_request"
-  },
-  "schedule" : {
-    "units" : 0,
-    "timeUnit" : "minutes"
-  },
-  "operations" : [ {
-    "name" : "name",
-    "operationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "operatorConfiguration" : {
-      "normalization" : {
-        "option" : "basic"
-      },
-      "dbt" : {
-        "gitRepoBranch" : "gitRepoBranch",
-        "dockerImage" : "dockerImage",
-        "dbtArguments" : "dbtArguments",
-        "gitRepoUrl" : "gitRepoUrl"
-      }
-    },
-    "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  }, {
-    "name" : "name",
-    "operationId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "operatorConfiguration" : {
-      "normalization" : {
-        "option" : "basic"
-      },
-      "dbt" : {
-        "gitRepoBranch" : "gitRepoBranch",
-        "dockerImage" : "dockerImage",
-        "dbtArguments" : "dbtArguments",
-        "gitRepoUrl" : "gitRepoUrl"
-      }
-    },
-    "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  } ],
-  "catalogId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "name" : "name",
-  "syncCatalog" : {
-    "streams" : [ {
-      "stream" : {
-        "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-        "supportedSyncModes" : [ null, null ],
-        "sourceDefinedCursor" : true,
-        "name" : "name",
-        "namespace" : "namespace",
-        "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-      },
-      "config" : {
-        "aliasName" : "aliasName",
-        "cursorField" : [ "cursorField", "cursorField" ],
-        "selected" : true,
-        "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-      }
-    }, {
-      "stream" : {
-        "sourceDefinedPrimaryKey" : [ [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ], [ "sourceDefinedPrimaryKey", "sourceDefinedPrimaryKey" ] ],
-        "supportedSyncModes" : [ null, null ],
-        "sourceDefinedCursor" : true,
-        "name" : "name",
-        "namespace" : "namespace",
-        "defaultCursorField" : [ "defaultCursorField", "defaultCursorField" ]
-      },
-      "config" : {
-        "aliasName" : "aliasName",
-        "cursorField" : [ "cursorField", "cursorField" ],
-        "selected" : true,
-        "primaryKey" : [ [ "primaryKey", "primaryKey" ], [ "primaryKey", "primaryKey" ] ]
-      }
-    } ]
-  },
-  "connectionId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "namespaceFormat" : "${SOURCE_NAMESPACE}",
-  "operationIds" : [ null, null ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - WebBackendConnectionRead -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-

Workspace

-
-
- Up -
post /v1/workspaces/create
-
Creates a workspace (createWorkspace)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
WorkspaceCreate WorkspaceCreate (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "news" : true,
-  "displaySetupWizard" : true,
-  "initialSetupComplete" : true,
-  "anonymousDataCollection" : true,
-  "customerId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "name" : "name",
-  "firstCompletedSync" : true,
-  "feedbackDone" : true,
-  "email" : "email",
-  "slug" : "slug",
-  "securityUpdates" : true,
-  "notifications" : [ {
-    "slackConfiguration" : {
-      "webhook" : "webhook"
-    },
-    "sendOnSuccess" : false,
-    "sendOnFailure" : true,
-    "customerioConfiguration" : "{}"
-  }, {
-    "slackConfiguration" : {
-      "webhook" : "webhook"
-    },
-    "sendOnSuccess" : false,
-    "sendOnFailure" : true,
-    "customerioConfiguration" : "{}"
-  } ],
-  "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - WorkspaceRead -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/workspaces/delete
-
Deletes a workspace (deleteWorkspace)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
WorkspaceIdRequestBody WorkspaceIdRequestBody (required)
- -
Body Parameter
- -
- - - - - - - - -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

204

- The resource was deleted successfully. - -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/workspaces/get
-
Find workspace by ID (getWorkspace)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
WorkspaceIdRequestBody WorkspaceIdRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "news" : true,
-  "displaySetupWizard" : true,
-  "initialSetupComplete" : true,
-  "anonymousDataCollection" : true,
-  "customerId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "name" : "name",
-  "firstCompletedSync" : true,
-  "feedbackDone" : true,
-  "email" : "email",
-  "slug" : "slug",
-  "securityUpdates" : true,
-  "notifications" : [ {
-    "slackConfiguration" : {
-      "webhook" : "webhook"
-    },
-    "sendOnSuccess" : false,
-    "sendOnFailure" : true,
-    "customerioConfiguration" : "{}"
-  }, {
-    "slackConfiguration" : {
-      "webhook" : "webhook"
-    },
-    "sendOnSuccess" : false,
-    "sendOnFailure" : true,
-    "customerioConfiguration" : "{}"
-  } ],
-  "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - WorkspaceRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/workspaces/get_by_slug
-
Find workspace by slug (getWorkspaceBySlug)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
SlugRequestBody SlugRequestBody (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "news" : true,
-  "displaySetupWizard" : true,
-  "initialSetupComplete" : true,
-  "anonymousDataCollection" : true,
-  "customerId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "name" : "name",
-  "firstCompletedSync" : true,
-  "feedbackDone" : true,
-  "email" : "email",
-  "slug" : "slug",
-  "securityUpdates" : true,
-  "notifications" : [ {
-    "slackConfiguration" : {
-      "webhook" : "webhook"
-    },
-    "sendOnSuccess" : false,
-    "sendOnFailure" : true,
-    "customerioConfiguration" : "{}"
-  }, {
-    "slackConfiguration" : {
-      "webhook" : "webhook"
-    },
-    "sendOnSuccess" : false,
-    "sendOnFailure" : true,
-    "customerioConfiguration" : "{}"
-  } ],
-  "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - WorkspaceRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/workspaces/list
-
List all workspaces registered in the current Airbyte deployment (listWorkspaces)
-
- - - - - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "workspaces" : [ {
-    "news" : true,
-    "displaySetupWizard" : true,
-    "initialSetupComplete" : true,
-    "anonymousDataCollection" : true,
-    "customerId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "name" : "name",
-    "firstCompletedSync" : true,
-    "feedbackDone" : true,
-    "email" : "email",
-    "slug" : "slug",
-    "securityUpdates" : true,
-    "notifications" : [ {
-      "slackConfiguration" : {
-        "webhook" : "webhook"
-      },
-      "sendOnSuccess" : false,
-      "sendOnFailure" : true,
-      "customerioConfiguration" : "{}"
-    }, {
-      "slackConfiguration" : {
-        "webhook" : "webhook"
-      },
-      "sendOnSuccess" : false,
-      "sendOnFailure" : true,
-      "customerioConfiguration" : "{}"
-    } ],
-    "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  }, {
-    "news" : true,
-    "displaySetupWizard" : true,
-    "initialSetupComplete" : true,
-    "anonymousDataCollection" : true,
-    "customerId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-    "name" : "name",
-    "firstCompletedSync" : true,
-    "feedbackDone" : true,
-    "email" : "email",
-    "slug" : "slug",
-    "securityUpdates" : true,
-    "notifications" : [ {
-      "slackConfiguration" : {
-        "webhook" : "webhook"
-      },
-      "sendOnSuccess" : false,
-      "sendOnFailure" : true,
-      "customerioConfiguration" : "{}"
-    }, {
-      "slackConfiguration" : {
-        "webhook" : "webhook"
-      },
-      "sendOnSuccess" : false,
-      "sendOnFailure" : true,
-      "customerioConfiguration" : "{}"
-    } ],
-    "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-  } ]
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - WorkspaceReadList -
-
-
-
- Up -
post /v1/workspaces/update
-
Update workspace state (updateWorkspace)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
WorkspaceUpdate WorkspaceUpdate (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "news" : true,
-  "displaySetupWizard" : true,
-  "initialSetupComplete" : true,
-  "anonymousDataCollection" : true,
-  "customerId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "name" : "name",
-  "firstCompletedSync" : true,
-  "feedbackDone" : true,
-  "email" : "email",
-  "slug" : "slug",
-  "securityUpdates" : true,
-  "notifications" : [ {
-    "slackConfiguration" : {
-      "webhook" : "webhook"
-    },
-    "sendOnSuccess" : false,
-    "sendOnFailure" : true,
-    "customerioConfiguration" : "{}"
-  }, {
-    "slackConfiguration" : {
-      "webhook" : "webhook"
-    },
-    "sendOnSuccess" : false,
-    "sendOnFailure" : true,
-    "customerioConfiguration" : "{}"
-  } ],
-  "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - WorkspaceRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
-
-
- Up -
post /v1/workspaces/tag_feedback_status_as_done
-
Update workspace feedback state (updateWorkspaceFeedback)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
WorkspaceGiveFeedback WorkspaceGiveFeedback (required)
- -
Body Parameter
- -
- - - - - - - - -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

204

- The feedback state has been properly updated - -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -
-
-
-
- Up -
post /v1/workspaces/update_name
-
Update workspace name (updateWorkspaceName)
-
- - -

Consumes

- This API call consumes the following media types via the Content-Type request header: -
    -
  • application/json
  • -
- -

Request body

-
-
WorkspaceUpdateName WorkspaceUpdateName (required)
- -
Body Parameter
- -
- - - - -

Return type

- - - - -

Example data

-
Content-Type: application/json
-
{
-  "news" : true,
-  "displaySetupWizard" : true,
-  "initialSetupComplete" : true,
-  "anonymousDataCollection" : true,
-  "customerId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91",
-  "name" : "name",
-  "firstCompletedSync" : true,
-  "feedbackDone" : true,
-  "email" : "email",
-  "slug" : "slug",
-  "securityUpdates" : true,
-  "notifications" : [ {
-    "slackConfiguration" : {
-      "webhook" : "webhook"
-    },
-    "sendOnSuccess" : false,
-    "sendOnFailure" : true,
-    "customerioConfiguration" : "{}"
-  }, {
-    "slackConfiguration" : {
-      "webhook" : "webhook"
-    },
-    "sendOnSuccess" : false,
-    "sendOnFailure" : true,
-    "customerioConfiguration" : "{}"
-  } ],
-  "workspaceId" : "046b6c7f-0b8a-43b9-b35d-6489e6daee91"
-}
- -

Produces

- This API call produces the following media types according to the Accept request header; - the media type will be conveyed by the Content-Type response header. -
    -
  • application/json
  • -
- -

Responses

-

200

- Successful operation - WorkspaceRead -

404

- Object with given id was not found. - NotFoundKnownExceptionInfo -

422

- Input failed validation - InvalidInputExceptionInfo -
-
- -

Models

- [ Jump to Methods ] - -

Table of Contents

-
    -
  1. ActorDefinitionResourceRequirements -
  2. -
  3. AdvancedAuth -
  4. -
  5. AirbyteCatalog -
  6. -
  7. AirbyteStream -
  8. -
  9. AirbyteStreamAndConfiguration -
  10. -
  11. AirbyteStreamConfiguration -
  12. -
  13. AttemptFailureOrigin -
  14. -
  15. AttemptFailureReason -
  16. -
  17. AttemptFailureSummary -
  18. -
  19. AttemptFailureType -
  20. -
  21. AttemptInfoRead -
  22. -
  23. AttemptRead -
  24. -
  25. AttemptStats -
  26. -
  27. AttemptStatus -
  28. -
  29. AttemptStreamStats -
  30. -
  31. AuthSpecification -
  32. -
  33. CatalogDiff -
  34. -
  35. CheckConnectionRead -
  36. -
  37. CheckOperationRead -
  38. -
  39. CompleteDestinationOAuthRequest -
  40. -
  41. CompleteSourceOauthRequest -
  42. -
  43. ConnectionCreate -
  44. -
  45. ConnectionIdRequestBody -
  46. -
  47. ConnectionRead -
  48. -
  49. ConnectionReadList -
  50. -
  51. ConnectionSchedule -
  52. -
  53. ConnectionSearch -
  54. -
  55. ConnectionState -
  56. -
  57. ConnectionStateType -
  58. -
  59. ConnectionStatus -
  60. -
  61. ConnectionUpdate -
  62. -
  63. CustomDestinationDefinitionCreate -
  64. -
  65. CustomDestinationDefinitionUpdate -
  66. -
  67. CustomSourceDefinitionCreate -
  68. -
  69. CustomSourceDefinitionUpdate -
  70. -
  71. DataType -
  72. -
  73. DbMigrationExecutionRead -
  74. -
  75. DbMigrationRead -
  76. -
  77. DbMigrationReadList -
  78. -
  79. DbMigrationRequestBody -
  80. -
  81. DbMigrationState -
  82. -
  83. DestinationCloneConfiguration -
  84. -
  85. DestinationCloneRequestBody -
  86. -
  87. DestinationCoreConfig -
  88. -
  89. DestinationCreate -
  90. -
  91. DestinationDefinitionCreate -
  92. -
  93. DestinationDefinitionIdRequestBody -
  94. -
  95. DestinationDefinitionIdWithWorkspaceId -
  96. -
  97. DestinationDefinitionRead -
  98. -
  99. DestinationDefinitionReadList -
  100. -
  101. DestinationDefinitionSpecificationRead -
  102. -
  103. DestinationDefinitionUpdate -
  104. -
  105. DestinationIdRequestBody -
  106. -
  107. DestinationOauthConsentRequest -
  108. -
  109. DestinationRead -
  110. -
  111. DestinationReadList -
  112. -
  113. DestinationSearch -
  114. -
  115. DestinationSyncMode -
  116. -
  117. DestinationUpdate -
  118. -
  119. FieldAdd -
  120. -
  121. FieldRemove -
  122. -
  123. FieldSchemaUpdate -
  124. -
  125. FieldTransform -
  126. -
  127. GlobalState -
  128. -
  129. HealthCheckRead -
  130. -
  131. ImportRead -
  132. -
  133. ImportRequestBody -
  134. -
  135. InvalidInputExceptionInfo -
  136. -
  137. InvalidInputProperty -
  138. -
  139. JobConfigType -
  140. -
  141. JobDebugInfoRead -
  142. -
  143. JobDebugRead -
  144. -
  145. JobIdRequestBody -
  146. -
  147. JobInfoRead -
  148. -
  149. JobListRequestBody -
  150. -
  151. JobRead -
  152. -
  153. JobReadList -
  154. -
  155. JobStatus -
  156. -
  157. JobType -
  158. -
  159. JobTypeResourceLimit -
  160. -
  161. JobWithAttemptsRead -
  162. -
  163. KnownExceptionInfo -
  164. -
  165. LogRead -
  166. -
  167. LogType -
  168. -
  169. LogsRequestBody -
  170. -
  171. NamespaceDefinitionType -
  172. -
  173. NotFoundKnownExceptionInfo -
  174. -
  175. Notification -
  176. -
  177. NotificationRead -
  178. -
  179. NotificationType -
  180. -
  181. OAuth2Specification -
  182. -
  183. OAuthConfigSpecification -
  184. -
  185. OAuthConsentRead -
  186. -
  187. OperationCreate -
  188. -
  189. OperationIdRequestBody -
  190. -
  191. OperationRead -
  192. -
  193. OperationReadList -
  194. -
  195. OperationUpdate -
  196. -
  197. OperatorConfiguration -
  198. -
  199. OperatorDbt -
  200. -
  201. OperatorNormalization -
  202. -
  203. OperatorType -
  204. -
  205. Pagination -
  206. -
  207. PrivateDestinationDefinitionRead -
  208. -
  209. PrivateDestinationDefinitionReadList -
  210. -
  211. PrivateSourceDefinitionRead -
  212. -
  213. PrivateSourceDefinitionReadList -
  214. -
  215. ReleaseStage -
  216. -
  217. ResetConfig -
  218. -
  219. ResourceRequirements -
  220. -
  221. SetInstancewideDestinationOauthParamsRequestBody -
  222. -
  223. SetInstancewideSourceOauthParamsRequestBody -
  224. -
  225. SlackNotificationConfiguration -
  226. -
  227. SlugRequestBody -
  228. -
  229. SourceCloneConfiguration -
  230. -
  231. SourceCloneRequestBody -
  232. -
  233. SourceCoreConfig -
  234. -
  235. SourceCreate -
  236. -
  237. SourceDefinitionCreate -
  238. -
  239. SourceDefinitionIdRequestBody -
  240. -
  241. SourceDefinitionIdWithWorkspaceId -
  242. -
  243. SourceDefinitionRead -
  244. -
  245. SourceDefinitionReadList -
  246. -
  247. SourceDefinitionSpecificationRead -
  248. -
  249. SourceDefinitionUpdate -
  250. -
  251. SourceDiscoverSchemaRead -
  252. -
  253. SourceDiscoverSchemaRequestBody -
  254. -
  255. SourceIdRequestBody -
  256. -
  257. SourceOauthConsentRequest -
  258. -
  259. SourceRead -
  260. -
  261. SourceReadList -
  262. -
  263. SourceSearch -
  264. -
  265. SourceUpdate -
  266. -
  267. StreamDescriptor -
  268. -
  269. StreamState -
  270. -
  271. StreamTransform -
  272. -
  273. SyncMode -
  274. -
  275. SynchronousJobRead -
  276. -
  277. UploadRead -
  278. -
  279. WebBackendConnectionCreate -
  280. -
  281. WebBackendConnectionRead -
  282. -
  283. WebBackendConnectionReadList -
  284. -
  285. WebBackendConnectionRequestBody -
  286. -
  287. WebBackendConnectionSearch -
  288. -
  289. WebBackendConnectionUpdate -
  290. -
  291. WebBackendOperationCreateOrUpdate -
  292. -
  293. WebBackendWorkspaceState -
  294. -
  295. WebBackendWorkspaceStateResult -
  296. -
  297. WorkspaceCreate -
  298. -
  299. WorkspaceGiveFeedback -
  300. -
  301. WorkspaceIdRequestBody -
  302. -
  303. WorkspaceRead -
  304. -
  305. WorkspaceReadList -
  306. -
  307. WorkspaceUpdate -
  308. -
  309. WorkspaceUpdateName -
  310. -
- -
-

ActorDefinitionResourceRequirements - Up

-
actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
-
-
default (optional)
-
jobSpecific (optional)
-
-
-
-

AdvancedAuth - Up

-
-
-
authFlowType (optional)
-
Enum:
-
oauth2.0
oauth1.0
-
predicateKey (optional)
array[String] Json Path to a field in the connectorSpecification that should exist for the advanced auth to be applicable.
-
predicateValue (optional)
String Value of the predicate_key fields for the advanced auth to be applicable.
-
oauthConfigSpecification (optional)
-
-
-
-

AirbyteCatalog - Up

-
describes the available schema (catalog).
- -
-
-

AirbyteStream - Up

-
the immutable schema defined by the source
-
-
name
String Stream's name.
-
jsonSchema (optional)
-
supportedSyncModes (optional)
-
sourceDefinedCursor (optional)
Boolean If the source defines the cursor field, then any other cursor field inputs will be ignored. If it does not, either the user_provided one is used, or the default one is used as a backup.
-
defaultCursorField (optional)
array[String] Path to the field that will be used to determine if a record is new or modified since the last sync. If not provided by the source, the end user will have to specify the comparable themselves.
-
sourceDefinedPrimaryKey (optional)
array[array[String]] If the source defines the primary key, paths to the fields that will be used as a primary key. If not provided by the source, the end user will have to specify the primary key themselves.
-
namespace (optional)
String Optional Source-defined namespace. Airbyte streams from the same sources should have the same namespace. Currently only used by JDBC destinations to determine what schema to write to.
-
-
-
-

AirbyteStreamAndConfiguration - Up

-
each stream is split in two parts; the immutable schema from source and mutable configuration for destination
-
-
stream (optional)
-
config (optional)
-
-
-
-

AirbyteStreamConfiguration - Up

-
the mutable part of the stream to configure the destination
-
-
syncMode
-
cursorField (optional)
array[String] Path to the field that will be used to determine if a record is new or modified since the last sync. This field is REQUIRED if sync_mode is incremental. Otherwise it is ignored.
-
destinationSyncMode
-
primaryKey (optional)
array[array[String]] Paths to the fields that will be used as primary key. This field is REQUIRED if destination_sync_mode is *_dedup. Otherwise it is ignored.
-
aliasName (optional)
String Alias name to the stream to be used in the destination
-
selected (optional)
-
-
-
-

AttemptFailureOrigin - Up

-
Indicates where the error originated. If not set, the origin of error is not well known.
-
-
-
-
-

AttemptFailureReason - Up

-
-
-
failureOrigin (optional)
-
failureType (optional)
-
externalMessage (optional)
-
internalMessage (optional)
-
stacktrace (optional)
-
retryable (optional)
Boolean True if it is known that retrying may succeed, e.g. for a transient failure. False if it is known that a retry will not succeed, e.g. for a configuration issue. If not set, retryable status is not well known.
-
timestamp
Long format: int64
-
-
-
-

AttemptFailureSummary - Up

-
-
-
failures
-
partialSuccess (optional)
Boolean True if the number of committed records for this attempt was greater than 0. False if 0 records were committed. If not set, the number of committed records is unknown.
-
-
-
-

AttemptFailureType - Up

-
Categorizes well known errors into types for programmatic handling. If not set, the type of error is not well known.
-
-
-
-
-

AttemptInfoRead - Up

-
-
-
attempt
-
logs
-
-
-
-

AttemptRead - Up

-
-
-
id
Long format: int64
-
status
-
createdAt
Long format: int64
-
updatedAt
Long format: int64
-
endedAt (optional)
Long format: int64
-
bytesSynced (optional)
Long format: int64
-
recordsSynced (optional)
Long format: int64
-
totalStats (optional)
-
streamStats (optional)
-
failureSummary (optional)
-
-
-
-

AttemptStats - Up

-
-
-
recordsEmitted (optional)
Long format: int64
-
bytesEmitted (optional)
Long format: int64
-
stateMessagesEmitted (optional)
Long format: int64
-
recordsCommitted (optional)
Long format: int64
-
-
-
-

AttemptStatus - Up

-
-
-
-
-
-

AttemptStreamStats - Up

-
-
-
streamName
-
stats
-
-
-
-

AuthSpecification - Up

-
-
-
auth_type (optional)
-
Enum:
-
oauth2.0
-
oauth2Specification (optional)
-
-
-
-

CatalogDiff - Up

-
Describes the difference between two Airbyte catalogs.
-
-
transforms
array[StreamTransform] list of stream transformations. order does not matter.
-
-
-
-

CheckConnectionRead - Up

-
-
-
status
-
Enum:
-
succeeded
failed
-
message (optional)
-
jobInfo
-
-
-
-

CheckOperationRead - Up

-
-
-
status
-
Enum:
-
succeeded
failed
-
message (optional)
-
-
-
-

CompleteDestinationOAuthRequest - Up

-
-
-
destinationDefinitionId
UUID format: uuid
-
workspaceId
UUID format: uuid
-
redirectUrl (optional)
String When completing OAuth flow to gain an access token, some API sometimes requires to verify that the app re-send the redirectUrl that was used when consent was given.
-
queryParams (optional)
map[String, Object] The query parameters present in the redirect URL after a user granted consent e.g auth code
-
oAuthInputConfiguration (optional)
-
-
-
-

CompleteSourceOauthRequest - Up

-
-
-
sourceDefinitionId
UUID format: uuid
-
workspaceId
UUID format: uuid
-
redirectUrl (optional)
String When completing OAuth flow to gain an access token, some API sometimes requires to verify that the app re-send the redirectUrl that was used when consent was given.
-
queryParams (optional)
map[String, Object] The query parameters present in the redirect URL after a user granted consent e.g auth code
-
oAuthInputConfiguration (optional)
-
-
-
-

ConnectionCreate - Up

-
-
-
name (optional)
String Optional name of the connection
-
namespaceDefinition (optional)
-
namespaceFormat (optional)
String Used when namespaceDefinition is 'customformat'. If blank then behaves like namespaceDefinition = 'destination'. If "${SOURCE_NAMESPACE}" then behaves like namespaceDefinition = 'source'.
-
prefix (optional)
String Prefix that will be prepended to the name of each stream when it is written to the destination.
-
sourceId
UUID format: uuid
-
destinationId
UUID format: uuid
-
operationIds (optional)
array[UUID] format: uuid
-
syncCatalog (optional)
-
schedule (optional)
-
status
-
resourceRequirements (optional)
-
sourceCatalogId (optional)
UUID format: uuid
-
-
-
-

ConnectionIdRequestBody - Up

-
-
-
connectionId
UUID format: uuid
-
-
-
-

ConnectionRead - Up

-
-
-
connectionId
UUID format: uuid
-
name
-
namespaceDefinition (optional)
-
namespaceFormat (optional)
String Used when namespaceDefinition is 'customformat'. If blank then behaves like namespaceDefinition = 'destination'. If "${SOURCE_NAMESPACE}" then behaves like namespaceDefinition = 'source'.
-
prefix (optional)
String Prefix that will be prepended to the name of each stream when it is written to the destination.
-
sourceId
UUID format: uuid
-
destinationId
UUID format: uuid
-
operationIds (optional)
array[UUID] format: uuid
-
syncCatalog
-
schedule (optional)
-
status
-
resourceRequirements (optional)
-
sourceCatalogId (optional)
UUID format: uuid
-
-
- -
-

ConnectionSchedule - Up

-
if null, then no schedule is set.
-
-
units
Long format: int64
-
timeUnit
-
Enum:
-
minutes
hours
days
weeks
months
-
-
-
-

ConnectionSearch - Up

-
-
-
connectionId (optional)
UUID format: uuid
-
name (optional)
-
namespaceDefinition (optional)
-
namespaceFormat (optional)
String Used when namespaceDefinition is 'customformat'. If blank then behaves like namespaceDefinition = 'destination'. If "${SOURCE_NAMESPACE}" then behaves like namespaceDefinition = 'source'.
-
prefix (optional)
String Prefix that will be prepended to the name of each stream when it is written to the destination.
-
sourceId (optional)
UUID format: uuid
-
destinationId (optional)
UUID format: uuid
-
schedule (optional)
-
status (optional)
-
source (optional)
-
destination (optional)
-
-
-
-

ConnectionState - Up

-
Contains the state for a connection. The stateType field identifies what type of state it is. Only the field corresponding to that type will be set, the rest will be null. If stateType=not_set, then none of the fields will be set.
-
-
stateType
-
connectionId
UUID format: uuid
-
state (optional)
-
streamState (optional)
-
globalState (optional)
-
-
- -
-

ConnectionStatus - Up

-
Active means that data is flowing through the connection. Inactive means it is not. Deprecated means the connection is off and cannot be re-activated. the schema field describes the elements of the schema that will be synced.
-
-
-
-
-

ConnectionUpdate - Up

-
-
-
connectionId
UUID format: uuid
-
namespaceDefinition (optional)
-
namespaceFormat (optional)
String Used when namespaceDefinition is 'customformat'. If blank then behaves like namespaceDefinition = 'destination'. If "${SOURCE_NAMESPACE}" then behaves like namespaceDefinition = 'source'.
-
name (optional)
String Name that will be set to this connection
-
prefix (optional)
String Prefix that will be prepended to the name of each stream when it is written to the destination.
-
operationIds (optional)
array[UUID] format: uuid
-
syncCatalog
-
schedule (optional)
-
status
-
resourceRequirements (optional)
-
sourceCatalogId (optional)
UUID format: uuid
-
-
-
-

CustomDestinationDefinitionCreate - Up

-
-
-
workspaceId
UUID format: uuid
-
destinationDefinition
-
-
-
-

CustomDestinationDefinitionUpdate - Up

-
-
-
workspaceId
UUID format: uuid
-
destinationDefinition
-
-
-
-

CustomSourceDefinitionCreate - Up

-
-
-
workspaceId
UUID format: uuid
-
sourceDefinition
-
-
-
-

CustomSourceDefinitionUpdate - Up

-
-
-
workspaceId
UUID format: uuid
-
sourceDefinition
-
-
-
-

DataType - Up

-
-
-
-
-
-

DbMigrationExecutionRead - Up

-
-
-
initialVersion (optional)
-
targetVersion (optional)
-
executedMigrations (optional)
-
-
-
-

DbMigrationRead - Up

-
-
-
migrationType
-
migrationVersion
-
migrationDescription
-
migrationState (optional)
-
migratedBy (optional)
-
migratedAt (optional)
Long format: int64
-
migrationScript (optional)
-
-
-
-

DbMigrationReadList - Up

-
-
-
migrations (optional)
-
-
-
-

DbMigrationRequestBody - Up

-
-
-
database
-
-
- -
-

DestinationCloneConfiguration - Up

-
-
-
connectionConfiguration (optional)
-
name (optional)
-
-
-
-

DestinationCloneRequestBody - Up

-
The values required to configure the destination. The schema for this should have an id of the existing destination along with the configuration you want to change in case.
-
-
destinationCloneId
UUID format: uuid
-
destinationConfiguration (optional)
-
-
-
-

DestinationCoreConfig - Up

-
-
-
destinationDefinitionId
UUID format: uuid
-
connectionConfiguration
-
-
-
-

DestinationCreate - Up

-
-
-
workspaceId
UUID format: uuid
-
name
-
destinationDefinitionId
UUID format: uuid
-
connectionConfiguration
-
-
-
-

DestinationDefinitionCreate - Up

-
-
-
name
-
dockerRepository
-
dockerImageTag
-
documentationUrl
URI format: uri
-
icon (optional)
-
resourceRequirements (optional)
-
-
-
-

DestinationDefinitionIdRequestBody - Up

-
-
-
destinationDefinitionId
UUID format: uuid
-
-
-
-

DestinationDefinitionIdWithWorkspaceId - Up

-
-
-
destinationDefinitionId
UUID format: uuid
-
workspaceId
UUID format: uuid
-
-
-
-

DestinationDefinitionRead - Up

-
-
-
destinationDefinitionId
UUID format: uuid
-
name
-
dockerRepository
-
dockerImageTag
-
documentationUrl
URI format: uri
-
icon (optional)
-
releaseStage (optional)
-
releaseDate (optional)
date The date when this connector was first released, in yyyy-mm-dd format. format: date
-
resourceRequirements (optional)
-
-
- -
-

DestinationDefinitionSpecificationRead - Up

-
-
-
destinationDefinitionId
UUID format: uuid
-
documentationUrl (optional)
-
connectionSpecification (optional)
-
authSpecification (optional)
-
advancedAuth (optional)
-
jobInfo
-
supportedDestinationSyncModes (optional)
-
supportsDbt (optional)
-
supportsNormalization (optional)
-
-
-
-

DestinationDefinitionUpdate - Up

-
-
-
destinationDefinitionId
UUID format: uuid
-
dockerImageTag (optional)
-
resourceRequirements (optional)
-
-
-
-

DestinationIdRequestBody - Up

-
-
-
destinationId
UUID format: uuid
-
-
-
-

DestinationOauthConsentRequest - Up

-
-
-
destinationDefinitionId
UUID format: uuid
-
workspaceId
UUID format: uuid
-
redirectUrl
String The url to redirect to after getting the user consent
-
oAuthInputConfiguration (optional)
-
-
-
-

DestinationRead - Up

-
-
-
destinationDefinitionId
UUID format: uuid
-
destinationId
UUID format: uuid
-
workspaceId
UUID format: uuid
-
connectionConfiguration
-
name
-
destinationName
-
-
- -
-

DestinationSearch - Up

-
-
-
destinationDefinitionId (optional)
UUID format: uuid
-
destinationId (optional)
UUID format: uuid
-
workspaceId (optional)
UUID format: uuid
-
connectionConfiguration (optional)
-
name (optional)
-
destinationName (optional)
-
-
- -
-

DestinationUpdate - Up

-
-
-
destinationId
UUID format: uuid
-
connectionConfiguration
-
name
-
-
-
-

FieldAdd - Up

-
-
-
schema (optional)
-
-
-
-

FieldRemove - Up

-
-
-
schema (optional)
-
-
-
-

FieldSchemaUpdate - Up

-
-
-
oldSchema
-
newSchema
-
-
-
-

FieldTransform - Up

-
Describes the difference between two Streams.
-
-
transformType
-
Enum:
-
add_field
remove_field
update_field_schema
-
fieldName
array[String] A field name is a list of strings that form the path to the field.
-
addField (optional)
-
removeField (optional)
-
updateFieldSchema (optional)
-
-
-
-

GlobalState - Up

-
-
-
shared_state (optional)
-
streamStates
-
-
-
-

HealthCheckRead - Up

-
-
-
available
-
-
-
-

ImportRead - Up

-
-
-
status
-
Enum:
-
succeeded
failed
-
reason (optional)
-
-
-
-

ImportRequestBody - Up

-
-
-
resourceId
UUID format: uuid
-
workspaceId
UUID format: uuid
-
-
-
-

InvalidInputExceptionInfo - Up

-
-
-
message
-
exceptionClassName (optional)
-
exceptionStack (optional)
-
validationErrors
-
-
-
-

InvalidInputProperty - Up

-
-
-
propertyPath
-
invalidValue (optional)
-
message (optional)
-
-
-
-

JobConfigType - Up

-
-
-
-
- -
-

JobDebugRead - Up

-
-
-
id
Long format: int64
-
configType
-
configId
-
status
-
airbyteVersion
-
sourceDefinition
-
destinationDefinition
-
-
-
-

JobIdRequestBody - Up

-
-
-
id
Long format: int64
-
-
-
-

JobInfoRead - Up

-
-
-
job
-
attempts
-
-
-
-

JobListRequestBody - Up

-
-
-
configTypes
-
configId
-
pagination (optional)
-
-
-
-

JobRead - Up

-
-
-
id
Long format: int64
-
configType
-
configId
-
createdAt
Long format: int64
-
updatedAt
Long format: int64
-
status
-
resetConfig (optional)
-
-
- -
-

JobStatus - Up

-
-
-
-
-
-

JobType - Up

-
enum that describes the different types of jobs that the platform runs.
-
-
-
-
-

JobTypeResourceLimit - Up

-
sets resource requirements for a specific job type for an actor definition. these values override the default, if both are set.
-
-
jobType
-
resourceRequirements
-
-
-
-

JobWithAttemptsRead - Up

-
-
-
job (optional)
-
attempts (optional)
-
-
-
-

KnownExceptionInfo - Up

-
-
-
message
-
exceptionClassName (optional)
-
exceptionStack (optional)
-
rootCauseExceptionClassName (optional)
-
rootCauseExceptionStack (optional)
-
-
-
-

LogRead - Up

-
-
-
logLines
-
-
-
-

LogType - Up

-
type/source of logs produced
-
-
-
-
-

LogsRequestBody - Up

-
-
-
logType
-
-
-
-

NamespaceDefinitionType - Up

-
Method used for computing final namespace in destination
-
-
-
-
-

NotFoundKnownExceptionInfo - Up

-
-
-
id (optional)
-
message
-
exceptionClassName (optional)
-
exceptionStack (optional)
-
rootCauseExceptionClassName (optional)
-
rootCauseExceptionStack (optional)
-
-
-
-

Notification - Up

-
-
-
notificationType
-
sendOnSuccess
-
sendOnFailure
-
slackConfiguration (optional)
-
customerioConfiguration (optional)
-
-
-
-

NotificationRead - Up

-
-
-
status
-
Enum:
-
succeeded
failed
-
message (optional)
-
-
- -
-

OAuth2Specification - Up

-
An object containing any metadata needed to describe this connector's Oauth flow
-
-
rootObject
array[oas_any_type_not_mapped] A list of strings representing a pointer to the root object which contains any oauth parameters in the ConnectorSpecification. -Examples: -if oauth parameters were contained inside the top level, rootObject=[] If they were nested inside another object {'credentials': {'app_id' etc...}, rootObject=['credentials'] If they were inside a oneOf {'switch': {oneOf: [{client_id...}, {non_oauth_param]}}, rootObject=['switch', 0]
-
oauthFlowInitParameters
array[array[String]] Pointers to the fields in the rootObject needed to obtain the initial refresh/access tokens for the OAuth flow. Each inner array represents the path in the rootObject of the referenced field. For example. Assume the rootObject contains params 'app_secret', 'app_id' which are needed to get the initial refresh token. If they are not nested in the rootObject, then the array would look like this [['app_secret'], ['app_id']] If they are nested inside an object called 'auth_params' then this array would be [['auth_params', 'app_secret'], ['auth_params', 'app_id']]
-
oauthFlowOutputParameters
array[array[String]] Pointers to the fields in the rootObject which can be populated from successfully completing the oauth flow using the init parameters. This is typically a refresh/access token. Each inner array represents the path in the rootObject of the referenced field.
-
-
-
-

OAuthConfigSpecification - Up

-
-
-
oauthUserInputFromConnectorConfigSpecification (optional)
-
completeOAuthOutputSpecification (optional)
-
completeOAuthServerInputSpecification (optional)
-
completeOAuthServerOutputSpecification (optional)
-
-
-
-

OAuthConsentRead - Up

-
-
-
consentUrl
-
-
-
-

OperationCreate - Up

-
-
-
workspaceId
UUID format: uuid
-
name
-
operatorConfiguration
-
-
-
-

OperationIdRequestBody - Up

-
-
-
operationId
UUID format: uuid
-
-
-
-

OperationRead - Up

-
-
-
workspaceId
UUID format: uuid
-
operationId
UUID format: uuid
-
name
-
operatorConfiguration
-
-
- -
-

OperationUpdate - Up

-
-
-
operationId
UUID format: uuid
-
name
-
operatorConfiguration
-
-
-
-

OperatorConfiguration - Up

-
-
-
operatorType
-
normalization (optional)
-
dbt (optional)
-
-
-
-

OperatorDbt - Up

-
-
-
gitRepoUrl
-
gitRepoBranch (optional)
-
dockerImage (optional)
-
dbtArguments (optional)
-
-
-
-

OperatorNormalization - Up

-
-
-
option (optional)
-
Enum:
-
basic
-
-
-
-

OperatorType - Up

-
-
-
-
-
-

Pagination - Up

-
-
-
pageSize (optional)
-
rowOffset (optional)
-
-
-
-

PrivateDestinationDefinitionRead - Up

-
-
-
destinationDefinition
-
granted
-
-
- -
-

PrivateSourceDefinitionRead - Up

-
-
-
sourceDefinition
-
granted
-
-
- -
-

ReleaseStage - Up

-
-
-
-
-
-

ResetConfig - Up

-
contains information about how a reset was configured. only populated if the job was a reset.
-
-
streamsToReset (optional)
-
-
-
-

ResourceRequirements - Up

-
optional resource requirements to run workers (blank for unbounded allocations)
-
-
cpu_request (optional)
-
cpu_limit (optional)
-
memory_request (optional)
-
memory_limit (optional)
-
-
-
-

SetInstancewideDestinationOauthParamsRequestBody - Up

-
-
-
destinationDefinitionId
UUID format: uuid
-
params
-
-
-
-

SetInstancewideSourceOauthParamsRequestBody - Up

-
-
-
sourceDefinitionId
UUID format: uuid
-
params
-
-
- -
-

SlugRequestBody - Up

-
-
-
slug
-
-
-
-

SourceCloneConfiguration - Up

-
-
-
connectionConfiguration (optional)
-
name (optional)
-
-
-
-

SourceCloneRequestBody - Up

-
The values required to configure the source. The schema for this should have an id of the existing source along with the configuration you want to change in case.
-
-
sourceCloneId
UUID format: uuid
-
sourceConfiguration (optional)
-
-
-
-

SourceCoreConfig - Up

-
-
-
sourceDefinitionId
UUID format: uuid
-
connectionConfiguration
-
-
-
-

SourceCreate - Up

-
-
-
sourceDefinitionId
UUID format: uuid
-
connectionConfiguration
-
workspaceId
UUID format: uuid
-
name
-
-
-
-

SourceDefinitionCreate - Up

-
-
-
name
-
dockerRepository
-
dockerImageTag
-
documentationUrl
URI format: uri
-
icon (optional)
-
resourceRequirements (optional)
-
-
-
-

SourceDefinitionIdRequestBody - Up

-
-
-
sourceDefinitionId
UUID format: uuid
-
-
-
-

SourceDefinitionIdWithWorkspaceId - Up

-
-
-
sourceDefinitionId
UUID format: uuid
-
workspaceId
UUID format: uuid
-
-
-
-

SourceDefinitionRead - Up

-
-
-
sourceDefinitionId
UUID format: uuid
-
name
-
dockerRepository
-
dockerImageTag
-
documentationUrl (optional)
URI format: uri
-
icon (optional)
-
releaseStage (optional)
-
releaseDate (optional)
date The date when this connector was first released, in yyyy-mm-dd format. format: date
-
resourceRequirements (optional)
-
-
- -
-

SourceDefinitionSpecificationRead - Up

-
-
-
sourceDefinitionId
UUID format: uuid
-
documentationUrl (optional)
-
connectionSpecification (optional)
-
authSpecification (optional)
-
advancedAuth (optional)
-
jobInfo
-
-
-
-

SourceDefinitionUpdate - Up

-
Update the SourceDefinition. Currently, the only allowed attribute to update is the default docker image version.
-
-
sourceDefinitionId
UUID format: uuid
-
dockerImageTag
-
resourceRequirements (optional)
-
-
-
-

SourceDiscoverSchemaRead - Up

-
Returns the results of a discover catalog job. If the job was not successful, the catalog field will not be present. jobInfo will aways be present and its status be used to determine if the job was successful or not.
-
-
catalog (optional)
-
jobInfo
-
catalogId (optional)
UUID format: uuid
-
-
-
-

SourceDiscoverSchemaRequestBody - Up

-
-
-
sourceId
UUID format: uuid
-
disable_cache (optional)
-
-
-
-

SourceIdRequestBody - Up

-
-
-
sourceId
UUID format: uuid
-
-
-
-

SourceOauthConsentRequest - Up

-
-
-
sourceDefinitionId
UUID format: uuid
-
workspaceId
UUID format: uuid
-
redirectUrl
String The url to redirect to after getting the user consent
-
oAuthInputConfiguration (optional)
-
-
-
-

SourceRead - Up

-
-
-
sourceDefinitionId
UUID format: uuid
-
sourceId
UUID format: uuid
-
workspaceId
UUID format: uuid
-
connectionConfiguration
-
name
-
sourceName
-
-
- -
-

SourceSearch - Up

-
-
-
sourceDefinitionId (optional)
UUID format: uuid
-
sourceId (optional)
UUID format: uuid
-
workspaceId (optional)
UUID format: uuid
-
connectionConfiguration (optional)
-
name (optional)
-
sourceName (optional)
-
-
-
-

SourceUpdate - Up

-
-
-
sourceId
UUID format: uuid
-
connectionConfiguration
-
name
-
-
-
-

StreamDescriptor - Up

-
-
-
name
-
namespace (optional)
-
-
-
-

StreamState - Up

-
-
-
streamDescriptor
-
streamState (optional)
-
-
-
-

StreamTransform - Up

-
-
-
transformType
-
Enum:
-
add_stream
remove_stream
update_stream
-
streamDescriptor
-
updateStream (optional)
array[FieldTransform] list of field transformations. order does not matter.
-
-
-
-

SyncMode - Up

-
-
-
-
-
-

SynchronousJobRead - Up

-
-
-
id
UUID format: uuid
-
configType
-
configId (optional)
String only present if a config id was provided.
-
createdAt
Long format: int64
-
endedAt
Long format: int64
-
succeeded
-
logs (optional)
-
-
-
-

UploadRead - Up

-
-
-
status
-
Enum:
-
succeeded
failed
-
resourceId (optional)
UUID format: uuid
-
-
-
-

WebBackendConnectionCreate - Up

-
-
-
name (optional)
String Optional name of the connection
-
namespaceDefinition (optional)
-
namespaceFormat (optional)
String Used when namespaceDefinition is 'customformat'. If blank then behaves like namespaceDefinition = 'destination'. If "${SOURCE_NAMESPACE}" then behaves like namespaceDefinition = 'source'.
-
prefix (optional)
String Prefix that will be prepended to the name of each stream when it is written to the destination.
-
sourceId
UUID format: uuid
-
destinationId
UUID format: uuid
-
operationIds (optional)
array[UUID] format: uuid
-
syncCatalog (optional)
-
schedule (optional)
-
status
-
resourceRequirements (optional)
-
operations (optional)
-
sourceCatalogId (optional)
UUID format: uuid
-
-
-
-

WebBackendConnectionRead - Up

-
-
-
connectionId
UUID format: uuid
-
name
-
namespaceDefinition (optional)
-
namespaceFormat (optional)
String Used when namespaceDefinition is 'customformat'. If blank then behaves like namespaceDefinition = 'destination'. If "${SOURCE_NAMESPACE}" then behaves like namespaceDefinition = 'source'.
-
prefix (optional)
String Prefix that will be prepended to the name of each stream when it is written to the destination.
-
sourceId
UUID format: uuid
-
destinationId
UUID format: uuid
-
syncCatalog
-
schedule (optional)
-
status
-
operationIds (optional)
array[UUID] format: uuid
-
source
-
destination
-
operations (optional)
-
latestSyncJobCreatedAt (optional)
Long epoch time of the latest sync job. null if no sync job has taken place. format: int64
-
latestSyncJobStatus (optional)
-
isSyncing
-
resourceRequirements (optional)
-
catalogId (optional)
UUID format: uuid
-
catalogDiff (optional)
-
-
- -
-

WebBackendConnectionRequestBody - Up

-
-
-
withRefreshedCatalog (optional)
-
connectionId
UUID format: uuid
-
-
-
-

WebBackendConnectionSearch - Up

-
-
-
connectionId (optional)
UUID format: uuid
-
name (optional)
-
namespaceDefinition (optional)
-
namespaceFormat (optional)
String Used when namespaceDefinition is 'customformat'. If blank then behaves like namespaceDefinition = 'destination'. If "${SOURCE_NAMESPACE}" then behaves like namespaceDefinition = 'source'.
-
prefix (optional)
String Prefix that will be prepended to the name of each stream when it is written to the destination.
-
sourceId (optional)
UUID format: uuid
-
destinationId (optional)
UUID format: uuid
-
schedule (optional)
-
status (optional)
-
source (optional)
-
destination (optional)
-
-
-
-

WebBackendConnectionUpdate - Up

-
-
-
name (optional)
String Name that will be set to the connection
-
connectionId
UUID format: uuid
-
namespaceDefinition (optional)
-
namespaceFormat (optional)
String Used when namespaceDefinition is 'customformat'. If blank then behaves like namespaceDefinition = 'destination'. If "${SOURCE_NAMESPACE}" then behaves like namespaceDefinition = 'source'.
-
prefix (optional)
String Prefix that will be prepended to the name of each stream when it is written to the destination.
-
operationIds (optional)
array[UUID] format: uuid
-
syncCatalog
-
schedule (optional)
-
status
-
resourceRequirements (optional)
-
withRefreshedCatalog (optional)
-
operations (optional)
-
sourceCatalogId (optional)
UUID format: uuid
-
-
-
-

WebBackendOperationCreateOrUpdate - Up

-
-
-
operationId (optional)
UUID format: uuid
-
workspaceId
UUID format: uuid
-
name
-
operatorConfiguration
-
-
-
-

WebBackendWorkspaceState - Up

-
-
-
workspaceId
UUID format: uuid
-
-
-
-

WebBackendWorkspaceStateResult - Up

-
-
-
hasConnections
-
hasSources
-
hasDestinations
-
-
-
-

WorkspaceCreate - Up

-
-
-
email (optional)
String format: email
-
anonymousDataCollection (optional)
-
name
-
news (optional)
-
securityUpdates (optional)
-
notifications (optional)
-
displaySetupWizard (optional)
-
-
-
-

WorkspaceGiveFeedback - Up

-
-
-
workspaceId
UUID format: uuid
-
-
-
-

WorkspaceIdRequestBody - Up

-
-
-
workspaceId
UUID format: uuid
-
-
-
-

WorkspaceRead - Up

-
-
-
workspaceId
UUID format: uuid
-
customerId
UUID format: uuid
-
email (optional)
String format: email
-
name
-
slug
-
initialSetupComplete
-
displaySetupWizard (optional)
-
anonymousDataCollection (optional)
-
news (optional)
-
securityUpdates (optional)
-
notifications (optional)
-
firstCompletedSync (optional)
-
feedbackDone (optional)
-
-
- -
-

WorkspaceUpdate - Up

-
-
-
workspaceId
UUID format: uuid
-
email (optional)
String format: email
-
initialSetupComplete
-
displaySetupWizard (optional)
-
anonymousDataCollection
-
news
-
securityUpdates
-
notifications (optional)
-
-
-
-

WorkspaceUpdateName - Up

-
-
-
workspaceId
UUID format: uuid
-
name
-
-
- - diff --git a/docs/reference/api/rapidoc-api-docs.html b/docs/reference/api/rapidoc-api-docs.html deleted file mode 100644 index 95dbd9b4eb47..000000000000 --- a/docs/reference/api/rapidoc-api-docs.html +++ /dev/null @@ -1,12 +0,0 @@ - - - - - - - - - - diff --git a/docs/troubleshooting/on-upgrading.md b/docs/troubleshooting/on-upgrading.md deleted file mode 100644 index b1ba4dea74c4..000000000000 --- a/docs/troubleshooting/on-upgrading.md +++ /dev/null @@ -1,2 +0,0 @@ -# On Upgrading - diff --git a/docs/understanding-airbyte/README.md b/docs/understanding-airbyte/README.md deleted file mode 100644 index 19657b56c7ee..000000000000 --- a/docs/understanding-airbyte/README.md +++ /dev/null @@ -1,2 +0,0 @@ -# Understanding Airbyte - diff --git a/docusaurus/sidebars.js b/docusaurus/sidebars.js index 70724d75076e..fd342f2cdb2b 100644 --- a/docusaurus/sidebars.js +++ b/docusaurus/sidebars.js @@ -1,330 +1,31 @@ -const fs = require("fs"); -const path = require("path") +/** + * Creating a sidebar enables you to: + - create an ordered group of docs + - render a sidebar for each doc of that group + - provide next/previous navigation -const connectorsDocsRoot = "../docs/integrations"; -const sourcesDocs = `${connectorsDocsRoot}/sources`; -const destinationDocs = `${connectorsDocsRoot}/destinations`; + The sidebars can be generated from the filesystem, or explicitly defined here. -function getFilenamesInDir(prefix, dir, excludes) { - return fs.readdirSync(dir) - .map(fileName => fileName.replace(".md", "")) - .filter(fileName => excludes.indexOf(fileName.toLowerCase()) === -1) - .map(filename => { - return {type: 'doc', id: path.join(prefix, filename)} - }); -} + Create as many sidebars as you want. + */ -function getSourceConnectors() { - return getFilenamesInDir("integrations/sources/", sourcesDocs, ["readme"]); -} +// @ts-check -function getDestinationConnectors() { - return getFilenamesInDir("integrations/destinations/", destinationDocs, ["readme"]); -} -module.exports = { - mySidebar: [ - { - type: 'doc', - label: 'Start here', - id: "readme", - }, - { - type: 'category', - label: 'Connector Catalog', - link: { - type: 'doc', - id: 'integrations/README', - }, - items: [ - { - type: 'category', - label: 'Sources', - items: getSourceConnectors() - }, - { - type: 'category', - label: 'Destinations', - items: getDestinationConnectors() - }, - { - type: 'doc', - id: "integrations/custom-connectors", - }, - ] +/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ +const sidebars = { + // By default, Docusaurus generates a sidebar from the docs folder structure + tutorialSidebar: [{type: 'autogenerated', dirName: '.'}], - }, - { - type: 'category', - label: 'Airbyte Cloud', - link: { - type: 'doc', - id: 'cloud/getting-started-with-airbyte-cloud', - }, - items: [ - 'cloud/core-concepts', - 'cloud/managing-airbyte-cloud', - - ], - }, - { - type: 'category', - label: 'Airbyte Open Source QuickStart', - link: { - type: 'generated-index', - }, - items: [ - 'quickstart/deploy-airbyte', - 'quickstart/add-a-source', - 'quickstart/add-a-destination', - 'quickstart/set-up-a-connection', - ], - }, - { - type: 'category', - label: 'Deploy Airbyte Open Source', - link: { - type: 'generated-index', - }, - items: [ - 'deploying-airbyte/local-deployment', - 'deploying-airbyte/on-aws-ec2', - 'deploying-airbyte/on-azure-vm-cloud-shell', - 'deploying-airbyte/on-gcp-compute-engine', - 'deploying-airbyte/on-kubernetes', - 'deploying-airbyte/on-plural', - 'deploying-airbyte/on-oci-vm', - 'deploying-airbyte/on-digitalocean-droplet', - ], - }, - { - type: 'category', - label: 'Manage Airbyte Open Source', - link: { - type: 'generated-index', - }, - items: [ - 'operator-guides/upgrading-airbyte', - 'operator-guides/reset', - 'operator-guides/configuring-airbyte-db', - 'operator-guides/browsing-output-logs', - 'operator-guides/using-the-airflow-airbyte-operator', - 'operator-guides/using-prefect-task', - 'operator-guides/using-dagster-integration', - 'operator-guides/locating-files-local-destination', - { - type: 'category', - label: 'Transformations and Normalization', - items: [ - 'operator-guides/transformation-and-normalization/transformations-with-sql', - 'operator-guides/transformation-and-normalization/transformations-with-dbt', - 'operator-guides/transformation-and-normalization/transformations-with-airbyte', - ] - }, - { - type: 'category', - label: 'Configuring Airbyte', - link: { - type: 'doc', - id: 'operator-guides/configuring-airbyte', - }, - items: [ - 'operator-guides/sentry-integration', - ] - }, - 'operator-guides/using-custom-connectors', - 'operator-guides/scaling-airbyte', - 'operator-guides/securing-airbyte', - ], - }, + // But you can create a sidebar manually + /* + tutorialSidebar: [ { type: 'category', - label: 'Troubleshoot Airbyte', - link: { - type: 'doc', - id: 'troubleshooting/README', - }, - items: [ - 'troubleshooting/on-deploying', - 'troubleshooting/new-connection', - 'troubleshooting/running-sync', - ], - }, - { - type: 'category', - label: 'Build a connector', - link: { - type: 'doc', - id: 'connector-development/README', - }, - items: [ - 'connector-development/tutorials/cdk-speedrun', - { - type: 'category', - label: 'Python CDK: Creating a HTTP API Source', - items: [ - 'connector-development/tutorials/cdk-tutorial-python-http/getting-started', - 'connector-development/tutorials/cdk-tutorial-python-http/creating-the-source', - 'connector-development/tutorials/cdk-tutorial-python-http/install-dependencies', - 'connector-development/tutorials/cdk-tutorial-python-http/define-inputs', - 'connector-development/tutorials/cdk-tutorial-python-http/connection-checking', - 'connector-development/tutorials/cdk-tutorial-python-http/declare-schema', - 'connector-development/tutorials/cdk-tutorial-python-http/read-data', - 'connector-development/tutorials/cdk-tutorial-python-http/use-connector-in-airbyte', - 'connector-development/tutorials/cdk-tutorial-python-http/test-your-connector', - ] - }, - 'connector-development/tutorials/building-a-python-source', - 'connector-development/tutorials/building-a-python-destination', - 'connector-development/tutorials/building-a-java-destination', - 'connector-development/tutorials/profile-java-connector-memory', - { - type: 'category', - label: 'Connector Development Kit (Python)', - link: { - type: 'doc', - id: 'connector-development/cdk-python/README', - }, - items: [ - 'connector-development/cdk-python/basic-concepts', - 'connector-development/cdk-python/schemas', - 'connector-development/cdk-python/full-refresh-stream', - 'connector-development/cdk-python/incremental-stream', - 'connector-development/cdk-python/http-streams', - 'connector-development/cdk-python/python-concepts', - 'connector-development/cdk-python/stream-slices', - ] - }, - 'connector-development/cdk-faros-js', - 'connector-development/airbyte101', - 'connector-development/testing-connectors/README', - 'connector-development/testing-connectors/source-acceptance-tests-reference', - 'connector-development/connector-specification-reference', - 'connector-development/best-practices', - 'connector-development/ux-handbook', - ] - }, - { - type: 'category', - label: 'Contribute to Airbyte', - link: { - type: 'doc', - id: 'contributing-to-airbyte/README', - }, - items: [ - 'contributing-to-airbyte/code-of-conduct', - 'contributing-to-airbyte/maintainer-code-of-conduct', - 'contributing-to-airbyte/developing-locally', - 'contributing-to-airbyte/developing-on-docker', - 'contributing-to-airbyte/developing-on-kubernetes', - 'contributing-to-airbyte/monorepo-python-development', - 'contributing-to-airbyte/code-style', - 'contributing-to-airbyte/gradle-cheatsheet', - 'contributing-to-airbyte/gradle-dependency-update', - { - type: 'link', - label: 'Connector template', - href: 'https://hackmd.io/Bz75cgATSbm7DjrAqgl4rw', - }, - { - type: 'category', - label: 'Updating documentation', - link: { - type: 'doc', - id: 'contributing-to-airbyte/updating-documentation', - }, - items: [ - 'docusaurus/contributing_to_docs', - 'docusaurus/making_a_redirect', - 'docusaurus/deploying_and_reverting_docs', - 'docusaurus/locally_testing_docusaurus', - 'docusaurus/readme', - ] - }, - ] - }, - { - type: 'category', - label: 'Understand Airbyte', - items: [ - 'understanding-airbyte/beginners-guide-to-catalog', - 'understanding-airbyte/airbyte-protocol', - 'understanding-airbyte/airbyte-protocol-docker', - 'understanding-airbyte/basic-normalization', - { - type: 'category', - label: 'Connections and Sync Modes', - link: { - type: 'doc', - id: 'understanding-airbyte/connections/README', - }, - items: [ - 'understanding-airbyte/connections/full-refresh-overwrite', - 'understanding-airbyte/connections/full-refresh-append', - 'understanding-airbyte/connections/incremental-append', - 'understanding-airbyte/connections/incremental-deduped-history', - ] - }, - 'understanding-airbyte/operations', - 'understanding-airbyte/high-level-view', - 'understanding-airbyte/jobs', - 'understanding-airbyte/tech-stack', - 'understanding-airbyte/cdc', - 'understanding-airbyte/namespaces', - 'understanding-airbyte/supported-data-types', - 'understanding-airbyte/json-avro-conversion', - 'understanding-airbyte/glossary', - ] - }, - { - type: 'doc', - id: "api-documentation", - }, - { - type: 'link', - label: 'CLI documentation', - href: 'https://github.com/airbytehq/airbyte/blob/master/octavia-cli/README.md', - }, - { - type: 'category', - label: 'Project Overview', - items: [ - { - type: 'link', - label: 'Roadmap', - href: 'https://app.harvestr.io/roadmap/view/pQU6gdCyc/airbyte-roadmap', - }, - 'project-overview/product-release-stages', - { - type: 'category', - label: 'Changelog', - items: [ - 'project-overview/changelog/README', - 'project-overview/changelog/platform', - 'project-overview/changelog/connectors', - ] - }, - 'project-overview/slack-code-of-conduct', - 'project-overview/security', - { - type: 'link', - label: 'Airbyte Repository', - href: 'https://github.com/airbytehq/airbyte', - }, - { - type: 'category', - label: 'Licenses', - link: { - type: 'doc', - id: 'project-overview/licenses/README', - }, - items: [ - 'project-overview/licenses/license-faq', - 'project-overview/licenses/elv2-license', - 'project-overview/licenses/mit-license', - 'project-overview/licenses/examples', - ] - }, - ], + label: 'Tutorial', + items: ['hello'], }, ], -} + */ +}; + +module.exports = sidebars;