diff --git a/README.md b/README.md
index 6affde47..765fd7fa 100755
--- a/README.md
+++ b/README.md
@@ -59,7 +59,7 @@ req = shared.ConnectionCreateRequest(
'placeat',
],
],
- sync_mode=shared.ConnectionSyncModeEnumEnum.INCREMENTAL_APPEND,
+ sync_mode=shared.ConnectionSyncModeEnum.INCREMENTAL_APPEND,
),
shared.StreamConfiguration(
cursor_field=[
@@ -73,7 +73,7 @@ req = shared.ConnectionCreateRequest(
'sapiente',
],
],
- sync_mode=shared.ConnectionSyncModeEnumEnum.INCREMENTAL_DEDUPED_HISTORY,
+ sync_mode=shared.ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY,
),
shared.StreamConfiguration(
cursor_field=[
@@ -92,20 +92,20 @@ req = shared.ConnectionCreateRequest(
'fugit',
],
],
- sync_mode=shared.ConnectionSyncModeEnumEnum.INCREMENTAL_APPEND,
+ sync_mode=shared.ConnectionSyncModeEnum.INCREMENTAL_APPEND,
),
],
),
- data_residency=shared.GeographyEnumEnum.EU,
+ data_residency=shared.GeographyEnum.EU,
destination_id='c816742c-b739-4205-9293-96fea7596eb1',
name='Lela Orn',
- namespace_definition=shared.NamespaceDefinitionEnumEnum.SOURCE,
+ namespace_definition=shared.NamespaceDefinitionEnum.SOURCE,
namespace_format='${SOURCE_NAMESPACE}',
- non_breaking_schema_updates_behavior=shared.NonBreakingSchemaUpdatesBehaviorEnumEnum.IGNORE,
+ non_breaking_schema_updates_behavior=shared.NonBreakingSchemaUpdatesBehaviorEnum.IGNORE,
prefix='corporis',
schedule=shared.ConnectionScheduleCreate(
cron_expression='explicabo',
- schedule_type=shared.ScheduleTypeEnumEnum.CRON,
+ schedule_type=shared.ScheduleTypeEnum.CRON,
),
source_id='5955907a-ff1a-43a2-ba94-67739251aa52',
)
diff --git a/RELEASES.md b/RELEASES.md
index f3c03908..200b9853 100644
--- a/RELEASES.md
+++ b/RELEASES.md
@@ -126,4 +126,28 @@ Based on:
- OpenAPI Doc 1.0.0
- Speakeasy CLI 1.33.2 (2.29.0) https://github.com/speakeasy-api/speakeasy
### Releases
-- [PyPI v0.7.0] https://pypi.org/project/airbyte-api/0.7.0 - .
\ No newline at end of file
+- [PyPI v0.7.0] https://pypi.org/project/airbyte-api/0.7.0 - .
+
+## 2023-05-18 00:13:10
+### Changes
+Based on:
+- OpenAPI Doc 1.0.0
+- Speakeasy CLI 1.34.0 (2.30.0) https://github.com/speakeasy-api/speakeasy
+### Releases
+- [PyPI v0.8.0] https://pypi.org/project/airbyte-api/0.8.0 - .
+
+## 2023-05-18 20:19:16
+### Changes
+Based on:
+- OpenAPI Doc 1.0.0
+- Speakeasy CLI 1.35.0 (2.31.0) https://github.com/speakeasy-api/speakeasy
+### Releases
+- [PyPI v0.9.0] https://pypi.org/project/airbyte-api/0.9.0 - .
+
+## 2023-05-23 00:13:59
+### Changes
+Based on:
+- OpenAPI Doc 1.0.0
+- Speakeasy CLI 1.37.5 (2.32.2) https://github.com/speakeasy-api/speakeasy
+### Releases
+- [PyPI v0.10.0] https://pypi.org/project/airbyte-api/0.10.0 - .
\ No newline at end of file
diff --git a/USAGE.md b/USAGE.md
index 8f6375a5..a0f69461 100755
--- a/USAGE.md
+++ b/USAGE.md
@@ -35,7 +35,7 @@ req = shared.ConnectionCreateRequest(
'placeat',
],
],
- sync_mode=shared.ConnectionSyncModeEnumEnum.INCREMENTAL_APPEND,
+ sync_mode=shared.ConnectionSyncModeEnum.INCREMENTAL_APPEND,
),
shared.StreamConfiguration(
cursor_field=[
@@ -49,7 +49,7 @@ req = shared.ConnectionCreateRequest(
'sapiente',
],
],
- sync_mode=shared.ConnectionSyncModeEnumEnum.INCREMENTAL_DEDUPED_HISTORY,
+ sync_mode=shared.ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY,
),
shared.StreamConfiguration(
cursor_field=[
@@ -68,20 +68,20 @@ req = shared.ConnectionCreateRequest(
'fugit',
],
],
- sync_mode=shared.ConnectionSyncModeEnumEnum.INCREMENTAL_APPEND,
+ sync_mode=shared.ConnectionSyncModeEnum.INCREMENTAL_APPEND,
),
],
),
- data_residency=shared.GeographyEnumEnum.EU,
+ data_residency=shared.GeographyEnum.EU,
destination_id='c816742c-b739-4205-9293-96fea7596eb1',
name='Lela Orn',
- namespace_definition=shared.NamespaceDefinitionEnumEnum.SOURCE,
+ namespace_definition=shared.NamespaceDefinitionEnum.SOURCE,
namespace_format='${SOURCE_NAMESPACE}',
- non_breaking_schema_updates_behavior=shared.NonBreakingSchemaUpdatesBehaviorEnumEnum.IGNORE,
+ non_breaking_schema_updates_behavior=shared.NonBreakingSchemaUpdatesBehaviorEnum.IGNORE,
prefix='corporis',
schedule=shared.ConnectionScheduleCreate(
cron_expression='explicabo',
- schedule_type=shared.ScheduleTypeEnumEnum.CRON,
+ schedule_type=shared.ScheduleTypeEnum.CRON,
),
source_id='5955907a-ff1a-43a2-ba94-67739251aa52',
)
diff --git a/docs/connections/README.md b/docs/connections/README.md
index 63f42288..837ce034 100755
--- a/docs/connections/README.md
+++ b/docs/connections/README.md
@@ -52,7 +52,7 @@ req = shared.ConnectionCreateRequest(
'reprehenderit',
],
],
- sync_mode=shared.ConnectionSyncModeEnumEnum.FULL_REFRESH_APPEND,
+ sync_mode=shared.ConnectionSyncModeEnum.FULL_REFRESH_APPEND,
),
shared.StreamConfiguration(
cursor_field=[
@@ -82,7 +82,7 @@ req = shared.ConnectionCreateRequest(
'veritatis',
],
],
- sync_mode=shared.ConnectionSyncModeEnumEnum.INCREMENTAL_DEDUPED_HISTORY,
+ sync_mode=shared.ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY,
),
shared.StreamConfiguration(
cursor_field=[
@@ -109,7 +109,7 @@ req = shared.ConnectionCreateRequest(
'dolorum',
],
],
- sync_mode=shared.ConnectionSyncModeEnumEnum.INCREMENTAL_APPEND,
+ sync_mode=shared.ConnectionSyncModeEnum.INCREMENTAL_APPEND,
),
shared.StreamConfiguration(
cursor_field=[
@@ -134,20 +134,20 @@ req = shared.ConnectionCreateRequest(
'illum',
],
],
- sync_mode=shared.ConnectionSyncModeEnumEnum.INCREMENTAL_DEDUPED_HISTORY,
+ sync_mode=shared.ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY,
),
],
),
- data_residency=shared.GeographyEnumEnum.EU,
+ data_residency=shared.GeographyEnum.EU,
destination_id='14cd66ae-395e-4fb9-ba88-f3a66997074b',
name='Leroy Greenfelder',
- namespace_definition=shared.NamespaceDefinitionEnumEnum.CUSTOM_FORMAT,
+ namespace_definition=shared.NamespaceDefinitionEnum.CUSTOM_FORMAT,
namespace_format='${SOURCE_NAMESPACE}',
- non_breaking_schema_updates_behavior=shared.NonBreakingSchemaUpdatesBehaviorEnumEnum.IGNORE,
+ non_breaking_schema_updates_behavior=shared.NonBreakingSchemaUpdatesBehaviorEnum.IGNORE,
prefix='vero',
schedule=shared.ConnectionScheduleCreate(
cron_expression='aspernatur',
- schedule_type=shared.ScheduleTypeEnumEnum.MANUAL,
+ schedule_type=shared.ScheduleTypeEnum.MANUAL,
),
source_id='41959890-afa5-463e-a516-fe4c8b711e5b',
)
diff --git a/docs/destinations/README.md b/docs/destinations/README.md
index df663a0f..d42c3876 100755
--- a/docs/destinations/README.md
+++ b/docs/destinations/README.md
@@ -27,14 +27,14 @@ req = shared.DestinationCreateRequest(
configuration=shared.DestinationDatabricks(
accept_terms=False,
data_source=shared.DestinationDatabricksDataSourceRecommendedManagedTables(
- data_source_type=shared.DestinationDatabricksDataSourceRecommendedManagedTablesDataSourceTypeEnum.MANAGED_TABLES_STORAGE,
+ data_source_type=shared.DestinationDatabricksDataSourceRecommendedManagedTablesDataSourceType.MANAGED_TABLES_STORAGE,
),
database='perferendis',
databricks_http_path='amet',
databricks_personal_access_token='optio',
databricks_port='accusamus',
databricks_server_hostname='ad',
- destination_type=shared.DestinationDatabricksDatabricksEnum.DATABRICKS,
+ destination_type=shared.DestinationDatabricksDatabricks.DATABRICKS,
purge_staging_data=False,
schema='saepe',
),
diff --git a/docs/jobs/README.md b/docs/jobs/README.md
index a444f97c..07095988 100755
--- a/docs/jobs/README.md
+++ b/docs/jobs/README.md
@@ -51,7 +51,7 @@ s = airbyte.Airbyte(
req = shared.JobCreateRequest(
connection_id='odio',
- job_type=shared.JobTypeEnumEnum.RESET,
+ job_type=shared.JobTypeEnum.RESET,
)
res = s.jobs.create_job(req)
@@ -104,7 +104,7 @@ s = airbyte.Airbyte(
req = operations.ListJobsRequest(
connection_id='quisquam',
- job_type=shared.JobTypeEnumEnum.RESET,
+ job_type=shared.JobTypeEnum.RESET,
limit=606476,
offset=338159,
)
diff --git a/docs/sources/README.md b/docs/sources/README.md
index 045a720c..ac0fed26 100755
--- a/docs/sources/README.md
+++ b/docs/sources/README.md
@@ -32,7 +32,7 @@ req = shared.SourceCreateRequest(
records_per_slice=231701,
records_per_sync=878870,
seed=949319,
- source_type=shared.SourceFakerFakerEnum.FAKER,
+ source_type=shared.SourceFakerFaker.FAKER,
),
name='Darla Rau',
secret_id='similique',
diff --git a/docs/workspaces/README.md b/docs/workspaces/README.md
index f9d52b1e..2ed834a6 100755
--- a/docs/workspaces/README.md
+++ b/docs/workspaces/README.md
@@ -28,7 +28,7 @@ s = airbyte.Airbyte(
req = operations.CreateOrUpdateWorkspaceOAuthCredentialsRequest(
workspace_o_auth_credentials_request=shared.WorkspaceOAuthCredentialsRequest(
- actor_type=shared.ActorTypeEnumEnum.SOURCE,
+ actor_type=shared.ActorTypeEnum.SOURCE,
configuration={
"error": 'sint',
"pariatur": 'possimus',
diff --git a/files.gen b/files.gen
index 00e45875..2a699d3c 100755
--- a/files.gen
+++ b/files.gen
@@ -38,18 +38,18 @@ src/airbyte/models/operations/listworkspaces.py
src/airbyte/models/operations/updateworkspace.py
src/airbyte/models/operations/__init__.py
src/airbyte/models/shared/connectionresponse.py
-src/airbyte/models/shared/connectionstatusenum_enum.py
+src/airbyte/models/shared/connectionstatusenum.py
src/airbyte/models/shared/connectionscheduleresponse.py
-src/airbyte/models/shared/scheduletypewithbasicenum_enum.py
-src/airbyte/models/shared/nonbreakingschemaupdatesbehaviorenum_enum.py
-src/airbyte/models/shared/namespacedefinitionenum_enum.py
-src/airbyte/models/shared/geographyenum_enum.py
+src/airbyte/models/shared/scheduletypewithbasicenum.py
+src/airbyte/models/shared/nonbreakingschemaupdatesbehaviorenum.py
+src/airbyte/models/shared/namespacedefinitionenum.py
+src/airbyte/models/shared/geographyenum.py
src/airbyte/models/shared/connectioncreaterequest.py
src/airbyte/models/shared/connectionschedulecreate.py
-src/airbyte/models/shared/scheduletypeenum_enum.py
+src/airbyte/models/shared/scheduletypeenum.py
src/airbyte/models/shared/streamconfigurations.py
src/airbyte/models/shared/streamconfiguration.py
-src/airbyte/models/shared/connectionsyncmodeenum_enum.py
+src/airbyte/models/shared/connectionsyncmodeenum.py
src/airbyte/models/shared/connectionsresponse.py
src/airbyte/models/shared/destinationresponse.py
src/airbyte/models/shared/destinationcreaterequest.py
@@ -64,6 +64,7 @@ src/airbyte/models/shared/destination_convex.py
src/airbyte/models/shared/destination_cumulio.py
src/airbyte/models/shared/destination_databend.py
src/airbyte/models/shared/destination_databricks.py
+src/airbyte/models/shared/destination_dev_null.py
src/airbyte/models/shared/destination_dynamodb.py
src/airbyte/models/shared/destination_elasticsearch.py
src/airbyte/models/shared/destination_firebolt.py
@@ -93,8 +94,8 @@ src/airbyte/models/shared/destination_snowflake.py
src/airbyte/models/shared/destination_typesense.py
src/airbyte/models/shared/destinationsresponse.py
src/airbyte/models/shared/jobresponse.py
-src/airbyte/models/shared/jobstatusenum_enum.py
-src/airbyte/models/shared/jobtypeenum_enum.py
+src/airbyte/models/shared/jobstatusenum.py
+src/airbyte/models/shared/jobtypeenum.py
src/airbyte/models/shared/jobcreaterequest.py
src/airbyte/models/shared/jobsresponse.py
src/airbyte/models/shared/sourceresponse.py
@@ -294,7 +295,7 @@ src/airbyte/models/shared/initiateoauthrequest.py
src/airbyte/models/shared/sourcesresponse.py
src/airbyte/models/shared/streamproperties.py
src/airbyte/models/shared/workspaceoauthcredentialsrequest.py
-src/airbyte/models/shared/actortypeenum_enum.py
+src/airbyte/models/shared/actortypeenum.py
src/airbyte/models/shared/workspaceresponse.py
src/airbyte/models/shared/workspacecreaterequest.py
src/airbyte/models/shared/workspacesresponse.py
diff --git a/gen.yaml b/gen.yaml
index 885547a6..d39bfb2f 100644
--- a/gen.yaml
+++ b/gen.yaml
@@ -1,15 +1,15 @@
configVersion: 1.0.0
management:
- docChecksum: a486e9e3eacd68c92aee50f313722c18
+ docChecksum: e82f3e11abf47c2509f115468e75bb06
docVersion: 1.0.0
- speakeasyVersion: 1.33.2
- generationVersion: 2.29.0
+ speakeasyVersion: 1.37.5
+ generationVersion: 2.32.2
generation:
sdkClassName: airbyte
singleTagPerOp: false
telemetryEnabled: true
python:
- version: 0.7.0
+ version: 0.10.0
author: Airbyte
description: Python Client SDK for Airbyte API
maxMethodParams: 0
diff --git a/setup.py b/setup.py
index dfc7dc7d..7245a367 100755
--- a/setup.py
+++ b/setup.py
@@ -10,7 +10,7 @@
setuptools.setup(
name="airbyte-api",
- version="0.7.0",
+ version="0.10.0",
author="Airbyte",
description="Python Client SDK for Airbyte API",
long_description=long_description,
diff --git a/src/airbyte/connections.py b/src/airbyte/connections.py
index f2efd28b..204ea684 100755
--- a/src/airbyte/connections.py
+++ b/src/airbyte/connections.py
@@ -27,13 +27,13 @@ def create_connection(self, request: shared.ConnectionCreateRequest) -> operatio
base_url = self._server_url
url = base_url.removesuffix('/') + '/connections'
-
headers = {}
req_content_type, data, form = utils.serialize_request_body(request, "request", 'json')
if req_content_type not in ('multipart/form-data', 'multipart/mixed'):
headers['content-type'] = req_content_type
if data is None and form is None:
raise Exception('request body is required')
+ headers['Accept'] = 'application/json'
headers['user-agent'] = f'speakeasy-sdk/{self._language} {self._sdk_version} {self._gen_version}'
client = self._security_client
@@ -58,8 +58,8 @@ def delete_connection(self, request: operations.DeleteConnectionRequest) -> oper
base_url = self._server_url
url = utils.generate_url(operations.DeleteConnectionRequest, base_url, '/connections/{connectionId}', request)
-
headers = {}
+ headers['Accept'] = '*/*'
headers['user-agent'] = f'speakeasy-sdk/{self._language} {self._sdk_version} {self._gen_version}'
client = self._security_client
@@ -78,8 +78,8 @@ def get_connection(self, request: operations.GetConnectionRequest) -> operations
base_url = self._server_url
url = utils.generate_url(operations.GetConnectionRequest, base_url, '/connections/{connectionId}', request)
-
headers = {}
+ headers['Accept'] = 'application/json'
headers['user-agent'] = f'speakeasy-sdk/{self._language} {self._sdk_version} {self._gen_version}'
client = self._security_client
@@ -104,9 +104,9 @@ def list_connections(self, request: operations.ListConnectionsRequest) -> operat
base_url = self._server_url
url = base_url.removesuffix('/') + '/connections'
-
headers = {}
query_params = utils.get_query_params(operations.ListConnectionsRequest, request)
+ headers['Accept'] = 'application/json'
headers['user-agent'] = f'speakeasy-sdk/{self._language} {self._sdk_version} {self._gen_version}'
client = self._security_client
diff --git a/src/airbyte/destinations.py b/src/airbyte/destinations.py
index 6dbc88a1..8f28ff09 100755
--- a/src/airbyte/destinations.py
+++ b/src/airbyte/destinations.py
@@ -29,11 +29,11 @@ def create_destination(self, request: shared.DestinationCreateRequest) -> operat
base_url = self._server_url
url = base_url.removesuffix('/') + '/destinations'
-
headers = {}
req_content_type, data, form = utils.serialize_request_body(request, "request", 'json')
if req_content_type not in ('multipart/form-data', 'multipart/mixed'):
headers['content-type'] = req_content_type
+ headers['Accept'] = 'application/json'
headers['user-agent'] = f'speakeasy-sdk/{self._language} {self._sdk_version} {self._gen_version}'
client = self._security_client
@@ -58,8 +58,8 @@ def delete_destination(self, request: operations.DeleteDestinationRequest) -> op
base_url = self._server_url
url = utils.generate_url(operations.DeleteDestinationRequest, base_url, '/destinations/{destinationId}', request)
-
headers = {}
+ headers['Accept'] = '*/*'
headers['user-agent'] = f'speakeasy-sdk/{self._language} {self._sdk_version} {self._gen_version}'
client = self._security_client
@@ -78,8 +78,8 @@ def get_destination(self, request: operations.GetDestinationRequest) -> operatio
base_url = self._server_url
url = utils.generate_url(operations.GetDestinationRequest, base_url, '/destinations/{destinationId}', request)
-
headers = {}
+ headers['Accept'] = 'application/json'
headers['user-agent'] = f'speakeasy-sdk/{self._language} {self._sdk_version} {self._gen_version}'
client = self._security_client
@@ -104,9 +104,9 @@ def list_destinations(self, request: operations.ListDestinationsRequest) -> oper
base_url = self._server_url
url = base_url.removesuffix('/') + '/destinations'
-
headers = {}
query_params = utils.get_query_params(operations.ListDestinationsRequest, request)
+ headers['Accept'] = 'application/json'
headers['user-agent'] = f'speakeasy-sdk/{self._language} {self._sdk_version} {self._gen_version}'
client = self._security_client
diff --git a/src/airbyte/jobs.py b/src/airbyte/jobs.py
index b0263d07..0d13fc01 100755
--- a/src/airbyte/jobs.py
+++ b/src/airbyte/jobs.py
@@ -27,8 +27,8 @@ def cancel_job(self, request: operations.CancelJobRequest) -> operations.CancelJ
base_url = self._server_url
url = utils.generate_url(operations.CancelJobRequest, base_url, '/jobs/{jobId}', request)
-
headers = {}
+ headers['Accept'] = 'application/json'
headers['user-agent'] = f'speakeasy-sdk/{self._language} {self._sdk_version} {self._gen_version}'
client = self._security_client
@@ -53,13 +53,13 @@ def create_job(self, request: shared.JobCreateRequest) -> operations.CreateJobRe
base_url = self._server_url
url = base_url.removesuffix('/') + '/jobs'
-
headers = {}
req_content_type, data, form = utils.serialize_request_body(request, "request", 'json')
if req_content_type not in ('multipart/form-data', 'multipart/mixed'):
headers['content-type'] = req_content_type
if data is None and form is None:
raise Exception('request body is required')
+ headers['Accept'] = 'application/json'
headers['user-agent'] = f'speakeasy-sdk/{self._language} {self._sdk_version} {self._gen_version}'
client = self._security_client
@@ -84,8 +84,8 @@ def get_job(self, request: operations.GetJobRequest) -> operations.GetJobRespons
base_url = self._server_url
url = utils.generate_url(operations.GetJobRequest, base_url, '/jobs/{jobId}', request)
-
headers = {}
+ headers['Accept'] = 'application/json'
headers['user-agent'] = f'speakeasy-sdk/{self._language} {self._sdk_version} {self._gen_version}'
client = self._security_client
@@ -110,9 +110,9 @@ def list_jobs(self, request: operations.ListJobsRequest) -> operations.ListJobsR
base_url = self._server_url
url = base_url.removesuffix('/') + '/jobs'
-
headers = {}
query_params = utils.get_query_params(operations.ListJobsRequest, request)
+ headers['Accept'] = 'application/json'
headers['user-agent'] = f'speakeasy-sdk/{self._language} {self._sdk_version} {self._gen_version}'
client = self._security_client
diff --git a/src/airbyte/models/operations/listjobs.py b/src/airbyte/models/operations/listjobs.py
index 11c74436..4695104d 100755
--- a/src/airbyte/models/operations/listjobs.py
+++ b/src/airbyte/models/operations/listjobs.py
@@ -4,7 +4,7 @@
import dataclasses
import requests as requests_http
from ..shared import jobsresponse as shared_jobsresponse
-from ..shared import jobtypeenum_enum as shared_jobtypeenum_enum
+from ..shared import jobtypeenum as shared_jobtypeenum
from typing import Optional
@@ -13,7 +13,7 @@ class ListJobsRequest:
connection_id: str = dataclasses.field(metadata={'query_param': { 'field_name': 'connectionId', 'style': 'form', 'explode': True }})
r"""Filter the Jobs by connectionId."""
- job_type: Optional[shared_jobtypeenum_enum.JobTypeEnumEnum] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'jobType', 'style': 'form', 'explode': True }})
+ job_type: Optional[shared_jobtypeenum.JobTypeEnum] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'jobType', 'style': 'form', 'explode': True }})
r"""Filter the Jobs by jobType."""
limit: Optional[int] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'limit', 'style': 'form', 'explode': True }})
r"""Set the limit on the number of Jobs returned. The default is 20 Jobs."""
diff --git a/src/airbyte/models/shared/__init__.py b/src/airbyte/models/shared/__init__.py
index e01c8e31..c4520f14 100755
--- a/src/airbyte/models/shared/__init__.py
+++ b/src/airbyte/models/shared/__init__.py
@@ -1,13 +1,13 @@
"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT."""
-from .actortypeenum_enum import *
+from .actortypeenum import *
from .connectioncreaterequest import *
from .connectionresponse import *
from .connectionschedulecreate import *
from .connectionscheduleresponse import *
from .connectionsresponse import *
-from .connectionstatusenum_enum import *
-from .connectionsyncmodeenum_enum import *
+from .connectionstatusenum import *
+from .connectionsyncmodeenum import *
from .destination_amazon_sqs import *
from .destination_aws_datalake import *
from .destination_azure_blob_storage import *
@@ -19,6 +19,7 @@
from .destination_cumulio import *
from .destination_databend import *
from .destination_databricks import *
+from .destination_dev_null import *
from .destination_dynamodb import *
from .destination_elasticsearch import *
from .destination_firebolt import *
@@ -49,17 +50,17 @@
from .destinationcreaterequest import *
from .destinationresponse import *
from .destinationsresponse import *
-from .geographyenum_enum import *
+from .geographyenum import *
from .initiateoauthrequest import *
from .jobcreaterequest import *
from .jobresponse import *
from .jobsresponse import *
-from .jobstatusenum_enum import *
-from .jobtypeenum_enum import *
-from .namespacedefinitionenum_enum import *
-from .nonbreakingschemaupdatesbehaviorenum_enum import *
-from .scheduletypeenum_enum import *
-from .scheduletypewithbasicenum_enum import *
+from .jobstatusenum import *
+from .jobtypeenum import *
+from .namespacedefinitionenum import *
+from .nonbreakingschemaupdatesbehaviorenum import *
+from .scheduletypeenum import *
+from .scheduletypewithbasicenum import *
from .security import *
from .source_aircall import *
from .source_airtable import *
@@ -264,4 +265,4 @@
from .workspacesresponse import *
from .workspaceupdaterequest import *
-__all__ = ["ActorTypeEnumEnum","ConnectionCreateRequest","ConnectionResponse","ConnectionScheduleCreate","ConnectionScheduleResponse","ConnectionStatusEnumEnum","ConnectionSyncModeEnumEnum","ConnectionsResponse","DestinationAmazonSqs","DestinationAmazonSqsAWSRegionEnum","DestinationAmazonSqsAmazonSqsEnum","DestinationAwsDatalake","DestinationAwsDatalakeAwsDatalakeEnum","DestinationAwsDatalakeChooseHowToPartitionDataEnum","DestinationAwsDatalakeCredentialsIAMRole","DestinationAwsDatalakeCredentialsIAMRoleCredentialsTitleEnum","DestinationAwsDatalakeCredentialsIAMUser","DestinationAwsDatalakeCredentialsIAMUserCredentialsTitleEnum","DestinationAwsDatalakeFormatJSONLinesNewlineDelimitedJSON","DestinationAwsDatalakeFormatJSONLinesNewlineDelimitedJSONCompressionCodecOptionalEnum","DestinationAwsDatalakeFormatJSONLinesNewlineDelimitedJSONFormatTypeWildcardEnum","DestinationAwsDatalakeFormatParquetColumnarStorage","DestinationAwsDatalakeFormatParquetColumnarStorageCompressionCodecOptionalEnum","DestinationAwsDatalakeFormatParquetColumnarStorageFormatTypeWildcardEnum","DestinationAwsDatalakeS3BucketRegionEnum","DestinationAzureBlobStorage","DestinationAzureBlobStorageAzureBlobStorageEnum","DestinationAzureBlobStorageFormatCSVCommaSeparatedValues","DestinationAzureBlobStorageFormatCSVCommaSeparatedValuesFormatTypeEnum","DestinationAzureBlobStorageFormatCSVCommaSeparatedValuesNormalizationFlatteningEnum","DestinationAzureBlobStorageFormatJSONLinesNewlineDelimitedJSON","DestinationAzureBlobStorageFormatJSONLinesNewlineDelimitedJSONFormatTypeEnum","DestinationBigquery","DestinationBigqueryBigqueryEnum","DestinationBigqueryDatasetLocationEnum","DestinationBigqueryDenormalized","DestinationBigqueryDenormalizedBigqueryDenormalizedEnum","DestinationBigqueryDenormalizedDatasetLocationEnum","DestinationBigqueryDenormalizedLoadingMethodGCSStaging","DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey","DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKeyCredentialTypeEnum","DestinationBigqueryDenormalizedLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessingEnum","DestinationBigqueryDenormalizedLoadingMethodGCSStagingMethodEnum","DestinationBigqueryDenormalizedLoadingMethodStandardInserts","DestinationBigqueryDenormalizedLoadingMethodStandardInsertsMethodEnum","DestinationBigqueryLoadingMethodGCSStaging","DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey","DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKeyCredentialTypeEnum","DestinationBigqueryLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessingEnum","DestinationBigqueryLoadingMethodGCSStagingMethodEnum","DestinationBigqueryLoadingMethodStandardInserts","DestinationBigqueryLoadingMethodStandardInsertsMethodEnum","DestinationBigqueryTransformationQueryRunTypeEnum","DestinationCassandra","DestinationCassandraCassandraEnum","DestinationClickhouse","DestinationClickhouseClickhouseEnum","DestinationClickhouseTunnelMethodNoTunnel","DestinationClickhouseTunnelMethodNoTunnelTunnelMethodEnum","DestinationClickhouseTunnelMethodPasswordAuthentication","DestinationClickhouseTunnelMethodPasswordAuthenticationTunnelMethodEnum","DestinationClickhouseTunnelMethodSSHKeyAuthentication","DestinationClickhouseTunnelMethodSSHKeyAuthenticationTunnelMethodEnum","DestinationConvex","DestinationConvexConvexEnum","DestinationCreateRequest","DestinationCumulio","DestinationCumulioCumulioEnum","DestinationDatabend","DestinationDatabendDatabendEnum","DestinationDatabricks","DestinationDatabricksDataSourceAmazonS3","DestinationDatabricksDataSourceAmazonS3DataSourceTypeEnum","DestinationDatabricksDataSourceAmazonS3S3BucketRegionEnum","DestinationDatabricksDataSourceAzureBlobStorage","DestinationDatabricksDataSourceAzureBlobStorageDataSourceTypeEnum","DestinationDatabricksDataSourceRecommendedManagedTables","DestinationDatabricksDataSourceRecommendedManagedTablesDataSourceTypeEnum","DestinationDatabricksDatabricksEnum","DestinationDynamodb","DestinationDynamodbDynamoDBRegionEnum","DestinationDynamodbDynamodbEnum","DestinationElasticsearch","DestinationElasticsearchAuthenticationMethodAPIKeySecret","DestinationElasticsearchAuthenticationMethodAPIKeySecretMethodEnum","DestinationElasticsearchAuthenticationMethodUsernamePassword","DestinationElasticsearchAuthenticationMethodUsernamePasswordMethodEnum","DestinationElasticsearchElasticsearchEnum","DestinationFirebolt","DestinationFireboltFireboltEnum","DestinationFireboltLoadingMethodExternalTableViaS3","DestinationFireboltLoadingMethodExternalTableViaS3MethodEnum","DestinationFireboltLoadingMethodSQLInserts","DestinationFireboltLoadingMethodSQLInsertsMethodEnum","DestinationFirestore","DestinationFirestoreFirestoreEnum","DestinationGCSGCSBucketRegionEnum","DestinationGcs","DestinationGcsCredentialHMACKey","DestinationGcsCredentialHMACKeyCredentialTypeEnum","DestinationGcsFormatAvroApacheAvro","DestinationGcsFormatAvroApacheAvroCompressionCodecBzip2","DestinationGcsFormatAvroApacheAvroCompressionCodecBzip2CodecEnum","DestinationGcsFormatAvroApacheAvroCompressionCodecDeflate","DestinationGcsFormatAvroApacheAvroCompressionCodecDeflateCodecEnum","DestinationGcsFormatAvroApacheAvroCompressionCodecNoCompression","DestinationGcsFormatAvroApacheAvroCompressionCodecNoCompressionCodecEnum","DestinationGcsFormatAvroApacheAvroCompressionCodecSnappy","DestinationGcsFormatAvroApacheAvroCompressionCodecSnappyCodecEnum","DestinationGcsFormatAvroApacheAvroCompressionCodecXz","DestinationGcsFormatAvroApacheAvroCompressionCodecXzCodecEnum","DestinationGcsFormatAvroApacheAvroCompressionCodecZstandard","DestinationGcsFormatAvroApacheAvroCompressionCodecZstandardCodecEnum","DestinationGcsFormatAvroApacheAvroFormatTypeEnum","DestinationGcsFormatCSVCommaSeparatedValues","DestinationGcsFormatCSVCommaSeparatedValuesCompressionGZIP","DestinationGcsFormatCSVCommaSeparatedValuesCompressionGZIPCompressionTypeEnum","DestinationGcsFormatCSVCommaSeparatedValuesCompressionNoCompression","DestinationGcsFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionTypeEnum","DestinationGcsFormatCSVCommaSeparatedValuesFormatTypeEnum","DestinationGcsFormatCSVCommaSeparatedValuesNormalizationEnum","DestinationGcsFormatJSONLinesNewlineDelimitedJSON","DestinationGcsFormatJSONLinesNewlineDelimitedJSONCompressionGZIP","DestinationGcsFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionTypeEnum","DestinationGcsFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression","DestinationGcsFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionTypeEnum","DestinationGcsFormatJSONLinesNewlineDelimitedJSONFormatTypeEnum","DestinationGcsFormatParquetColumnarStorage","DestinationGcsFormatParquetColumnarStorageCompressionCodecEnum","DestinationGcsFormatParquetColumnarStorageFormatTypeEnum","DestinationGcsGcsEnum","DestinationGoogleSheets","DestinationGoogleSheetsAuthenticationViaGoogleOAuth","DestinationGoogleSheetsGoogleSheetsEnum","DestinationKeen","DestinationKeenKeenEnum","DestinationKinesis","DestinationKinesisKinesisEnum","DestinationMariadbColumnstore","DestinationMariadbColumnstoreMariadbColumnstoreEnum","DestinationMariadbColumnstoreTunnelMethodNoTunnel","DestinationMariadbColumnstoreTunnelMethodNoTunnelTunnelMethodEnum","DestinationMariadbColumnstoreTunnelMethodPasswordAuthentication","DestinationMariadbColumnstoreTunnelMethodPasswordAuthenticationTunnelMethodEnum","DestinationMariadbColumnstoreTunnelMethodSSHKeyAuthentication","DestinationMariadbColumnstoreTunnelMethodSSHKeyAuthenticationTunnelMethodEnum","DestinationMeilisearch","DestinationMeilisearchMeilisearchEnum","DestinationMongodb","DestinationMongodbAuthTypeLoginPassword","DestinationMongodbAuthTypeLoginPasswordAuthorizationEnum","DestinationMongodbAuthTypeNone","DestinationMongodbAuthTypeNoneAuthorizationEnum","DestinationMongodbInstanceTypeMongoDBAtlas","DestinationMongodbInstanceTypeMongoDBAtlasInstanceEnum","DestinationMongodbInstanceTypeReplicaSet","DestinationMongodbInstanceTypeReplicaSetInstanceEnum","DestinationMongodbInstanceTypeStandaloneMongoDbInstance","DestinationMongodbInstanceTypeStandaloneMongoDbInstanceInstanceEnum","DestinationMongodbMongodbEnum","DestinationMongodbTunnelMethodNoTunnel","DestinationMongodbTunnelMethodNoTunnelTunnelMethodEnum","DestinationMongodbTunnelMethodPasswordAuthentication","DestinationMongodbTunnelMethodPasswordAuthenticationTunnelMethodEnum","DestinationMongodbTunnelMethodSSHKeyAuthentication","DestinationMongodbTunnelMethodSSHKeyAuthenticationTunnelMethodEnum","DestinationMssql","DestinationMssqlMssqlEnum","DestinationMssqlSslMethodEncryptedTrustServerCertificate","DestinationMssqlSslMethodEncryptedTrustServerCertificateSslMethodEnum","DestinationMssqlSslMethodEncryptedVerifyCertificate","DestinationMssqlSslMethodEncryptedVerifyCertificateSslMethodEnum","DestinationMssqlTunnelMethodNoTunnel","DestinationMssqlTunnelMethodNoTunnelTunnelMethodEnum","DestinationMssqlTunnelMethodPasswordAuthentication","DestinationMssqlTunnelMethodPasswordAuthenticationTunnelMethodEnum","DestinationMssqlTunnelMethodSSHKeyAuthentication","DestinationMssqlTunnelMethodSSHKeyAuthenticationTunnelMethodEnum","DestinationMysql","DestinationMysqlMysqlEnum","DestinationMysqlTunnelMethodNoTunnel","DestinationMysqlTunnelMethodNoTunnelTunnelMethodEnum","DestinationMysqlTunnelMethodPasswordAuthentication","DestinationMysqlTunnelMethodPasswordAuthenticationTunnelMethodEnum","DestinationMysqlTunnelMethodSSHKeyAuthentication","DestinationMysqlTunnelMethodSSHKeyAuthenticationTunnelMethodEnum","DestinationOracle","DestinationOracleOracleEnum","DestinationOracleTunnelMethodNoTunnel","DestinationOracleTunnelMethodNoTunnelTunnelMethodEnum","DestinationOracleTunnelMethodPasswordAuthentication","DestinationOracleTunnelMethodPasswordAuthenticationTunnelMethodEnum","DestinationOracleTunnelMethodSSHKeyAuthentication","DestinationOracleTunnelMethodSSHKeyAuthenticationTunnelMethodEnum","DestinationPostgres","DestinationPostgresPostgresEnum","DestinationPostgresSslModeAllow","DestinationPostgresSslModeAllowModeEnum","DestinationPostgresSslModeDisable","DestinationPostgresSslModeDisableModeEnum","DestinationPostgresSslModePrefer","DestinationPostgresSslModePreferModeEnum","DestinationPostgresSslModeRequire","DestinationPostgresSslModeRequireModeEnum","DestinationPostgresSslModeVerifyCa","DestinationPostgresSslModeVerifyCaModeEnum","DestinationPostgresSslModeVerifyFull","DestinationPostgresSslModeVerifyFullModeEnum","DestinationPostgresTunnelMethodNoTunnel","DestinationPostgresTunnelMethodNoTunnelTunnelMethodEnum","DestinationPostgresTunnelMethodPasswordAuthentication","DestinationPostgresTunnelMethodPasswordAuthenticationTunnelMethodEnum","DestinationPostgresTunnelMethodSSHKeyAuthentication","DestinationPostgresTunnelMethodSSHKeyAuthenticationTunnelMethodEnum","DestinationPubsub","DestinationPubsubPubsubEnum","DestinationPulsar","DestinationPulsarCompressionTypeEnum","DestinationPulsarPulsarEnum","DestinationPulsarTopicTypeEnum","DestinationRabbitmq","DestinationRabbitmqRabbitmqEnum","DestinationRedis","DestinationRedisCacheTypeEnum","DestinationRedisRedisEnum","DestinationRedisSslModeDisable","DestinationRedisSslModeDisableModeEnum","DestinationRedisSslModeVerifyFull","DestinationRedisSslModeVerifyFullModeEnum","DestinationRedisTunnelMethodNoTunnel","DestinationRedisTunnelMethodNoTunnelTunnelMethodEnum","DestinationRedisTunnelMethodPasswordAuthentication","DestinationRedisTunnelMethodPasswordAuthenticationTunnelMethodEnum","DestinationRedisTunnelMethodSSHKeyAuthentication","DestinationRedisTunnelMethodSSHKeyAuthenticationTunnelMethodEnum","DestinationRedshift","DestinationRedshiftRedshiftEnum","DestinationRedshiftTunnelMethodNoTunnel","DestinationRedshiftTunnelMethodNoTunnelTunnelMethodEnum","DestinationRedshiftTunnelMethodPasswordAuthentication","DestinationRedshiftTunnelMethodPasswordAuthenticationTunnelMethodEnum","DestinationRedshiftTunnelMethodSSHKeyAuthentication","DestinationRedshiftTunnelMethodSSHKeyAuthenticationTunnelMethodEnum","DestinationRedshiftUploadingMethodS3Staging","DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption","DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionTypeEnum","DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption","DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryptionEncryptionTypeEnum","DestinationRedshiftUploadingMethodS3StagingMethodEnum","DestinationRedshiftUploadingMethodS3StagingS3BucketRegionEnum","DestinationRedshiftUploadingMethodStandard","DestinationRedshiftUploadingMethodStandardMethodEnum","DestinationResponse","DestinationRockset","DestinationRocksetRocksetEnum","DestinationS3","DestinationS3FormatAvroApacheAvro","DestinationS3FormatAvroApacheAvroCompressionCodecBzip2","DestinationS3FormatAvroApacheAvroCompressionCodecBzip2CodecEnum","DestinationS3FormatAvroApacheAvroCompressionCodecDeflate","DestinationS3FormatAvroApacheAvroCompressionCodecDeflateCodecEnum","DestinationS3FormatAvroApacheAvroCompressionCodecNoCompression","DestinationS3FormatAvroApacheAvroCompressionCodecNoCompressionCodecEnum","DestinationS3FormatAvroApacheAvroCompressionCodecSnappy","DestinationS3FormatAvroApacheAvroCompressionCodecSnappyCodecEnum","DestinationS3FormatAvroApacheAvroCompressionCodecXz","DestinationS3FormatAvroApacheAvroCompressionCodecXzCodecEnum","DestinationS3FormatAvroApacheAvroCompressionCodecZstandard","DestinationS3FormatAvroApacheAvroCompressionCodecZstandardCodecEnum","DestinationS3FormatAvroApacheAvroFormatTypeEnum","DestinationS3FormatCSVCommaSeparatedValues","DestinationS3FormatCSVCommaSeparatedValuesCompressionGZIP","DestinationS3FormatCSVCommaSeparatedValuesCompressionGZIPCompressionTypeEnum","DestinationS3FormatCSVCommaSeparatedValuesCompressionNoCompression","DestinationS3FormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionTypeEnum","DestinationS3FormatCSVCommaSeparatedValuesFlatteningEnum","DestinationS3FormatCSVCommaSeparatedValuesFormatTypeEnum","DestinationS3FormatJSONLinesNewlineDelimitedJSON","DestinationS3FormatJSONLinesNewlineDelimitedJSONCompressionGZIP","DestinationS3FormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionTypeEnum","DestinationS3FormatJSONLinesNewlineDelimitedJSONCompressionNoCompression","DestinationS3FormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionTypeEnum","DestinationS3FormatJSONLinesNewlineDelimitedJSONFlatteningEnum","DestinationS3FormatJSONLinesNewlineDelimitedJSONFormatTypeEnum","DestinationS3FormatParquetColumnarStorage","DestinationS3FormatParquetColumnarStorageCompressionCodecEnum","DestinationS3FormatParquetColumnarStorageFormatTypeEnum","DestinationS3Glue","DestinationS3GlueFormatJSONLinesNewlineDelimitedJSON","DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONCompressionGZIP","DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionTypeEnum","DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression","DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionTypeEnum","DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONFlatteningEnum","DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONFormatTypeEnum","DestinationS3GlueS3BucketRegionEnum","DestinationS3GlueS3GlueEnum","DestinationS3GlueSerializationLibraryEnum","DestinationS3S3BucketRegionEnum","DestinationS3S3Enum","DestinationScylla","DestinationScyllaScyllaEnum","DestinationSftpJSON","DestinationSftpJSONSftpJSONEnum","DestinationSnowflake","DestinationSnowflakeCredentialsKeyPairAuthentication","DestinationSnowflakeCredentialsKeyPairAuthenticationAuthTypeEnum","DestinationSnowflakeCredentialsOAuth20","DestinationSnowflakeCredentialsOAuth20AuthTypeEnum","DestinationSnowflakeCredentialsUsernameAndPassword","DestinationSnowflakeCredentialsUsernameAndPasswordAuthTypeEnum","DestinationSnowflakeLoadingMethodAWSS3Staging","DestinationSnowflakeLoadingMethodAWSS3StagingEncryptionAESCBCEnvelopeEncryption","DestinationSnowflakeLoadingMethodAWSS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionTypeEnum","DestinationSnowflakeLoadingMethodAWSS3StagingEncryptionNoEncryption","DestinationSnowflakeLoadingMethodAWSS3StagingEncryptionNoEncryptionEncryptionTypeEnum","DestinationSnowflakeLoadingMethodAWSS3StagingMethodEnum","DestinationSnowflakeLoadingMethodAWSS3StagingS3BucketRegionEnum","DestinationSnowflakeLoadingMethodGoogleCloudStorageStaging","DestinationSnowflakeLoadingMethodGoogleCloudStorageStagingMethodEnum","DestinationSnowflakeLoadingMethodRecommendedInternalStaging","DestinationSnowflakeLoadingMethodRecommendedInternalStagingMethodEnum","DestinationSnowflakeLoadingMethodSelectAnotherOption","DestinationSnowflakeLoadingMethodSelectAnotherOptionMethodEnum","DestinationSnowflakeSnowflakeEnum","DestinationTypesense","DestinationTypesenseTypesenseEnum","DestinationsResponse","GeographyEnumEnum","InitiateOauthRequest","JobCreateRequest","JobResponse","JobStatusEnumEnum","JobTypeEnumEnum","JobsResponse","NamespaceDefinitionEnumEnum","NonBreakingSchemaUpdatesBehaviorEnumEnum","ScheduleTypeEnumEnum","ScheduleTypeWithBasicEnumEnum","Security","SourceAircall","SourceAircallAircallEnum","SourceAirtable","SourceAirtableAirtableEnum","SourceAirtableCredentialsOAuth20","SourceAirtableCredentialsOAuth20AuthMethodEnum","SourceAirtableCredentialsPersonalAccessToken","SourceAirtableCredentialsPersonalAccessTokenAuthMethodEnum","SourceAlloydb","SourceAlloydbAlloydbEnum","SourceAlloydbReplicationMethodStandard","SourceAlloydbReplicationMethodStandardMethodEnum","SourceAlloydbTunnelMethodNoTunnel","SourceAlloydbTunnelMethodNoTunnelTunnelMethodEnum","SourceAlloydbTunnelMethodPasswordAuthentication","SourceAlloydbTunnelMethodPasswordAuthenticationTunnelMethodEnum","SourceAlloydbTunnelMethodSSHKeyAuthentication","SourceAlloydbTunnelMethodSSHKeyAuthenticationTunnelMethodEnum","SourceAmazonAds","SourceAmazonAdsAmazonAdsEnum","SourceAmazonAdsAuthTypeEnum","SourceAmazonAdsRegionEnum","SourceAmazonAdsReportRecordTypesEnum","SourceAmazonAdsStateFilterEnum","SourceAmazonSellerPartner","SourceAmazonSellerPartnerAWSEnvironmentEnum","SourceAmazonSellerPartnerAWSRegionEnum","SourceAmazonSellerPartnerAmazonSellerPartnerEnum","SourceAmazonSellerPartnerAuthTypeEnum","SourceAmazonSqs","SourceAmazonSqsAWSRegionEnum","SourceAmazonSqsAmazonSqsEnum","SourceAmplitude","SourceAmplitudeAmplitudeEnum","SourceAmplitudeDataRegionEnum","SourceApifyDataset","SourceApifyDatasetApifyDatasetEnum","SourceAsana","SourceAsanaAsanaEnum","SourceAsanaCredentialsAuthenticateViaAsanaOauth","SourceAsanaCredentialsAuthenticateViaAsanaOauthCredentialsTitleEnum","SourceAsanaCredentialsAuthenticateWithPersonalAccessToken","SourceAsanaCredentialsAuthenticateWithPersonalAccessTokenCredentialsTitleEnum","SourceAuth0","SourceAuth0Auth0Enum","SourceAuth0CredentialsOAuth2AccessToken","SourceAuth0CredentialsOAuth2AccessTokenAuthenticationMethodEnum","SourceAuth0CredentialsOAuth2ConfidentialApplication","SourceAuth0CredentialsOAuth2ConfidentialApplicationAuthenticationMethodEnum","SourceAwsCloudtrail","SourceAwsCloudtrailAwsCloudtrailEnum","SourceAzureBlobStorage","SourceAzureBlobStorageAzureBlobStorageEnum","SourceAzureBlobStorageFormatJSONLinesNewlineDelimitedJSON","SourceAzureBlobStorageFormatJSONLinesNewlineDelimitedJSONFormatTypeEnum","SourceAzureTable","SourceAzureTableAzureTableEnum","SourceBambooHr","SourceBambooHrBambooHrEnum","SourceBigcommerce","SourceBigcommerceBigcommerceEnum","SourceBigquery","SourceBigqueryBigqueryEnum","SourceBingAds","SourceBingAdsAuthMethodEnum","SourceBingAdsBingAdsEnum","SourceBraintree","SourceBraintreeBraintreeEnum","SourceBraintreeEnvironmentEnum","SourceBraze","SourceBrazeBrazeEnum","SourceChargebee","SourceChargebeeChargebeeEnum","SourceChargebeeProductCatalogEnum","SourceChartmogul","SourceChartmogulChartmogulEnum","SourceChartmogulIntervalEnum","SourceClickhouse","SourceClickhouseClickhouseEnum","SourceClickhouseTunnelMethodNoTunnel","SourceClickhouseTunnelMethodNoTunnelTunnelMethodEnum","SourceClickhouseTunnelMethodPasswordAuthentication","SourceClickhouseTunnelMethodPasswordAuthenticationTunnelMethodEnum","SourceClickhouseTunnelMethodSSHKeyAuthentication","SourceClickhouseTunnelMethodSSHKeyAuthenticationTunnelMethodEnum","SourceClickupAPI","SourceClickupAPIClickupAPIEnum","SourceCloseCom","SourceCloseComCloseComEnum","SourceCoda","SourceCodaCodaEnum","SourceCoinAPI","SourceCoinAPICoinAPIEnum","SourceCoinAPIEnvironmentEnum","SourceCoinmarketcap","SourceCoinmarketcapCoinmarketcapEnum","SourceCoinmarketcapDataTypeEnum","SourceConfigcat","SourceConfigcatConfigcatEnum","SourceConfluence","SourceConfluenceConfluenceEnum","SourceCreateRequest","SourceDatascope","SourceDatascopeDatascopeEnum","SourceDelighted","SourceDelightedDelightedEnum","SourceDixa","SourceDixaDixaEnum","SourceDockerhub","SourceDockerhubDockerhubEnum","SourceDremio","SourceDremioDremioEnum","SourceDynamodb","SourceDynamodbDynamodbEnum","SourceDynamodbDynamodbRegionEnum","SourceE2eTestCloud","SourceE2eTestCloudE2eTestCloudEnum","SourceE2eTestCloudMockCatalogMultiSchema","SourceE2eTestCloudMockCatalogMultiSchemaTypeEnum","SourceE2eTestCloudMockCatalogSingleSchema","SourceE2eTestCloudMockCatalogSingleSchemaTypeEnum","SourceE2eTestCloudTypeEnum","SourceEmailoctopus","SourceEmailoctopusEmailoctopusEnum","SourceExchangeRates","SourceExchangeRatesExchangeRatesEnum","SourceFacebookMarketing","SourceFacebookMarketingFacebookMarketingEnum","SourceFacebookMarketingInsightConfig","SourceFacebookMarketingInsightConfigLevelEnum","SourceFacebookMarketingInsightConfigValidActionBreakdownsEnum","SourceFacebookMarketingInsightConfigValidBreakdownsEnum","SourceFacebookMarketingInsightConfigValidEnumsEnum","SourceFacebookPages","SourceFacebookPagesFacebookPagesEnum","SourceFaker","SourceFakerFakerEnum","SourceFauna","SourceFaunaCollection","SourceFaunaCollectionDeletionsDisabled","SourceFaunaCollectionDeletionsDisabledDeletionModeEnum","SourceFaunaCollectionDeletionsEnabled","SourceFaunaCollectionDeletionsEnabledDeletionModeEnum","SourceFaunaFaunaEnum","SourceFileSecure","SourceFileSecureFileFormatEnum","SourceFileSecureFileSecureEnum","SourceFileSecureProviderAzBlobAzureBlobStorage","SourceFileSecureProviderAzBlobAzureBlobStorageStorageEnum","SourceFileSecureProviderGCSGoogleCloudStorage","SourceFileSecureProviderGCSGoogleCloudStorageStorageEnum","SourceFileSecureProviderHTTPSPublicWeb","SourceFileSecureProviderHTTPSPublicWebStorageEnum","SourceFileSecureProviderS3AmazonWebServices","SourceFileSecureProviderS3AmazonWebServicesStorageEnum","SourceFileSecureProviderSCPSecureCopyProtocol","SourceFileSecureProviderSCPSecureCopyProtocolStorageEnum","SourceFileSecureProviderSFTPSecureFileTransferProtocol","SourceFileSecureProviderSFTPSecureFileTransferProtocolStorageEnum","SourceFileSecureProviderSSHSecureShell","SourceFileSecureProviderSSHSecureShellStorageEnum","SourceFirebolt","SourceFireboltFireboltEnum","SourceFreshcaller","SourceFreshcallerFreshcallerEnum","SourceFreshdesk","SourceFreshdeskFreshdeskEnum","SourceFreshsales","SourceFreshsalesFreshsalesEnum","SourceGcs","SourceGcsGcsEnum","SourceGetlago","SourceGetlagoGetlagoEnum","SourceGithub","SourceGithubCredentialsOAuth","SourceGithubCredentialsOAuthOptionTitleEnum","SourceGithubCredentialsPersonalAccessToken","SourceGithubCredentialsPersonalAccessTokenOptionTitleEnum","SourceGithubGithubEnum","SourceGitlab","SourceGitlabCredentialsOAuth20","SourceGitlabCredentialsOAuth20AuthTypeEnum","SourceGitlabCredentialsPrivateToken","SourceGitlabCredentialsPrivateTokenAuthTypeEnum","SourceGitlabGitlabEnum","SourceGlassfrog","SourceGlassfrogGlassfrogEnum","SourceGnews","SourceGnewsCountryEnum","SourceGnewsGnewsEnum","SourceGnewsInEnum","SourceGnewsLanguageEnum","SourceGnewsNullableEnum","SourceGnewsSortByEnum","SourceGnewsTopHeadlinesTopicEnum","SourceGoogleAds","SourceGoogleAdsCustomQueries","SourceGoogleAdsGoogleAdsEnum","SourceGoogleAdsGoogleCredentials","SourceGoogleAnalyticsDataAPI","SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth","SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauthAuthTypeEnum","SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication","SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthenticationAuthTypeEnum","SourceGoogleAnalyticsDataAPIGoogleAnalyticsDataAPIEnum","SourceGoogleAnalyticsV4","SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth","SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauthAuthTypeEnum","SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication","SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthenticationAuthTypeEnum","SourceGoogleAnalyticsV4GoogleAnalyticsV4Enum","SourceGoogleDirectory","SourceGoogleDirectoryGoogleDirectoryEnum","SourceGoogleSearchConsole","SourceGoogleSearchConsoleAuthorizationOAuth","SourceGoogleSearchConsoleAuthorizationOAuthAuthTypeEnum","SourceGoogleSearchConsoleAuthorizationServiceAccountKeyAuthentication","SourceGoogleSearchConsoleAuthorizationServiceAccountKeyAuthenticationAuthTypeEnum","SourceGoogleSearchConsoleGoogleSearchConsoleEnum","SourceGoogleSheets","SourceGoogleSheetsCredentialsAuthenticateViaGoogleOAuth","SourceGoogleSheetsCredentialsAuthenticateViaGoogleOAuthAuthTypeEnum","SourceGoogleSheetsCredentialsServiceAccountKeyAuthentication","SourceGoogleSheetsCredentialsServiceAccountKeyAuthenticationAuthTypeEnum","SourceGoogleSheetsGoogleSheetsEnum","SourceGoogleWebfonts","SourceGoogleWebfontsGoogleWebfontsEnum","SourceGoogleWorkspaceAdminReports","SourceGoogleWorkspaceAdminReportsGoogleWorkspaceAdminReportsEnum","SourceGreenhouse","SourceGreenhouseGreenhouseEnum","SourceGridly","SourceGridlyGridlyEnum","SourceHarvest","SourceHarvestHarvestEnum","SourceHubplanner","SourceHubplannerHubplannerEnum","SourceHubspot","SourceHubspotCredentialsOAuth","SourceHubspotCredentialsOAuthCredentialsEnum","SourceHubspotCredentialsPrivateApp","SourceHubspotCredentialsPrivateAppCredentialsEnum","SourceHubspotHubspotEnum","SourceInsightly","SourceInsightlyInsightlyEnum","SourceInstagram","SourceInstagramInstagramEnum","SourceInstatus","SourceInstatusInstatusEnum","SourceIntercom","SourceIntercomIntercomEnum","SourceIp2whois","SourceIp2whoisIp2whoisEnum","SourceIterable","SourceIterableIterableEnum","SourceJira","SourceJiraJiraEnum","SourceK6Cloud","SourceK6CloudK6CloudEnum","SourceKlarna","SourceKlarnaKlarnaEnum","SourceKlarnaRegionEnum","SourceKlaviyo","SourceKlaviyoKlaviyoEnum","SourceKustomerSinger","SourceKustomerSingerKustomerSingerEnum","SourceLaunchdarkly","SourceLaunchdarklyLaunchdarklyEnum","SourceLemlist","SourceLemlistLemlistEnum","SourceLinkedinAds","SourceLinkedinAdsCredentialsAccessToken","SourceLinkedinAdsCredentialsAccessTokenAuthMethodEnum","SourceLinkedinAdsCredentialsOAuth20","SourceLinkedinAdsCredentialsOAuth20AuthMethodEnum","SourceLinkedinAdsLinkedinAdsEnum","SourceLinkedinPages","SourceLinkedinPagesCredentialsAccessToken","SourceLinkedinPagesCredentialsAccessTokenAuthMethodEnum","SourceLinkedinPagesCredentialsOAuth20","SourceLinkedinPagesCredentialsOAuth20AuthMethodEnum","SourceLinkedinPagesLinkedinPagesEnum","SourceLinnworks","SourceLinnworksLinnworksEnum","SourceLokalise","SourceLokaliseLokaliseEnum","SourceMailchimp","SourceMailchimpCredentialsAPIKey","SourceMailchimpCredentialsAPIKeyAuthTypeEnum","SourceMailchimpCredentialsOAuth20","SourceMailchimpCredentialsOAuth20AuthTypeEnum","SourceMailchimpMailchimpEnum","SourceMailgun","SourceMailgunMailgunEnum","SourceMailjetSms","SourceMailjetSmsMailjetSmsEnum","SourceMarketo","SourceMarketoMarketoEnum","SourceMetabase","SourceMetabaseMetabaseEnum","SourceMicrosoftTeams","SourceMicrosoftTeamsCredentialsAuthenticateViaMicrosoft","SourceMicrosoftTeamsCredentialsAuthenticateViaMicrosoftAuthTypeEnum","SourceMicrosoftTeamsCredentialsAuthenticateViaMicrosoftOAuth20","SourceMicrosoftTeamsCredentialsAuthenticateViaMicrosoftOAuth20AuthTypeEnum","SourceMicrosoftTeamsMicrosoftTeamsEnum","SourceMixpanel","SourceMixpanelCredentialsProjectSecret","SourceMixpanelCredentialsProjectSecretOptionTitleEnum","SourceMixpanelCredentialsServiceAccount","SourceMixpanelCredentialsServiceAccountOptionTitleEnum","SourceMixpanelMixpanelEnum","SourceMixpanelRegionEnum","SourceMonday","SourceMondayCredentialsAPIToken","SourceMondayCredentialsAPITokenAuthTypeEnum","SourceMondayCredentialsOAuth20","SourceMondayCredentialsOAuth20AuthTypeEnum","SourceMondayMondayEnum","SourceMongodb","SourceMongodbInstanceTypeMongoDBAtlas","SourceMongodbInstanceTypeMongoDBAtlasInstanceEnum","SourceMongodbInstanceTypeReplicaSet","SourceMongodbInstanceTypeReplicaSetInstanceEnum","SourceMongodbInstanceTypeStandaloneMongoDbInstance","SourceMongodbInstanceTypeStandaloneMongoDbInstanceInstanceEnum","SourceMongodbMongodbEnum","SourceMssql","SourceMssqlMssqlEnum","SourceMssqlReplicationMethodLogicalReplicationCDC","SourceMssqlReplicationMethodLogicalReplicationCDCDataToSyncEnum","SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevelEnum","SourceMssqlReplicationMethodLogicalReplicationCDCMethodEnum","SourceMssqlReplicationMethodStandard","SourceMssqlReplicationMethodStandardMethodEnum","SourceMssqlSslMethodEncryptedTrustServerCertificate","SourceMssqlSslMethodEncryptedTrustServerCertificateSslMethodEnum","SourceMssqlSslMethodEncryptedVerifyCertificate","SourceMssqlSslMethodEncryptedVerifyCertificateSslMethodEnum","SourceMssqlTunnelMethodNoTunnel","SourceMssqlTunnelMethodNoTunnelTunnelMethodEnum","SourceMssqlTunnelMethodPasswordAuthentication","SourceMssqlTunnelMethodPasswordAuthenticationTunnelMethodEnum","SourceMssqlTunnelMethodSSHKeyAuthentication","SourceMssqlTunnelMethodSSHKeyAuthenticationTunnelMethodEnum","SourceMyHours","SourceMyHoursMyHoursEnum","SourceMysql","SourceMysqlMysqlEnum","SourceMysqlReplicationMethodLogicalReplicationCDC","SourceMysqlReplicationMethodLogicalReplicationCDCMethodEnum","SourceMysqlReplicationMethodStandard","SourceMysqlReplicationMethodStandardMethodEnum","SourceMysqlSslModePreferred","SourceMysqlSslModePreferredModeEnum","SourceMysqlSslModeRequired","SourceMysqlSslModeRequiredModeEnum","SourceMysqlSslModeVerifyCA","SourceMysqlSslModeVerifyCAModeEnum","SourceMysqlSslModeVerifyIdentity","SourceMysqlSslModeVerifyIdentityModeEnum","SourceMysqlTunnelMethodNoTunnel","SourceMysqlTunnelMethodNoTunnelTunnelMethodEnum","SourceMysqlTunnelMethodPasswordAuthentication","SourceMysqlTunnelMethodPasswordAuthenticationTunnelMethodEnum","SourceMysqlTunnelMethodSSHKeyAuthentication","SourceMysqlTunnelMethodSSHKeyAuthenticationTunnelMethodEnum","SourceNetsuite","SourceNetsuiteNetsuiteEnum","SourceNotion","SourceNotionCredentialsAccessToken","SourceNotionCredentialsAccessTokenAuthTypeEnum","SourceNotionCredentialsOAuth20","SourceNotionCredentialsOAuth20AuthTypeEnum","SourceNotionNotionEnum","SourceNytimes","SourceNytimesNytimesEnum","SourceNytimesPeriodUsedForMostPopularStreamsEnum","SourceNytimesShareTypeUsedForMostPopularSharedStreamEnum","SourceOkta","SourceOktaCredentialsAPIToken","SourceOktaCredentialsAPITokenAuthTypeEnum","SourceOktaCredentialsOAuth20","SourceOktaCredentialsOAuth20AuthTypeEnum","SourceOktaOktaEnum","SourceOmnisend","SourceOmnisendOmnisendEnum","SourceOnesignal","SourceOnesignalApplications","SourceOnesignalOnesignalEnum","SourceOpenweather","SourceOpenweatherLanguageEnum","SourceOpenweatherOpenweatherEnum","SourceOpenweatherUnitsEnum","SourceOracle","SourceOracleConnectionDataServiceName","SourceOracleConnectionDataServiceNameConnectionTypeEnum","SourceOracleConnectionDataSystemIDSID","SourceOracleConnectionDataSystemIDSIDConnectionTypeEnum","SourceOracleEncryptionNativeNetworkEncryptionNNE","SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithmEnum","SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionMethodEnum","SourceOracleEncryptionTLSEncryptedVerifyCertificate","SourceOracleEncryptionTLSEncryptedVerifyCertificateEncryptionMethodEnum","SourceOracleOracleEnum","SourceOracleTunnelMethodNoTunnel","SourceOracleTunnelMethodNoTunnelTunnelMethodEnum","SourceOracleTunnelMethodPasswordAuthentication","SourceOracleTunnelMethodPasswordAuthenticationTunnelMethodEnum","SourceOracleTunnelMethodSSHKeyAuthentication","SourceOracleTunnelMethodSSHKeyAuthenticationTunnelMethodEnum","SourceOrb","SourceOrbOrbEnum","SourceOrbit","SourceOrbitOrbitEnum","SourceOutreach","SourceOutreachOutreachEnum","SourcePaypalTransaction","SourcePaypalTransactionPaypalTransactionEnum","SourcePaystack","SourcePaystackPaystackEnum","SourcePendo","SourcePendoPendoEnum","SourcePersistiq","SourcePersistiqPersistiqEnum","SourcePexelsAPI","SourcePexelsAPIPexelsAPIEnum","SourcePinterest","SourcePinterestCredentialsAccessToken","SourcePinterestCredentialsAccessTokenAuthMethodEnum","SourcePinterestCredentialsOAuth20","SourcePinterestCredentialsOAuth20AuthMethodEnum","SourcePinterestPinterestEnum","SourcePinterestStatusEnum","SourcePipedrive","SourcePipedriveAPIKeyAuthentication","SourcePipedriveAPIKeyAuthenticationAuthTypeEnum","SourcePipedrivePipedriveEnum","SourcePocket","SourcePocketContentTypeEnum","SourcePocketDetailTypeEnum","SourcePocketPocketEnum","SourcePocketSortByEnum","SourcePocketStateEnum","SourcePokeapi","SourcePokeapiPokeapiEnum","SourcePolygonStockAPI","SourcePolygonStockAPIPolygonStockAPIEnum","SourcePostgres","SourcePostgresPostgresEnum","SourcePostgresReplicationMethodStandard","SourcePostgresReplicationMethodStandardMethodEnum","SourcePostgresTunnelMethodNoTunnel","SourcePostgresTunnelMethodNoTunnelTunnelMethodEnum","SourcePostgresTunnelMethodPasswordAuthentication","SourcePostgresTunnelMethodPasswordAuthenticationTunnelMethodEnum","SourcePostgresTunnelMethodSSHKeyAuthentication","SourcePostgresTunnelMethodSSHKeyAuthenticationTunnelMethodEnum","SourcePosthog","SourcePosthogPosthogEnum","SourcePostmarkapp","SourcePostmarkappPostmarkappEnum","SourcePrestashop","SourcePrestashopPrestashopEnum","SourcePublicApis","SourcePublicApisPublicApisEnum","SourcePunkAPI","SourcePunkAPIPunkAPIEnum","SourcePypi","SourcePypiPypiEnum","SourceQualaroo","SourceQualarooQualarooEnum","SourceQuickbooks","SourceQuickbooksCredentialsOAuth20","SourceQuickbooksCredentialsOAuth20AuthTypeEnum","SourceQuickbooksQuickbooksEnum","SourceRailz","SourceRailzRailzEnum","SourceRecharge","SourceRechargeRechargeEnum","SourceRecreation","SourceRecreationRecreationEnum","SourceRecruitee","SourceRecruiteeRecruiteeEnum","SourceRecurly","SourceRecurlyRecurlyEnum","SourceRedshift","SourceRedshiftRedshiftEnum","SourceResponse","SourceRetently","SourceRetentlyRetentlyEnum","SourceRkiCovid","SourceRkiCovidRkiCovidEnum","SourceRss","SourceRssRssEnum","SourceS3","SourceS3FormatAvro","SourceS3FormatAvroFiletypeEnum","SourceS3FormatCSV","SourceS3FormatCSVFiletypeEnum","SourceS3FormatJsonl","SourceS3FormatJsonlFiletypeEnum","SourceS3FormatJsonlUnexpectedFieldBehaviorEnum","SourceS3FormatParquet","SourceS3FormatParquetFiletypeEnum","SourceS3S3AmazonWebServices","SourceS3S3Enum","SourceSalesforce","SourceSalesforceAuthTypeEnum","SourceSalesforceSalesforceEnum","SourceSalesforceSinger","SourceSalesforceSingerAPITypeEnum","SourceSalesforceSingerSalesforceSingerEnum","SourceSalesforceStreamsCriteria","SourceSalesforceStreamsCriteriaSearchCriteriaEnum","SourceSalesloft","SourceSalesloftCredentialsAuthenticateViaAPIKey","SourceSalesloftCredentialsAuthenticateViaAPIKeyAuthTypeEnum","SourceSalesloftCredentialsAuthenticateViaOAuth","SourceSalesloftCredentialsAuthenticateViaOAuthAuthTypeEnum","SourceSalesloftSalesloftEnum","SourceSapFieldglass","SourceSapFieldglassSapFieldglassEnum","SourceSecoda","SourceSecodaSecodaEnum","SourceSendgrid","SourceSendgridSendgridEnum","SourceSendinblue","SourceSendinblueSendinblueEnum","SourceSenseforce","SourceSenseforceSenseforceEnum","SourceSentry","SourceSentrySentryEnum","SourceSftp","SourceSftpBulk","SourceSftpBulkFileTypeEnum","SourceSftpBulkSftpBulkEnum","SourceSftpCredentialsPasswordAuthentication","SourceSftpCredentialsPasswordAuthenticationAuthMethodEnum","SourceSftpCredentialsSSHKeyAuthentication","SourceSftpCredentialsSSHKeyAuthenticationAuthMethodEnum","SourceSftpSftpEnum","SourceShopify","SourceShopifyCredentialsAPIPassword","SourceShopifyCredentialsAPIPasswordAuthMethodEnum","SourceShopifyCredentialsOAuth20","SourceShopifyCredentialsOAuth20AuthMethodEnum","SourceShopifyShopifyEnum","SourceShortio","SourceShortioShortioEnum","SourceSlack","SourceSlackCredentialsAPIToken","SourceSlackCredentialsAPITokenOptionTitleEnum","SourceSlackCredentialsSignInViaSlackOAuth","SourceSlackCredentialsSignInViaSlackOAuthOptionTitleEnum","SourceSlackSlackEnum","SourceSmaily","SourceSmailySmailyEnum","SourceSmartengage","SourceSmartengageSmartengageEnum","SourceSmartsheets","SourceSmartsheetsCredentialsAPIAccessToken","SourceSmartsheetsCredentialsAPIAccessTokenAuthTypeEnum","SourceSmartsheetsCredentialsOAuth20","SourceSmartsheetsCredentialsOAuth20AuthTypeEnum","SourceSmartsheetsSmartsheetsEnum","SourceSnapchatMarketing","SourceSnapchatMarketingSnapchatMarketingEnum","SourceSnowflake","SourceSnowflakeCredentialsOAuth20","SourceSnowflakeCredentialsOAuth20AuthTypeEnum","SourceSnowflakeCredentialsUsernameAndPassword","SourceSnowflakeCredentialsUsernameAndPasswordAuthTypeEnum","SourceSnowflakeSnowflakeEnum","SourceSonarCloud","SourceSonarCloudSonarCloudEnum","SourceSpacexAPI","SourceSpacexAPISpacexAPIEnum","SourceSquare","SourceSquareCredentialsAPIKey","SourceSquareCredentialsAPIKeyAuthTypeEnum","SourceSquareCredentialsOauthAuthentication","SourceSquareCredentialsOauthAuthenticationAuthTypeEnum","SourceSquareSquareEnum","SourceStrava","SourceStravaAuthTypeEnum","SourceStravaStravaEnum","SourceStripe","SourceStripeStripeEnum","SourceSurveySparrow","SourceSurveySparrowRegionEUBasedAccount","SourceSurveySparrowRegionEUBasedAccountURLBaseEnum","SourceSurveySparrowRegionGlobalAccount","SourceSurveySparrowRegionGlobalAccountURLBaseEnum","SourceSurveySparrowSurveySparrowEnum","SourceSurveymonkey","SourceSurveymonkeyOriginDatacenterOfTheSurveyMonkeyAccountEnum","SourceSurveymonkeySurveyMonkeyAuthorizationMethod","SourceSurveymonkeySurveyMonkeyAuthorizationMethodAuthMethodEnum","SourceSurveymonkeySurveymonkeyEnum","SourceTempo","SourceTempoTempoEnum","SourceTheGuardianAPI","SourceTheGuardianAPITheGuardianAPIEnum","SourceTiktokMarketing","SourceTiktokMarketingCredentialsOAuth20","SourceTiktokMarketingCredentialsOAuth20AuthTypeEnum","SourceTiktokMarketingCredentialsSandboxAccessToken","SourceTiktokMarketingCredentialsSandboxAccessTokenAuthTypeEnum","SourceTiktokMarketingTiktokMarketingEnum","SourceTodoist","SourceTodoistTodoistEnum","SourceTrello","SourceTrelloTrelloEnum","SourceTrustpilot","SourceTrustpilotCredentialsAPIKey","SourceTrustpilotCredentialsAPIKeyAuthTypeEnum","SourceTrustpilotCredentialsOAuth20","SourceTrustpilotCredentialsOAuth20AuthTypeEnum","SourceTrustpilotTrustpilotEnum","SourceTvmazeSchedule","SourceTvmazeScheduleTvmazeScheduleEnum","SourceTwilio","SourceTwilioTaskrouter","SourceTwilioTaskrouterTwilioTaskrouterEnum","SourceTwilioTwilioEnum","SourceTwitter","SourceTwitterTwitterEnum","SourceTypeform","SourceTypeformTypeformEnum","SourceUsCensus","SourceUsCensusUsCensusEnum","SourceVantage","SourceVantageVantageEnum","SourceWebflow","SourceWebflowWebflowEnum","SourceWhiskyHunter","SourceWhiskyHunterWhiskyHunterEnum","SourceWikipediaPageviews","SourceWikipediaPageviewsWikipediaPageviewsEnum","SourceWoocommerce","SourceWoocommerceWoocommerceEnum","SourceXero","SourceXeroAuthenticateViaXeroOAuth","SourceXeroXeroEnum","SourceXkcd","SourceXkcdXkcdEnum","SourceYandexMetrica","SourceYandexMetricaYandexMetricaEnum","SourceYounium","SourceYouniumYouniumEnum","SourceYoutubeAnalytics","SourceYoutubeAnalyticsYoutubeAnalyticsEnum","SourceZendeskChat","SourceZendeskChatCredentialsAccessToken","SourceZendeskChatCredentialsAccessTokenCredentialsEnum","SourceZendeskChatCredentialsOAuth20","SourceZendeskChatCredentialsOAuth20CredentialsEnum","SourceZendeskChatZendeskChatEnum","SourceZendeskSunshine","SourceZendeskSunshineCredentialsAPIToken","SourceZendeskSunshineCredentialsAPITokenAuthMethodEnum","SourceZendeskSunshineCredentialsOAuth20","SourceZendeskSunshineCredentialsOAuth20AuthMethodEnum","SourceZendeskSunshineZendeskSunshineEnum","SourceZendeskSupport","SourceZendeskSupportZendeskSupportEnum","SourceZendeskTalk","SourceZendeskTalkZendeskTalkEnum","SourceZenloop","SourceZenloopZenloopEnum","SourceZohoCRMZohoCRMEditionEnum","SourceZohoCrm","SourceZohoCrmDataCenterLocationEnum","SourceZohoCrmEnvironmentEnum","SourceZohoCrmZohoCrmEnum","SourceZoom","SourceZoomZoomEnum","SourceZuora","SourceZuoraDataQueryTypeEnum","SourceZuoraTenantEndpointLocationEnum","SourceZuoraZuoraEnum","SourcesResponse","StreamConfiguration","StreamConfigurations","StreamProperties","WorkspaceCreateRequest","WorkspaceOAuthCredentialsRequest","WorkspaceResponse","WorkspaceUpdateRequest","WorkspacesResponse"]
+__all__ = ["ActorTypeEnum","ConnectionCreateRequest","ConnectionResponse","ConnectionScheduleCreate","ConnectionScheduleResponse","ConnectionStatusEnum","ConnectionSyncModeEnum","ConnectionsResponse","DestinationAmazonSqs","DestinationAmazonSqsAWSRegion","DestinationAmazonSqsAmazonSqs","DestinationAwsDatalake","DestinationAwsDatalakeAwsDatalake","DestinationAwsDatalakeChooseHowToPartitionData","DestinationAwsDatalakeCredentialsIAMRole","DestinationAwsDatalakeCredentialsIAMRoleCredentialsTitle","DestinationAwsDatalakeCredentialsIAMUser","DestinationAwsDatalakeCredentialsIAMUserCredentialsTitle","DestinationAwsDatalakeFormatJSONLinesNewlineDelimitedJSON","DestinationAwsDatalakeFormatJSONLinesNewlineDelimitedJSONCompressionCodecOptional","DestinationAwsDatalakeFormatJSONLinesNewlineDelimitedJSONFormatTypeWildcard","DestinationAwsDatalakeFormatParquetColumnarStorage","DestinationAwsDatalakeFormatParquetColumnarStorageCompressionCodecOptional","DestinationAwsDatalakeFormatParquetColumnarStorageFormatTypeWildcard","DestinationAwsDatalakeS3BucketRegion","DestinationAzureBlobStorage","DestinationAzureBlobStorageAzureBlobStorage","DestinationAzureBlobStorageFormatCSVCommaSeparatedValues","DestinationAzureBlobStorageFormatCSVCommaSeparatedValuesFormatType","DestinationAzureBlobStorageFormatCSVCommaSeparatedValuesNormalizationFlattening","DestinationAzureBlobStorageFormatJSONLinesNewlineDelimitedJSON","DestinationAzureBlobStorageFormatJSONLinesNewlineDelimitedJSONFormatType","DestinationBigquery","DestinationBigqueryBigquery","DestinationBigqueryDatasetLocation","DestinationBigqueryDenormalized","DestinationBigqueryDenormalizedBigqueryDenormalized","DestinationBigqueryDenormalizedDatasetLocation","DestinationBigqueryDenormalizedLoadingMethodGCSStaging","DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey","DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKeyCredentialType","DestinationBigqueryDenormalizedLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing","DestinationBigqueryDenormalizedLoadingMethodGCSStagingMethod","DestinationBigqueryDenormalizedLoadingMethodStandardInserts","DestinationBigqueryDenormalizedLoadingMethodStandardInsertsMethod","DestinationBigqueryLoadingMethodGCSStaging","DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey","DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKeyCredentialType","DestinationBigqueryLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing","DestinationBigqueryLoadingMethodGCSStagingMethod","DestinationBigqueryLoadingMethodStandardInserts","DestinationBigqueryLoadingMethodStandardInsertsMethod","DestinationBigqueryTransformationQueryRunType","DestinationCassandra","DestinationCassandraCassandra","DestinationClickhouse","DestinationClickhouseClickhouse","DestinationClickhouseTunnelMethodNoTunnel","DestinationClickhouseTunnelMethodNoTunnelTunnelMethod","DestinationClickhouseTunnelMethodPasswordAuthentication","DestinationClickhouseTunnelMethodPasswordAuthenticationTunnelMethod","DestinationClickhouseTunnelMethodSSHKeyAuthentication","DestinationClickhouseTunnelMethodSSHKeyAuthenticationTunnelMethod","DestinationConvex","DestinationConvexConvex","DestinationCreateRequest","DestinationCumulio","DestinationCumulioCumulio","DestinationDatabend","DestinationDatabendDatabend","DestinationDatabricks","DestinationDatabricksDataSourceAmazonS3","DestinationDatabricksDataSourceAmazonS3DataSourceType","DestinationDatabricksDataSourceAmazonS3S3BucketRegion","DestinationDatabricksDataSourceAzureBlobStorage","DestinationDatabricksDataSourceAzureBlobStorageDataSourceType","DestinationDatabricksDataSourceRecommendedManagedTables","DestinationDatabricksDataSourceRecommendedManagedTablesDataSourceType","DestinationDatabricksDatabricks","DestinationDevNull","DestinationDevNullDevNull","DestinationDevNullTestDestinationSilent","DestinationDevNullTestDestinationSilentTestDestinationType","DestinationDynamodb","DestinationDynamodbDynamoDBRegion","DestinationDynamodbDynamodb","DestinationElasticsearch","DestinationElasticsearchAuthenticationMethodAPIKeySecret","DestinationElasticsearchAuthenticationMethodAPIKeySecretMethod","DestinationElasticsearchAuthenticationMethodUsernamePassword","DestinationElasticsearchAuthenticationMethodUsernamePasswordMethod","DestinationElasticsearchElasticsearch","DestinationFirebolt","DestinationFireboltFirebolt","DestinationFireboltLoadingMethodExternalTableViaS3","DestinationFireboltLoadingMethodExternalTableViaS3Method","DestinationFireboltLoadingMethodSQLInserts","DestinationFireboltLoadingMethodSQLInsertsMethod","DestinationFirestore","DestinationFirestoreFirestore","DestinationGCSGCSBucketRegion","DestinationGcs","DestinationGcsCredentialHMACKey","DestinationGcsCredentialHMACKeyCredentialType","DestinationGcsFormatAvroApacheAvro","DestinationGcsFormatAvroApacheAvroCompressionCodecBzip2","DestinationGcsFormatAvroApacheAvroCompressionCodecBzip2Codec","DestinationGcsFormatAvroApacheAvroCompressionCodecDeflate","DestinationGcsFormatAvroApacheAvroCompressionCodecDeflateCodec","DestinationGcsFormatAvroApacheAvroCompressionCodecNoCompression","DestinationGcsFormatAvroApacheAvroCompressionCodecNoCompressionCodec","DestinationGcsFormatAvroApacheAvroCompressionCodecSnappy","DestinationGcsFormatAvroApacheAvroCompressionCodecSnappyCodec","DestinationGcsFormatAvroApacheAvroCompressionCodecXz","DestinationGcsFormatAvroApacheAvroCompressionCodecXzCodec","DestinationGcsFormatAvroApacheAvroCompressionCodecZstandard","DestinationGcsFormatAvroApacheAvroCompressionCodecZstandardCodec","DestinationGcsFormatAvroApacheAvroFormatType","DestinationGcsFormatCSVCommaSeparatedValues","DestinationGcsFormatCSVCommaSeparatedValuesCompressionGZIP","DestinationGcsFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType","DestinationGcsFormatCSVCommaSeparatedValuesCompressionNoCompression","DestinationGcsFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType","DestinationGcsFormatCSVCommaSeparatedValuesFormatType","DestinationGcsFormatCSVCommaSeparatedValuesNormalization","DestinationGcsFormatJSONLinesNewlineDelimitedJSON","DestinationGcsFormatJSONLinesNewlineDelimitedJSONCompressionGZIP","DestinationGcsFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType","DestinationGcsFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression","DestinationGcsFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType","DestinationGcsFormatJSONLinesNewlineDelimitedJSONFormatType","DestinationGcsFormatParquetColumnarStorage","DestinationGcsFormatParquetColumnarStorageCompressionCodec","DestinationGcsFormatParquetColumnarStorageFormatType","DestinationGcsGcs","DestinationGoogleSheets","DestinationGoogleSheetsAuthenticationViaGoogleOAuth","DestinationGoogleSheetsGoogleSheets","DestinationKeen","DestinationKeenKeen","DestinationKinesis","DestinationKinesisKinesis","DestinationMariadbColumnstore","DestinationMariadbColumnstoreMariadbColumnstore","DestinationMariadbColumnstoreTunnelMethodNoTunnel","DestinationMariadbColumnstoreTunnelMethodNoTunnelTunnelMethod","DestinationMariadbColumnstoreTunnelMethodPasswordAuthentication","DestinationMariadbColumnstoreTunnelMethodPasswordAuthenticationTunnelMethod","DestinationMariadbColumnstoreTunnelMethodSSHKeyAuthentication","DestinationMariadbColumnstoreTunnelMethodSSHKeyAuthenticationTunnelMethod","DestinationMeilisearch","DestinationMeilisearchMeilisearch","DestinationMongodb","DestinationMongodbAuthTypeLoginPassword","DestinationMongodbAuthTypeLoginPasswordAuthorization","DestinationMongodbAuthTypeNone","DestinationMongodbAuthTypeNoneAuthorization","DestinationMongodbInstanceTypeMongoDBAtlas","DestinationMongodbInstanceTypeMongoDBAtlasInstance","DestinationMongodbInstanceTypeReplicaSet","DestinationMongodbInstanceTypeReplicaSetInstance","DestinationMongodbInstanceTypeStandaloneMongoDbInstance","DestinationMongodbInstanceTypeStandaloneMongoDbInstanceInstance","DestinationMongodbMongodb","DestinationMongodbTunnelMethodNoTunnel","DestinationMongodbTunnelMethodNoTunnelTunnelMethod","DestinationMongodbTunnelMethodPasswordAuthentication","DestinationMongodbTunnelMethodPasswordAuthenticationTunnelMethod","DestinationMongodbTunnelMethodSSHKeyAuthentication","DestinationMongodbTunnelMethodSSHKeyAuthenticationTunnelMethod","DestinationMssql","DestinationMssqlMssql","DestinationMssqlSslMethodEncryptedTrustServerCertificate","DestinationMssqlSslMethodEncryptedTrustServerCertificateSslMethod","DestinationMssqlSslMethodEncryptedVerifyCertificate","DestinationMssqlSslMethodEncryptedVerifyCertificateSslMethod","DestinationMssqlTunnelMethodNoTunnel","DestinationMssqlTunnelMethodNoTunnelTunnelMethod","DestinationMssqlTunnelMethodPasswordAuthentication","DestinationMssqlTunnelMethodPasswordAuthenticationTunnelMethod","DestinationMssqlTunnelMethodSSHKeyAuthentication","DestinationMssqlTunnelMethodSSHKeyAuthenticationTunnelMethod","DestinationMysql","DestinationMysqlMysql","DestinationMysqlTunnelMethodNoTunnel","DestinationMysqlTunnelMethodNoTunnelTunnelMethod","DestinationMysqlTunnelMethodPasswordAuthentication","DestinationMysqlTunnelMethodPasswordAuthenticationTunnelMethod","DestinationMysqlTunnelMethodSSHKeyAuthentication","DestinationMysqlTunnelMethodSSHKeyAuthenticationTunnelMethod","DestinationOracle","DestinationOracleOracle","DestinationOracleTunnelMethodNoTunnel","DestinationOracleTunnelMethodNoTunnelTunnelMethod","DestinationOracleTunnelMethodPasswordAuthentication","DestinationOracleTunnelMethodPasswordAuthenticationTunnelMethod","DestinationOracleTunnelMethodSSHKeyAuthentication","DestinationOracleTunnelMethodSSHKeyAuthenticationTunnelMethod","DestinationPostgres","DestinationPostgresPostgres","DestinationPostgresSslModeAllow","DestinationPostgresSslModeAllowMode","DestinationPostgresSslModeDisable","DestinationPostgresSslModeDisableMode","DestinationPostgresSslModePrefer","DestinationPostgresSslModePreferMode","DestinationPostgresSslModeRequire","DestinationPostgresSslModeRequireMode","DestinationPostgresSslModeVerifyCa","DestinationPostgresSslModeVerifyCaMode","DestinationPostgresSslModeVerifyFull","DestinationPostgresSslModeVerifyFullMode","DestinationPostgresTunnelMethodNoTunnel","DestinationPostgresTunnelMethodNoTunnelTunnelMethod","DestinationPostgresTunnelMethodPasswordAuthentication","DestinationPostgresTunnelMethodPasswordAuthenticationTunnelMethod","DestinationPostgresTunnelMethodSSHKeyAuthentication","DestinationPostgresTunnelMethodSSHKeyAuthenticationTunnelMethod","DestinationPubsub","DestinationPubsubPubsub","DestinationPulsar","DestinationPulsarCompressionType","DestinationPulsarPulsar","DestinationPulsarTopicType","DestinationRabbitmq","DestinationRabbitmqRabbitmq","DestinationRedis","DestinationRedisCacheType","DestinationRedisRedis","DestinationRedisSslModeDisable","DestinationRedisSslModeDisableMode","DestinationRedisSslModeVerifyFull","DestinationRedisSslModeVerifyFullMode","DestinationRedisTunnelMethodNoTunnel","DestinationRedisTunnelMethodNoTunnelTunnelMethod","DestinationRedisTunnelMethodPasswordAuthentication","DestinationRedisTunnelMethodPasswordAuthenticationTunnelMethod","DestinationRedisTunnelMethodSSHKeyAuthentication","DestinationRedisTunnelMethodSSHKeyAuthenticationTunnelMethod","DestinationRedshift","DestinationRedshiftRedshift","DestinationRedshiftTunnelMethodNoTunnel","DestinationRedshiftTunnelMethodNoTunnelTunnelMethod","DestinationRedshiftTunnelMethodPasswordAuthentication","DestinationRedshiftTunnelMethodPasswordAuthenticationTunnelMethod","DestinationRedshiftTunnelMethodSSHKeyAuthentication","DestinationRedshiftTunnelMethodSSHKeyAuthenticationTunnelMethod","DestinationRedshiftUploadingMethodS3Staging","DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption","DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionType","DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption","DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryptionEncryptionType","DestinationRedshiftUploadingMethodS3StagingMethod","DestinationRedshiftUploadingMethodS3StagingS3BucketRegion","DestinationRedshiftUploadingMethodStandard","DestinationRedshiftUploadingMethodStandardMethod","DestinationResponse","DestinationRockset","DestinationRocksetRockset","DestinationS3","DestinationS3FormatAvroApacheAvro","DestinationS3FormatAvroApacheAvroCompressionCodecBzip2","DestinationS3FormatAvroApacheAvroCompressionCodecBzip2Codec","DestinationS3FormatAvroApacheAvroCompressionCodecDeflate","DestinationS3FormatAvroApacheAvroCompressionCodecDeflateCodec","DestinationS3FormatAvroApacheAvroCompressionCodecNoCompression","DestinationS3FormatAvroApacheAvroCompressionCodecNoCompressionCodec","DestinationS3FormatAvroApacheAvroCompressionCodecSnappy","DestinationS3FormatAvroApacheAvroCompressionCodecSnappyCodec","DestinationS3FormatAvroApacheAvroCompressionCodecXz","DestinationS3FormatAvroApacheAvroCompressionCodecXzCodec","DestinationS3FormatAvroApacheAvroCompressionCodecZstandard","DestinationS3FormatAvroApacheAvroCompressionCodecZstandardCodec","DestinationS3FormatAvroApacheAvroFormatType","DestinationS3FormatCSVCommaSeparatedValues","DestinationS3FormatCSVCommaSeparatedValuesCompressionGZIP","DestinationS3FormatCSVCommaSeparatedValuesCompressionGZIPCompressionType","DestinationS3FormatCSVCommaSeparatedValuesCompressionNoCompression","DestinationS3FormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType","DestinationS3FormatCSVCommaSeparatedValuesFlattening","DestinationS3FormatCSVCommaSeparatedValuesFormatType","DestinationS3FormatJSONLinesNewlineDelimitedJSON","DestinationS3FormatJSONLinesNewlineDelimitedJSONCompressionGZIP","DestinationS3FormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType","DestinationS3FormatJSONLinesNewlineDelimitedJSONCompressionNoCompression","DestinationS3FormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType","DestinationS3FormatJSONLinesNewlineDelimitedJSONFlattening","DestinationS3FormatJSONLinesNewlineDelimitedJSONFormatType","DestinationS3FormatParquetColumnarStorage","DestinationS3FormatParquetColumnarStorageCompressionCodec","DestinationS3FormatParquetColumnarStorageFormatType","DestinationS3Glue","DestinationS3GlueFormatJSONLinesNewlineDelimitedJSON","DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONCompressionGZIP","DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType","DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression","DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType","DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONFlattening","DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONFormatType","DestinationS3GlueS3BucketRegion","DestinationS3GlueS3Glue","DestinationS3GlueSerializationLibrary","DestinationS3S3","DestinationS3S3BucketRegion","DestinationScylla","DestinationScyllaScylla","DestinationSftpJSON","DestinationSftpJSONSftpJSON","DestinationSnowflake","DestinationSnowflakeCredentialsKeyPairAuthentication","DestinationSnowflakeCredentialsKeyPairAuthenticationAuthType","DestinationSnowflakeCredentialsOAuth20","DestinationSnowflakeCredentialsOAuth20AuthType","DestinationSnowflakeCredentialsUsernameAndPassword","DestinationSnowflakeCredentialsUsernameAndPasswordAuthType","DestinationSnowflakeLoadingMethodAWSS3Staging","DestinationSnowflakeLoadingMethodAWSS3StagingEncryptionAESCBCEnvelopeEncryption","DestinationSnowflakeLoadingMethodAWSS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionType","DestinationSnowflakeLoadingMethodAWSS3StagingEncryptionNoEncryption","DestinationSnowflakeLoadingMethodAWSS3StagingEncryptionNoEncryptionEncryptionType","DestinationSnowflakeLoadingMethodAWSS3StagingMethod","DestinationSnowflakeLoadingMethodAWSS3StagingS3BucketRegion","DestinationSnowflakeLoadingMethodGoogleCloudStorageStaging","DestinationSnowflakeLoadingMethodGoogleCloudStorageStagingMethod","DestinationSnowflakeLoadingMethodRecommendedInternalStaging","DestinationSnowflakeLoadingMethodRecommendedInternalStagingMethod","DestinationSnowflakeLoadingMethodSelectAnotherOption","DestinationSnowflakeLoadingMethodSelectAnotherOptionMethod","DestinationSnowflakeSnowflake","DestinationTypesense","DestinationTypesenseTypesense","DestinationsResponse","GeographyEnum","InitiateOauthRequest","JobCreateRequest","JobResponse","JobStatusEnum","JobTypeEnum","JobsResponse","NamespaceDefinitionEnum","NonBreakingSchemaUpdatesBehaviorEnum","ScheduleTypeEnum","ScheduleTypeWithBasicEnum","Security","SourceAircall","SourceAircallAircall","SourceAirtable","SourceAirtableAirtable","SourceAirtableCredentialsOAuth20","SourceAirtableCredentialsOAuth20AuthMethod","SourceAirtableCredentialsPersonalAccessToken","SourceAirtableCredentialsPersonalAccessTokenAuthMethod","SourceAlloydb","SourceAlloydbAlloydb","SourceAlloydbReplicationMethodStandard","SourceAlloydbReplicationMethodStandardMethod","SourceAlloydbTunnelMethodNoTunnel","SourceAlloydbTunnelMethodNoTunnelTunnelMethod","SourceAlloydbTunnelMethodPasswordAuthentication","SourceAlloydbTunnelMethodPasswordAuthenticationTunnelMethod","SourceAlloydbTunnelMethodSSHKeyAuthentication","SourceAlloydbTunnelMethodSSHKeyAuthenticationTunnelMethod","SourceAmazonAds","SourceAmazonAdsAmazonAds","SourceAmazonAdsAuthType","SourceAmazonAdsRegion","SourceAmazonAdsReportRecordTypes","SourceAmazonAdsStateFilter","SourceAmazonSellerPartner","SourceAmazonSellerPartnerAWSEnvironment","SourceAmazonSellerPartnerAWSRegion","SourceAmazonSellerPartnerAmazonSellerPartner","SourceAmazonSellerPartnerAuthType","SourceAmazonSqs","SourceAmazonSqsAWSRegion","SourceAmazonSqsAmazonSqs","SourceAmplitude","SourceAmplitudeAmplitude","SourceAmplitudeDataRegion","SourceApifyDataset","SourceApifyDatasetApifyDataset","SourceAsana","SourceAsanaAsana","SourceAsanaCredentialsAuthenticateViaAsanaOauth","SourceAsanaCredentialsAuthenticateViaAsanaOauthCredentialsTitle","SourceAsanaCredentialsAuthenticateWithPersonalAccessToken","SourceAsanaCredentialsAuthenticateWithPersonalAccessTokenCredentialsTitle","SourceAuth0","SourceAuth0Auth0","SourceAuth0CredentialsOAuth2AccessToken","SourceAuth0CredentialsOAuth2AccessTokenAuthenticationMethod","SourceAuth0CredentialsOAuth2ConfidentialApplication","SourceAuth0CredentialsOAuth2ConfidentialApplicationAuthenticationMethod","SourceAwsCloudtrail","SourceAwsCloudtrailAwsCloudtrail","SourceAzureBlobStorage","SourceAzureBlobStorageAzureBlobStorage","SourceAzureBlobStorageFormatJSONLinesNewlineDelimitedJSON","SourceAzureBlobStorageFormatJSONLinesNewlineDelimitedJSONFormatType","SourceAzureTable","SourceAzureTableAzureTable","SourceBambooHr","SourceBambooHrBambooHr","SourceBigcommerce","SourceBigcommerceBigcommerce","SourceBigquery","SourceBigqueryBigquery","SourceBingAds","SourceBingAdsAuthMethod","SourceBingAdsBingAds","SourceBraintree","SourceBraintreeBraintree","SourceBraintreeEnvironment","SourceBraze","SourceBrazeBraze","SourceChargebee","SourceChargebeeChargebee","SourceChargebeeProductCatalog","SourceChartmogul","SourceChartmogulChartmogul","SourceChartmogulInterval","SourceClickhouse","SourceClickhouseClickhouse","SourceClickhouseTunnelMethodNoTunnel","SourceClickhouseTunnelMethodNoTunnelTunnelMethod","SourceClickhouseTunnelMethodPasswordAuthentication","SourceClickhouseTunnelMethodPasswordAuthenticationTunnelMethod","SourceClickhouseTunnelMethodSSHKeyAuthentication","SourceClickhouseTunnelMethodSSHKeyAuthenticationTunnelMethod","SourceClickupAPI","SourceClickupAPIClickupAPI","SourceCloseCom","SourceCloseComCloseCom","SourceCoda","SourceCodaCoda","SourceCoinAPI","SourceCoinAPICoinAPI","SourceCoinAPIEnvironment","SourceCoinmarketcap","SourceCoinmarketcapCoinmarketcap","SourceCoinmarketcapDataType","SourceConfigcat","SourceConfigcatConfigcat","SourceConfluence","SourceConfluenceConfluence","SourceCreateRequest","SourceDatascope","SourceDatascopeDatascope","SourceDelighted","SourceDelightedDelighted","SourceDixa","SourceDixaDixa","SourceDockerhub","SourceDockerhubDockerhub","SourceDremio","SourceDremioDremio","SourceDynamodb","SourceDynamodbDynamodb","SourceDynamodbDynamodbRegion","SourceE2eTestCloud","SourceE2eTestCloudE2eTestCloud","SourceE2eTestCloudMockCatalogMultiSchema","SourceE2eTestCloudMockCatalogMultiSchemaType","SourceE2eTestCloudMockCatalogSingleSchema","SourceE2eTestCloudMockCatalogSingleSchemaType","SourceE2eTestCloudType","SourceEmailoctopus","SourceEmailoctopusEmailoctopus","SourceExchangeRates","SourceExchangeRatesExchangeRates","SourceFacebookMarketing","SourceFacebookMarketingFacebookMarketing","SourceFacebookMarketingInsightConfig","SourceFacebookMarketingInsightConfigLevel","SourceFacebookMarketingInsightConfigValidActionBreakdowns","SourceFacebookMarketingInsightConfigValidBreakdowns","SourceFacebookMarketingInsightConfigValidEnums","SourceFacebookPages","SourceFacebookPagesFacebookPages","SourceFaker","SourceFakerFaker","SourceFauna","SourceFaunaCollection","SourceFaunaCollectionDeletionsDisabled","SourceFaunaCollectionDeletionsDisabledDeletionMode","SourceFaunaCollectionDeletionsEnabled","SourceFaunaCollectionDeletionsEnabledDeletionMode","SourceFaunaFauna","SourceFileSecure","SourceFileSecureFileFormat","SourceFileSecureFileSecure","SourceFileSecureProviderAzBlobAzureBlobStorage","SourceFileSecureProviderAzBlobAzureBlobStorageStorage","SourceFileSecureProviderGCSGoogleCloudStorage","SourceFileSecureProviderGCSGoogleCloudStorageStorage","SourceFileSecureProviderHTTPSPublicWeb","SourceFileSecureProviderHTTPSPublicWebStorage","SourceFileSecureProviderS3AmazonWebServices","SourceFileSecureProviderS3AmazonWebServicesStorage","SourceFileSecureProviderSCPSecureCopyProtocol","SourceFileSecureProviderSCPSecureCopyProtocolStorage","SourceFileSecureProviderSFTPSecureFileTransferProtocol","SourceFileSecureProviderSFTPSecureFileTransferProtocolStorage","SourceFileSecureProviderSSHSecureShell","SourceFileSecureProviderSSHSecureShellStorage","SourceFirebolt","SourceFireboltFirebolt","SourceFreshcaller","SourceFreshcallerFreshcaller","SourceFreshdesk","SourceFreshdeskFreshdesk","SourceFreshsales","SourceFreshsalesFreshsales","SourceGcs","SourceGcsGcs","SourceGetlago","SourceGetlagoGetlago","SourceGithub","SourceGithubCredentialsOAuth","SourceGithubCredentialsOAuthOptionTitle","SourceGithubCredentialsPersonalAccessToken","SourceGithubCredentialsPersonalAccessTokenOptionTitle","SourceGithubGithub","SourceGitlab","SourceGitlabCredentialsOAuth20","SourceGitlabCredentialsOAuth20AuthType","SourceGitlabCredentialsPrivateToken","SourceGitlabCredentialsPrivateTokenAuthType","SourceGitlabGitlab","SourceGlassfrog","SourceGlassfrogGlassfrog","SourceGnews","SourceGnewsCountry","SourceGnewsGnews","SourceGnewsIn","SourceGnewsLanguage","SourceGnewsNullable","SourceGnewsSortBy","SourceGnewsTopHeadlinesTopic","SourceGoogleAds","SourceGoogleAdsCustomQueries","SourceGoogleAdsGoogleAds","SourceGoogleAdsGoogleCredentials","SourceGoogleAnalyticsDataAPI","SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth","SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauthAuthType","SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication","SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthenticationAuthType","SourceGoogleAnalyticsDataAPIGoogleAnalyticsDataAPI","SourceGoogleAnalyticsV4","SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth","SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauthAuthType","SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication","SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthenticationAuthType","SourceGoogleAnalyticsV4GoogleAnalyticsV4","SourceGoogleDirectory","SourceGoogleDirectoryGoogleDirectory","SourceGoogleSearchConsole","SourceGoogleSearchConsoleAuthorizationOAuth","SourceGoogleSearchConsoleAuthorizationOAuthAuthType","SourceGoogleSearchConsoleAuthorizationServiceAccountKeyAuthentication","SourceGoogleSearchConsoleAuthorizationServiceAccountKeyAuthenticationAuthType","SourceGoogleSearchConsoleGoogleSearchConsole","SourceGoogleSheets","SourceGoogleSheetsCredentialsAuthenticateViaGoogleOAuth","SourceGoogleSheetsCredentialsAuthenticateViaGoogleOAuthAuthType","SourceGoogleSheetsCredentialsServiceAccountKeyAuthentication","SourceGoogleSheetsCredentialsServiceAccountKeyAuthenticationAuthType","SourceGoogleSheetsGoogleSheets","SourceGoogleWebfonts","SourceGoogleWebfontsGoogleWebfonts","SourceGoogleWorkspaceAdminReports","SourceGoogleWorkspaceAdminReportsGoogleWorkspaceAdminReports","SourceGreenhouse","SourceGreenhouseGreenhouse","SourceGridly","SourceGridlyGridly","SourceHarvest","SourceHarvestHarvest","SourceHubplanner","SourceHubplannerHubplanner","SourceHubspot","SourceHubspotCredentialsOAuth","SourceHubspotCredentialsOAuthCredentials","SourceHubspotCredentialsPrivateApp","SourceHubspotCredentialsPrivateAppCredentials","SourceHubspotHubspot","SourceInsightly","SourceInsightlyInsightly","SourceInstagram","SourceInstagramInstagram","SourceInstatus","SourceInstatusInstatus","SourceIntercom","SourceIntercomIntercom","SourceIp2whois","SourceIp2whoisIp2whois","SourceIterable","SourceIterableIterable","SourceJira","SourceJiraJira","SourceK6Cloud","SourceK6CloudK6Cloud","SourceKlarna","SourceKlarnaKlarna","SourceKlarnaRegion","SourceKlaviyo","SourceKlaviyoKlaviyo","SourceKustomerSinger","SourceKustomerSingerKustomerSinger","SourceLaunchdarkly","SourceLaunchdarklyLaunchdarkly","SourceLemlist","SourceLemlistLemlist","SourceLinkedinAds","SourceLinkedinAdsCredentialsAccessToken","SourceLinkedinAdsCredentialsAccessTokenAuthMethod","SourceLinkedinAdsCredentialsOAuth20","SourceLinkedinAdsCredentialsOAuth20AuthMethod","SourceLinkedinAdsLinkedinAds","SourceLinkedinPages","SourceLinkedinPagesCredentialsAccessToken","SourceLinkedinPagesCredentialsAccessTokenAuthMethod","SourceLinkedinPagesCredentialsOAuth20","SourceLinkedinPagesCredentialsOAuth20AuthMethod","SourceLinkedinPagesLinkedinPages","SourceLinnworks","SourceLinnworksLinnworks","SourceLokalise","SourceLokaliseLokalise","SourceMailchimp","SourceMailchimpCredentialsAPIKey","SourceMailchimpCredentialsAPIKeyAuthType","SourceMailchimpCredentialsOAuth20","SourceMailchimpCredentialsOAuth20AuthType","SourceMailchimpMailchimp","SourceMailgun","SourceMailgunMailgun","SourceMailjetSms","SourceMailjetSmsMailjetSms","SourceMarketo","SourceMarketoMarketo","SourceMetabase","SourceMetabaseMetabase","SourceMicrosoftTeams","SourceMicrosoftTeamsCredentialsAuthenticateViaMicrosoft","SourceMicrosoftTeamsCredentialsAuthenticateViaMicrosoftAuthType","SourceMicrosoftTeamsCredentialsAuthenticateViaMicrosoftOAuth20","SourceMicrosoftTeamsCredentialsAuthenticateViaMicrosoftOAuth20AuthType","SourceMicrosoftTeamsMicrosoftTeams","SourceMixpanel","SourceMixpanelCredentialsProjectSecret","SourceMixpanelCredentialsProjectSecretOptionTitle","SourceMixpanelCredentialsServiceAccount","SourceMixpanelCredentialsServiceAccountOptionTitle","SourceMixpanelMixpanel","SourceMixpanelRegion","SourceMonday","SourceMondayCredentialsAPIToken","SourceMondayCredentialsAPITokenAuthType","SourceMondayCredentialsOAuth20","SourceMondayCredentialsOAuth20AuthType","SourceMondayMonday","SourceMongodb","SourceMongodbInstanceTypeMongoDBAtlas","SourceMongodbInstanceTypeMongoDBAtlasInstance","SourceMongodbInstanceTypeReplicaSet","SourceMongodbInstanceTypeReplicaSetInstance","SourceMongodbInstanceTypeStandaloneMongoDbInstance","SourceMongodbInstanceTypeStandaloneMongoDbInstanceInstance","SourceMongodbMongodb","SourceMssql","SourceMssqlMssql","SourceMssqlReplicationMethodLogicalReplicationCDC","SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync","SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel","SourceMssqlReplicationMethodLogicalReplicationCDCMethod","SourceMssqlReplicationMethodStandard","SourceMssqlReplicationMethodStandardMethod","SourceMssqlSslMethodEncryptedTrustServerCertificate","SourceMssqlSslMethodEncryptedTrustServerCertificateSslMethod","SourceMssqlSslMethodEncryptedVerifyCertificate","SourceMssqlSslMethodEncryptedVerifyCertificateSslMethod","SourceMssqlTunnelMethodNoTunnel","SourceMssqlTunnelMethodNoTunnelTunnelMethod","SourceMssqlTunnelMethodPasswordAuthentication","SourceMssqlTunnelMethodPasswordAuthenticationTunnelMethod","SourceMssqlTunnelMethodSSHKeyAuthentication","SourceMssqlTunnelMethodSSHKeyAuthenticationTunnelMethod","SourceMyHours","SourceMyHoursMyHours","SourceMysql","SourceMysqlMysql","SourceMysqlReplicationMethodLogicalReplicationCDC","SourceMysqlReplicationMethodLogicalReplicationCDCMethod","SourceMysqlReplicationMethodStandard","SourceMysqlReplicationMethodStandardMethod","SourceMysqlSslModePreferred","SourceMysqlSslModePreferredMode","SourceMysqlSslModeRequired","SourceMysqlSslModeRequiredMode","SourceMysqlSslModeVerifyCA","SourceMysqlSslModeVerifyCAMode","SourceMysqlSslModeVerifyIdentity","SourceMysqlSslModeVerifyIdentityMode","SourceMysqlTunnelMethodNoTunnel","SourceMysqlTunnelMethodNoTunnelTunnelMethod","SourceMysqlTunnelMethodPasswordAuthentication","SourceMysqlTunnelMethodPasswordAuthenticationTunnelMethod","SourceMysqlTunnelMethodSSHKeyAuthentication","SourceMysqlTunnelMethodSSHKeyAuthenticationTunnelMethod","SourceNetsuite","SourceNetsuiteNetsuite","SourceNotion","SourceNotionCredentialsAccessToken","SourceNotionCredentialsAccessTokenAuthType","SourceNotionCredentialsOAuth20","SourceNotionCredentialsOAuth20AuthType","SourceNotionNotion","SourceNytimes","SourceNytimesNytimes","SourceNytimesPeriodUsedForMostPopularStreams","SourceNytimesShareTypeUsedForMostPopularSharedStream","SourceOkta","SourceOktaCredentialsAPIToken","SourceOktaCredentialsAPITokenAuthType","SourceOktaCredentialsOAuth20","SourceOktaCredentialsOAuth20AuthType","SourceOktaOkta","SourceOmnisend","SourceOmnisendOmnisend","SourceOnesignal","SourceOnesignalApplications","SourceOnesignalOnesignal","SourceOpenweather","SourceOpenweatherLanguage","SourceOpenweatherOpenweather","SourceOpenweatherUnits","SourceOracle","SourceOracleConnectionDataServiceName","SourceOracleConnectionDataServiceNameConnectionType","SourceOracleConnectionDataSystemIDSID","SourceOracleConnectionDataSystemIDSIDConnectionType","SourceOracleEncryptionNativeNetworkEncryptionNNE","SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithm","SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionMethod","SourceOracleEncryptionTLSEncryptedVerifyCertificate","SourceOracleEncryptionTLSEncryptedVerifyCertificateEncryptionMethod","SourceOracleOracle","SourceOracleTunnelMethodNoTunnel","SourceOracleTunnelMethodNoTunnelTunnelMethod","SourceOracleTunnelMethodPasswordAuthentication","SourceOracleTunnelMethodPasswordAuthenticationTunnelMethod","SourceOracleTunnelMethodSSHKeyAuthentication","SourceOracleTunnelMethodSSHKeyAuthenticationTunnelMethod","SourceOrb","SourceOrbOrb","SourceOrbit","SourceOrbitOrbit","SourceOutreach","SourceOutreachOutreach","SourcePaypalTransaction","SourcePaypalTransactionPaypalTransaction","SourcePaystack","SourcePaystackPaystack","SourcePendo","SourcePendoPendo","SourcePersistiq","SourcePersistiqPersistiq","SourcePexelsAPI","SourcePexelsAPIPexelsAPI","SourcePinterest","SourcePinterestCredentialsAccessToken","SourcePinterestCredentialsAccessTokenAuthMethod","SourcePinterestCredentialsOAuth20","SourcePinterestCredentialsOAuth20AuthMethod","SourcePinterestPinterest","SourcePinterestStatus","SourcePipedrive","SourcePipedriveAPIKeyAuthentication","SourcePipedriveAPIKeyAuthenticationAuthType","SourcePipedrivePipedrive","SourcePocket","SourcePocketContentType","SourcePocketDetailType","SourcePocketPocket","SourcePocketSortBy","SourcePocketState","SourcePokeapi","SourcePokeapiPokeapi","SourcePolygonStockAPI","SourcePolygonStockAPIPolygonStockAPI","SourcePostgres","SourcePostgresPostgres","SourcePostgresReplicationMethodStandard","SourcePostgresReplicationMethodStandardMethod","SourcePostgresTunnelMethodNoTunnel","SourcePostgresTunnelMethodNoTunnelTunnelMethod","SourcePostgresTunnelMethodPasswordAuthentication","SourcePostgresTunnelMethodPasswordAuthenticationTunnelMethod","SourcePostgresTunnelMethodSSHKeyAuthentication","SourcePostgresTunnelMethodSSHKeyAuthenticationTunnelMethod","SourcePosthog","SourcePosthogPosthog","SourcePostmarkapp","SourcePostmarkappPostmarkapp","SourcePrestashop","SourcePrestashopPrestashop","SourcePublicApis","SourcePublicApisPublicApis","SourcePunkAPI","SourcePunkAPIPunkAPI","SourcePypi","SourcePypiPypi","SourceQualaroo","SourceQualarooQualaroo","SourceQuickbooks","SourceQuickbooksCredentialsOAuth20","SourceQuickbooksCredentialsOAuth20AuthType","SourceQuickbooksQuickbooks","SourceRailz","SourceRailzRailz","SourceRecharge","SourceRechargeRecharge","SourceRecreation","SourceRecreationRecreation","SourceRecruitee","SourceRecruiteeRecruitee","SourceRecurly","SourceRecurlyRecurly","SourceRedshift","SourceRedshiftRedshift","SourceResponse","SourceRetently","SourceRetentlyRetently","SourceRkiCovid","SourceRkiCovidRkiCovid","SourceRss","SourceRssRss","SourceS3","SourceS3FormatAvro","SourceS3FormatAvroFiletype","SourceS3FormatCSV","SourceS3FormatCSVFiletype","SourceS3FormatJsonl","SourceS3FormatJsonlFiletype","SourceS3FormatJsonlUnexpectedFieldBehavior","SourceS3FormatParquet","SourceS3FormatParquetFiletype","SourceS3S3","SourceS3S3AmazonWebServices","SourceSalesforce","SourceSalesforceAuthType","SourceSalesforceSalesforce","SourceSalesforceSinger","SourceSalesforceSingerAPIType","SourceSalesforceSingerSalesforceSinger","SourceSalesforceStreamsCriteria","SourceSalesforceStreamsCriteriaSearchCriteria","SourceSalesloft","SourceSalesloftCredentialsAuthenticateViaAPIKey","SourceSalesloftCredentialsAuthenticateViaAPIKeyAuthType","SourceSalesloftCredentialsAuthenticateViaOAuth","SourceSalesloftCredentialsAuthenticateViaOAuthAuthType","SourceSalesloftSalesloft","SourceSapFieldglass","SourceSapFieldglassSapFieldglass","SourceSecoda","SourceSecodaSecoda","SourceSendgrid","SourceSendgridSendgrid","SourceSendinblue","SourceSendinblueSendinblue","SourceSenseforce","SourceSenseforceSenseforce","SourceSentry","SourceSentrySentry","SourceSftp","SourceSftpBulk","SourceSftpBulkFileType","SourceSftpBulkSftpBulk","SourceSftpCredentialsPasswordAuthentication","SourceSftpCredentialsPasswordAuthenticationAuthMethod","SourceSftpCredentialsSSHKeyAuthentication","SourceSftpCredentialsSSHKeyAuthenticationAuthMethod","SourceSftpSftp","SourceShopify","SourceShopifyCredentialsAPIPassword","SourceShopifyCredentialsAPIPasswordAuthMethod","SourceShopifyCredentialsOAuth20","SourceShopifyCredentialsOAuth20AuthMethod","SourceShopifyShopify","SourceShortio","SourceShortioShortio","SourceSlack","SourceSlackCredentialsAPIToken","SourceSlackCredentialsAPITokenOptionTitle","SourceSlackCredentialsSignInViaSlackOAuth","SourceSlackCredentialsSignInViaSlackOAuthOptionTitle","SourceSlackSlack","SourceSmaily","SourceSmailySmaily","SourceSmartengage","SourceSmartengageSmartengage","SourceSmartsheets","SourceSmartsheetsCredentialsAPIAccessToken","SourceSmartsheetsCredentialsAPIAccessTokenAuthType","SourceSmartsheetsCredentialsOAuth20","SourceSmartsheetsCredentialsOAuth20AuthType","SourceSmartsheetsSmartsheets","SourceSnapchatMarketing","SourceSnapchatMarketingSnapchatMarketing","SourceSnowflake","SourceSnowflakeCredentialsOAuth20","SourceSnowflakeCredentialsOAuth20AuthType","SourceSnowflakeCredentialsUsernameAndPassword","SourceSnowflakeCredentialsUsernameAndPasswordAuthType","SourceSnowflakeSnowflake","SourceSonarCloud","SourceSonarCloudSonarCloud","SourceSpacexAPI","SourceSpacexAPISpacexAPI","SourceSquare","SourceSquareCredentialsAPIKey","SourceSquareCredentialsAPIKeyAuthType","SourceSquareCredentialsOauthAuthentication","SourceSquareCredentialsOauthAuthenticationAuthType","SourceSquareSquare","SourceStrava","SourceStravaAuthType","SourceStravaStrava","SourceStripe","SourceStripeStripe","SourceSurveySparrow","SourceSurveySparrowRegionEUBasedAccount","SourceSurveySparrowRegionEUBasedAccountURLBase","SourceSurveySparrowRegionGlobalAccount","SourceSurveySparrowRegionGlobalAccountURLBase","SourceSurveySparrowSurveySparrow","SourceSurveymonkey","SourceSurveymonkeyOriginDatacenterOfTheSurveyMonkeyAccount","SourceSurveymonkeySurveyMonkeyAuthorizationMethod","SourceSurveymonkeySurveyMonkeyAuthorizationMethodAuthMethod","SourceSurveymonkeySurveymonkey","SourceTempo","SourceTempoTempo","SourceTheGuardianAPI","SourceTheGuardianAPITheGuardianAPI","SourceTiktokMarketing","SourceTiktokMarketingCredentialsOAuth20","SourceTiktokMarketingCredentialsOAuth20AuthType","SourceTiktokMarketingCredentialsSandboxAccessToken","SourceTiktokMarketingCredentialsSandboxAccessTokenAuthType","SourceTiktokMarketingTiktokMarketing","SourceTodoist","SourceTodoistTodoist","SourceTrello","SourceTrelloTrello","SourceTrustpilot","SourceTrustpilotCredentialsAPIKey","SourceTrustpilotCredentialsAPIKeyAuthType","SourceTrustpilotCredentialsOAuth20","SourceTrustpilotCredentialsOAuth20AuthType","SourceTrustpilotTrustpilot","SourceTvmazeSchedule","SourceTvmazeScheduleTvmazeSchedule","SourceTwilio","SourceTwilioTaskrouter","SourceTwilioTaskrouterTwilioTaskrouter","SourceTwilioTwilio","SourceTwitter","SourceTwitterTwitter","SourceTypeform","SourceTypeformTypeform","SourceUsCensus","SourceUsCensusUsCensus","SourceVantage","SourceVantageVantage","SourceWebflow","SourceWebflowWebflow","SourceWhiskyHunter","SourceWhiskyHunterWhiskyHunter","SourceWikipediaPageviews","SourceWikipediaPageviewsWikipediaPageviews","SourceWoocommerce","SourceWoocommerceWoocommerce","SourceXero","SourceXeroAuthenticateViaXeroOAuth","SourceXeroXero","SourceXkcd","SourceXkcdXkcd","SourceYandexMetrica","SourceYandexMetricaYandexMetrica","SourceYounium","SourceYouniumYounium","SourceYoutubeAnalytics","SourceYoutubeAnalyticsYoutubeAnalytics","SourceZendeskChat","SourceZendeskChatCredentialsAccessToken","SourceZendeskChatCredentialsAccessTokenCredentials","SourceZendeskChatCredentialsOAuth20","SourceZendeskChatCredentialsOAuth20Credentials","SourceZendeskChatZendeskChat","SourceZendeskSunshine","SourceZendeskSunshineCredentialsAPIToken","SourceZendeskSunshineCredentialsAPITokenAuthMethod","SourceZendeskSunshineCredentialsOAuth20","SourceZendeskSunshineCredentialsOAuth20AuthMethod","SourceZendeskSunshineZendeskSunshine","SourceZendeskSupport","SourceZendeskSupportZendeskSupport","SourceZendeskTalk","SourceZendeskTalkZendeskTalk","SourceZenloop","SourceZenloopZenloop","SourceZohoCRMZohoCRMEdition","SourceZohoCrm","SourceZohoCrmDataCenterLocation","SourceZohoCrmEnvironment","SourceZohoCrmZohoCrm","SourceZoom","SourceZoomZoom","SourceZuora","SourceZuoraDataQueryType","SourceZuoraTenantEndpointLocation","SourceZuoraZuora","SourcesResponse","StreamConfiguration","StreamConfigurations","StreamProperties","WorkspaceCreateRequest","WorkspaceOAuthCredentialsRequest","WorkspaceResponse","WorkspaceUpdateRequest","WorkspacesResponse"]
diff --git a/src/airbyte/models/shared/actortypeenum_enum.py b/src/airbyte/models/shared/actortypeenum.py
similarity index 88%
rename from src/airbyte/models/shared/actortypeenum_enum.py
rename to src/airbyte/models/shared/actortypeenum.py
index 26a44ac9..d76a86ff 100755
--- a/src/airbyte/models/shared/actortypeenum_enum.py
+++ b/src/airbyte/models/shared/actortypeenum.py
@@ -3,7 +3,7 @@
from __future__ import annotations
from enum import Enum
-class ActorTypeEnumEnum(str, Enum):
+class ActorTypeEnum(str, Enum):
r"""Whether you're setting this override for a source or destination"""
SOURCE = 'source'
DESTINATION = 'destination'
diff --git a/src/airbyte/models/shared/connectioncreaterequest.py b/src/airbyte/models/shared/connectioncreaterequest.py
index b3d30f38..028fd473 100755
--- a/src/airbyte/models/shared/connectioncreaterequest.py
+++ b/src/airbyte/models/shared/connectioncreaterequest.py
@@ -3,9 +3,9 @@
from __future__ import annotations
import dataclasses
from ..shared import connectionschedulecreate as shared_connectionschedulecreate
-from ..shared import geographyenum_enum as shared_geographyenum_enum
-from ..shared import namespacedefinitionenum_enum as shared_namespacedefinitionenum_enum
-from ..shared import nonbreakingschemaupdatesbehaviorenum_enum as shared_nonbreakingschemaupdatesbehaviorenum_enum
+from ..shared import geographyenum as shared_geographyenum
+from ..shared import namespacedefinitionenum as shared_namespacedefinitionenum
+from ..shared import nonbreakingschemaupdatesbehaviorenum as shared_nonbreakingschemaupdatesbehaviorenum
from ..shared import streamconfigurations as shared_streamconfigurations
from airbyte import utils
from dataclasses_json import Undefined, dataclass_json
@@ -20,14 +20,14 @@ class ConnectionCreateRequest:
source_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceId') }})
configurations: Optional[shared_streamconfigurations.StreamConfigurations] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('configurations'), 'exclude': lambda f: f is None }})
r"""A list of configured stream options for a connection."""
- data_residency: Optional[shared_geographyenum_enum.GeographyEnumEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dataResidency'), 'exclude': lambda f: f is None }})
+ data_residency: Optional[shared_geographyenum.GeographyEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dataResidency'), 'exclude': lambda f: f is None }})
name: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name'), 'exclude': lambda f: f is None }})
r"""Optional name of the connection"""
- namespace_definition: Optional[shared_namespacedefinitionenum_enum.NamespaceDefinitionEnumEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('namespaceDefinition'), 'exclude': lambda f: f is None }})
+ namespace_definition: Optional[shared_namespacedefinitionenum.NamespaceDefinitionEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('namespaceDefinition'), 'exclude': lambda f: f is None }})
r"""Define the location where the data will be stored in the destination"""
namespace_format: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('namespaceFormat'), 'exclude': lambda f: f is None }})
r"""Used when namespaceDefinition is 'custom_format'. If blank then behaves like namespaceDefinition = 'destination'. If \\"${SOURCE_NAMESPACE}\\" then behaves like namespaceDefinition = 'source'."""
- non_breaking_schema_updates_behavior: Optional[shared_nonbreakingschemaupdatesbehaviorenum_enum.NonBreakingSchemaUpdatesBehaviorEnumEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('nonBreakingSchemaUpdatesBehavior'), 'exclude': lambda f: f is None }})
+ non_breaking_schema_updates_behavior: Optional[shared_nonbreakingschemaupdatesbehaviorenum.NonBreakingSchemaUpdatesBehaviorEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('nonBreakingSchemaUpdatesBehavior'), 'exclude': lambda f: f is None }})
r"""Set how Airbyte handles syncs when it detects a non-breaking schema change in the source"""
prefix: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('prefix'), 'exclude': lambda f: f is None }})
r"""Prefix that will be prepended to the name of each stream when it is written to the destination (ex. “airbyte_” causes “projects” => “airbyte_projects”)."""
diff --git a/src/airbyte/models/shared/connectionresponse.py b/src/airbyte/models/shared/connectionresponse.py
index 5785048a..5404a918 100755
--- a/src/airbyte/models/shared/connectionresponse.py
+++ b/src/airbyte/models/shared/connectionresponse.py
@@ -3,10 +3,10 @@
from __future__ import annotations
import dataclasses
from ..shared import connectionscheduleresponse as shared_connectionscheduleresponse
-from ..shared import connectionstatusenum_enum as shared_connectionstatusenum_enum
-from ..shared import geographyenum_enum as shared_geographyenum_enum
-from ..shared import namespacedefinitionenum_enum as shared_namespacedefinitionenum_enum
-from ..shared import nonbreakingschemaupdatesbehaviorenum_enum as shared_nonbreakingschemaupdatesbehaviorenum_enum
+from ..shared import connectionstatusenum as shared_connectionstatusenum
+from ..shared import geographyenum as shared_geographyenum
+from ..shared import namespacedefinitionenum as shared_namespacedefinitionenum
+from ..shared import nonbreakingschemaupdatesbehaviorenum as shared_nonbreakingschemaupdatesbehaviorenum
from airbyte import utils
from dataclasses_json import Undefined, dataclass_json
from typing import Optional
@@ -18,18 +18,18 @@ class ConnectionResponse:
r"""Provides details of a single connection."""
connection_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('connectionId') }})
- data_residency: shared_geographyenum_enum.GeographyEnumEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dataResidency') }})
+ data_residency: shared_geographyenum.GeographyEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dataResidency') }})
destination_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationId') }})
name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name') }})
schedule: shared_connectionscheduleresponse.ConnectionScheduleResponse = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('schedule') }})
r"""schedule for when the the connection should run, per the schedule type"""
source_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceId') }})
- status: shared_connectionstatusenum_enum.ConnectionStatusEnumEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('status') }})
+ status: shared_connectionstatusenum.ConnectionStatusEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('status') }})
workspace_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('workspaceId') }})
- namespace_definition: Optional[shared_namespacedefinitionenum_enum.NamespaceDefinitionEnumEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('namespaceDefinition'), 'exclude': lambda f: f is None }})
+ namespace_definition: Optional[shared_namespacedefinitionenum.NamespaceDefinitionEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('namespaceDefinition'), 'exclude': lambda f: f is None }})
r"""Define the location where the data will be stored in the destination"""
namespace_format: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('namespaceFormat'), 'exclude': lambda f: f is None }})
- non_breaking_schema_updates_behavior: Optional[shared_nonbreakingschemaupdatesbehaviorenum_enum.NonBreakingSchemaUpdatesBehaviorEnumEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('nonBreakingSchemaUpdatesBehavior'), 'exclude': lambda f: f is None }})
+ non_breaking_schema_updates_behavior: Optional[shared_nonbreakingschemaupdatesbehaviorenum.NonBreakingSchemaUpdatesBehaviorEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('nonBreakingSchemaUpdatesBehavior'), 'exclude': lambda f: f is None }})
r"""Set how Airbyte handles syncs when it detects a non-breaking schema change in the source"""
prefix: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('prefix'), 'exclude': lambda f: f is None }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/connectionschedulecreate.py b/src/airbyte/models/shared/connectionschedulecreate.py
index eaaac30f..6eee9b5e 100755
--- a/src/airbyte/models/shared/connectionschedulecreate.py
+++ b/src/airbyte/models/shared/connectionschedulecreate.py
@@ -2,7 +2,7 @@
from __future__ import annotations
import dataclasses
-from ..shared import scheduletypeenum_enum as shared_scheduletypeenum_enum
+from ..shared import scheduletypeenum as shared_scheduletypeenum
from airbyte import utils
from dataclasses_json import Undefined, dataclass_json
from typing import Optional
@@ -13,6 +13,6 @@
class ConnectionScheduleCreate:
r"""schedule for when the the connection should run, per the schedule type"""
- schedule_type: shared_scheduletypeenum_enum.ScheduleTypeEnumEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('scheduleType') }})
+ schedule_type: shared_scheduletypeenum.ScheduleTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('scheduleType') }})
cron_expression: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cronExpression'), 'exclude': lambda f: f is None }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/connectionscheduleresponse.py b/src/airbyte/models/shared/connectionscheduleresponse.py
index 3ec5dd1f..e62beaf4 100755
--- a/src/airbyte/models/shared/connectionscheduleresponse.py
+++ b/src/airbyte/models/shared/connectionscheduleresponse.py
@@ -2,7 +2,7 @@
from __future__ import annotations
import dataclasses
-from ..shared import scheduletypewithbasicenum_enum as shared_scheduletypewithbasicenum_enum
+from ..shared import scheduletypewithbasicenum as shared_scheduletypewithbasicenum
from airbyte import utils
from dataclasses_json import Undefined, dataclass_json
from typing import Optional
@@ -13,7 +13,7 @@
class ConnectionScheduleResponse:
r"""schedule for when the the connection should run, per the schedule type"""
- schedule_type: shared_scheduletypewithbasicenum_enum.ScheduleTypeWithBasicEnumEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('scheduleType') }})
+ schedule_type: shared_scheduletypewithbasicenum.ScheduleTypeWithBasicEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('scheduleType') }})
basic_timing: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('basicTiming'), 'exclude': lambda f: f is None }})
cron_expression: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cronExpression'), 'exclude': lambda f: f is None }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/connectionstatusenum_enum.py b/src/airbyte/models/shared/connectionstatusenum.py
similarity index 83%
rename from src/airbyte/models/shared/connectionstatusenum_enum.py
rename to src/airbyte/models/shared/connectionstatusenum.py
index f7bf0320..cd9a5537 100755
--- a/src/airbyte/models/shared/connectionstatusenum_enum.py
+++ b/src/airbyte/models/shared/connectionstatusenum.py
@@ -3,7 +3,7 @@
from __future__ import annotations
from enum import Enum
-class ConnectionStatusEnumEnum(str, Enum):
+class ConnectionStatusEnum(str, Enum):
ACTIVE = 'active'
INACTIVE = 'inactive'
DEPRECATED = 'deprecated'
diff --git a/src/airbyte/models/shared/connectionsyncmodeenum_enum.py b/src/airbyte/models/shared/connectionsyncmodeenum.py
similarity index 88%
rename from src/airbyte/models/shared/connectionsyncmodeenum_enum.py
rename to src/airbyte/models/shared/connectionsyncmodeenum.py
index c747a311..658f10ca 100755
--- a/src/airbyte/models/shared/connectionsyncmodeenum_enum.py
+++ b/src/airbyte/models/shared/connectionsyncmodeenum.py
@@ -3,7 +3,7 @@
from __future__ import annotations
from enum import Enum
-class ConnectionSyncModeEnumEnum(str, Enum):
+class ConnectionSyncModeEnum(str, Enum):
FULL_REFRESH_OVERWRITE = 'full_refresh_overwrite'
FULL_REFRESH_APPEND = 'full_refresh_append'
INCREMENTAL_APPEND = 'incremental_append'
diff --git a/src/airbyte/models/shared/destination_amazon_sqs.py b/src/airbyte/models/shared/destination_amazon_sqs.py
index aad10420..f0d77a1e 100755
--- a/src/airbyte/models/shared/destination_amazon_sqs.py
+++ b/src/airbyte/models/shared/destination_amazon_sqs.py
@@ -7,10 +7,10 @@
from enum import Enum
from typing import Optional
-class DestinationAmazonSqsAmazonSqsEnum(str, Enum):
+class DestinationAmazonSqsAmazonSqs(str, Enum):
AMAZON_SQS = 'amazon-sqs'
-class DestinationAmazonSqsAWSRegionEnum(str, Enum):
+class DestinationAmazonSqsAWSRegion(str, Enum):
r"""AWS Region of the SQS Queue"""
US_EAST_1 = 'us-east-1'
US_EAST_2 = 'us-east-2'
@@ -44,10 +44,10 @@ class DestinationAmazonSqsAWSRegionEnum(str, Enum):
class DestinationAmazonSqs:
r"""The values required to configure the destination."""
- destination_type: DestinationAmazonSqsAmazonSqsEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationAmazonSqsAmazonSqs = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
queue_url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('queue_url') }})
r"""URL of the SQS Queue"""
- region: DestinationAmazonSqsAWSRegionEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('region') }})
+ region: DestinationAmazonSqsAWSRegion = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('region') }})
r"""AWS Region of the SQS Queue"""
access_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_key'), 'exclude': lambda f: f is None }})
r"""The Access Key ID of the AWS IAM Role to use for sending messages"""
diff --git a/src/airbyte/models/shared/destination_aws_datalake.py b/src/airbyte/models/shared/destination_aws_datalake.py
index 6616a3d8..fa3e9f1c 100755
--- a/src/airbyte/models/shared/destination_aws_datalake.py
+++ b/src/airbyte/models/shared/destination_aws_datalake.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class DestinationAwsDatalakeCredentialsIAMUserCredentialsTitleEnum(str, Enum):
+class DestinationAwsDatalakeCredentialsIAMUserCredentialsTitle(str, Enum):
r"""Name of the credentials"""
IAM_USER = 'IAM User'
@@ -21,10 +21,10 @@ class DestinationAwsDatalakeCredentialsIAMUser:
r"""AWS User Access Key Id"""
aws_secret_access_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('aws_secret_access_key') }})
r"""Secret Access Key"""
- credentials_title: DestinationAwsDatalakeCredentialsIAMUserCredentialsTitleEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials_title') }})
+ credentials_title: DestinationAwsDatalakeCredentialsIAMUserCredentialsTitle = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials_title') }})
r"""Name of the credentials"""
-class DestinationAwsDatalakeCredentialsIAMRoleCredentialsTitleEnum(str, Enum):
+class DestinationAwsDatalakeCredentialsIAMRoleCredentialsTitle(str, Enum):
r"""Name of the credentials"""
IAM_ROLE = 'IAM Role'
@@ -34,22 +34,22 @@ class DestinationAwsDatalakeCredentialsIAMRoleCredentialsTitleEnum(str, Enum):
class DestinationAwsDatalakeCredentialsIAMRole:
r"""Choose How to Authenticate to AWS."""
- credentials_title: DestinationAwsDatalakeCredentialsIAMRoleCredentialsTitleEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials_title') }})
+ credentials_title: DestinationAwsDatalakeCredentialsIAMRoleCredentialsTitle = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials_title') }})
r"""Name of the credentials"""
role_arn: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('role_arn') }})
r"""Will assume this role to write data to s3"""
-class DestinationAwsDatalakeAwsDatalakeEnum(str, Enum):
+class DestinationAwsDatalakeAwsDatalake(str, Enum):
AWS_DATALAKE = 'aws-datalake'
-class DestinationAwsDatalakeFormatParquetColumnarStorageCompressionCodecOptionalEnum(str, Enum):
+class DestinationAwsDatalakeFormatParquetColumnarStorageCompressionCodecOptional(str, Enum):
r"""The compression algorithm used to compress data."""
UNCOMPRESSED = 'UNCOMPRESSED'
SNAPPY = 'SNAPPY'
GZIP = 'GZIP'
ZSTD = 'ZSTD'
-class DestinationAwsDatalakeFormatParquetColumnarStorageFormatTypeWildcardEnum(str, Enum):
+class DestinationAwsDatalakeFormatParquetColumnarStorageFormatTypeWildcard(str, Enum):
PARQUET = 'Parquet'
@@ -58,16 +58,16 @@ class DestinationAwsDatalakeFormatParquetColumnarStorageFormatTypeWildcardEnum(s
class DestinationAwsDatalakeFormatParquetColumnarStorage:
r"""Format of the data output."""
- format_type: DestinationAwsDatalakeFormatParquetColumnarStorageFormatTypeWildcardEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }})
- compression_codec: Optional[DestinationAwsDatalakeFormatParquetColumnarStorageCompressionCodecOptionalEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_codec'), 'exclude': lambda f: f is None }})
+ format_type: DestinationAwsDatalakeFormatParquetColumnarStorageFormatTypeWildcard = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }})
+ compression_codec: Optional[DestinationAwsDatalakeFormatParquetColumnarStorageCompressionCodecOptional] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_codec'), 'exclude': lambda f: f is None }})
r"""The compression algorithm used to compress data."""
-class DestinationAwsDatalakeFormatJSONLinesNewlineDelimitedJSONCompressionCodecOptionalEnum(str, Enum):
+class DestinationAwsDatalakeFormatJSONLinesNewlineDelimitedJSONCompressionCodecOptional(str, Enum):
r"""The compression algorithm used to compress data."""
UNCOMPRESSED = 'UNCOMPRESSED'
GZIP = 'GZIP'
-class DestinationAwsDatalakeFormatJSONLinesNewlineDelimitedJSONFormatTypeWildcardEnum(str, Enum):
+class DestinationAwsDatalakeFormatJSONLinesNewlineDelimitedJSONFormatTypeWildcard(str, Enum):
JSONL = 'JSONL'
@@ -76,11 +76,11 @@ class DestinationAwsDatalakeFormatJSONLinesNewlineDelimitedJSONFormatTypeWildcar
class DestinationAwsDatalakeFormatJSONLinesNewlineDelimitedJSON:
r"""Format of the data output."""
- format_type: DestinationAwsDatalakeFormatJSONLinesNewlineDelimitedJSONFormatTypeWildcardEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }})
- compression_codec: Optional[DestinationAwsDatalakeFormatJSONLinesNewlineDelimitedJSONCompressionCodecOptionalEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_codec'), 'exclude': lambda f: f is None }})
+ format_type: DestinationAwsDatalakeFormatJSONLinesNewlineDelimitedJSONFormatTypeWildcard = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }})
+ compression_codec: Optional[DestinationAwsDatalakeFormatJSONLinesNewlineDelimitedJSONCompressionCodecOptional] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_codec'), 'exclude': lambda f: f is None }})
r"""The compression algorithm used to compress data."""
-class DestinationAwsDatalakeChooseHowToPartitionDataEnum(str, Enum):
+class DestinationAwsDatalakeChooseHowToPartitionData(str, Enum):
r"""Partition data by cursor fields when a cursor field is a date"""
NO_PARTITIONING = 'NO PARTITIONING'
DATE = 'DATE'
@@ -90,7 +90,7 @@ class DestinationAwsDatalakeChooseHowToPartitionDataEnum(str, Enum):
YEAR_MONTH = 'YEAR/MONTH'
YEAR_MONTH_DAY = 'YEAR/MONTH/DAY'
-class DestinationAwsDatalakeS3BucketRegionEnum(str, Enum):
+class DestinationAwsDatalakeS3BucketRegion(str, Enum):
r"""The region of the S3 bucket. See here for all region codes."""
UNKNOWN = ''
US_EAST_1 = 'us-east-1'
@@ -129,10 +129,10 @@ class DestinationAwsDatalake:
r"""The name of the S3 bucket. Read more here."""
credentials: Any = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }})
r"""Choose How to Authenticate to AWS."""
- destination_type: DestinationAwsDatalakeAwsDatalakeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationAwsDatalakeAwsDatalake = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
lakeformation_database_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lakeformation_database_name') }})
r"""The default database this destination will use to create tables in per stream. Can be changed per connection by customizing the namespace."""
- region: DestinationAwsDatalakeS3BucketRegionEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('region') }})
+ region: DestinationAwsDatalakeS3BucketRegion = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('region') }})
r"""The region of the S3 bucket. See here for all region codes."""
aws_account_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('aws_account_id'), 'exclude': lambda f: f is None }})
r"""target aws account id"""
@@ -148,6 +148,6 @@ class DestinationAwsDatalake:
r"""Add default values for the `Tag Key` to databases created by this destination. Comma separate for multiple values."""
lakeformation_governed_tables: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lakeformation_governed_tables'), 'exclude': lambda f: f is None }})
r"""Whether to create tables as LF governed tables."""
- partitioning: Optional[DestinationAwsDatalakeChooseHowToPartitionDataEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('partitioning'), 'exclude': lambda f: f is None }})
+ partitioning: Optional[DestinationAwsDatalakeChooseHowToPartitionData] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('partitioning'), 'exclude': lambda f: f is None }})
r"""Partition data by cursor fields when a cursor field is a date"""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/destination_azure_blob_storage.py b/src/airbyte/models/shared/destination_azure_blob_storage.py
index 363a785d..489c40a1 100755
--- a/src/airbyte/models/shared/destination_azure_blob_storage.py
+++ b/src/airbyte/models/shared/destination_azure_blob_storage.py
@@ -7,10 +7,10 @@
from enum import Enum
from typing import Any, Optional
-class DestinationAzureBlobStorageAzureBlobStorageEnum(str, Enum):
+class DestinationAzureBlobStorageAzureBlobStorage(str, Enum):
AZURE_BLOB_STORAGE = 'azure-blob-storage'
-class DestinationAzureBlobStorageFormatJSONLinesNewlineDelimitedJSONFormatTypeEnum(str, Enum):
+class DestinationAzureBlobStorageFormatJSONLinesNewlineDelimitedJSONFormatType(str, Enum):
JSONL = 'JSONL'
@@ -19,14 +19,14 @@ class DestinationAzureBlobStorageFormatJSONLinesNewlineDelimitedJSONFormatTypeEn
class DestinationAzureBlobStorageFormatJSONLinesNewlineDelimitedJSON:
r"""Output data format"""
- format_type: DestinationAzureBlobStorageFormatJSONLinesNewlineDelimitedJSONFormatTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }})
+ format_type: DestinationAzureBlobStorageFormatJSONLinesNewlineDelimitedJSONFormatType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }})
-class DestinationAzureBlobStorageFormatCSVCommaSeparatedValuesNormalizationFlatteningEnum(str, Enum):
+class DestinationAzureBlobStorageFormatCSVCommaSeparatedValuesNormalizationFlattening(str, Enum):
r"""Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details."""
NO_FLATTENING = 'No flattening'
ROOT_LEVEL_FLATTENING = 'Root level flattening'
-class DestinationAzureBlobStorageFormatCSVCommaSeparatedValuesFormatTypeEnum(str, Enum):
+class DestinationAzureBlobStorageFormatCSVCommaSeparatedValuesFormatType(str, Enum):
CSV = 'CSV'
@@ -35,9 +35,9 @@ class DestinationAzureBlobStorageFormatCSVCommaSeparatedValuesFormatTypeEnum(str
class DestinationAzureBlobStorageFormatCSVCommaSeparatedValues:
r"""Output data format"""
- flattening: DestinationAzureBlobStorageFormatCSVCommaSeparatedValuesNormalizationFlatteningEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('flattening') }})
+ flattening: DestinationAzureBlobStorageFormatCSVCommaSeparatedValuesNormalizationFlattening = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('flattening') }})
r"""Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details."""
- format_type: DestinationAzureBlobStorageFormatCSVCommaSeparatedValuesFormatTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }})
+ format_type: DestinationAzureBlobStorageFormatCSVCommaSeparatedValuesFormatType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }})
@dataclass_json(undefined=Undefined.EXCLUDE)
@@ -49,7 +49,7 @@ class DestinationAzureBlobStorage:
r"""The Azure blob storage account key."""
azure_blob_storage_account_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('azure_blob_storage_account_name') }})
r"""The account's name of the Azure Blob Storage."""
- destination_type: DestinationAzureBlobStorageAzureBlobStorageEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationAzureBlobStorageAzureBlobStorage = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
format: Any = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format') }})
r"""Output data format"""
azure_blob_storage_container_name: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('azure_blob_storage_container_name'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/destination_bigquery.py b/src/airbyte/models/shared/destination_bigquery.py
index 4f995673..09d590df 100755
--- a/src/airbyte/models/shared/destination_bigquery.py
+++ b/src/airbyte/models/shared/destination_bigquery.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class DestinationBigqueryDatasetLocationEnum(str, Enum):
+class DestinationBigqueryDatasetLocation(str, Enum):
r"""The location of the dataset. Warning: Changes made after creation will not be applied. Read more here."""
US = 'US'
EU = 'EU'
@@ -50,10 +50,10 @@ class DestinationBigqueryDatasetLocationEnum(str, Enum):
US_WEST3 = 'us-west3'
US_WEST4 = 'us-west4'
-class DestinationBigqueryBigqueryEnum(str, Enum):
+class DestinationBigqueryBigquery(str, Enum):
BIGQUERY = 'bigquery'
-class DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKeyCredentialTypeEnum(str, Enum):
+class DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKeyCredentialType(str, Enum):
HMAC_KEY = 'HMAC_KEY'
@@ -62,18 +62,18 @@ class DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKeyCredentialTypeE
class DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey:
r"""An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here."""
- credential_type: DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKeyCredentialTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credential_type') }})
+ credential_type: DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKeyCredentialType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credential_type') }})
hmac_key_access_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('hmac_key_access_id') }})
r"""HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long."""
hmac_key_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('hmac_key_secret') }})
r"""The corresponding secret for the access ID. It is a 40-character base-64 encoded string."""
-class DestinationBigqueryLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessingEnum(str, Enum):
+class DestinationBigqueryLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing(str, Enum):
r"""This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default \\"Delete all tmp files from GCS\\" value is used if not set explicitly."""
DELETE_ALL_TMP_FILES_FROM_GCS = 'Delete all tmp files from GCS'
KEEP_ALL_TMP_FILES_IN_GCS = 'Keep all tmp files in GCS'
-class DestinationBigqueryLoadingMethodGCSStagingMethodEnum(str, Enum):
+class DestinationBigqueryLoadingMethodGCSStagingMethod(str, Enum):
GCS_STAGING = 'GCS Staging'
@@ -88,13 +88,13 @@ class DestinationBigqueryLoadingMethodGCSStaging:
r"""The name of the GCS bucket. Read more here."""
gcs_bucket_path: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('gcs_bucket_path') }})
r"""Directory under the GCS bucket where data will be written."""
- method: DestinationBigqueryLoadingMethodGCSStagingMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
+ method: DestinationBigqueryLoadingMethodGCSStagingMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
file_buffer_count: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('file_buffer_count'), 'exclude': lambda f: f is None }})
r"""Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects"""
- keep_files_in_gcs_bucket: Optional[DestinationBigqueryLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessingEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('keep_files_in_gcs-bucket'), 'exclude': lambda f: f is None }})
+ keep_files_in_gcs_bucket: Optional[DestinationBigqueryLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('keep_files_in_gcs-bucket'), 'exclude': lambda f: f is None }})
r"""This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default \\"Delete all tmp files from GCS\\" value is used if not set explicitly."""
-class DestinationBigqueryLoadingMethodStandardInsertsMethodEnum(str, Enum):
+class DestinationBigqueryLoadingMethodStandardInsertsMethod(str, Enum):
STANDARD = 'Standard'
@@ -103,9 +103,9 @@ class DestinationBigqueryLoadingMethodStandardInsertsMethodEnum(str, Enum):
class DestinationBigqueryLoadingMethodStandardInserts:
r"""Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here."""
- method: DestinationBigqueryLoadingMethodStandardInsertsMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
+ method: DestinationBigqueryLoadingMethodStandardInsertsMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
-class DestinationBigqueryTransformationQueryRunTypeEnum(str, Enum):
+class DestinationBigqueryTransformationQueryRunType(str, Enum):
r"""Interactive run type means that the query is executed as soon as possible, and these queries count towards concurrent rate limit and daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources are available in the BigQuery shared resource pool, which usually occurs within a few minutes. Batch queries don’t count towards your concurrent rate limit. Read more about batch queries here. The default \\"interactive\\" value is used if not set explicitly."""
INTERACTIVE = 'interactive'
BATCH = 'batch'
@@ -118,9 +118,9 @@ class DestinationBigquery:
dataset_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dataset_id') }})
r"""The default BigQuery Dataset ID that tables are replicated to if the source does not specify a namespace. Read more here."""
- dataset_location: DestinationBigqueryDatasetLocationEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dataset_location') }})
+ dataset_location: DestinationBigqueryDatasetLocation = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dataset_location') }})
r"""The location of the dataset. Warning: Changes made after creation will not be applied. Read more here."""
- destination_type: DestinationBigqueryBigqueryEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationBigqueryBigquery = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
project_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('project_id') }})
r"""The GCP project ID for the project containing the target BigQuery dataset. Read more here."""
big_query_client_buffer_size_mb: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('big_query_client_buffer_size_mb'), 'exclude': lambda f: f is None }})
@@ -129,6 +129,6 @@ class DestinationBigquery:
r"""The contents of the JSON service account key. Check out the docs if you need help generating this key. Default credentials will be used if this field is left empty."""
loading_method: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('loading_method'), 'exclude': lambda f: f is None }})
r"""Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here."""
- transformation_priority: Optional[DestinationBigqueryTransformationQueryRunTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('transformation_priority'), 'exclude': lambda f: f is None }})
+ transformation_priority: Optional[DestinationBigqueryTransformationQueryRunType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('transformation_priority'), 'exclude': lambda f: f is None }})
r"""Interactive run type means that the query is executed as soon as possible, and these queries count towards concurrent rate limit and daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources are available in the BigQuery shared resource pool, which usually occurs within a few minutes. Batch queries don’t count towards your concurrent rate limit. Read more about batch queries here. The default \\"interactive\\" value is used if not set explicitly."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/destination_bigquery_denormalized.py b/src/airbyte/models/shared/destination_bigquery_denormalized.py
index a50d6c22..af849a3b 100755
--- a/src/airbyte/models/shared/destination_bigquery_denormalized.py
+++ b/src/airbyte/models/shared/destination_bigquery_denormalized.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class DestinationBigqueryDenormalizedDatasetLocationEnum(str, Enum):
+class DestinationBigqueryDenormalizedDatasetLocation(str, Enum):
r"""The location of the dataset. Warning: Changes made after creation will not be applied. The default \\"US\\" value is used if not set explicitly. Read more here."""
US = 'US'
EU = 'EU'
@@ -50,10 +50,10 @@ class DestinationBigqueryDenormalizedDatasetLocationEnum(str, Enum):
US_WEST3 = 'us-west3'
US_WEST4 = 'us-west4'
-class DestinationBigqueryDenormalizedBigqueryDenormalizedEnum(str, Enum):
+class DestinationBigqueryDenormalizedBigqueryDenormalized(str, Enum):
BIGQUERY_DENORMALIZED = 'bigquery-denormalized'
-class DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKeyCredentialTypeEnum(str, Enum):
+class DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKeyCredentialType(str, Enum):
HMAC_KEY = 'HMAC_KEY'
@@ -62,18 +62,18 @@ class DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKeyCre
class DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey:
r"""An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here."""
- credential_type: DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKeyCredentialTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credential_type') }})
+ credential_type: DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKeyCredentialType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credential_type') }})
hmac_key_access_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('hmac_key_access_id') }})
r"""HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long."""
hmac_key_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('hmac_key_secret') }})
r"""The corresponding secret for the access ID. It is a 40-character base-64 encoded string."""
-class DestinationBigqueryDenormalizedLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessingEnum(str, Enum):
+class DestinationBigqueryDenormalizedLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing(str, Enum):
r"""This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default \\"Delete all tmp files from GCS\\" value is used if not set explicitly."""
DELETE_ALL_TMP_FILES_FROM_GCS = 'Delete all tmp files from GCS'
KEEP_ALL_TMP_FILES_IN_GCS = 'Keep all tmp files in GCS'
-class DestinationBigqueryDenormalizedLoadingMethodGCSStagingMethodEnum(str, Enum):
+class DestinationBigqueryDenormalizedLoadingMethodGCSStagingMethod(str, Enum):
GCS_STAGING = 'GCS Staging'
@@ -88,13 +88,13 @@ class DestinationBigqueryDenormalizedLoadingMethodGCSStaging:
r"""The name of the GCS bucket. Read more here."""
gcs_bucket_path: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('gcs_bucket_path') }})
r"""Directory under the GCS bucket where data will be written. Read more here."""
- method: DestinationBigqueryDenormalizedLoadingMethodGCSStagingMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
+ method: DestinationBigqueryDenormalizedLoadingMethodGCSStagingMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
file_buffer_count: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('file_buffer_count'), 'exclude': lambda f: f is None }})
r"""Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects"""
- keep_files_in_gcs_bucket: Optional[DestinationBigqueryDenormalizedLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessingEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('keep_files_in_gcs-bucket'), 'exclude': lambda f: f is None }})
+ keep_files_in_gcs_bucket: Optional[DestinationBigqueryDenormalizedLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('keep_files_in_gcs-bucket'), 'exclude': lambda f: f is None }})
r"""This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default \\"Delete all tmp files from GCS\\" value is used if not set explicitly."""
-class DestinationBigqueryDenormalizedLoadingMethodStandardInsertsMethodEnum(str, Enum):
+class DestinationBigqueryDenormalizedLoadingMethodStandardInsertsMethod(str, Enum):
STANDARD = 'Standard'
@@ -103,7 +103,7 @@ class DestinationBigqueryDenormalizedLoadingMethodStandardInsertsMethodEnum(str,
class DestinationBigqueryDenormalizedLoadingMethodStandardInserts:
r"""Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here."""
- method: DestinationBigqueryDenormalizedLoadingMethodStandardInsertsMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
+ method: DestinationBigqueryDenormalizedLoadingMethodStandardInsertsMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
@dataclass_json(undefined=Undefined.EXCLUDE)
@@ -113,14 +113,14 @@ class DestinationBigqueryDenormalized:
dataset_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dataset_id') }})
r"""The default BigQuery Dataset ID that tables are replicated to if the source does not specify a namespace. Read more here."""
- destination_type: DestinationBigqueryDenormalizedBigqueryDenormalizedEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationBigqueryDenormalizedBigqueryDenormalized = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
project_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('project_id') }})
r"""The GCP project ID for the project containing the target BigQuery dataset. Read more here."""
big_query_client_buffer_size_mb: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('big_query_client_buffer_size_mb'), 'exclude': lambda f: f is None }})
r"""Google BigQuery client's chunk (buffer) size (MIN=1, MAX = 15) for each table. The size that will be written by a single RPC. Written data will be buffered and only flushed upon reaching this size or closing the channel. The default 15MB value is used if not set explicitly. Read more here."""
credentials_json: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials_json'), 'exclude': lambda f: f is None }})
r"""The contents of the JSON service account key. Check out the docs if you need help generating this key. Default credentials will be used if this field is left empty."""
- dataset_location: Optional[DestinationBigqueryDenormalizedDatasetLocationEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dataset_location'), 'exclude': lambda f: f is None }})
+ dataset_location: Optional[DestinationBigqueryDenormalizedDatasetLocation] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dataset_location'), 'exclude': lambda f: f is None }})
r"""The location of the dataset. Warning: Changes made after creation will not be applied. The default \\"US\\" value is used if not set explicitly. Read more here."""
loading_method: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('loading_method'), 'exclude': lambda f: f is None }})
r"""Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here."""
diff --git a/src/airbyte/models/shared/destination_cassandra.py b/src/airbyte/models/shared/destination_cassandra.py
index 09845495..1e04a7bd 100755
--- a/src/airbyte/models/shared/destination_cassandra.py
+++ b/src/airbyte/models/shared/destination_cassandra.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class DestinationCassandraCassandraEnum(str, Enum):
+class DestinationCassandraCassandra(str, Enum):
CASSANDRA = 'cassandra'
@@ -18,7 +18,7 @@ class DestinationCassandra:
address: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('address') }})
r"""Address to connect to."""
- destination_type: DestinationCassandraCassandraEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationCassandraCassandra = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
keyspace: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('keyspace') }})
r"""Default Cassandra keyspace to create data in."""
password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password') }})
diff --git a/src/airbyte/models/shared/destination_clickhouse.py b/src/airbyte/models/shared/destination_clickhouse.py
index 13ad930d..65ab09e6 100755
--- a/src/airbyte/models/shared/destination_clickhouse.py
+++ b/src/airbyte/models/shared/destination_clickhouse.py
@@ -7,10 +7,10 @@
from enum import Enum
from typing import Any, Optional
-class DestinationClickhouseClickhouseEnum(str, Enum):
+class DestinationClickhouseClickhouse(str, Enum):
CLICKHOUSE = 'clickhouse'
-class DestinationClickhouseTunnelMethodPasswordAuthenticationTunnelMethodEnum(str, Enum):
+class DestinationClickhouseTunnelMethodPasswordAuthenticationTunnelMethod(str, Enum):
r"""Connect through a jump server tunnel host using username and password authentication"""
SSH_PASSWORD_AUTH = 'SSH_PASSWORD_AUTH'
@@ -22,7 +22,7 @@ class DestinationClickhouseTunnelMethodPasswordAuthentication:
tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }})
r"""Hostname of the jump server host that allows inbound ssh tunnel."""
- tunnel_method: DestinationClickhouseTunnelMethodPasswordAuthenticationTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: DestinationClickhouseTunnelMethodPasswordAuthenticationTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""Connect through a jump server tunnel host using username and password authentication"""
tunnel_port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port') }})
r"""Port on the proxy/jump server that accepts inbound ssh connections."""
@@ -31,7 +31,7 @@ class DestinationClickhouseTunnelMethodPasswordAuthentication:
tunnel_user_password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user_password') }})
r"""OS-level password for logging into the jump server host"""
-class DestinationClickhouseTunnelMethodSSHKeyAuthenticationTunnelMethodEnum(str, Enum):
+class DestinationClickhouseTunnelMethodSSHKeyAuthenticationTunnelMethod(str, Enum):
r"""Connect through a jump server tunnel host using username and ssh key"""
SSH_KEY_AUTH = 'SSH_KEY_AUTH'
@@ -45,14 +45,14 @@ class DestinationClickhouseTunnelMethodSSHKeyAuthentication:
r"""OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"""
tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }})
r"""Hostname of the jump server host that allows inbound ssh tunnel."""
- tunnel_method: DestinationClickhouseTunnelMethodSSHKeyAuthenticationTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: DestinationClickhouseTunnelMethodSSHKeyAuthenticationTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""Connect through a jump server tunnel host using username and ssh key"""
tunnel_port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port') }})
r"""Port on the proxy/jump server that accepts inbound ssh connections."""
tunnel_user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user') }})
r"""OS-level username for logging into the jump server host."""
-class DestinationClickhouseTunnelMethodNoTunnelTunnelMethodEnum(str, Enum):
+class DestinationClickhouseTunnelMethodNoTunnelTunnelMethod(str, Enum):
r"""No ssh tunnel needed to connect to database"""
NO_TUNNEL = 'NO_TUNNEL'
@@ -62,7 +62,7 @@ class DestinationClickhouseTunnelMethodNoTunnelTunnelMethodEnum(str, Enum):
class DestinationClickhouseTunnelMethodNoTunnel:
r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use."""
- tunnel_method: DestinationClickhouseTunnelMethodNoTunnelTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: DestinationClickhouseTunnelMethodNoTunnelTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""No ssh tunnel needed to connect to database"""
@@ -73,7 +73,7 @@ class DestinationClickhouse:
database: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('database') }})
r"""Name of the database."""
- destination_type: DestinationClickhouseClickhouseEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationClickhouseClickhouse = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }})
r"""Hostname of the database."""
port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port') }})
diff --git a/src/airbyte/models/shared/destination_convex.py b/src/airbyte/models/shared/destination_convex.py
index c68f44c7..5c3d07b8 100755
--- a/src/airbyte/models/shared/destination_convex.py
+++ b/src/airbyte/models/shared/destination_convex.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class DestinationConvexConvexEnum(str, Enum):
+class DestinationConvexConvex(str, Enum):
CONVEX = 'convex'
@@ -19,5 +19,5 @@ class DestinationConvex:
r"""API access key used to send data to a Convex deployment."""
deployment_url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('deployment_url') }})
r"""URL of the Convex deployment that is the destination"""
- destination_type: DestinationConvexConvexEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationConvexConvex = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/destination_cumulio.py b/src/airbyte/models/shared/destination_cumulio.py
index ba31008f..4db2a076 100755
--- a/src/airbyte/models/shared/destination_cumulio.py
+++ b/src/airbyte/models/shared/destination_cumulio.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class DestinationCumulioCumulioEnum(str, Enum):
+class DestinationCumulioCumulio(str, Enum):
CUMULIO = 'cumulio'
@@ -21,5 +21,5 @@ class DestinationCumulio:
r"""An API key generated in Cumul.io's platform (can be generated here: https://app.cumul.io/start/profile/integration)."""
api_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_token') }})
r"""The corresponding API token generated in Cumul.io's platform (can be generated here: https://app.cumul.io/start/profile/integration)."""
- destination_type: DestinationCumulioCumulioEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationCumulioCumulio = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/destination_databend.py b/src/airbyte/models/shared/destination_databend.py
index fecdfc3b..4e810648 100755
--- a/src/airbyte/models/shared/destination_databend.py
+++ b/src/airbyte/models/shared/destination_databend.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class DestinationDatabendDatabendEnum(str, Enum):
+class DestinationDatabendDatabend(str, Enum):
DATABEND = 'databend'
@@ -18,7 +18,7 @@ class DestinationDatabend:
database: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('database') }})
r"""Name of the database."""
- destination_type: DestinationDatabendDatabendEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationDatabendDatabend = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }})
r"""Hostname of the database."""
username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }})
diff --git a/src/airbyte/models/shared/destination_databricks.py b/src/airbyte/models/shared/destination_databricks.py
index 6ec5cd5d..98ea87e3 100755
--- a/src/airbyte/models/shared/destination_databricks.py
+++ b/src/airbyte/models/shared/destination_databricks.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class DestinationDatabricksDataSourceAzureBlobStorageDataSourceTypeEnum(str, Enum):
+class DestinationDatabricksDataSourceAzureBlobStorageDataSourceType(str, Enum):
AZURE_BLOB_STORAGE = 'AZURE_BLOB_STORAGE'
@@ -22,14 +22,14 @@ class DestinationDatabricksDataSourceAzureBlobStorage:
r"""The name of the Azure blob storage container."""
azure_blob_storage_sas_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('azure_blob_storage_sas_token') }})
r"""Shared access signature (SAS) token to grant limited access to objects in your storage account."""
- data_source_type: DestinationDatabricksDataSourceAzureBlobStorageDataSourceTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('data_source_type') }})
+ data_source_type: DestinationDatabricksDataSourceAzureBlobStorageDataSourceType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('data_source_type') }})
azure_blob_storage_endpoint_domain_name: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('azure_blob_storage_endpoint_domain_name'), 'exclude': lambda f: f is None }})
r"""This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example."""
-class DestinationDatabricksDataSourceAmazonS3DataSourceTypeEnum(str, Enum):
+class DestinationDatabricksDataSourceAmazonS3DataSourceType(str, Enum):
S3_STORAGE = 'S3_STORAGE'
-class DestinationDatabricksDataSourceAmazonS3S3BucketRegionEnum(str, Enum):
+class DestinationDatabricksDataSourceAmazonS3S3BucketRegion(str, Enum):
r"""The region of the S3 staging bucket to use if utilising a copy strategy."""
UNKNOWN = ''
US_EAST_1 = 'us-east-1'
@@ -64,21 +64,21 @@ class DestinationDatabricksDataSourceAmazonS3S3BucketRegionEnum(str, Enum):
class DestinationDatabricksDataSourceAmazonS3:
r"""Storage on which the delta lake is built."""
- data_source_type: DestinationDatabricksDataSourceAmazonS3DataSourceTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('data_source_type') }})
+ data_source_type: DestinationDatabricksDataSourceAmazonS3DataSourceType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('data_source_type') }})
s3_access_key_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_access_key_id') }})
r"""The Access Key Id granting allow one to access the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket."""
s3_bucket_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_name') }})
r"""The name of the S3 bucket to use for intermittent staging of the data."""
s3_bucket_path: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_path') }})
r"""The directory under the S3 bucket where data will be written."""
- s3_bucket_region: DestinationDatabricksDataSourceAmazonS3S3BucketRegionEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_region') }})
+ s3_bucket_region: DestinationDatabricksDataSourceAmazonS3S3BucketRegion = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_region') }})
r"""The region of the S3 staging bucket to use if utilising a copy strategy."""
s3_secret_access_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_secret_access_key') }})
r"""The corresponding secret to the above access key id."""
file_name_pattern: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('file_name_pattern'), 'exclude': lambda f: f is None }})
r"""The pattern allows you to set the file-name format for the S3 staging file(s)"""
-class DestinationDatabricksDataSourceRecommendedManagedTablesDataSourceTypeEnum(str, Enum):
+class DestinationDatabricksDataSourceRecommendedManagedTablesDataSourceType(str, Enum):
MANAGED_TABLES_STORAGE = 'MANAGED_TABLES_STORAGE'
@@ -87,9 +87,9 @@ class DestinationDatabricksDataSourceRecommendedManagedTablesDataSourceTypeEnum(
class DestinationDatabricksDataSourceRecommendedManagedTables:
r"""Storage on which the delta lake is built."""
- data_source_type: DestinationDatabricksDataSourceRecommendedManagedTablesDataSourceTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('data_source_type') }})
+ data_source_type: DestinationDatabricksDataSourceRecommendedManagedTablesDataSourceType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('data_source_type') }})
-class DestinationDatabricksDatabricksEnum(str, Enum):
+class DestinationDatabricksDatabricks(str, Enum):
DATABRICKS = 'databricks'
@@ -108,7 +108,7 @@ class DestinationDatabricks:
r"""Databricks Personal Access Token for making authenticated requests."""
databricks_server_hostname: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('databricks_server_hostname') }})
r"""Databricks Cluster Server Hostname."""
- destination_type: DestinationDatabricksDatabricksEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationDatabricksDatabricks = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
database: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('database'), 'exclude': lambda f: f is None }})
r"""The name of the catalog. If not specified otherwise, the \\"hive_metastore\\" will be used."""
databricks_port: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('databricks_port'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/destination_dev_null.py b/src/airbyte/models/shared/destination_dev_null.py
new file mode 100755
index 00000000..d852ef91
--- /dev/null
+++ b/src/airbyte/models/shared/destination_dev_null.py
@@ -0,0 +1,33 @@
+"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT."""
+
+from __future__ import annotations
+import dataclasses
+from airbyte import utils
+from dataclasses_json import Undefined, dataclass_json
+from enum import Enum
+from typing import Any
+
+class DestinationDevNullDevNull(str, Enum):
+ DEV_NULL = 'dev-null'
+
+class DestinationDevNullTestDestinationSilentTestDestinationType(str, Enum):
+ SILENT = 'SILENT'
+
+
+@dataclass_json(undefined=Undefined.EXCLUDE)
+@dataclasses.dataclass
+class DestinationDevNullTestDestinationSilent:
+ r"""The type of destination to be used"""
+
+ test_destination_type: DestinationDevNullTestDestinationSilentTestDestinationType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('test_destination_type') }})
+
+
+@dataclass_json(undefined=Undefined.EXCLUDE)
+@dataclasses.dataclass
+class DestinationDevNull:
+ r"""The values required to configure the destination."""
+
+ destination_type: DestinationDevNullDevNull = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ test_destination: Any = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('test_destination') }})
+ r"""The type of destination to be used"""
+
\ No newline at end of file
diff --git a/src/airbyte/models/shared/destination_dynamodb.py b/src/airbyte/models/shared/destination_dynamodb.py
index d4b97004..e46f3eef 100755
--- a/src/airbyte/models/shared/destination_dynamodb.py
+++ b/src/airbyte/models/shared/destination_dynamodb.py
@@ -7,10 +7,10 @@
from enum import Enum
from typing import Optional
-class DestinationDynamodbDynamodbEnum(str, Enum):
+class DestinationDynamodbDynamodb(str, Enum):
DYNAMODB = 'dynamodb'
-class DestinationDynamodbDynamoDBRegionEnum(str, Enum):
+class DestinationDynamodbDynamoDBRegion(str, Enum):
r"""The region of the DynamoDB."""
UNKNOWN = ''
US_EAST_1 = 'us-east-1'
@@ -47,8 +47,8 @@ class DestinationDynamodb:
access_key_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_key_id') }})
r"""The access key id to access the DynamoDB. Airbyte requires Read and Write permissions to the DynamoDB."""
- destination_type: DestinationDynamodbDynamodbEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
- dynamodb_region: DestinationDynamodbDynamoDBRegionEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dynamodb_region') }})
+ destination_type: DestinationDynamodbDynamodb = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ dynamodb_region: DestinationDynamodbDynamoDBRegion = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dynamodb_region') }})
r"""The region of the DynamoDB."""
dynamodb_table_name_prefix: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dynamodb_table_name_prefix') }})
r"""The prefix to use when naming DynamoDB tables."""
diff --git a/src/airbyte/models/shared/destination_elasticsearch.py b/src/airbyte/models/shared/destination_elasticsearch.py
index bfad007f..85dc12bf 100755
--- a/src/airbyte/models/shared/destination_elasticsearch.py
+++ b/src/airbyte/models/shared/destination_elasticsearch.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class DestinationElasticsearchAuthenticationMethodUsernamePasswordMethodEnum(str, Enum):
+class DestinationElasticsearchAuthenticationMethodUsernamePasswordMethod(str, Enum):
BASIC = 'basic'
@@ -16,13 +16,13 @@ class DestinationElasticsearchAuthenticationMethodUsernamePasswordMethodEnum(str
class DestinationElasticsearchAuthenticationMethodUsernamePassword:
r"""Basic auth header with a username and password"""
- method: DestinationElasticsearchAuthenticationMethodUsernamePasswordMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
+ method: DestinationElasticsearchAuthenticationMethodUsernamePasswordMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password') }})
r"""Basic auth password to access a secure Elasticsearch server"""
username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }})
r"""Basic auth username to access a secure Elasticsearch server"""
-class DestinationElasticsearchAuthenticationMethodAPIKeySecretMethodEnum(str, Enum):
+class DestinationElasticsearchAuthenticationMethodAPIKeySecretMethod(str, Enum):
SECRET = 'secret'
@@ -35,9 +35,9 @@ class DestinationElasticsearchAuthenticationMethodAPIKeySecret:
r"""The Key ID to used when accessing an enterprise Elasticsearch instance."""
api_key_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('apiKeySecret') }})
r"""The secret associated with the API Key ID."""
- method: DestinationElasticsearchAuthenticationMethodAPIKeySecretMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
+ method: DestinationElasticsearchAuthenticationMethodAPIKeySecretMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
-class DestinationElasticsearchElasticsearchEnum(str, Enum):
+class DestinationElasticsearchElasticsearch(str, Enum):
ELASTICSEARCH = 'elasticsearch'
@@ -46,7 +46,7 @@ class DestinationElasticsearchElasticsearchEnum(str, Enum):
class DestinationElasticsearch:
r"""The values required to configure the destination."""
- destination_type: DestinationElasticsearchElasticsearchEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationElasticsearchElasticsearch = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
endpoint: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('endpoint') }})
r"""The full url of the Elasticsearch server"""
authentication_method: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('authenticationMethod'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/destination_firebolt.py b/src/airbyte/models/shared/destination_firebolt.py
index 882dad65..0db44548 100755
--- a/src/airbyte/models/shared/destination_firebolt.py
+++ b/src/airbyte/models/shared/destination_firebolt.py
@@ -7,10 +7,10 @@
from enum import Enum
from typing import Any, Optional
-class DestinationFireboltFireboltEnum(str, Enum):
+class DestinationFireboltFirebolt(str, Enum):
FIREBOLT = 'firebolt'
-class DestinationFireboltLoadingMethodExternalTableViaS3MethodEnum(str, Enum):
+class DestinationFireboltLoadingMethodExternalTableViaS3Method(str, Enum):
S3 = 'S3'
@@ -23,13 +23,13 @@ class DestinationFireboltLoadingMethodExternalTableViaS3:
r"""AWS access key granting read and write access to S3."""
aws_key_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('aws_key_secret') }})
r"""Corresponding secret part of the AWS Key"""
- method: DestinationFireboltLoadingMethodExternalTableViaS3MethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
+ method: DestinationFireboltLoadingMethodExternalTableViaS3Method = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
s3_bucket: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket') }})
r"""The name of the S3 bucket."""
s3_region: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_region') }})
r"""Region name of the S3 bucket."""
-class DestinationFireboltLoadingMethodSQLInsertsMethodEnum(str, Enum):
+class DestinationFireboltLoadingMethodSQLInsertsMethod(str, Enum):
SQL = 'SQL'
@@ -38,7 +38,7 @@ class DestinationFireboltLoadingMethodSQLInsertsMethodEnum(str, Enum):
class DestinationFireboltLoadingMethodSQLInserts:
r"""Loading method used to select the way data will be uploaded to Firebolt"""
- method: DestinationFireboltLoadingMethodSQLInsertsMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
+ method: DestinationFireboltLoadingMethodSQLInsertsMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
@dataclass_json(undefined=Undefined.EXCLUDE)
@@ -48,7 +48,7 @@ class DestinationFirebolt:
database: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('database') }})
r"""The database to connect to."""
- destination_type: DestinationFireboltFireboltEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationFireboltFirebolt = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password') }})
r"""Firebolt password."""
username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }})
diff --git a/src/airbyte/models/shared/destination_firestore.py b/src/airbyte/models/shared/destination_firestore.py
index 4746fe3a..7ef965d9 100755
--- a/src/airbyte/models/shared/destination_firestore.py
+++ b/src/airbyte/models/shared/destination_firestore.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class DestinationFirestoreFirestoreEnum(str, Enum):
+class DestinationFirestoreFirestore(str, Enum):
FIRESTORE = 'firestore'
@@ -16,7 +16,7 @@ class DestinationFirestoreFirestoreEnum(str, Enum):
class DestinationFirestore:
r"""The values required to configure the destination."""
- destination_type: DestinationFirestoreFirestoreEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationFirestoreFirestore = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
project_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('project_id') }})
r"""The GCP project ID for the project containing the target BigQuery dataset."""
credentials_json: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials_json'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/destination_gcs.py b/src/airbyte/models/shared/destination_gcs.py
index a8e53d3b..c43ed924 100755
--- a/src/airbyte/models/shared/destination_gcs.py
+++ b/src/airbyte/models/shared/destination_gcs.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class DestinationGcsCredentialHMACKeyCredentialTypeEnum(str, Enum):
+class DestinationGcsCredentialHMACKeyCredentialType(str, Enum):
HMAC_KEY = 'HMAC_KEY'
@@ -16,16 +16,16 @@ class DestinationGcsCredentialHMACKeyCredentialTypeEnum(str, Enum):
class DestinationGcsCredentialHMACKey:
r"""An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here."""
- credential_type: DestinationGcsCredentialHMACKeyCredentialTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credential_type') }})
+ credential_type: DestinationGcsCredentialHMACKeyCredentialType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credential_type') }})
hmac_key_access_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('hmac_key_access_id') }})
r"""When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long. Read more here."""
hmac_key_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('hmac_key_secret') }})
r"""The corresponding secret for the access ID. It is a 40-character base-64 encoded string. Read more here."""
-class DestinationGcsGcsEnum(str, Enum):
+class DestinationGcsGcs(str, Enum):
GCS = 'gcs'
-class DestinationGcsFormatParquetColumnarStorageCompressionCodecEnum(str, Enum):
+class DestinationGcsFormatParquetColumnarStorageCompressionCodec(str, Enum):
r"""The compression algorithm used to compress data pages."""
UNCOMPRESSED = 'UNCOMPRESSED'
SNAPPY = 'SNAPPY'
@@ -35,7 +35,7 @@ class DestinationGcsFormatParquetColumnarStorageCompressionCodecEnum(str, Enum):
LZ4 = 'LZ4'
ZSTD = 'ZSTD'
-class DestinationGcsFormatParquetColumnarStorageFormatTypeEnum(str, Enum):
+class DestinationGcsFormatParquetColumnarStorageFormatType(str, Enum):
PARQUET = 'Parquet'
@@ -44,10 +44,10 @@ class DestinationGcsFormatParquetColumnarStorageFormatTypeEnum(str, Enum):
class DestinationGcsFormatParquetColumnarStorage:
r"""Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format."""
- format_type: DestinationGcsFormatParquetColumnarStorageFormatTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }})
+ format_type: DestinationGcsFormatParquetColumnarStorageFormatType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }})
block_size_mb: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('block_size_mb'), 'exclude': lambda f: f is None }})
r"""This is the size of a row group being buffered in memory. It limits the memory usage when writing. Larger values will improve the IO when reading, but consume more memory when writing. Default: 128 MB."""
- compression_codec: Optional[DestinationGcsFormatParquetColumnarStorageCompressionCodecEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_codec'), 'exclude': lambda f: f is None }})
+ compression_codec: Optional[DestinationGcsFormatParquetColumnarStorageCompressionCodec] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_codec'), 'exclude': lambda f: f is None }})
r"""The compression algorithm used to compress data pages."""
dictionary_encoding: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dictionary_encoding'), 'exclude': lambda f: f is None }})
r"""Default: true."""
@@ -58,7 +58,7 @@ class DestinationGcsFormatParquetColumnarStorage:
page_size_kb: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('page_size_kb'), 'exclude': lambda f: f is None }})
r"""The page size is for compression. A block is composed of pages. A page is the smallest unit that must be read fully to access a single record. If this value is too small, the compression will deteriorate. Default: 1024 KB."""
-class DestinationGcsFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionTypeEnum(str, Enum):
+class DestinationGcsFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType(str, Enum):
GZIP = 'GZIP'
@@ -67,9 +67,9 @@ class DestinationGcsFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressio
class DestinationGcsFormatJSONLinesNewlineDelimitedJSONCompressionGZIP:
r"""Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: \\".jsonl.gz\\")."""
- compression_type: Optional[DestinationGcsFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_type'), 'exclude': lambda f: f is None }})
+ compression_type: Optional[DestinationGcsFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_type'), 'exclude': lambda f: f is None }})
-class DestinationGcsFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionTypeEnum(str, Enum):
+class DestinationGcsFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType(str, Enum):
NO_COMPRESSION = 'No Compression'
@@ -78,9 +78,9 @@ class DestinationGcsFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionC
class DestinationGcsFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression:
r"""Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: \\".jsonl.gz\\")."""
- compression_type: Optional[DestinationGcsFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_type'), 'exclude': lambda f: f is None }})
+ compression_type: Optional[DestinationGcsFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_type'), 'exclude': lambda f: f is None }})
-class DestinationGcsFormatJSONLinesNewlineDelimitedJSONFormatTypeEnum(str, Enum):
+class DestinationGcsFormatJSONLinesNewlineDelimitedJSONFormatType(str, Enum):
JSONL = 'JSONL'
@@ -89,11 +89,11 @@ class DestinationGcsFormatJSONLinesNewlineDelimitedJSONFormatTypeEnum(str, Enum)
class DestinationGcsFormatJSONLinesNewlineDelimitedJSON:
r"""Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format."""
- format_type: DestinationGcsFormatJSONLinesNewlineDelimitedJSONFormatTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }})
+ format_type: DestinationGcsFormatJSONLinesNewlineDelimitedJSONFormatType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }})
compression: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression'), 'exclude': lambda f: f is None }})
r"""Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: \\".jsonl.gz\\")."""
-class DestinationGcsFormatCSVCommaSeparatedValuesCompressionGZIPCompressionTypeEnum(str, Enum):
+class DestinationGcsFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType(str, Enum):
GZIP = 'GZIP'
@@ -102,9 +102,9 @@ class DestinationGcsFormatCSVCommaSeparatedValuesCompressionGZIPCompressionTypeE
class DestinationGcsFormatCSVCommaSeparatedValuesCompressionGZIP:
r"""Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: \\".csv.gz\\")."""
- compression_type: Optional[DestinationGcsFormatCSVCommaSeparatedValuesCompressionGZIPCompressionTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_type'), 'exclude': lambda f: f is None }})
+ compression_type: Optional[DestinationGcsFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_type'), 'exclude': lambda f: f is None }})
-class DestinationGcsFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionTypeEnum(str, Enum):
+class DestinationGcsFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType(str, Enum):
NO_COMPRESSION = 'No Compression'
@@ -113,14 +113,14 @@ class DestinationGcsFormatCSVCommaSeparatedValuesCompressionNoCompressionCompres
class DestinationGcsFormatCSVCommaSeparatedValuesCompressionNoCompression:
r"""Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: \\".csv.gz\\")."""
- compression_type: Optional[DestinationGcsFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_type'), 'exclude': lambda f: f is None }})
+ compression_type: Optional[DestinationGcsFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_type'), 'exclude': lambda f: f is None }})
-class DestinationGcsFormatCSVCommaSeparatedValuesNormalizationEnum(str, Enum):
+class DestinationGcsFormatCSVCommaSeparatedValuesNormalization(str, Enum):
r"""Whether the input JSON data should be normalized (flattened) in the output CSV. Please refer to docs for details."""
NO_FLATTENING = 'No flattening'
ROOT_LEVEL_FLATTENING = 'Root level flattening'
-class DestinationGcsFormatCSVCommaSeparatedValuesFormatTypeEnum(str, Enum):
+class DestinationGcsFormatCSVCommaSeparatedValuesFormatType(str, Enum):
CSV = 'CSV'
@@ -129,13 +129,13 @@ class DestinationGcsFormatCSVCommaSeparatedValuesFormatTypeEnum(str, Enum):
class DestinationGcsFormatCSVCommaSeparatedValues:
r"""Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format."""
- format_type: DestinationGcsFormatCSVCommaSeparatedValuesFormatTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }})
+ format_type: DestinationGcsFormatCSVCommaSeparatedValuesFormatType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }})
compression: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression'), 'exclude': lambda f: f is None }})
r"""Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: \\".csv.gz\\")."""
- flattening: Optional[DestinationGcsFormatCSVCommaSeparatedValuesNormalizationEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('flattening'), 'exclude': lambda f: f is None }})
+ flattening: Optional[DestinationGcsFormatCSVCommaSeparatedValuesNormalization] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('flattening'), 'exclude': lambda f: f is None }})
r"""Whether the input JSON data should be normalized (flattened) in the output CSV. Please refer to docs for details."""
-class DestinationGcsFormatAvroApacheAvroCompressionCodecSnappyCodecEnum(str, Enum):
+class DestinationGcsFormatAvroApacheAvroCompressionCodecSnappyCodec(str, Enum):
SNAPPY = 'snappy'
@@ -144,9 +144,9 @@ class DestinationGcsFormatAvroApacheAvroCompressionCodecSnappyCodecEnum(str, Enu
class DestinationGcsFormatAvroApacheAvroCompressionCodecSnappy:
r"""The compression algorithm used to compress data. Default to no compression."""
- codec: DestinationGcsFormatAvroApacheAvroCompressionCodecSnappyCodecEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('codec') }})
+ codec: DestinationGcsFormatAvroApacheAvroCompressionCodecSnappyCodec = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('codec') }})
-class DestinationGcsFormatAvroApacheAvroCompressionCodecZstandardCodecEnum(str, Enum):
+class DestinationGcsFormatAvroApacheAvroCompressionCodecZstandardCodec(str, Enum):
ZSTANDARD = 'zstandard'
@@ -155,13 +155,13 @@ class DestinationGcsFormatAvroApacheAvroCompressionCodecZstandardCodecEnum(str,
class DestinationGcsFormatAvroApacheAvroCompressionCodecZstandard:
r"""The compression algorithm used to compress data. Default to no compression."""
- codec: DestinationGcsFormatAvroApacheAvroCompressionCodecZstandardCodecEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('codec') }})
+ codec: DestinationGcsFormatAvroApacheAvroCompressionCodecZstandardCodec = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('codec') }})
compression_level: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_level'), 'exclude': lambda f: f is None }})
r"""Negative levels are 'fast' modes akin to lz4 or snappy, levels above 9 are generally for archival purposes, and levels above 18 use a lot of memory."""
include_checksum: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('include_checksum'), 'exclude': lambda f: f is None }})
r"""If true, include a checksum with each data block."""
-class DestinationGcsFormatAvroApacheAvroCompressionCodecXzCodecEnum(str, Enum):
+class DestinationGcsFormatAvroApacheAvroCompressionCodecXzCodec(str, Enum):
XZ = 'xz'
@@ -170,11 +170,11 @@ class DestinationGcsFormatAvroApacheAvroCompressionCodecXzCodecEnum(str, Enum):
class DestinationGcsFormatAvroApacheAvroCompressionCodecXz:
r"""The compression algorithm used to compress data. Default to no compression."""
- codec: DestinationGcsFormatAvroApacheAvroCompressionCodecXzCodecEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('codec') }})
+ codec: DestinationGcsFormatAvroApacheAvroCompressionCodecXzCodec = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('codec') }})
compression_level: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_level'), 'exclude': lambda f: f is None }})
r"""The presets 0-3 are fast presets with medium compression. The presets 4-6 are fairly slow presets with high compression. The default preset is 6. The presets 7-9 are like the preset 6 but use bigger dictionaries and have higher compressor and decompressor memory requirements. Unless the uncompressed size of the file exceeds 8 MiB, 16 MiB, or 32 MiB, it is waste of memory to use the presets 7, 8, or 9, respectively. Read more here for details."""
-class DestinationGcsFormatAvroApacheAvroCompressionCodecBzip2CodecEnum(str, Enum):
+class DestinationGcsFormatAvroApacheAvroCompressionCodecBzip2Codec(str, Enum):
BZIP2 = 'bzip2'
@@ -183,9 +183,9 @@ class DestinationGcsFormatAvroApacheAvroCompressionCodecBzip2CodecEnum(str, Enum
class DestinationGcsFormatAvroApacheAvroCompressionCodecBzip2:
r"""The compression algorithm used to compress data. Default to no compression."""
- codec: DestinationGcsFormatAvroApacheAvroCompressionCodecBzip2CodecEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('codec') }})
+ codec: DestinationGcsFormatAvroApacheAvroCompressionCodecBzip2Codec = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('codec') }})
-class DestinationGcsFormatAvroApacheAvroCompressionCodecDeflateCodecEnum(str, Enum):
+class DestinationGcsFormatAvroApacheAvroCompressionCodecDeflateCodec(str, Enum):
DEFLATE = 'Deflate'
@@ -194,11 +194,11 @@ class DestinationGcsFormatAvroApacheAvroCompressionCodecDeflateCodecEnum(str, En
class DestinationGcsFormatAvroApacheAvroCompressionCodecDeflate:
r"""The compression algorithm used to compress data. Default to no compression."""
- codec: DestinationGcsFormatAvroApacheAvroCompressionCodecDeflateCodecEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('codec') }})
+ codec: DestinationGcsFormatAvroApacheAvroCompressionCodecDeflateCodec = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('codec') }})
compression_level: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_level'), 'exclude': lambda f: f is None }})
r"""0: no compression & fastest, 9: best compression & slowest."""
-class DestinationGcsFormatAvroApacheAvroCompressionCodecNoCompressionCodecEnum(str, Enum):
+class DestinationGcsFormatAvroApacheAvroCompressionCodecNoCompressionCodec(str, Enum):
NO_COMPRESSION = 'no compression'
@@ -207,9 +207,9 @@ class DestinationGcsFormatAvroApacheAvroCompressionCodecNoCompressionCodecEnum(s
class DestinationGcsFormatAvroApacheAvroCompressionCodecNoCompression:
r"""The compression algorithm used to compress data. Default to no compression."""
- codec: DestinationGcsFormatAvroApacheAvroCompressionCodecNoCompressionCodecEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('codec') }})
+ codec: DestinationGcsFormatAvroApacheAvroCompressionCodecNoCompressionCodec = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('codec') }})
-class DestinationGcsFormatAvroApacheAvroFormatTypeEnum(str, Enum):
+class DestinationGcsFormatAvroApacheAvroFormatType(str, Enum):
AVRO = 'Avro'
@@ -220,9 +220,9 @@ class DestinationGcsFormatAvroApacheAvro:
compression_codec: Any = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_codec') }})
r"""The compression algorithm used to compress data. Default to no compression."""
- format_type: DestinationGcsFormatAvroApacheAvroFormatTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }})
+ format_type: DestinationGcsFormatAvroApacheAvroFormatType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }})
-class DestinationGCSGCSBucketRegionEnum(str, Enum):
+class DestinationGCSGCSBucketRegion(str, Enum):
r"""Select a Region of the GCS Bucket. Read more here."""
NORTHAMERICA_NORTHEAST1 = 'northamerica-northeast1'
NORTHAMERICA_NORTHEAST2 = 'northamerica-northeast2'
@@ -268,13 +268,13 @@ class DestinationGcs:
credential: Any = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credential') }})
r"""An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here."""
- destination_type: DestinationGcsGcsEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationGcsGcs = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
format: Any = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format') }})
r"""Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format."""
gcs_bucket_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('gcs_bucket_name') }})
r"""You can find the bucket name in the App Engine Admin console Application Settings page, under the label Google Cloud Storage Bucket. Read more here."""
gcs_bucket_path: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('gcs_bucket_path') }})
r"""GCS Bucket Path string Subdirectory under the above bucket to sync the data into."""
- gcs_bucket_region: Optional[DestinationGCSGCSBucketRegionEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('gcs_bucket_region'), 'exclude': lambda f: f is None }})
+ gcs_bucket_region: Optional[DestinationGCSGCSBucketRegion] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('gcs_bucket_region'), 'exclude': lambda f: f is None }})
r"""Select a Region of the GCS Bucket. Read more here."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/destination_google_sheets.py b/src/airbyte/models/shared/destination_google_sheets.py
index 014a6495..d8e19b1b 100755
--- a/src/airbyte/models/shared/destination_google_sheets.py
+++ b/src/airbyte/models/shared/destination_google_sheets.py
@@ -19,7 +19,7 @@ class DestinationGoogleSheetsAuthenticationViaGoogleOAuth:
refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token') }})
r"""The token for obtaining new access token."""
-class DestinationGoogleSheetsGoogleSheetsEnum(str, Enum):
+class DestinationGoogleSheetsGoogleSheets(str, Enum):
GOOGLE_SHEETS = 'google-sheets'
@@ -30,7 +30,7 @@ class DestinationGoogleSheets:
credentials: DestinationGoogleSheetsAuthenticationViaGoogleOAuth = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }})
r"""Google API Credentials for connecting to Google Sheets and Google Drive APIs"""
- destination_type: DestinationGoogleSheetsGoogleSheetsEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationGoogleSheetsGoogleSheets = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
spreadsheet_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('spreadsheet_id') }})
r"""The link to your spreadsheet. See this guide for more details."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/destination_keen.py b/src/airbyte/models/shared/destination_keen.py
index adeb0461..0d977488 100755
--- a/src/airbyte/models/shared/destination_keen.py
+++ b/src/airbyte/models/shared/destination_keen.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class DestinationKeenKeenEnum(str, Enum):
+class DestinationKeenKeen(str, Enum):
KEEN = 'keen'
@@ -18,7 +18,7 @@ class DestinationKeen:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""To get Keen Master API Key, navigate to the Access tab from the left-hand, side panel and check the Project Details section."""
- destination_type: DestinationKeenKeenEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationKeenKeen = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
project_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('project_id') }})
r"""To get Keen Project ID, navigate to the Access tab from the left-hand, side panel and check the Project Details section."""
infer_timestamp: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('infer_timestamp'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/destination_kinesis.py b/src/airbyte/models/shared/destination_kinesis.py
index 60849ecb..136ae572 100755
--- a/src/airbyte/models/shared/destination_kinesis.py
+++ b/src/airbyte/models/shared/destination_kinesis.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class DestinationKinesisKinesisEnum(str, Enum):
+class DestinationKinesisKinesis(str, Enum):
KINESIS = 'kinesis'
@@ -19,7 +19,7 @@ class DestinationKinesis:
r"""Generate the AWS Access Key for current user."""
buffer_size: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('bufferSize') }})
r"""Buffer size for storing kinesis records before being batch streamed."""
- destination_type: DestinationKinesisKinesisEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationKinesisKinesis = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
endpoint: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('endpoint') }})
r"""AWS Kinesis endpoint."""
private_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('privateKey') }})
diff --git a/src/airbyte/models/shared/destination_mariadb_columnstore.py b/src/airbyte/models/shared/destination_mariadb_columnstore.py
index 0f1b910e..860b13b6 100755
--- a/src/airbyte/models/shared/destination_mariadb_columnstore.py
+++ b/src/airbyte/models/shared/destination_mariadb_columnstore.py
@@ -7,10 +7,10 @@
from enum import Enum
from typing import Any, Optional
-class DestinationMariadbColumnstoreMariadbColumnstoreEnum(str, Enum):
+class DestinationMariadbColumnstoreMariadbColumnstore(str, Enum):
MARIADB_COLUMNSTORE = 'mariadb-columnstore'
-class DestinationMariadbColumnstoreTunnelMethodPasswordAuthenticationTunnelMethodEnum(str, Enum):
+class DestinationMariadbColumnstoreTunnelMethodPasswordAuthenticationTunnelMethod(str, Enum):
r"""Connect through a jump server tunnel host using username and password authentication"""
SSH_PASSWORD_AUTH = 'SSH_PASSWORD_AUTH'
@@ -22,7 +22,7 @@ class DestinationMariadbColumnstoreTunnelMethodPasswordAuthentication:
tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }})
r"""Hostname of the jump server host that allows inbound ssh tunnel."""
- tunnel_method: DestinationMariadbColumnstoreTunnelMethodPasswordAuthenticationTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: DestinationMariadbColumnstoreTunnelMethodPasswordAuthenticationTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""Connect through a jump server tunnel host using username and password authentication"""
tunnel_port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port') }})
r"""Port on the proxy/jump server that accepts inbound ssh connections."""
@@ -31,7 +31,7 @@ class DestinationMariadbColumnstoreTunnelMethodPasswordAuthentication:
tunnel_user_password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user_password') }})
r"""OS-level password for logging into the jump server host"""
-class DestinationMariadbColumnstoreTunnelMethodSSHKeyAuthenticationTunnelMethodEnum(str, Enum):
+class DestinationMariadbColumnstoreTunnelMethodSSHKeyAuthenticationTunnelMethod(str, Enum):
r"""Connect through a jump server tunnel host using username and ssh key"""
SSH_KEY_AUTH = 'SSH_KEY_AUTH'
@@ -45,14 +45,14 @@ class DestinationMariadbColumnstoreTunnelMethodSSHKeyAuthentication:
r"""OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"""
tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }})
r"""Hostname of the jump server host that allows inbound ssh tunnel."""
- tunnel_method: DestinationMariadbColumnstoreTunnelMethodSSHKeyAuthenticationTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: DestinationMariadbColumnstoreTunnelMethodSSHKeyAuthenticationTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""Connect through a jump server tunnel host using username and ssh key"""
tunnel_port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port') }})
r"""Port on the proxy/jump server that accepts inbound ssh connections."""
tunnel_user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user') }})
r"""OS-level username for logging into the jump server host."""
-class DestinationMariadbColumnstoreTunnelMethodNoTunnelTunnelMethodEnum(str, Enum):
+class DestinationMariadbColumnstoreTunnelMethodNoTunnelTunnelMethod(str, Enum):
r"""No ssh tunnel needed to connect to database"""
NO_TUNNEL = 'NO_TUNNEL'
@@ -62,7 +62,7 @@ class DestinationMariadbColumnstoreTunnelMethodNoTunnelTunnelMethodEnum(str, Enu
class DestinationMariadbColumnstoreTunnelMethodNoTunnel:
r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use."""
- tunnel_method: DestinationMariadbColumnstoreTunnelMethodNoTunnelTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: DestinationMariadbColumnstoreTunnelMethodNoTunnelTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""No ssh tunnel needed to connect to database"""
@@ -73,7 +73,7 @@ class DestinationMariadbColumnstore:
database: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('database') }})
r"""Name of the database."""
- destination_type: DestinationMariadbColumnstoreMariadbColumnstoreEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationMariadbColumnstoreMariadbColumnstore = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }})
r"""The Hostname of the database."""
port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port') }})
diff --git a/src/airbyte/models/shared/destination_meilisearch.py b/src/airbyte/models/shared/destination_meilisearch.py
index 61f72508..745c267d 100755
--- a/src/airbyte/models/shared/destination_meilisearch.py
+++ b/src/airbyte/models/shared/destination_meilisearch.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class DestinationMeilisearchMeilisearchEnum(str, Enum):
+class DestinationMeilisearchMeilisearch(str, Enum):
MEILISEARCH = 'meilisearch'
@@ -16,7 +16,7 @@ class DestinationMeilisearchMeilisearchEnum(str, Enum):
class DestinationMeilisearch:
r"""The values required to configure the destination."""
- destination_type: DestinationMeilisearchMeilisearchEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationMeilisearchMeilisearch = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }})
r"""Hostname of the MeiliSearch instance."""
api_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/destination_mongodb.py b/src/airbyte/models/shared/destination_mongodb.py
index ecd255ea..f1ce26b2 100755
--- a/src/airbyte/models/shared/destination_mongodb.py
+++ b/src/airbyte/models/shared/destination_mongodb.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class DestinationMongodbAuthTypeLoginPasswordAuthorizationEnum(str, Enum):
+class DestinationMongodbAuthTypeLoginPasswordAuthorization(str, Enum):
LOGIN_PASSWORD = 'login/password'
@@ -16,13 +16,13 @@ class DestinationMongodbAuthTypeLoginPasswordAuthorizationEnum(str, Enum):
class DestinationMongodbAuthTypeLoginPassword:
r"""Login/Password."""
- authorization: DestinationMongodbAuthTypeLoginPasswordAuthorizationEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('authorization') }})
+ authorization: DestinationMongodbAuthTypeLoginPasswordAuthorization = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('authorization') }})
password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password') }})
r"""Password associated with the username."""
username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }})
r"""Username to use to access the database."""
-class DestinationMongodbAuthTypeNoneAuthorizationEnum(str, Enum):
+class DestinationMongodbAuthTypeNoneAuthorization(str, Enum):
NONE = 'none'
@@ -31,12 +31,12 @@ class DestinationMongodbAuthTypeNoneAuthorizationEnum(str, Enum):
class DestinationMongodbAuthTypeNone:
r"""None."""
- authorization: DestinationMongodbAuthTypeNoneAuthorizationEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('authorization') }})
+ authorization: DestinationMongodbAuthTypeNoneAuthorization = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('authorization') }})
-class DestinationMongodbMongodbEnum(str, Enum):
+class DestinationMongodbMongodb(str, Enum):
MONGODB = 'mongodb'
-class DestinationMongodbInstanceTypeMongoDBAtlasInstanceEnum(str, Enum):
+class DestinationMongodbInstanceTypeMongoDBAtlasInstance(str, Enum):
ATLAS = 'atlas'
@@ -47,9 +47,9 @@ class DestinationMongodbInstanceTypeMongoDBAtlas:
cluster_url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cluster_url') }})
r"""URL of a cluster to connect to."""
- instance: DestinationMongodbInstanceTypeMongoDBAtlasInstanceEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('instance') }})
+ instance: DestinationMongodbInstanceTypeMongoDBAtlasInstance = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('instance') }})
-class DestinationMongodbInstanceTypeReplicaSetInstanceEnum(str, Enum):
+class DestinationMongodbInstanceTypeReplicaSetInstance(str, Enum):
REPLICA = 'replica'
@@ -58,13 +58,13 @@ class DestinationMongodbInstanceTypeReplicaSetInstanceEnum(str, Enum):
class DestinationMongodbInstanceTypeReplicaSet:
r"""MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default."""
- instance: DestinationMongodbInstanceTypeReplicaSetInstanceEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('instance') }})
+ instance: DestinationMongodbInstanceTypeReplicaSetInstance = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('instance') }})
server_addresses: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('server_addresses') }})
r"""The members of a replica set. Please specify `host`:`port` of each member seperated by comma."""
replica_set: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('replica_set'), 'exclude': lambda f: f is None }})
r"""A replica set name."""
-class DestinationMongodbInstanceTypeStandaloneMongoDbInstanceInstanceEnum(str, Enum):
+class DestinationMongodbInstanceTypeStandaloneMongoDbInstanceInstance(str, Enum):
STANDALONE = 'standalone'
@@ -75,11 +75,11 @@ class DestinationMongodbInstanceTypeStandaloneMongoDbInstance:
host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }})
r"""The Host of a Mongo database to be replicated."""
- instance: DestinationMongodbInstanceTypeStandaloneMongoDbInstanceInstanceEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('instance') }})
+ instance: DestinationMongodbInstanceTypeStandaloneMongoDbInstanceInstance = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('instance') }})
port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port') }})
r"""The Port of a Mongo database to be replicated."""
-class DestinationMongodbTunnelMethodPasswordAuthenticationTunnelMethodEnum(str, Enum):
+class DestinationMongodbTunnelMethodPasswordAuthenticationTunnelMethod(str, Enum):
r"""Connect through a jump server tunnel host using username and password authentication"""
SSH_PASSWORD_AUTH = 'SSH_PASSWORD_AUTH'
@@ -91,7 +91,7 @@ class DestinationMongodbTunnelMethodPasswordAuthentication:
tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }})
r"""Hostname of the jump server host that allows inbound ssh tunnel."""
- tunnel_method: DestinationMongodbTunnelMethodPasswordAuthenticationTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: DestinationMongodbTunnelMethodPasswordAuthenticationTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""Connect through a jump server tunnel host using username and password authentication"""
tunnel_port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port') }})
r"""Port on the proxy/jump server that accepts inbound ssh connections."""
@@ -100,7 +100,7 @@ class DestinationMongodbTunnelMethodPasswordAuthentication:
tunnel_user_password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user_password') }})
r"""OS-level password for logging into the jump server host"""
-class DestinationMongodbTunnelMethodSSHKeyAuthenticationTunnelMethodEnum(str, Enum):
+class DestinationMongodbTunnelMethodSSHKeyAuthenticationTunnelMethod(str, Enum):
r"""Connect through a jump server tunnel host using username and ssh key"""
SSH_KEY_AUTH = 'SSH_KEY_AUTH'
@@ -114,14 +114,14 @@ class DestinationMongodbTunnelMethodSSHKeyAuthentication:
r"""OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"""
tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }})
r"""Hostname of the jump server host that allows inbound ssh tunnel."""
- tunnel_method: DestinationMongodbTunnelMethodSSHKeyAuthenticationTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: DestinationMongodbTunnelMethodSSHKeyAuthenticationTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""Connect through a jump server tunnel host using username and ssh key"""
tunnel_port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port') }})
r"""Port on the proxy/jump server that accepts inbound ssh connections."""
tunnel_user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user') }})
r"""OS-level username for logging into the jump server host."""
-class DestinationMongodbTunnelMethodNoTunnelTunnelMethodEnum(str, Enum):
+class DestinationMongodbTunnelMethodNoTunnelTunnelMethod(str, Enum):
r"""No ssh tunnel needed to connect to database"""
NO_TUNNEL = 'NO_TUNNEL'
@@ -131,7 +131,7 @@ class DestinationMongodbTunnelMethodNoTunnelTunnelMethodEnum(str, Enum):
class DestinationMongodbTunnelMethodNoTunnel:
r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use."""
- tunnel_method: DestinationMongodbTunnelMethodNoTunnelTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: DestinationMongodbTunnelMethodNoTunnelTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""No ssh tunnel needed to connect to database"""
@@ -144,7 +144,7 @@ class DestinationMongodb:
r"""Authorization type."""
database: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('database') }})
r"""Name of the database."""
- destination_type: DestinationMongodbMongodbEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationMongodbMongodb = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
instance_type: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('instance_type'), 'exclude': lambda f: f is None }})
r"""MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default."""
tunnel_method: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/destination_mssql.py b/src/airbyte/models/shared/destination_mssql.py
index b31651fc..ffbb7ba3 100755
--- a/src/airbyte/models/shared/destination_mssql.py
+++ b/src/airbyte/models/shared/destination_mssql.py
@@ -7,10 +7,10 @@
from enum import Enum
from typing import Any, Optional
-class DestinationMssqlMssqlEnum(str, Enum):
+class DestinationMssqlMssql(str, Enum):
MSSQL = 'mssql'
-class DestinationMssqlSslMethodEncryptedVerifyCertificateSslMethodEnum(str, Enum):
+class DestinationMssqlSslMethodEncryptedVerifyCertificateSslMethod(str, Enum):
ENCRYPTED_VERIFY_CERTIFICATE = 'encrypted_verify_certificate'
@@ -19,11 +19,11 @@ class DestinationMssqlSslMethodEncryptedVerifyCertificateSslMethodEnum(str, Enum
class DestinationMssqlSslMethodEncryptedVerifyCertificate:
r"""Verify and use the certificate provided by the server."""
- ssl_method: DestinationMssqlSslMethodEncryptedVerifyCertificateSslMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_method') }})
+ ssl_method: DestinationMssqlSslMethodEncryptedVerifyCertificateSslMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_method') }})
host_name_in_certificate: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('hostNameInCertificate'), 'exclude': lambda f: f is None }})
r"""Specifies the host name of the server. The value of this property must match the subject property of the certificate."""
-class DestinationMssqlSslMethodEncryptedTrustServerCertificateSslMethodEnum(str, Enum):
+class DestinationMssqlSslMethodEncryptedTrustServerCertificateSslMethod(str, Enum):
ENCRYPTED_TRUST_SERVER_CERTIFICATE = 'encrypted_trust_server_certificate'
@@ -32,9 +32,9 @@ class DestinationMssqlSslMethodEncryptedTrustServerCertificateSslMethodEnum(str,
class DestinationMssqlSslMethodEncryptedTrustServerCertificate:
r"""Use the certificate provided by the server without verification. (For testing purposes only!)"""
- ssl_method: DestinationMssqlSslMethodEncryptedTrustServerCertificateSslMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_method') }})
+ ssl_method: DestinationMssqlSslMethodEncryptedTrustServerCertificateSslMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_method') }})
-class DestinationMssqlTunnelMethodPasswordAuthenticationTunnelMethodEnum(str, Enum):
+class DestinationMssqlTunnelMethodPasswordAuthenticationTunnelMethod(str, Enum):
r"""Connect through a jump server tunnel host using username and password authentication"""
SSH_PASSWORD_AUTH = 'SSH_PASSWORD_AUTH'
@@ -46,7 +46,7 @@ class DestinationMssqlTunnelMethodPasswordAuthentication:
tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }})
r"""Hostname of the jump server host that allows inbound ssh tunnel."""
- tunnel_method: DestinationMssqlTunnelMethodPasswordAuthenticationTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: DestinationMssqlTunnelMethodPasswordAuthenticationTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""Connect through a jump server tunnel host using username and password authentication"""
tunnel_port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port') }})
r"""Port on the proxy/jump server that accepts inbound ssh connections."""
@@ -55,7 +55,7 @@ class DestinationMssqlTunnelMethodPasswordAuthentication:
tunnel_user_password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user_password') }})
r"""OS-level password for logging into the jump server host"""
-class DestinationMssqlTunnelMethodSSHKeyAuthenticationTunnelMethodEnum(str, Enum):
+class DestinationMssqlTunnelMethodSSHKeyAuthenticationTunnelMethod(str, Enum):
r"""Connect through a jump server tunnel host using username and ssh key"""
SSH_KEY_AUTH = 'SSH_KEY_AUTH'
@@ -69,14 +69,14 @@ class DestinationMssqlTunnelMethodSSHKeyAuthentication:
r"""OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"""
tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }})
r"""Hostname of the jump server host that allows inbound ssh tunnel."""
- tunnel_method: DestinationMssqlTunnelMethodSSHKeyAuthenticationTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: DestinationMssqlTunnelMethodSSHKeyAuthenticationTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""Connect through a jump server tunnel host using username and ssh key"""
tunnel_port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port') }})
r"""Port on the proxy/jump server that accepts inbound ssh connections."""
tunnel_user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user') }})
r"""OS-level username for logging into the jump server host."""
-class DestinationMssqlTunnelMethodNoTunnelTunnelMethodEnum(str, Enum):
+class DestinationMssqlTunnelMethodNoTunnelTunnelMethod(str, Enum):
r"""No ssh tunnel needed to connect to database"""
NO_TUNNEL = 'NO_TUNNEL'
@@ -86,7 +86,7 @@ class DestinationMssqlTunnelMethodNoTunnelTunnelMethodEnum(str, Enum):
class DestinationMssqlTunnelMethodNoTunnel:
r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use."""
- tunnel_method: DestinationMssqlTunnelMethodNoTunnelTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: DestinationMssqlTunnelMethodNoTunnelTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""No ssh tunnel needed to connect to database"""
@@ -97,7 +97,7 @@ class DestinationMssql:
database: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('database') }})
r"""The name of the MSSQL database."""
- destination_type: DestinationMssqlMssqlEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationMssqlMssql = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }})
r"""The host name of the MSSQL database."""
port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port') }})
diff --git a/src/airbyte/models/shared/destination_mysql.py b/src/airbyte/models/shared/destination_mysql.py
index dbfd384f..2b69b2f7 100755
--- a/src/airbyte/models/shared/destination_mysql.py
+++ b/src/airbyte/models/shared/destination_mysql.py
@@ -7,10 +7,10 @@
from enum import Enum
from typing import Any, Optional
-class DestinationMysqlMysqlEnum(str, Enum):
+class DestinationMysqlMysql(str, Enum):
MYSQL = 'mysql'
-class DestinationMysqlTunnelMethodPasswordAuthenticationTunnelMethodEnum(str, Enum):
+class DestinationMysqlTunnelMethodPasswordAuthenticationTunnelMethod(str, Enum):
r"""Connect through a jump server tunnel host using username and password authentication"""
SSH_PASSWORD_AUTH = 'SSH_PASSWORD_AUTH'
@@ -22,7 +22,7 @@ class DestinationMysqlTunnelMethodPasswordAuthentication:
tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }})
r"""Hostname of the jump server host that allows inbound ssh tunnel."""
- tunnel_method: DestinationMysqlTunnelMethodPasswordAuthenticationTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: DestinationMysqlTunnelMethodPasswordAuthenticationTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""Connect through a jump server tunnel host using username and password authentication"""
tunnel_port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port') }})
r"""Port on the proxy/jump server that accepts inbound ssh connections."""
@@ -31,7 +31,7 @@ class DestinationMysqlTunnelMethodPasswordAuthentication:
tunnel_user_password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user_password') }})
r"""OS-level password for logging into the jump server host"""
-class DestinationMysqlTunnelMethodSSHKeyAuthenticationTunnelMethodEnum(str, Enum):
+class DestinationMysqlTunnelMethodSSHKeyAuthenticationTunnelMethod(str, Enum):
r"""Connect through a jump server tunnel host using username and ssh key"""
SSH_KEY_AUTH = 'SSH_KEY_AUTH'
@@ -45,14 +45,14 @@ class DestinationMysqlTunnelMethodSSHKeyAuthentication:
r"""OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"""
tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }})
r"""Hostname of the jump server host that allows inbound ssh tunnel."""
- tunnel_method: DestinationMysqlTunnelMethodSSHKeyAuthenticationTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: DestinationMysqlTunnelMethodSSHKeyAuthenticationTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""Connect through a jump server tunnel host using username and ssh key"""
tunnel_port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port') }})
r"""Port on the proxy/jump server that accepts inbound ssh connections."""
tunnel_user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user') }})
r"""OS-level username for logging into the jump server host."""
-class DestinationMysqlTunnelMethodNoTunnelTunnelMethodEnum(str, Enum):
+class DestinationMysqlTunnelMethodNoTunnelTunnelMethod(str, Enum):
r"""No ssh tunnel needed to connect to database"""
NO_TUNNEL = 'NO_TUNNEL'
@@ -62,7 +62,7 @@ class DestinationMysqlTunnelMethodNoTunnelTunnelMethodEnum(str, Enum):
class DestinationMysqlTunnelMethodNoTunnel:
r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use."""
- tunnel_method: DestinationMysqlTunnelMethodNoTunnelTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: DestinationMysqlTunnelMethodNoTunnelTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""No ssh tunnel needed to connect to database"""
@@ -73,7 +73,7 @@ class DestinationMysql:
database: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('database') }})
r"""Name of the database."""
- destination_type: DestinationMysqlMysqlEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationMysqlMysql = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }})
r"""Hostname of the database."""
port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port') }})
diff --git a/src/airbyte/models/shared/destination_oracle.py b/src/airbyte/models/shared/destination_oracle.py
index 24e4c8c7..d2c0dd6c 100755
--- a/src/airbyte/models/shared/destination_oracle.py
+++ b/src/airbyte/models/shared/destination_oracle.py
@@ -7,10 +7,10 @@
from enum import Enum
from typing import Any, Optional
-class DestinationOracleOracleEnum(str, Enum):
+class DestinationOracleOracle(str, Enum):
ORACLE = 'oracle'
-class DestinationOracleTunnelMethodPasswordAuthenticationTunnelMethodEnum(str, Enum):
+class DestinationOracleTunnelMethodPasswordAuthenticationTunnelMethod(str, Enum):
r"""Connect through a jump server tunnel host using username and password authentication"""
SSH_PASSWORD_AUTH = 'SSH_PASSWORD_AUTH'
@@ -22,7 +22,7 @@ class DestinationOracleTunnelMethodPasswordAuthentication:
tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }})
r"""Hostname of the jump server host that allows inbound ssh tunnel."""
- tunnel_method: DestinationOracleTunnelMethodPasswordAuthenticationTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: DestinationOracleTunnelMethodPasswordAuthenticationTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""Connect through a jump server tunnel host using username and password authentication"""
tunnel_port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port') }})
r"""Port on the proxy/jump server that accepts inbound ssh connections."""
@@ -31,7 +31,7 @@ class DestinationOracleTunnelMethodPasswordAuthentication:
tunnel_user_password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user_password') }})
r"""OS-level password for logging into the jump server host"""
-class DestinationOracleTunnelMethodSSHKeyAuthenticationTunnelMethodEnum(str, Enum):
+class DestinationOracleTunnelMethodSSHKeyAuthenticationTunnelMethod(str, Enum):
r"""Connect through a jump server tunnel host using username and ssh key"""
SSH_KEY_AUTH = 'SSH_KEY_AUTH'
@@ -45,14 +45,14 @@ class DestinationOracleTunnelMethodSSHKeyAuthentication:
r"""OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"""
tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }})
r"""Hostname of the jump server host that allows inbound ssh tunnel."""
- tunnel_method: DestinationOracleTunnelMethodSSHKeyAuthenticationTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: DestinationOracleTunnelMethodSSHKeyAuthenticationTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""Connect through a jump server tunnel host using username and ssh key"""
tunnel_port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port') }})
r"""Port on the proxy/jump server that accepts inbound ssh connections."""
tunnel_user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user') }})
r"""OS-level username for logging into the jump server host."""
-class DestinationOracleTunnelMethodNoTunnelTunnelMethodEnum(str, Enum):
+class DestinationOracleTunnelMethodNoTunnelTunnelMethod(str, Enum):
r"""No ssh tunnel needed to connect to database"""
NO_TUNNEL = 'NO_TUNNEL'
@@ -62,7 +62,7 @@ class DestinationOracleTunnelMethodNoTunnelTunnelMethodEnum(str, Enum):
class DestinationOracleTunnelMethodNoTunnel:
r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use."""
- tunnel_method: DestinationOracleTunnelMethodNoTunnelTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: DestinationOracleTunnelMethodNoTunnelTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""No ssh tunnel needed to connect to database"""
@@ -71,7 +71,7 @@ class DestinationOracleTunnelMethodNoTunnel:
class DestinationOracle:
r"""The values required to configure the destination."""
- destination_type: DestinationOracleOracleEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationOracleOracle = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }})
r"""The hostname of the database."""
port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port') }})
diff --git a/src/airbyte/models/shared/destination_postgres.py b/src/airbyte/models/shared/destination_postgres.py
index f3f33763..9cc075c6 100755
--- a/src/airbyte/models/shared/destination_postgres.py
+++ b/src/airbyte/models/shared/destination_postgres.py
@@ -7,10 +7,10 @@
from enum import Enum
from typing import Any, Optional
-class DestinationPostgresPostgresEnum(str, Enum):
+class DestinationPostgresPostgres(str, Enum):
POSTGRES = 'postgres'
-class DestinationPostgresSslModeVerifyFullModeEnum(str, Enum):
+class DestinationPostgresSslModeVerifyFullMode(str, Enum):
VERIFY_FULL = 'verify-full'
@@ -25,11 +25,11 @@ class DestinationPostgresSslModeVerifyFull:
r"""Client certificate"""
client_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_key') }})
r"""Client key"""
- mode: DestinationPostgresSslModeVerifyFullModeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }})
+ mode: DestinationPostgresSslModeVerifyFullMode = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }})
client_key_password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_key_password'), 'exclude': lambda f: f is None }})
r"""Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically."""
-class DestinationPostgresSslModeVerifyCaModeEnum(str, Enum):
+class DestinationPostgresSslModeVerifyCaMode(str, Enum):
VERIFY_CA = 'verify-ca'
@@ -40,11 +40,11 @@ class DestinationPostgresSslModeVerifyCa:
ca_certificate: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ca_certificate') }})
r"""CA certificate"""
- mode: DestinationPostgresSslModeVerifyCaModeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }})
+ mode: DestinationPostgresSslModeVerifyCaMode = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }})
client_key_password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_key_password'), 'exclude': lambda f: f is None }})
r"""Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically."""
-class DestinationPostgresSslModeRequireModeEnum(str, Enum):
+class DestinationPostgresSslModeRequireMode(str, Enum):
REQUIRE = 'require'
@@ -53,9 +53,9 @@ class DestinationPostgresSslModeRequireModeEnum(str, Enum):
class DestinationPostgresSslModeRequire:
r"""Require SSL mode."""
- mode: DestinationPostgresSslModeRequireModeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }})
+ mode: DestinationPostgresSslModeRequireMode = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }})
-class DestinationPostgresSslModePreferModeEnum(str, Enum):
+class DestinationPostgresSslModePreferMode(str, Enum):
PREFER = 'prefer'
@@ -64,9 +64,9 @@ class DestinationPostgresSslModePreferModeEnum(str, Enum):
class DestinationPostgresSslModePrefer:
r"""Prefer SSL mode."""
- mode: DestinationPostgresSslModePreferModeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }})
+ mode: DestinationPostgresSslModePreferMode = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }})
-class DestinationPostgresSslModeAllowModeEnum(str, Enum):
+class DestinationPostgresSslModeAllowMode(str, Enum):
ALLOW = 'allow'
@@ -75,9 +75,9 @@ class DestinationPostgresSslModeAllowModeEnum(str, Enum):
class DestinationPostgresSslModeAllow:
r"""Allow SSL mode."""
- mode: DestinationPostgresSslModeAllowModeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }})
+ mode: DestinationPostgresSslModeAllowMode = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }})
-class DestinationPostgresSslModeDisableModeEnum(str, Enum):
+class DestinationPostgresSslModeDisableMode(str, Enum):
DISABLE = 'disable'
@@ -86,9 +86,9 @@ class DestinationPostgresSslModeDisableModeEnum(str, Enum):
class DestinationPostgresSslModeDisable:
r"""Disable SSL."""
- mode: DestinationPostgresSslModeDisableModeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }})
+ mode: DestinationPostgresSslModeDisableMode = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }})
-class DestinationPostgresTunnelMethodPasswordAuthenticationTunnelMethodEnum(str, Enum):
+class DestinationPostgresTunnelMethodPasswordAuthenticationTunnelMethod(str, Enum):
r"""Connect through a jump server tunnel host using username and password authentication"""
SSH_PASSWORD_AUTH = 'SSH_PASSWORD_AUTH'
@@ -100,7 +100,7 @@ class DestinationPostgresTunnelMethodPasswordAuthentication:
tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }})
r"""Hostname of the jump server host that allows inbound ssh tunnel."""
- tunnel_method: DestinationPostgresTunnelMethodPasswordAuthenticationTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: DestinationPostgresTunnelMethodPasswordAuthenticationTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""Connect through a jump server tunnel host using username and password authentication"""
tunnel_port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port') }})
r"""Port on the proxy/jump server that accepts inbound ssh connections."""
@@ -109,7 +109,7 @@ class DestinationPostgresTunnelMethodPasswordAuthentication:
tunnel_user_password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user_password') }})
r"""OS-level password for logging into the jump server host"""
-class DestinationPostgresTunnelMethodSSHKeyAuthenticationTunnelMethodEnum(str, Enum):
+class DestinationPostgresTunnelMethodSSHKeyAuthenticationTunnelMethod(str, Enum):
r"""Connect through a jump server tunnel host using username and ssh key"""
SSH_KEY_AUTH = 'SSH_KEY_AUTH'
@@ -123,14 +123,14 @@ class DestinationPostgresTunnelMethodSSHKeyAuthentication:
r"""OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"""
tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }})
r"""Hostname of the jump server host that allows inbound ssh tunnel."""
- tunnel_method: DestinationPostgresTunnelMethodSSHKeyAuthenticationTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: DestinationPostgresTunnelMethodSSHKeyAuthenticationTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""Connect through a jump server tunnel host using username and ssh key"""
tunnel_port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port') }})
r"""Port on the proxy/jump server that accepts inbound ssh connections."""
tunnel_user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user') }})
r"""OS-level username for logging into the jump server host."""
-class DestinationPostgresTunnelMethodNoTunnelTunnelMethodEnum(str, Enum):
+class DestinationPostgresTunnelMethodNoTunnelTunnelMethod(str, Enum):
r"""No ssh tunnel needed to connect to database"""
NO_TUNNEL = 'NO_TUNNEL'
@@ -140,7 +140,7 @@ class DestinationPostgresTunnelMethodNoTunnelTunnelMethodEnum(str, Enum):
class DestinationPostgresTunnelMethodNoTunnel:
r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use."""
- tunnel_method: DestinationPostgresTunnelMethodNoTunnelTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: DestinationPostgresTunnelMethodNoTunnelTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""No ssh tunnel needed to connect to database"""
@@ -151,7 +151,7 @@ class DestinationPostgres:
database: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('database') }})
r"""Name of the database."""
- destination_type: DestinationPostgresPostgresEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationPostgresPostgres = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }})
r"""Hostname of the database."""
port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port') }})
diff --git a/src/airbyte/models/shared/destination_pubsub.py b/src/airbyte/models/shared/destination_pubsub.py
index 9a262865..42a61719 100755
--- a/src/airbyte/models/shared/destination_pubsub.py
+++ b/src/airbyte/models/shared/destination_pubsub.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class DestinationPubsubPubsubEnum(str, Enum):
+class DestinationPubsubPubsub(str, Enum):
PUBSUB = 'pubsub'
@@ -20,7 +20,7 @@ class DestinationPubsub:
r"""If TRUE messages will be buffered instead of sending them one by one"""
credentials_json: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials_json') }})
r"""The contents of the JSON service account key. Check out the docs if you need help generating this key."""
- destination_type: DestinationPubsubPubsubEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationPubsubPubsub = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
ordering_enabled: bool = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ordering_enabled') }})
r"""If TRUE PubSub publisher will have message ordering enabled. Every message will have an ordering key of stream"""
project_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('project_id') }})
diff --git a/src/airbyte/models/shared/destination_pulsar.py b/src/airbyte/models/shared/destination_pulsar.py
index 69d33d89..ce7f9a8f 100755
--- a/src/airbyte/models/shared/destination_pulsar.py
+++ b/src/airbyte/models/shared/destination_pulsar.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class DestinationPulsarCompressionTypeEnum(str, Enum):
+class DestinationPulsarCompressionType(str, Enum):
r"""Compression type for the producer."""
NONE = 'NONE'
LZ4 = 'LZ4'
@@ -15,10 +15,10 @@ class DestinationPulsarCompressionTypeEnum(str, Enum):
ZSTD = 'ZSTD'
SNAPPY = 'SNAPPY'
-class DestinationPulsarPulsarEnum(str, Enum):
+class DestinationPulsarPulsar(str, Enum):
PULSAR = 'pulsar'
-class DestinationPulsarTopicTypeEnum(str, Enum):
+class DestinationPulsarTopicType(str, Enum):
r"""It identifies type of topic. Pulsar supports two kind of topics: persistent and non-persistent. In persistent topic, all messages are durably persisted on disk (that means on multiple disks unless the broker is standalone), whereas non-persistent topic does not persist message into storage disk."""
PERSISTENT = 'persistent'
NON_PERSISTENT = 'non-persistent'
@@ -39,9 +39,9 @@ class DestinationPulsar:
r"""If the send operation should block when the outgoing message queue is full."""
brokers: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('brokers') }})
r"""A list of host/port pairs to use for establishing the initial connection to the Pulsar cluster."""
- compression_type: DestinationPulsarCompressionTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_type') }})
+ compression_type: DestinationPulsarCompressionType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_type') }})
r"""Compression type for the producer."""
- destination_type: DestinationPulsarPulsarEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationPulsarPulsar = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
max_pending_messages: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('max_pending_messages') }})
r"""The maximum size of a queue holding pending messages."""
max_pending_messages_across_partitions: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('max_pending_messages_across_partitions') }})
@@ -54,7 +54,7 @@ class DestinationPulsar:
r"""Topic pattern in which the records will be sent. You can use patterns like '{namespace}' and/or '{stream}' to send the message to a specific topic based on these values. Notice that the topic name will be transformed to a standard naming convention."""
topic_tenant: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('topic_tenant') }})
r"""The topic tenant within the instance. Tenants are essential to multi-tenancy in Pulsar, and spread across clusters."""
- topic_type: DestinationPulsarTopicTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('topic_type') }})
+ topic_type: DestinationPulsarTopicType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('topic_type') }})
r"""It identifies type of topic. Pulsar supports two kind of topics: persistent and non-persistent. In persistent topic, all messages are durably persisted on disk (that means on multiple disks unless the broker is standalone), whereas non-persistent topic does not persist message into storage disk."""
use_tls: bool = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('use_tls') }})
r"""Whether to use TLS encryption on the connection."""
diff --git a/src/airbyte/models/shared/destination_rabbitmq.py b/src/airbyte/models/shared/destination_rabbitmq.py
index 4e8ff55c..2d4920cc 100755
--- a/src/airbyte/models/shared/destination_rabbitmq.py
+++ b/src/airbyte/models/shared/destination_rabbitmq.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class DestinationRabbitmqRabbitmqEnum(str, Enum):
+class DestinationRabbitmqRabbitmq(str, Enum):
RABBITMQ = 'rabbitmq'
@@ -16,7 +16,7 @@ class DestinationRabbitmqRabbitmqEnum(str, Enum):
class DestinationRabbitmq:
r"""The values required to configure the destination."""
- destination_type: DestinationRabbitmqRabbitmqEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationRabbitmqRabbitmq = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }})
r"""The RabbitMQ host name."""
routing_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('routing_key') }})
diff --git a/src/airbyte/models/shared/destination_redis.py b/src/airbyte/models/shared/destination_redis.py
index 8c0be0d1..43be1319 100755
--- a/src/airbyte/models/shared/destination_redis.py
+++ b/src/airbyte/models/shared/destination_redis.py
@@ -7,14 +7,14 @@
from enum import Enum
from typing import Any, Optional
-class DestinationRedisCacheTypeEnum(str, Enum):
+class DestinationRedisCacheType(str, Enum):
r"""Redis cache type to store data in."""
HASH = 'hash'
-class DestinationRedisRedisEnum(str, Enum):
+class DestinationRedisRedis(str, Enum):
REDIS = 'redis'
-class DestinationRedisSslModeVerifyFullModeEnum(str, Enum):
+class DestinationRedisSslModeVerifyFullMode(str, Enum):
VERIFY_FULL = 'verify-full'
@@ -29,11 +29,11 @@ class DestinationRedisSslModeVerifyFull:
r"""Client certificate"""
client_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_key') }})
r"""Client key"""
- mode: DestinationRedisSslModeVerifyFullModeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }})
+ mode: DestinationRedisSslModeVerifyFullMode = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }})
client_key_password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_key_password'), 'exclude': lambda f: f is None }})
r"""Password for keystorage. If you do not add it - the password will be generated automatically."""
-class DestinationRedisSslModeDisableModeEnum(str, Enum):
+class DestinationRedisSslModeDisableMode(str, Enum):
DISABLE = 'disable'
@@ -42,9 +42,9 @@ class DestinationRedisSslModeDisableModeEnum(str, Enum):
class DestinationRedisSslModeDisable:
r"""Disable SSL."""
- mode: DestinationRedisSslModeDisableModeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }})
+ mode: DestinationRedisSslModeDisableMode = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }})
-class DestinationRedisTunnelMethodPasswordAuthenticationTunnelMethodEnum(str, Enum):
+class DestinationRedisTunnelMethodPasswordAuthenticationTunnelMethod(str, Enum):
r"""Connect through a jump server tunnel host using username and password authentication"""
SSH_PASSWORD_AUTH = 'SSH_PASSWORD_AUTH'
@@ -56,7 +56,7 @@ class DestinationRedisTunnelMethodPasswordAuthentication:
tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }})
r"""Hostname of the jump server host that allows inbound ssh tunnel."""
- tunnel_method: DestinationRedisTunnelMethodPasswordAuthenticationTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: DestinationRedisTunnelMethodPasswordAuthenticationTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""Connect through a jump server tunnel host using username and password authentication"""
tunnel_port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port') }})
r"""Port on the proxy/jump server that accepts inbound ssh connections."""
@@ -65,7 +65,7 @@ class DestinationRedisTunnelMethodPasswordAuthentication:
tunnel_user_password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user_password') }})
r"""OS-level password for logging into the jump server host"""
-class DestinationRedisTunnelMethodSSHKeyAuthenticationTunnelMethodEnum(str, Enum):
+class DestinationRedisTunnelMethodSSHKeyAuthenticationTunnelMethod(str, Enum):
r"""Connect through a jump server tunnel host using username and ssh key"""
SSH_KEY_AUTH = 'SSH_KEY_AUTH'
@@ -79,14 +79,14 @@ class DestinationRedisTunnelMethodSSHKeyAuthentication:
r"""OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"""
tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }})
r"""Hostname of the jump server host that allows inbound ssh tunnel."""
- tunnel_method: DestinationRedisTunnelMethodSSHKeyAuthenticationTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: DestinationRedisTunnelMethodSSHKeyAuthenticationTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""Connect through a jump server tunnel host using username and ssh key"""
tunnel_port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port') }})
r"""Port on the proxy/jump server that accepts inbound ssh connections."""
tunnel_user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user') }})
r"""OS-level username for logging into the jump server host."""
-class DestinationRedisTunnelMethodNoTunnelTunnelMethodEnum(str, Enum):
+class DestinationRedisTunnelMethodNoTunnelTunnelMethod(str, Enum):
r"""No ssh tunnel needed to connect to database"""
NO_TUNNEL = 'NO_TUNNEL'
@@ -96,7 +96,7 @@ class DestinationRedisTunnelMethodNoTunnelTunnelMethodEnum(str, Enum):
class DestinationRedisTunnelMethodNoTunnel:
r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use."""
- tunnel_method: DestinationRedisTunnelMethodNoTunnelTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: DestinationRedisTunnelMethodNoTunnelTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""No ssh tunnel needed to connect to database"""
@@ -105,9 +105,9 @@ class DestinationRedisTunnelMethodNoTunnel:
class DestinationRedis:
r"""The values required to configure the destination."""
- cache_type: DestinationRedisCacheTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cache_type') }})
+ cache_type: DestinationRedisCacheType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cache_type') }})
r"""Redis cache type to store data in."""
- destination_type: DestinationRedisRedisEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationRedisRedis = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }})
r"""Redis host to connect to."""
port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port') }})
diff --git a/src/airbyte/models/shared/destination_redshift.py b/src/airbyte/models/shared/destination_redshift.py
index 6848e316..ad8df6bf 100755
--- a/src/airbyte/models/shared/destination_redshift.py
+++ b/src/airbyte/models/shared/destination_redshift.py
@@ -7,10 +7,10 @@
from enum import Enum
from typing import Any, Optional
-class DestinationRedshiftRedshiftEnum(str, Enum):
+class DestinationRedshiftRedshift(str, Enum):
REDSHIFT = 'redshift'
-class DestinationRedshiftTunnelMethodPasswordAuthenticationTunnelMethodEnum(str, Enum):
+class DestinationRedshiftTunnelMethodPasswordAuthenticationTunnelMethod(str, Enum):
r"""Connect through a jump server tunnel host using username and password authentication"""
SSH_PASSWORD_AUTH = 'SSH_PASSWORD_AUTH'
@@ -22,7 +22,7 @@ class DestinationRedshiftTunnelMethodPasswordAuthentication:
tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }})
r"""Hostname of the jump server host that allows inbound ssh tunnel."""
- tunnel_method: DestinationRedshiftTunnelMethodPasswordAuthenticationTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: DestinationRedshiftTunnelMethodPasswordAuthenticationTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""Connect through a jump server tunnel host using username and password authentication"""
tunnel_port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port') }})
r"""Port on the proxy/jump server that accepts inbound ssh connections."""
@@ -31,7 +31,7 @@ class DestinationRedshiftTunnelMethodPasswordAuthentication:
tunnel_user_password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user_password') }})
r"""OS-level password for logging into the jump server host"""
-class DestinationRedshiftTunnelMethodSSHKeyAuthenticationTunnelMethodEnum(str, Enum):
+class DestinationRedshiftTunnelMethodSSHKeyAuthenticationTunnelMethod(str, Enum):
r"""Connect through a jump server tunnel host using username and ssh key"""
SSH_KEY_AUTH = 'SSH_KEY_AUTH'
@@ -45,14 +45,14 @@ class DestinationRedshiftTunnelMethodSSHKeyAuthentication:
r"""OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"""
tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }})
r"""Hostname of the jump server host that allows inbound ssh tunnel."""
- tunnel_method: DestinationRedshiftTunnelMethodSSHKeyAuthenticationTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: DestinationRedshiftTunnelMethodSSHKeyAuthenticationTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""Connect through a jump server tunnel host using username and ssh key"""
tunnel_port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port') }})
r"""Port on the proxy/jump server that accepts inbound ssh connections."""
tunnel_user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user') }})
r"""OS-level username for logging into the jump server host."""
-class DestinationRedshiftTunnelMethodNoTunnelTunnelMethodEnum(str, Enum):
+class DestinationRedshiftTunnelMethodNoTunnelTunnelMethod(str, Enum):
r"""No ssh tunnel needed to connect to database"""
NO_TUNNEL = 'NO_TUNNEL'
@@ -62,10 +62,10 @@ class DestinationRedshiftTunnelMethodNoTunnelTunnelMethodEnum(str, Enum):
class DestinationRedshiftTunnelMethodNoTunnel:
r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use."""
- tunnel_method: DestinationRedshiftTunnelMethodNoTunnelTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: DestinationRedshiftTunnelMethodNoTunnelTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""No ssh tunnel needed to connect to database"""
-class DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionTypeEnum(str, Enum):
+class DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionType(str, Enum):
AES_CBC_ENVELOPE = 'aes_cbc_envelope'
@@ -74,11 +74,11 @@ class DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncrypt
class DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption:
r"""Staging data will be encrypted using AES-CBC envelope encryption."""
- encryption_type: DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_type') }})
+ encryption_type: DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_type') }})
key_encrypting_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('key_encrypting_key'), 'exclude': lambda f: f is None }})
r"""The key, base64-encoded. Must be either 128, 192, or 256 bits. Leave blank to have Airbyte generate an ephemeral key for each sync."""
-class DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryptionEncryptionTypeEnum(str, Enum):
+class DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryptionEncryptionType(str, Enum):
NONE = 'none'
@@ -87,12 +87,12 @@ class DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryptionEncryptio
class DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption:
r"""Staging data will be stored in plaintext."""
- encryption_type: DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryptionEncryptionTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_type') }})
+ encryption_type: DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryptionEncryptionType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_type') }})
-class DestinationRedshiftUploadingMethodS3StagingMethodEnum(str, Enum):
+class DestinationRedshiftUploadingMethodS3StagingMethod(str, Enum):
S3_STAGING = 'S3 Staging'
-class DestinationRedshiftUploadingMethodS3StagingS3BucketRegionEnum(str, Enum):
+class DestinationRedshiftUploadingMethodS3StagingS3BucketRegion(str, Enum):
r"""The region of the S3 staging bucket to use if utilising a COPY strategy. See AWS docs for details."""
UNKNOWN = ''
US_EAST_1 = 'us-east-1'
@@ -127,10 +127,10 @@ class DestinationRedshiftUploadingMethodS3Staging:
access_key_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_key_id') }})
r"""This ID grants access to the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket. See AWS docs on how to generate an access key ID and secret access key."""
- method: DestinationRedshiftUploadingMethodS3StagingMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
+ method: DestinationRedshiftUploadingMethodS3StagingMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
s3_bucket_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_name') }})
r"""The name of the staging S3 bucket to use if utilising a COPY strategy. COPY is recommended for production workloads for better speed and scalability. See AWS docs for more details."""
- s3_bucket_region: DestinationRedshiftUploadingMethodS3StagingS3BucketRegionEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_region') }})
+ s3_bucket_region: DestinationRedshiftUploadingMethodS3StagingS3BucketRegion = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_region') }})
r"""The region of the S3 staging bucket to use if utilising a COPY strategy. See AWS docs for details."""
secret_access_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('secret_access_key') }})
r"""The corresponding secret to the above access key id. See AWS docs on how to generate an access key ID and secret access key."""
@@ -145,7 +145,7 @@ class DestinationRedshiftUploadingMethodS3Staging:
s3_bucket_path: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_path'), 'exclude': lambda f: f is None }})
r"""The directory under the S3 bucket where data will be written. If not provided, then defaults to the root directory. See path's name recommendations for more details."""
-class DestinationRedshiftUploadingMethodStandardMethodEnum(str, Enum):
+class DestinationRedshiftUploadingMethodStandardMethod(str, Enum):
STANDARD = 'Standard'
@@ -154,7 +154,7 @@ class DestinationRedshiftUploadingMethodStandardMethodEnum(str, Enum):
class DestinationRedshiftUploadingMethodStandard:
r"""The method how the data will be uploaded to the database."""
- method: DestinationRedshiftUploadingMethodStandardMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
+ method: DestinationRedshiftUploadingMethodStandardMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
@dataclass_json(undefined=Undefined.EXCLUDE)
@@ -164,7 +164,7 @@ class DestinationRedshift:
database: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('database') }})
r"""Name of the database."""
- destination_type: DestinationRedshiftRedshiftEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationRedshiftRedshift = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }})
r"""Host Endpoint of the Redshift Cluster (must include the cluster-id, region and end with .redshift.amazonaws.com)"""
password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password') }})
diff --git a/src/airbyte/models/shared/destination_rockset.py b/src/airbyte/models/shared/destination_rockset.py
index 4aef63f9..c3edb281 100755
--- a/src/airbyte/models/shared/destination_rockset.py
+++ b/src/airbyte/models/shared/destination_rockset.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class DestinationRocksetRocksetEnum(str, Enum):
+class DestinationRocksetRockset(str, Enum):
ROCKSET = 'rockset'
@@ -18,7 +18,7 @@ class DestinationRockset:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""Rockset api key"""
- destination_type: DestinationRocksetRocksetEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationRocksetRockset = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
workspace: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('workspace') }})
r"""The Rockset workspace in which collections will be created + written to."""
api_server: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_server'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/destination_s3.py b/src/airbyte/models/shared/destination_s3.py
index fd6c74e1..9babf5ce 100755
--- a/src/airbyte/models/shared/destination_s3.py
+++ b/src/airbyte/models/shared/destination_s3.py
@@ -7,10 +7,10 @@
from enum import Enum
from typing import Any, Optional
-class DestinationS3S3Enum(str, Enum):
+class DestinationS3S3(str, Enum):
S3 = 's3'
-class DestinationS3FormatParquetColumnarStorageCompressionCodecEnum(str, Enum):
+class DestinationS3FormatParquetColumnarStorageCompressionCodec(str, Enum):
r"""The compression algorithm used to compress data pages."""
UNCOMPRESSED = 'UNCOMPRESSED'
SNAPPY = 'SNAPPY'
@@ -20,7 +20,7 @@ class DestinationS3FormatParquetColumnarStorageCompressionCodecEnum(str, Enum):
LZ4 = 'LZ4'
ZSTD = 'ZSTD'
-class DestinationS3FormatParquetColumnarStorageFormatTypeEnum(str, Enum):
+class DestinationS3FormatParquetColumnarStorageFormatType(str, Enum):
PARQUET = 'Parquet'
@@ -29,10 +29,10 @@ class DestinationS3FormatParquetColumnarStorageFormatTypeEnum(str, Enum):
class DestinationS3FormatParquetColumnarStorage:
r"""Format of the data output. See here for more details"""
- format_type: DestinationS3FormatParquetColumnarStorageFormatTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }})
+ format_type: DestinationS3FormatParquetColumnarStorageFormatType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }})
block_size_mb: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('block_size_mb'), 'exclude': lambda f: f is None }})
r"""This is the size of a row group being buffered in memory. It limits the memory usage when writing. Larger values will improve the IO when reading, but consume more memory when writing. Default: 128 MB."""
- compression_codec: Optional[DestinationS3FormatParquetColumnarStorageCompressionCodecEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_codec'), 'exclude': lambda f: f is None }})
+ compression_codec: Optional[DestinationS3FormatParquetColumnarStorageCompressionCodec] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_codec'), 'exclude': lambda f: f is None }})
r"""The compression algorithm used to compress data pages."""
dictionary_encoding: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dictionary_encoding'), 'exclude': lambda f: f is None }})
r"""Default: true."""
@@ -43,7 +43,7 @@ class DestinationS3FormatParquetColumnarStorage:
page_size_kb: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('page_size_kb'), 'exclude': lambda f: f is None }})
r"""The page size is for compression. A block is composed of pages. A page is the smallest unit that must be read fully to access a single record. If this value is too small, the compression will deteriorate. Default: 1024 KB."""
-class DestinationS3FormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionTypeEnum(str, Enum):
+class DestinationS3FormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType(str, Enum):
GZIP = 'GZIP'
@@ -52,9 +52,9 @@ class DestinationS3FormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompression
class DestinationS3FormatJSONLinesNewlineDelimitedJSONCompressionGZIP:
r"""Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: \\".jsonl.gz\\")."""
- compression_type: Optional[DestinationS3FormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_type'), 'exclude': lambda f: f is None }})
+ compression_type: Optional[DestinationS3FormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_type'), 'exclude': lambda f: f is None }})
-class DestinationS3FormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionTypeEnum(str, Enum):
+class DestinationS3FormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType(str, Enum):
NO_COMPRESSION = 'No Compression'
@@ -63,14 +63,14 @@ class DestinationS3FormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCo
class DestinationS3FormatJSONLinesNewlineDelimitedJSONCompressionNoCompression:
r"""Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: \\".jsonl.gz\\")."""
- compression_type: Optional[DestinationS3FormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_type'), 'exclude': lambda f: f is None }})
+ compression_type: Optional[DestinationS3FormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_type'), 'exclude': lambda f: f is None }})
-class DestinationS3FormatJSONLinesNewlineDelimitedJSONFlatteningEnum(str, Enum):
+class DestinationS3FormatJSONLinesNewlineDelimitedJSONFlattening(str, Enum):
r"""Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details."""
NO_FLATTENING = 'No flattening'
ROOT_LEVEL_FLATTENING = 'Root level flattening'
-class DestinationS3FormatJSONLinesNewlineDelimitedJSONFormatTypeEnum(str, Enum):
+class DestinationS3FormatJSONLinesNewlineDelimitedJSONFormatType(str, Enum):
JSONL = 'JSONL'
@@ -79,13 +79,13 @@ class DestinationS3FormatJSONLinesNewlineDelimitedJSONFormatTypeEnum(str, Enum):
class DestinationS3FormatJSONLinesNewlineDelimitedJSON:
r"""Format of the data output. See here for more details"""
- format_type: DestinationS3FormatJSONLinesNewlineDelimitedJSONFormatTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }})
+ format_type: DestinationS3FormatJSONLinesNewlineDelimitedJSONFormatType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }})
compression: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression'), 'exclude': lambda f: f is None }})
r"""Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: \\".jsonl.gz\\")."""
- flattening: Optional[DestinationS3FormatJSONLinesNewlineDelimitedJSONFlatteningEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('flattening'), 'exclude': lambda f: f is None }})
+ flattening: Optional[DestinationS3FormatJSONLinesNewlineDelimitedJSONFlattening] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('flattening'), 'exclude': lambda f: f is None }})
r"""Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details."""
-class DestinationS3FormatCSVCommaSeparatedValuesCompressionGZIPCompressionTypeEnum(str, Enum):
+class DestinationS3FormatCSVCommaSeparatedValuesCompressionGZIPCompressionType(str, Enum):
GZIP = 'GZIP'
@@ -94,9 +94,9 @@ class DestinationS3FormatCSVCommaSeparatedValuesCompressionGZIPCompressionTypeEn
class DestinationS3FormatCSVCommaSeparatedValuesCompressionGZIP:
r"""Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: \\".csv.gz\\")."""
- compression_type: Optional[DestinationS3FormatCSVCommaSeparatedValuesCompressionGZIPCompressionTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_type'), 'exclude': lambda f: f is None }})
+ compression_type: Optional[DestinationS3FormatCSVCommaSeparatedValuesCompressionGZIPCompressionType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_type'), 'exclude': lambda f: f is None }})
-class DestinationS3FormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionTypeEnum(str, Enum):
+class DestinationS3FormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType(str, Enum):
NO_COMPRESSION = 'No Compression'
@@ -105,14 +105,14 @@ class DestinationS3FormatCSVCommaSeparatedValuesCompressionNoCompressionCompress
class DestinationS3FormatCSVCommaSeparatedValuesCompressionNoCompression:
r"""Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: \\".csv.gz\\")."""
- compression_type: Optional[DestinationS3FormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_type'), 'exclude': lambda f: f is None }})
+ compression_type: Optional[DestinationS3FormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_type'), 'exclude': lambda f: f is None }})
-class DestinationS3FormatCSVCommaSeparatedValuesFlatteningEnum(str, Enum):
+class DestinationS3FormatCSVCommaSeparatedValuesFlattening(str, Enum):
r"""Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details."""
NO_FLATTENING = 'No flattening'
ROOT_LEVEL_FLATTENING = 'Root level flattening'
-class DestinationS3FormatCSVCommaSeparatedValuesFormatTypeEnum(str, Enum):
+class DestinationS3FormatCSVCommaSeparatedValuesFormatType(str, Enum):
CSV = 'CSV'
@@ -121,13 +121,13 @@ class DestinationS3FormatCSVCommaSeparatedValuesFormatTypeEnum(str, Enum):
class DestinationS3FormatCSVCommaSeparatedValues:
r"""Format of the data output. See here for more details"""
- flattening: DestinationS3FormatCSVCommaSeparatedValuesFlatteningEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('flattening') }})
+ flattening: DestinationS3FormatCSVCommaSeparatedValuesFlattening = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('flattening') }})
r"""Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details."""
- format_type: DestinationS3FormatCSVCommaSeparatedValuesFormatTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }})
+ format_type: DestinationS3FormatCSVCommaSeparatedValuesFormatType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }})
compression: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression'), 'exclude': lambda f: f is None }})
r"""Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: \\".csv.gz\\")."""
-class DestinationS3FormatAvroApacheAvroCompressionCodecSnappyCodecEnum(str, Enum):
+class DestinationS3FormatAvroApacheAvroCompressionCodecSnappyCodec(str, Enum):
SNAPPY = 'snappy'
@@ -136,9 +136,9 @@ class DestinationS3FormatAvroApacheAvroCompressionCodecSnappyCodecEnum(str, Enum
class DestinationS3FormatAvroApacheAvroCompressionCodecSnappy:
r"""The compression algorithm used to compress data. Default to no compression."""
- codec: DestinationS3FormatAvroApacheAvroCompressionCodecSnappyCodecEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('codec') }})
+ codec: DestinationS3FormatAvroApacheAvroCompressionCodecSnappyCodec = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('codec') }})
-class DestinationS3FormatAvroApacheAvroCompressionCodecZstandardCodecEnum(str, Enum):
+class DestinationS3FormatAvroApacheAvroCompressionCodecZstandardCodec(str, Enum):
ZSTANDARD = 'zstandard'
@@ -147,13 +147,13 @@ class DestinationS3FormatAvroApacheAvroCompressionCodecZstandardCodecEnum(str, E
class DestinationS3FormatAvroApacheAvroCompressionCodecZstandard:
r"""The compression algorithm used to compress data. Default to no compression."""
- codec: DestinationS3FormatAvroApacheAvroCompressionCodecZstandardCodecEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('codec') }})
+ codec: DestinationS3FormatAvroApacheAvroCompressionCodecZstandardCodec = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('codec') }})
compression_level: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_level') }})
r"""Negative levels are 'fast' modes akin to lz4 or snappy, levels above 9 are generally for archival purposes, and levels above 18 use a lot of memory."""
include_checksum: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('include_checksum'), 'exclude': lambda f: f is None }})
r"""If true, include a checksum with each data block."""
-class DestinationS3FormatAvroApacheAvroCompressionCodecXzCodecEnum(str, Enum):
+class DestinationS3FormatAvroApacheAvroCompressionCodecXzCodec(str, Enum):
XZ = 'xz'
@@ -162,11 +162,11 @@ class DestinationS3FormatAvroApacheAvroCompressionCodecXzCodecEnum(str, Enum):
class DestinationS3FormatAvroApacheAvroCompressionCodecXz:
r"""The compression algorithm used to compress data. Default to no compression."""
- codec: DestinationS3FormatAvroApacheAvroCompressionCodecXzCodecEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('codec') }})
+ codec: DestinationS3FormatAvroApacheAvroCompressionCodecXzCodec = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('codec') }})
compression_level: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_level') }})
r"""See here for details."""
-class DestinationS3FormatAvroApacheAvroCompressionCodecBzip2CodecEnum(str, Enum):
+class DestinationS3FormatAvroApacheAvroCompressionCodecBzip2Codec(str, Enum):
BZIP2 = 'bzip2'
@@ -175,9 +175,9 @@ class DestinationS3FormatAvroApacheAvroCompressionCodecBzip2CodecEnum(str, Enum)
class DestinationS3FormatAvroApacheAvroCompressionCodecBzip2:
r"""The compression algorithm used to compress data. Default to no compression."""
- codec: DestinationS3FormatAvroApacheAvroCompressionCodecBzip2CodecEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('codec') }})
+ codec: DestinationS3FormatAvroApacheAvroCompressionCodecBzip2Codec = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('codec') }})
-class DestinationS3FormatAvroApacheAvroCompressionCodecDeflateCodecEnum(str, Enum):
+class DestinationS3FormatAvroApacheAvroCompressionCodecDeflateCodec(str, Enum):
DEFLATE = 'Deflate'
@@ -186,11 +186,11 @@ class DestinationS3FormatAvroApacheAvroCompressionCodecDeflateCodecEnum(str, Enu
class DestinationS3FormatAvroApacheAvroCompressionCodecDeflate:
r"""The compression algorithm used to compress data. Default to no compression."""
- codec: DestinationS3FormatAvroApacheAvroCompressionCodecDeflateCodecEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('codec') }})
+ codec: DestinationS3FormatAvroApacheAvroCompressionCodecDeflateCodec = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('codec') }})
compression_level: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_level') }})
r"""0: no compression & fastest, 9: best compression & slowest."""
-class DestinationS3FormatAvroApacheAvroCompressionCodecNoCompressionCodecEnum(str, Enum):
+class DestinationS3FormatAvroApacheAvroCompressionCodecNoCompressionCodec(str, Enum):
NO_COMPRESSION = 'no compression'
@@ -199,9 +199,9 @@ class DestinationS3FormatAvroApacheAvroCompressionCodecNoCompressionCodecEnum(st
class DestinationS3FormatAvroApacheAvroCompressionCodecNoCompression:
r"""The compression algorithm used to compress data. Default to no compression."""
- codec: DestinationS3FormatAvroApacheAvroCompressionCodecNoCompressionCodecEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('codec') }})
+ codec: DestinationS3FormatAvroApacheAvroCompressionCodecNoCompressionCodec = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('codec') }})
-class DestinationS3FormatAvroApacheAvroFormatTypeEnum(str, Enum):
+class DestinationS3FormatAvroApacheAvroFormatType(str, Enum):
AVRO = 'Avro'
@@ -212,9 +212,9 @@ class DestinationS3FormatAvroApacheAvro:
compression_codec: Any = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_codec') }})
r"""The compression algorithm used to compress data. Default to no compression."""
- format_type: DestinationS3FormatAvroApacheAvroFormatTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }})
+ format_type: DestinationS3FormatAvroApacheAvroFormatType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }})
-class DestinationS3S3BucketRegionEnum(str, Enum):
+class DestinationS3S3BucketRegion(str, Enum):
r"""The region of the S3 bucket. See here for all region codes."""
UNKNOWN = ''
US_EAST_1 = 'us-east-1'
@@ -249,14 +249,14 @@ class DestinationS3S3BucketRegionEnum(str, Enum):
class DestinationS3:
r"""The values required to configure the destination."""
- destination_type: DestinationS3S3Enum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationS3S3 = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
format: Any = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format') }})
r"""Format of the data output. See here for more details"""
s3_bucket_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_name') }})
r"""The name of the S3 bucket. Read more here."""
s3_bucket_path: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_path') }})
r"""Directory under the S3 bucket where data will be written. Read more here"""
- s3_bucket_region: DestinationS3S3BucketRegionEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_region') }})
+ s3_bucket_region: DestinationS3S3BucketRegion = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_region') }})
r"""The region of the S3 bucket. See here for all region codes."""
access_key_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_key_id'), 'exclude': lambda f: f is None }})
r"""The access key ID to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket. Read more here."""
diff --git a/src/airbyte/models/shared/destination_s3_glue.py b/src/airbyte/models/shared/destination_s3_glue.py
index 7d9daf35..7998c2ee 100755
--- a/src/airbyte/models/shared/destination_s3_glue.py
+++ b/src/airbyte/models/shared/destination_s3_glue.py
@@ -7,10 +7,10 @@
from enum import Enum
from typing import Any, Optional
-class DestinationS3GlueS3GlueEnum(str, Enum):
+class DestinationS3GlueS3Glue(str, Enum):
S3_GLUE = 's3-glue'
-class DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionTypeEnum(str, Enum):
+class DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType(str, Enum):
GZIP = 'GZIP'
@@ -19,9 +19,9 @@ class DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompres
class DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONCompressionGZIP:
r"""Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: \\".jsonl.gz\\")."""
- compression_type: Optional[DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_type'), 'exclude': lambda f: f is None }})
+ compression_type: Optional[DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_type'), 'exclude': lambda f: f is None }})
-class DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionTypeEnum(str, Enum):
+class DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType(str, Enum):
NO_COMPRESSION = 'No Compression'
@@ -30,14 +30,14 @@ class DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressi
class DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression:
r"""Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: \\".jsonl.gz\\")."""
- compression_type: Optional[DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_type'), 'exclude': lambda f: f is None }})
+ compression_type: Optional[DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_type'), 'exclude': lambda f: f is None }})
-class DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONFlatteningEnum(str, Enum):
+class DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONFlattening(str, Enum):
r"""Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details."""
NO_FLATTENING = 'No flattening'
ROOT_LEVEL_FLATTENING = 'Root level flattening'
-class DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONFormatTypeEnum(str, Enum):
+class DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONFormatType(str, Enum):
JSONL = 'JSONL'
@@ -46,18 +46,18 @@ class DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONFormatTypeEnum(str, En
class DestinationS3GlueFormatJSONLinesNewlineDelimitedJSON:
r"""Format of the data output. See here for more details"""
- format_type: DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONFormatTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }})
+ format_type: DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONFormatType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }})
compression: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression'), 'exclude': lambda f: f is None }})
r"""Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: \\".jsonl.gz\\")."""
- flattening: Optional[DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONFlatteningEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('flattening'), 'exclude': lambda f: f is None }})
+ flattening: Optional[DestinationS3GlueFormatJSONLinesNewlineDelimitedJSONFlattening] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('flattening'), 'exclude': lambda f: f is None }})
r"""Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details."""
-class DestinationS3GlueSerializationLibraryEnum(str, Enum):
+class DestinationS3GlueSerializationLibrary(str, Enum):
r"""The library that your query engine will use for reading and writing data in your lake."""
ORG_OPENX_DATA_JSONSERDE_JSON_SER_DE = 'org.openx.data.jsonserde.JsonSerDe'
ORG_APACHE_HIVE_HCATALOG_DATA_JSON_SER_DE = 'org.apache.hive.hcatalog.data.JsonSerDe'
-class DestinationS3GlueS3BucketRegionEnum(str, Enum):
+class DestinationS3GlueS3BucketRegion(str, Enum):
r"""The region of the S3 bucket. See here for all region codes."""
UNKNOWN = ''
US_EAST_1 = 'us-east-1'
@@ -92,18 +92,18 @@ class DestinationS3GlueS3BucketRegionEnum(str, Enum):
class DestinationS3Glue:
r"""The values required to configure the destination."""
- destination_type: DestinationS3GlueS3GlueEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationS3GlueS3Glue = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
format: Any = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format') }})
r"""Format of the data output. See here for more details"""
glue_database: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('glue_database') }})
r"""Name of the glue database for creating the tables, leave blank if no integration"""
- glue_serialization_library: DestinationS3GlueSerializationLibraryEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('glue_serialization_library') }})
+ glue_serialization_library: DestinationS3GlueSerializationLibrary = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('glue_serialization_library') }})
r"""The library that your query engine will use for reading and writing data in your lake."""
s3_bucket_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_name') }})
r"""The name of the S3 bucket. Read more here."""
s3_bucket_path: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_path') }})
r"""Directory under the S3 bucket where data will be written. Read more here"""
- s3_bucket_region: DestinationS3GlueS3BucketRegionEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_region') }})
+ s3_bucket_region: DestinationS3GlueS3BucketRegion = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_region') }})
r"""The region of the S3 bucket. See here for all region codes."""
access_key_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_key_id'), 'exclude': lambda f: f is None }})
r"""The access key ID to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket. Read more here."""
diff --git a/src/airbyte/models/shared/destination_scylla.py b/src/airbyte/models/shared/destination_scylla.py
index eba7ab90..e5e82a1d 100755
--- a/src/airbyte/models/shared/destination_scylla.py
+++ b/src/airbyte/models/shared/destination_scylla.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class DestinationScyllaScyllaEnum(str, Enum):
+class DestinationScyllaScylla(str, Enum):
SCYLLA = 'scylla'
@@ -18,7 +18,7 @@ class DestinationScylla:
address: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('address') }})
r"""Address to connect to."""
- destination_type: DestinationScyllaScyllaEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationScyllaScylla = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
keyspace: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('keyspace') }})
r"""Default Scylla keyspace to create data in."""
password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password') }})
diff --git a/src/airbyte/models/shared/destination_sftp_json.py b/src/airbyte/models/shared/destination_sftp_json.py
index 80878366..02fa61f3 100755
--- a/src/airbyte/models/shared/destination_sftp_json.py
+++ b/src/airbyte/models/shared/destination_sftp_json.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class DestinationSftpJSONSftpJSONEnum(str, Enum):
+class DestinationSftpJSONSftpJSON(str, Enum):
SFTP_JSON = 'sftp-json'
@@ -18,7 +18,7 @@ class DestinationSftpJSON:
destination_path: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destination_path') }})
r"""Path to the directory where json files will be written."""
- destination_type: DestinationSftpJSONSftpJSONEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationSftpJSONSftpJSON = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }})
r"""Hostname of the SFTP server."""
password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password') }})
diff --git a/src/airbyte/models/shared/destination_snowflake.py b/src/airbyte/models/shared/destination_snowflake.py
index 9875b540..e1acd1fd 100755
--- a/src/airbyte/models/shared/destination_snowflake.py
+++ b/src/airbyte/models/shared/destination_snowflake.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class DestinationSnowflakeCredentialsUsernameAndPasswordAuthTypeEnum(str, Enum):
+class DestinationSnowflakeCredentialsUsernameAndPasswordAuthType(str, Enum):
USERNAME_AND_PASSWORD = 'Username and Password'
@@ -17,9 +17,9 @@ class DestinationSnowflakeCredentialsUsernameAndPassword:
password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password') }})
r"""Enter the password associated with the username."""
- auth_type: Optional[DestinationSnowflakeCredentialsUsernameAndPasswordAuthTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
+ auth_type: Optional[DestinationSnowflakeCredentialsUsernameAndPasswordAuthType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
-class DestinationSnowflakeCredentialsKeyPairAuthenticationAuthTypeEnum(str, Enum):
+class DestinationSnowflakeCredentialsKeyPairAuthenticationAuthType(str, Enum):
KEY_PAIR_AUTHENTICATION = 'Key Pair Authentication'
@@ -29,11 +29,11 @@ class DestinationSnowflakeCredentialsKeyPairAuthentication:
private_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('private_key') }})
r"""RSA Private key to use for Snowflake connection. See the docs for more information on how to obtain this key."""
- auth_type: Optional[DestinationSnowflakeCredentialsKeyPairAuthenticationAuthTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
+ auth_type: Optional[DestinationSnowflakeCredentialsKeyPairAuthenticationAuthType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
private_key_password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('private_key_password'), 'exclude': lambda f: f is None }})
r"""Passphrase for private key"""
-class DestinationSnowflakeCredentialsOAuth20AuthTypeEnum(str, Enum):
+class DestinationSnowflakeCredentialsOAuth20AuthType(str, Enum):
O_AUTH2_0 = 'OAuth2.0'
@@ -45,16 +45,16 @@ class DestinationSnowflakeCredentialsOAuth20:
r"""Enter you application's Access Token"""
refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token') }})
r"""Enter your application's Refresh Token"""
- auth_type: Optional[DestinationSnowflakeCredentialsOAuth20AuthTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
+ auth_type: Optional[DestinationSnowflakeCredentialsOAuth20AuthType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
client_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id'), 'exclude': lambda f: f is None }})
r"""Enter your application's Client ID"""
client_secret: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret'), 'exclude': lambda f: f is None }})
r"""Enter your application's Client secret"""
-class DestinationSnowflakeSnowflakeEnum(str, Enum):
+class DestinationSnowflakeSnowflake(str, Enum):
SNOWFLAKE = 'snowflake'
-class DestinationSnowflakeLoadingMethodGoogleCloudStorageStagingMethodEnum(str, Enum):
+class DestinationSnowflakeLoadingMethodGoogleCloudStorageStagingMethod(str, Enum):
GCS_STAGING = 'GCS Staging'
@@ -67,11 +67,11 @@ class DestinationSnowflakeLoadingMethodGoogleCloudStorageStaging:
r"""Enter the Cloud Storage bucket name"""
credentials_json: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials_json') }})
r"""Enter your Google Cloud service account key in the JSON format with read/write access to your Cloud Storage staging bucket"""
- method: DestinationSnowflakeLoadingMethodGoogleCloudStorageStagingMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
+ method: DestinationSnowflakeLoadingMethodGoogleCloudStorageStagingMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
project_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('project_id') }})
r"""Enter the Google Cloud project ID"""
-class DestinationSnowflakeLoadingMethodAWSS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionTypeEnum(str, Enum):
+class DestinationSnowflakeLoadingMethodAWSS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionType(str, Enum):
AES_CBC_ENVELOPE = 'aes_cbc_envelope'
@@ -80,11 +80,11 @@ class DestinationSnowflakeLoadingMethodAWSS3StagingEncryptionAESCBCEnvelopeEncry
class DestinationSnowflakeLoadingMethodAWSS3StagingEncryptionAESCBCEnvelopeEncryption:
r"""Staging data will be encrypted using AES-CBC envelope encryption."""
- encryption_type: DestinationSnowflakeLoadingMethodAWSS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_type') }})
+ encryption_type: DestinationSnowflakeLoadingMethodAWSS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_type') }})
key_encrypting_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('key_encrypting_key'), 'exclude': lambda f: f is None }})
r"""The key, base64-encoded. Must be either 128, 192, or 256 bits. Leave blank to have Airbyte generate an ephemeral key for each sync."""
-class DestinationSnowflakeLoadingMethodAWSS3StagingEncryptionNoEncryptionEncryptionTypeEnum(str, Enum):
+class DestinationSnowflakeLoadingMethodAWSS3StagingEncryptionNoEncryptionEncryptionType(str, Enum):
NONE = 'none'
@@ -93,12 +93,12 @@ class DestinationSnowflakeLoadingMethodAWSS3StagingEncryptionNoEncryptionEncrypt
class DestinationSnowflakeLoadingMethodAWSS3StagingEncryptionNoEncryption:
r"""Staging data will be stored in plaintext."""
- encryption_type: DestinationSnowflakeLoadingMethodAWSS3StagingEncryptionNoEncryptionEncryptionTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_type') }})
+ encryption_type: DestinationSnowflakeLoadingMethodAWSS3StagingEncryptionNoEncryptionEncryptionType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_type') }})
-class DestinationSnowflakeLoadingMethodAWSS3StagingMethodEnum(str, Enum):
+class DestinationSnowflakeLoadingMethodAWSS3StagingMethod(str, Enum):
S3_STAGING = 'S3 Staging'
-class DestinationSnowflakeLoadingMethodAWSS3StagingS3BucketRegionEnum(str, Enum):
+class DestinationSnowflakeLoadingMethodAWSS3StagingS3BucketRegion(str, Enum):
r"""Enter the region where your S3 bucket resides"""
UNKNOWN = ''
US_EAST_1 = 'us-east-1'
@@ -133,7 +133,7 @@ class DestinationSnowflakeLoadingMethodAWSS3Staging:
access_key_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_key_id') }})
r"""Enter your AWS access key ID. Airbyte requires Read and Write permissions on your S3 bucket"""
- method: DestinationSnowflakeLoadingMethodAWSS3StagingMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
+ method: DestinationSnowflakeLoadingMethodAWSS3StagingMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
s3_bucket_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_name') }})
r"""Enter your S3 bucket name"""
secret_access_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('secret_access_key') }})
@@ -144,10 +144,10 @@ class DestinationSnowflakeLoadingMethodAWSS3Staging:
r"""The pattern allows you to set the file-name format for the S3 staging file(s)"""
purge_staging_data: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('purge_staging_data'), 'exclude': lambda f: f is None }})
r"""Toggle to delete staging files from the S3 bucket after a successful sync"""
- s3_bucket_region: Optional[DestinationSnowflakeLoadingMethodAWSS3StagingS3BucketRegionEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_region'), 'exclude': lambda f: f is None }})
+ s3_bucket_region: Optional[DestinationSnowflakeLoadingMethodAWSS3StagingS3BucketRegion] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_region'), 'exclude': lambda f: f is None }})
r"""Enter the region where your S3 bucket resides"""
-class DestinationSnowflakeLoadingMethodRecommendedInternalStagingMethodEnum(str, Enum):
+class DestinationSnowflakeLoadingMethodRecommendedInternalStagingMethod(str, Enum):
INTERNAL_STAGING = 'Internal Staging'
@@ -156,9 +156,9 @@ class DestinationSnowflakeLoadingMethodRecommendedInternalStagingMethodEnum(str,
class DestinationSnowflakeLoadingMethodRecommendedInternalStaging:
r"""Recommended for large production workloads for better speed and scalability."""
- method: DestinationSnowflakeLoadingMethodRecommendedInternalStagingMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
+ method: DestinationSnowflakeLoadingMethodRecommendedInternalStagingMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
-class DestinationSnowflakeLoadingMethodSelectAnotherOptionMethodEnum(str, Enum):
+class DestinationSnowflakeLoadingMethodSelectAnotherOptionMethod(str, Enum):
STANDARD = 'Standard'
@@ -167,7 +167,7 @@ class DestinationSnowflakeLoadingMethodSelectAnotherOptionMethodEnum(str, Enum):
class DestinationSnowflakeLoadingMethodSelectAnotherOption:
r"""Select another option"""
- method: DestinationSnowflakeLoadingMethodSelectAnotherOptionMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
+ method: DestinationSnowflakeLoadingMethodSelectAnotherOptionMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
@dataclass_json(undefined=Undefined.EXCLUDE)
@@ -177,7 +177,7 @@ class DestinationSnowflake:
database: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('database') }})
r"""Enter the name of the database you want to sync data into"""
- destination_type: DestinationSnowflakeSnowflakeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationSnowflakeSnowflake = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }})
r"""Enter your Snowflake account's locator (in the format ...snowflakecomputing.com)"""
role: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('role') }})
diff --git a/src/airbyte/models/shared/destination_typesense.py b/src/airbyte/models/shared/destination_typesense.py
index 2ea48a75..0e612353 100755
--- a/src/airbyte/models/shared/destination_typesense.py
+++ b/src/airbyte/models/shared/destination_typesense.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class DestinationTypesenseTypesenseEnum(str, Enum):
+class DestinationTypesenseTypesense(str, Enum):
TYPESENSE = 'typesense'
@@ -18,7 +18,7 @@ class DestinationTypesense:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""Typesense API Key"""
- destination_type: DestinationTypesenseTypesenseEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
+ destination_type: DestinationTypesenseTypesense = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }})
host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }})
r"""Hostname of the Typesense instance without protocol."""
batch_size: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('batch_size'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/geographyenum_enum.py b/src/airbyte/models/shared/geographyenum.py
similarity index 83%
rename from src/airbyte/models/shared/geographyenum_enum.py
rename to src/airbyte/models/shared/geographyenum.py
index 521f692c..9b352d57 100755
--- a/src/airbyte/models/shared/geographyenum_enum.py
+++ b/src/airbyte/models/shared/geographyenum.py
@@ -3,7 +3,7 @@
from __future__ import annotations
from enum import Enum
-class GeographyEnumEnum(str, Enum):
+class GeographyEnum(str, Enum):
AUTO = 'auto'
US = 'us'
EU = 'eu'
diff --git a/src/airbyte/models/shared/jobcreaterequest.py b/src/airbyte/models/shared/jobcreaterequest.py
index fc331008..ea8e73da 100755
--- a/src/airbyte/models/shared/jobcreaterequest.py
+++ b/src/airbyte/models/shared/jobcreaterequest.py
@@ -2,7 +2,7 @@
from __future__ import annotations
import dataclasses
-from ..shared import jobtypeenum_enum as shared_jobtypeenum_enum
+from ..shared import jobtypeenum as shared_jobtypeenum
from airbyte import utils
from dataclasses_json import Undefined, dataclass_json
@@ -13,6 +13,6 @@ class JobCreateRequest:
r"""Creates a new Job from the configuration provided in the request body."""
connection_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('connectionId') }})
- job_type: shared_jobtypeenum_enum.JobTypeEnumEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jobType') }})
+ job_type: shared_jobtypeenum.JobTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jobType') }})
r"""Enum that describes the different types of jobs that the platform runs."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/jobresponse.py b/src/airbyte/models/shared/jobresponse.py
index 8ec3b8e8..cd09eaaa 100755
--- a/src/airbyte/models/shared/jobresponse.py
+++ b/src/airbyte/models/shared/jobresponse.py
@@ -2,8 +2,8 @@
from __future__ import annotations
import dataclasses
-from ..shared import jobstatusenum_enum as shared_jobstatusenum_enum
-from ..shared import jobtypeenum_enum as shared_jobtypeenum_enum
+from ..shared import jobstatusenum as shared_jobstatusenum
+from ..shared import jobtypeenum as shared_jobtypeenum
from airbyte import utils
from dataclasses_json import Undefined, dataclass_json
from typing import Optional
@@ -15,10 +15,10 @@ class JobResponse:
r"""Provides details of a single job."""
job_id: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jobId') }})
- job_type: shared_jobtypeenum_enum.JobTypeEnumEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jobType') }})
+ job_type: shared_jobtypeenum.JobTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jobType') }})
r"""Enum that describes the different types of jobs that the platform runs."""
start_time: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('startTime') }})
- status: shared_jobstatusenum_enum.JobStatusEnumEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('status') }})
+ status: shared_jobstatusenum.JobStatusEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('status') }})
bytes_synced: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('bytesSynced'), 'exclude': lambda f: f is None }})
duration: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('duration'), 'exclude': lambda f: f is None }})
r"""Duration of a sync in ISO_8601 format"""
diff --git a/src/airbyte/models/shared/jobstatusenum_enum.py b/src/airbyte/models/shared/jobstatusenum.py
similarity index 88%
rename from src/airbyte/models/shared/jobstatusenum_enum.py
rename to src/airbyte/models/shared/jobstatusenum.py
index 73adb10a..6145c2e5 100755
--- a/src/airbyte/models/shared/jobstatusenum_enum.py
+++ b/src/airbyte/models/shared/jobstatusenum.py
@@ -3,7 +3,7 @@
from __future__ import annotations
from enum import Enum
-class JobStatusEnumEnum(str, Enum):
+class JobStatusEnum(str, Enum):
PENDING = 'pending'
RUNNING = 'running'
INCOMPLETE = 'incomplete'
diff --git a/src/airbyte/models/shared/jobtypeenum_enum.py b/src/airbyte/models/shared/jobtypeenum.py
similarity index 88%
rename from src/airbyte/models/shared/jobtypeenum_enum.py
rename to src/airbyte/models/shared/jobtypeenum.py
index 73287a74..8df6f7e6 100755
--- a/src/airbyte/models/shared/jobtypeenum_enum.py
+++ b/src/airbyte/models/shared/jobtypeenum.py
@@ -3,7 +3,7 @@
from __future__ import annotations
from enum import Enum
-class JobTypeEnumEnum(str, Enum):
+class JobTypeEnum(str, Enum):
r"""Enum that describes the different types of jobs that the platform runs."""
SYNC = 'sync'
RESET = 'reset'
diff --git a/src/airbyte/models/shared/namespacedefinitionenum_enum.py b/src/airbyte/models/shared/namespacedefinitionenum.py
similarity index 86%
rename from src/airbyte/models/shared/namespacedefinitionenum_enum.py
rename to src/airbyte/models/shared/namespacedefinitionenum.py
index 7af37b1e..d3fdcc21 100755
--- a/src/airbyte/models/shared/namespacedefinitionenum_enum.py
+++ b/src/airbyte/models/shared/namespacedefinitionenum.py
@@ -3,7 +3,7 @@
from __future__ import annotations
from enum import Enum
-class NamespaceDefinitionEnumEnum(str, Enum):
+class NamespaceDefinitionEnum(str, Enum):
r"""Define the location where the data will be stored in the destination"""
SOURCE = 'source'
DESTINATION = 'destination'
diff --git a/src/airbyte/models/shared/nonbreakingschemaupdatesbehaviorenum_enum.py b/src/airbyte/models/shared/nonbreakingschemaupdatesbehaviorenum.py
similarity index 83%
rename from src/airbyte/models/shared/nonbreakingschemaupdatesbehaviorenum_enum.py
rename to src/airbyte/models/shared/nonbreakingschemaupdatesbehaviorenum.py
index dfae4cdf..f1898dba 100755
--- a/src/airbyte/models/shared/nonbreakingschemaupdatesbehaviorenum_enum.py
+++ b/src/airbyte/models/shared/nonbreakingschemaupdatesbehaviorenum.py
@@ -3,7 +3,7 @@
from __future__ import annotations
from enum import Enum
-class NonBreakingSchemaUpdatesBehaviorEnumEnum(str, Enum):
+class NonBreakingSchemaUpdatesBehaviorEnum(str, Enum):
r"""Set how Airbyte handles syncs when it detects a non-breaking schema change in the source"""
IGNORE = 'ignore'
DISABLE_CONNECTION = 'disable_connection'
diff --git a/src/airbyte/models/shared/scheduletypeenum_enum.py b/src/airbyte/models/shared/scheduletypeenum.py
similarity index 81%
rename from src/airbyte/models/shared/scheduletypeenum_enum.py
rename to src/airbyte/models/shared/scheduletypeenum.py
index a3d32477..cd5af7b5 100755
--- a/src/airbyte/models/shared/scheduletypeenum_enum.py
+++ b/src/airbyte/models/shared/scheduletypeenum.py
@@ -3,6 +3,6 @@
from __future__ import annotations
from enum import Enum
-class ScheduleTypeEnumEnum(str, Enum):
+class ScheduleTypeEnum(str, Enum):
MANUAL = 'manual'
CRON = 'cron'
diff --git a/src/airbyte/models/shared/scheduletypewithbasicenum_enum.py b/src/airbyte/models/shared/scheduletypewithbasicenum.py
similarity index 80%
rename from src/airbyte/models/shared/scheduletypewithbasicenum_enum.py
rename to src/airbyte/models/shared/scheduletypewithbasicenum.py
index 9f02ac8b..70fbbfa1 100755
--- a/src/airbyte/models/shared/scheduletypewithbasicenum_enum.py
+++ b/src/airbyte/models/shared/scheduletypewithbasicenum.py
@@ -3,7 +3,7 @@
from __future__ import annotations
from enum import Enum
-class ScheduleTypeWithBasicEnumEnum(str, Enum):
+class ScheduleTypeWithBasicEnum(str, Enum):
MANUAL = 'manual'
CRON = 'cron'
BASIC = 'basic'
diff --git a/src/airbyte/models/shared/source_aircall.py b/src/airbyte/models/shared/source_aircall.py
index b76618b3..1d28d9ec 100755
--- a/src/airbyte/models/shared/source_aircall.py
+++ b/src/airbyte/models/shared/source_aircall.py
@@ -9,7 +9,7 @@
from enum import Enum
from marshmallow import fields
-class SourceAircallAircallEnum(str, Enum):
+class SourceAircallAircall(str, Enum):
AIRCALL = 'aircall'
@@ -22,7 +22,7 @@ class SourceAircall:
r"""App ID found at settings https://dashboard.aircall.io/integrations/api-keys"""
api_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_token') }})
r"""App token found at settings (Ref- https://dashboard.aircall.io/integrations/api-keys)"""
- source_type: SourceAircallAircallEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceAircallAircall = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""Date time filter for incremental filter, Specify which date to extract from."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_airtable.py b/src/airbyte/models/shared/source_airtable.py
index 434541f1..97e23119 100755
--- a/src/airbyte/models/shared/source_airtable.py
+++ b/src/airbyte/models/shared/source_airtable.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Any, Optional
-class SourceAirtableCredentialsPersonalAccessTokenAuthMethodEnum(str, Enum):
+class SourceAirtableCredentialsPersonalAccessTokenAuthMethod(str, Enum):
API_KEY = 'api_key'
@@ -20,9 +20,9 @@ class SourceAirtableCredentialsPersonalAccessToken:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""The Personal Access Token for the Airtable account. See the Support Guide for more information on how to obtain this token."""
- auth_method: Optional[SourceAirtableCredentialsPersonalAccessTokenAuthMethodEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method'), 'exclude': lambda f: f is None }})
+ auth_method: Optional[SourceAirtableCredentialsPersonalAccessTokenAuthMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method'), 'exclude': lambda f: f is None }})
-class SourceAirtableCredentialsOAuth20AuthMethodEnum(str, Enum):
+class SourceAirtableCredentialsOAuth20AuthMethod(str, Enum):
OAUTH2_0 = 'oauth2.0'
@@ -38,11 +38,11 @@ class SourceAirtableCredentialsOAuth20:
r"""The key to refresh the expired access token."""
access_token: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token'), 'exclude': lambda f: f is None }})
r"""Access Token for making authenticated requests."""
- auth_method: Optional[SourceAirtableCredentialsOAuth20AuthMethodEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method'), 'exclude': lambda f: f is None }})
+ auth_method: Optional[SourceAirtableCredentialsOAuth20AuthMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method'), 'exclude': lambda f: f is None }})
token_expiry_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('token_expiry_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso'), 'exclude': lambda f: f is None }})
r"""The date-time when the access token should be refreshed."""
-class SourceAirtableAirtableEnum(str, Enum):
+class SourceAirtableAirtable(str, Enum):
AIRTABLE = 'airtable'
@@ -51,6 +51,6 @@ class SourceAirtableAirtableEnum(str, Enum):
class SourceAirtable:
r"""The values required to configure the source."""
- source_type: SourceAirtableAirtableEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceAirtableAirtable = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
credentials: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_alloydb.py b/src/airbyte/models/shared/source_alloydb.py
index 2133da19..773969d2 100755
--- a/src/airbyte/models/shared/source_alloydb.py
+++ b/src/airbyte/models/shared/source_alloydb.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class SourceAlloydbReplicationMethodStandardMethodEnum(str, Enum):
+class SourceAlloydbReplicationMethodStandardMethod(str, Enum):
STANDARD = 'Standard'
@@ -16,12 +16,12 @@ class SourceAlloydbReplicationMethodStandardMethodEnum(str, Enum):
class SourceAlloydbReplicationMethodStandard:
r"""Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally."""
- method: SourceAlloydbReplicationMethodStandardMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
+ method: SourceAlloydbReplicationMethodStandardMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
-class SourceAlloydbAlloydbEnum(str, Enum):
+class SourceAlloydbAlloydb(str, Enum):
ALLOYDB = 'alloydb'
-class SourceAlloydbTunnelMethodPasswordAuthenticationTunnelMethodEnum(str, Enum):
+class SourceAlloydbTunnelMethodPasswordAuthenticationTunnelMethod(str, Enum):
r"""Connect through a jump server tunnel host using username and password authentication"""
SSH_PASSWORD_AUTH = 'SSH_PASSWORD_AUTH'
@@ -33,7 +33,7 @@ class SourceAlloydbTunnelMethodPasswordAuthentication:
tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }})
r"""Hostname of the jump server host that allows inbound ssh tunnel."""
- tunnel_method: SourceAlloydbTunnelMethodPasswordAuthenticationTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: SourceAlloydbTunnelMethodPasswordAuthenticationTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""Connect through a jump server tunnel host using username and password authentication"""
tunnel_port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port') }})
r"""Port on the proxy/jump server that accepts inbound ssh connections."""
@@ -42,7 +42,7 @@ class SourceAlloydbTunnelMethodPasswordAuthentication:
tunnel_user_password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user_password') }})
r"""OS-level password for logging into the jump server host"""
-class SourceAlloydbTunnelMethodSSHKeyAuthenticationTunnelMethodEnum(str, Enum):
+class SourceAlloydbTunnelMethodSSHKeyAuthenticationTunnelMethod(str, Enum):
r"""Connect through a jump server tunnel host using username and ssh key"""
SSH_KEY_AUTH = 'SSH_KEY_AUTH'
@@ -56,14 +56,14 @@ class SourceAlloydbTunnelMethodSSHKeyAuthentication:
r"""OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"""
tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }})
r"""Hostname of the jump server host that allows inbound ssh tunnel."""
- tunnel_method: SourceAlloydbTunnelMethodSSHKeyAuthenticationTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: SourceAlloydbTunnelMethodSSHKeyAuthenticationTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""Connect through a jump server tunnel host using username and ssh key"""
tunnel_port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port') }})
r"""Port on the proxy/jump server that accepts inbound ssh connections."""
tunnel_user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user') }})
r"""OS-level username for logging into the jump server host."""
-class SourceAlloydbTunnelMethodNoTunnelTunnelMethodEnum(str, Enum):
+class SourceAlloydbTunnelMethodNoTunnelTunnelMethod(str, Enum):
r"""No ssh tunnel needed to connect to database"""
NO_TUNNEL = 'NO_TUNNEL'
@@ -73,7 +73,7 @@ class SourceAlloydbTunnelMethodNoTunnelTunnelMethodEnum(str, Enum):
class SourceAlloydbTunnelMethodNoTunnel:
r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use."""
- tunnel_method: SourceAlloydbTunnelMethodNoTunnelTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: SourceAlloydbTunnelMethodNoTunnelTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""No ssh tunnel needed to connect to database"""
@@ -88,7 +88,7 @@ class SourceAlloydb:
r"""Hostname of the database."""
port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port') }})
r"""Port of the database."""
- source_type: SourceAlloydbAlloydbEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceAlloydbAlloydb = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }})
r"""Username to access the database."""
jdbc_url_params: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jdbc_url_params'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_amazon_ads.py b/src/airbyte/models/shared/source_amazon_ads.py
index 4deb71b0..a3622155 100755
--- a/src/airbyte/models/shared/source_amazon_ads.py
+++ b/src/airbyte/models/shared/source_amazon_ads.py
@@ -7,16 +7,16 @@
from enum import Enum
from typing import Optional
-class SourceAmazonAdsAuthTypeEnum(str, Enum):
+class SourceAmazonAdsAuthType(str, Enum):
OAUTH2_0 = 'oauth2.0'
-class SourceAmazonAdsRegionEnum(str, Enum):
+class SourceAmazonAdsRegion(str, Enum):
r"""Region to pull data from (EU/NA/FE). See docs for more details."""
NA = 'NA'
EU = 'EU'
FE = 'FE'
-class SourceAmazonAdsReportRecordTypesEnum(str, Enum):
+class SourceAmazonAdsReportRecordTypes(str, Enum):
AD_GROUPS = 'adGroups'
ASINS = 'asins'
ASINS_KEYWORDS = 'asins_keywords'
@@ -26,10 +26,10 @@ class SourceAmazonAdsReportRecordTypesEnum(str, Enum):
PRODUCT_ADS = 'productAds'
TARGETS = 'targets'
-class SourceAmazonAdsAmazonAdsEnum(str, Enum):
+class SourceAmazonAdsAmazonAds(str, Enum):
AMAZON_ADS = 'amazon-ads'
-class SourceAmazonAdsStateFilterEnum(str, Enum):
+class SourceAmazonAdsStateFilter(str, Enum):
ENABLED = 'enabled'
PAUSED = 'paused'
ARCHIVED = 'archived'
@@ -46,18 +46,18 @@ class SourceAmazonAds:
r"""The client secret of your Amazon Ads developer application. See the docs for more information."""
refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token') }})
r"""Amazon Ads refresh token. See the docs for more information on how to obtain this token."""
- source_type: SourceAmazonAdsAmazonAdsEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
- auth_type: Optional[SourceAmazonAdsAuthTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
+ source_type: SourceAmazonAdsAmazonAds = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ auth_type: Optional[SourceAmazonAdsAuthType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
look_back_window: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('look_back_window'), 'exclude': lambda f: f is None }})
r"""The amount of days to go back in time to get the updated data from Amazon Ads"""
profiles: Optional[list[int]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('profiles'), 'exclude': lambda f: f is None }})
r"""Profile IDs you want to fetch data for. See docs for more details."""
- region: Optional[SourceAmazonAdsRegionEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('region'), 'exclude': lambda f: f is None }})
+ region: Optional[SourceAmazonAdsRegion] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('region'), 'exclude': lambda f: f is None }})
r"""Region to pull data from (EU/NA/FE). See docs for more details."""
- report_record_types: Optional[list[SourceAmazonAdsReportRecordTypesEnum]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('report_record_types'), 'exclude': lambda f: f is None }})
+ report_record_types: Optional[list[SourceAmazonAdsReportRecordTypes]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('report_record_types'), 'exclude': lambda f: f is None }})
r"""Optional configuration which accepts an array of string of record types. Leave blank for default behaviour to pull all report types. Use this config option only if you want to pull specific report type(s). See docs for more details"""
start_date: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'exclude': lambda f: f is None }})
r"""The Start date for collecting reports, should not be more than 60 days in the past. In YYYY-MM-DD format"""
- state_filter: Optional[list[SourceAmazonAdsStateFilterEnum]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('state_filter'), 'exclude': lambda f: f is None }})
+ state_filter: Optional[list[SourceAmazonAdsStateFilter]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('state_filter'), 'exclude': lambda f: f is None }})
r"""Reflects the state of the Display, Product, and Brand Campaign streams as enabled, paused, or archived. If you do not populate this field, it will be ignored completely."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_amazon_seller_partner.py b/src/airbyte/models/shared/source_amazon_seller_partner.py
index 2202f5fa..15dfcb1a 100755
--- a/src/airbyte/models/shared/source_amazon_seller_partner.py
+++ b/src/airbyte/models/shared/source_amazon_seller_partner.py
@@ -7,15 +7,15 @@
from enum import Enum
from typing import Optional
-class SourceAmazonSellerPartnerAuthTypeEnum(str, Enum):
+class SourceAmazonSellerPartnerAuthType(str, Enum):
OAUTH2_0 = 'oauth2.0'
-class SourceAmazonSellerPartnerAWSEnvironmentEnum(str, Enum):
+class SourceAmazonSellerPartnerAWSEnvironment(str, Enum):
r"""An enumeration."""
PRODUCTION = 'PRODUCTION'
SANDBOX = 'SANDBOX'
-class SourceAmazonSellerPartnerAWSRegionEnum(str, Enum):
+class SourceAmazonSellerPartnerAWSRegion(str, Enum):
r"""An enumeration."""
AE = 'AE'
AU = 'AU'
@@ -40,7 +40,7 @@ class SourceAmazonSellerPartnerAWSRegionEnum(str, Enum):
UK = 'UK'
US = 'US'
-class SourceAmazonSellerPartnerAmazonSellerPartnerEnum(str, Enum):
+class SourceAmazonSellerPartnerAmazonSellerPartner(str, Enum):
AMAZON_SELLER_PARTNER = 'amazon-seller-partner'
@@ -51,7 +51,7 @@ class SourceAmazonSellerPartner:
app_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('app_id') }})
r"""Your Amazon App ID"""
- aws_environment: SourceAmazonSellerPartnerAWSEnvironmentEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('aws_environment') }})
+ aws_environment: SourceAmazonSellerPartnerAWSEnvironment = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('aws_environment') }})
r"""An enumeration."""
lwa_app_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lwa_app_id') }})
r"""Your Login with Amazon Client ID."""
@@ -59,12 +59,12 @@ class SourceAmazonSellerPartner:
r"""Your Login with Amazon Client Secret."""
refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token') }})
r"""The Refresh Token obtained via OAuth flow authorization."""
- region: SourceAmazonSellerPartnerAWSRegionEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('region') }})
+ region: SourceAmazonSellerPartnerAWSRegion = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('region') }})
r"""An enumeration."""
replication_start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('replication_start_date') }})
r"""UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated."""
- source_type: SourceAmazonSellerPartnerAmazonSellerPartnerEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
- auth_type: Optional[SourceAmazonSellerPartnerAuthTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
+ source_type: SourceAmazonSellerPartnerAmazonSellerPartner = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ auth_type: Optional[SourceAmazonSellerPartnerAuthType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
aws_access_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('aws_access_key'), 'exclude': lambda f: f is None }})
r"""Specifies the AWS access key used as part of the credentials to authenticate the user."""
aws_secret_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('aws_secret_key'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_amazon_sqs.py b/src/airbyte/models/shared/source_amazon_sqs.py
index 4255c33d..a78c94e7 100755
--- a/src/airbyte/models/shared/source_amazon_sqs.py
+++ b/src/airbyte/models/shared/source_amazon_sqs.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourceAmazonSqsAWSRegionEnum(str, Enum):
+class SourceAmazonSqsAWSRegion(str, Enum):
r"""AWS Region of the SQS Queue"""
US_EAST_1 = 'us-east-1'
US_EAST_2 = 'us-east-2'
@@ -35,7 +35,7 @@ class SourceAmazonSqsAWSRegionEnum(str, Enum):
US_GOV_EAST_1 = 'us-gov-east-1'
US_GOV_WEST_1 = 'us-gov-west-1'
-class SourceAmazonSqsAmazonSqsEnum(str, Enum):
+class SourceAmazonSqsAmazonSqs(str, Enum):
AMAZON_SQS = 'amazon-sqs'
@@ -48,9 +48,9 @@ class SourceAmazonSqs:
r"""If Enabled, messages will be deleted from the SQS Queue after being read. If Disabled, messages are left in the queue and can be read more than once. WARNING: Enabling this option can result in data loss in cases of failure, use with caution, see documentation for more detail."""
queue_url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('queue_url') }})
r"""URL of the SQS Queue"""
- region: SourceAmazonSqsAWSRegionEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('region') }})
+ region: SourceAmazonSqsAWSRegion = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('region') }})
r"""AWS Region of the SQS Queue"""
- source_type: SourceAmazonSqsAmazonSqsEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceAmazonSqsAmazonSqs = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
access_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_key'), 'exclude': lambda f: f is None }})
r"""The Access Key ID of the AWS IAM Role to use for pulling messages"""
attributes_to_return: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('attributes_to_return'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_amplitude.py b/src/airbyte/models/shared/source_amplitude.py
index 9acb713d..d272c8d2 100755
--- a/src/airbyte/models/shared/source_amplitude.py
+++ b/src/airbyte/models/shared/source_amplitude.py
@@ -7,12 +7,12 @@
from enum import Enum
from typing import Optional
-class SourceAmplitudeDataRegionEnum(str, Enum):
+class SourceAmplitudeDataRegion(str, Enum):
r"""Amplitude data region server"""
STANDARD_SERVER = 'Standard Server'
EU_RESIDENCY_SERVER = 'EU Residency Server'
-class SourceAmplitudeAmplitudeEnum(str, Enum):
+class SourceAmplitudeAmplitude(str, Enum):
AMPLITUDE = 'amplitude'
@@ -25,10 +25,10 @@ class SourceAmplitude:
r"""Amplitude API Key. See the setup guide for more information on how to obtain this key."""
secret_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('secret_key') }})
r"""Amplitude Secret Key. See the setup guide for more information on how to obtain this key."""
- source_type: SourceAmplitudeAmplitudeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceAmplitudeAmplitude = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }})
r"""UTC date and time in the format 2021-01-25T00:00:00Z. Any data before this date will not be replicated."""
- data_region: Optional[SourceAmplitudeDataRegionEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('data_region'), 'exclude': lambda f: f is None }})
+ data_region: Optional[SourceAmplitudeDataRegion] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('data_region'), 'exclude': lambda f: f is None }})
r"""Amplitude data region server"""
request_time_range: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('request_time_range'), 'exclude': lambda f: f is None }})
r"""According to Considerations too big time range in request can cause a timeout error. In this case, set shorter time interval in hours."""
diff --git a/src/airbyte/models/shared/source_apify_dataset.py b/src/airbyte/models/shared/source_apify_dataset.py
index 0bdfcdb3..5ae4af7f 100755
--- a/src/airbyte/models/shared/source_apify_dataset.py
+++ b/src/airbyte/models/shared/source_apify_dataset.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourceApifyDatasetApifyDatasetEnum(str, Enum):
+class SourceApifyDatasetApifyDataset(str, Enum):
APIFY_DATASET = 'apify-dataset'
@@ -18,7 +18,7 @@ class SourceApifyDataset:
dataset_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('datasetId') }})
r"""ID of the dataset you would like to load to Airbyte."""
- source_type: SourceApifyDatasetApifyDatasetEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceApifyDatasetApifyDataset = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
clean: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('clean'), 'exclude': lambda f: f is None }})
r"""If set to true, only clean items will be downloaded from the dataset. See description of what clean means in Apify API docs. If not sure, set clean to false."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_asana.py b/src/airbyte/models/shared/source_asana.py
index b726c7f6..46008e52 100755
--- a/src/airbyte/models/shared/source_asana.py
+++ b/src/airbyte/models/shared/source_asana.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class SourceAsanaCredentialsAuthenticateViaAsanaOauthCredentialsTitleEnum(str, Enum):
+class SourceAsanaCredentialsAuthenticateViaAsanaOauthCredentialsTitle(str, Enum):
r"""OAuth Credentials"""
O_AUTH_CREDENTIALS = 'OAuth Credentials'
@@ -20,10 +20,10 @@ class SourceAsanaCredentialsAuthenticateViaAsanaOauth:
client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }})
client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }})
refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token') }})
- option_title: Optional[SourceAsanaCredentialsAuthenticateViaAsanaOauthCredentialsTitleEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('option_title'), 'exclude': lambda f: f is None }})
+ option_title: Optional[SourceAsanaCredentialsAuthenticateViaAsanaOauthCredentialsTitle] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('option_title'), 'exclude': lambda f: f is None }})
r"""OAuth Credentials"""
-class SourceAsanaCredentialsAuthenticateWithPersonalAccessTokenCredentialsTitleEnum(str, Enum):
+class SourceAsanaCredentialsAuthenticateWithPersonalAccessTokenCredentialsTitle(str, Enum):
r"""PAT Credentials"""
PAT_CREDENTIALS = 'PAT Credentials'
@@ -35,10 +35,10 @@ class SourceAsanaCredentialsAuthenticateWithPersonalAccessToken:
personal_access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('personal_access_token') }})
r"""Asana Personal Access Token (generate yours here)."""
- option_title: Optional[SourceAsanaCredentialsAuthenticateWithPersonalAccessTokenCredentialsTitleEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('option_title'), 'exclude': lambda f: f is None }})
+ option_title: Optional[SourceAsanaCredentialsAuthenticateWithPersonalAccessTokenCredentialsTitle] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('option_title'), 'exclude': lambda f: f is None }})
r"""PAT Credentials"""
-class SourceAsanaAsanaEnum(str, Enum):
+class SourceAsanaAsana(str, Enum):
ASANA = 'asana'
@@ -47,7 +47,7 @@ class SourceAsanaAsanaEnum(str, Enum):
class SourceAsana:
r"""The values required to configure the source."""
- source_type: SourceAsanaAsanaEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceAsanaAsana = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
credentials: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }})
r"""Choose how to authenticate to Github"""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_auth0.py b/src/airbyte/models/shared/source_auth0.py
index 14c2250e..526c42d1 100755
--- a/src/airbyte/models/shared/source_auth0.py
+++ b/src/airbyte/models/shared/source_auth0.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any
-class SourceAuth0CredentialsOAuth2AccessTokenAuthenticationMethodEnum(str, Enum):
+class SourceAuth0CredentialsOAuth2AccessTokenAuthenticationMethod(str, Enum):
OAUTH2_ACCESS_TOKEN = 'oauth2_access_token'
@@ -17,9 +17,9 @@ class SourceAuth0CredentialsOAuth2AccessToken:
access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }})
r"""Also called API Access Token The access token used to call the Auth0 Management API Token. It's a JWT that contains specific grant permissions knowns as scopes."""
- auth_type: SourceAuth0CredentialsOAuth2AccessTokenAuthenticationMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
+ auth_type: SourceAuth0CredentialsOAuth2AccessTokenAuthenticationMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
-class SourceAuth0CredentialsOAuth2ConfidentialApplicationAuthenticationMethodEnum(str, Enum):
+class SourceAuth0CredentialsOAuth2ConfidentialApplicationAuthenticationMethod(str, Enum):
OAUTH2_CONFIDENTIAL_APPLICATION = 'oauth2_confidential_application'
@@ -29,13 +29,13 @@ class SourceAuth0CredentialsOAuth2ConfidentialApplication:
audience: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('audience') }})
r"""The audience for the token, which is your API. You can find this in the Identifier field on your API's settings tab"""
- auth_type: SourceAuth0CredentialsOAuth2ConfidentialApplicationAuthenticationMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
+ auth_type: SourceAuth0CredentialsOAuth2ConfidentialApplicationAuthenticationMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }})
r"""Your application's Client ID. You can find this value on the application's settings tab after you login the admin portal."""
client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }})
r"""Your application's Client Secret. You can find this value on the application's settings tab after you login the admin portal."""
-class SourceAuth0Auth0Enum(str, Enum):
+class SourceAuth0Auth0(str, Enum):
AUTH0 = 'auth0'
@@ -47,5 +47,5 @@ class SourceAuth0:
base_url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('base_url') }})
r"""The Authentication API is served over HTTPS. All URLs referenced in the documentation have the following base `https://YOUR_DOMAIN`"""
credentials: Any = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }})
- source_type: SourceAuth0Auth0Enum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceAuth0Auth0 = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_aws_cloudtrail.py b/src/airbyte/models/shared/source_aws_cloudtrail.py
index 73bd44ab..b624cc27 100755
--- a/src/airbyte/models/shared/source_aws_cloudtrail.py
+++ b/src/airbyte/models/shared/source_aws_cloudtrail.py
@@ -8,7 +8,7 @@
from enum import Enum
from marshmallow import fields
-class SourceAwsCloudtrailAwsCloudtrailEnum(str, Enum):
+class SourceAwsCloudtrailAwsCloudtrail(str, Enum):
AWS_CLOUDTRAIL = 'aws-cloudtrail'
@@ -23,7 +23,7 @@ class SourceAwsCloudtrail:
r"""The default AWS Region to use, for example, us-west-1 or us-west-2. When specifying a Region inline during client initialization, this property is named region_name."""
aws_secret_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('aws_secret_key') }})
r"""AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key."""
- source_type: SourceAwsCloudtrailAwsCloudtrailEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceAwsCloudtrailAwsCloudtrail = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: date = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.dateisoformat(False), 'decoder': utils.datefromisoformat, 'mm_field': fields.DateTime(format='iso') }})
r"""The date you would like to replicate data. Data in AWS CloudTrail is available for last 90 days only. Format: YYYY-MM-DD."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_azure_blob_storage.py b/src/airbyte/models/shared/source_azure_blob_storage.py
index e4f850d4..15824dec 100755
--- a/src/airbyte/models/shared/source_azure_blob_storage.py
+++ b/src/airbyte/models/shared/source_azure_blob_storage.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class SourceAzureBlobStorageFormatJSONLinesNewlineDelimitedJSONFormatTypeEnum(str, Enum):
+class SourceAzureBlobStorageFormatJSONLinesNewlineDelimitedJSONFormatType(str, Enum):
JSONL = 'JSONL'
@@ -16,9 +16,9 @@ class SourceAzureBlobStorageFormatJSONLinesNewlineDelimitedJSONFormatTypeEnum(st
class SourceAzureBlobStorageFormatJSONLinesNewlineDelimitedJSON:
r"""Input data format"""
- format_type: SourceAzureBlobStorageFormatJSONLinesNewlineDelimitedJSONFormatTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }})
+ format_type: SourceAzureBlobStorageFormatJSONLinesNewlineDelimitedJSONFormatType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }})
-class SourceAzureBlobStorageAzureBlobStorageEnum(str, Enum):
+class SourceAzureBlobStorageAzureBlobStorage(str, Enum):
AZURE_BLOB_STORAGE = 'azure-blob-storage'
@@ -35,7 +35,7 @@ class SourceAzureBlobStorage:
r"""The name of the Azure blob storage container."""
format: Any = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format') }})
r"""Input data format"""
- source_type: SourceAzureBlobStorageAzureBlobStorageEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceAzureBlobStorageAzureBlobStorage = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
azure_blob_storage_blobs_prefix: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('azure_blob_storage_blobs_prefix'), 'exclude': lambda f: f is None }})
r"""The Azure blob storage prefix to be applied"""
azure_blob_storage_endpoint: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('azure_blob_storage_endpoint'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_azure_table.py b/src/airbyte/models/shared/source_azure_table.py
index 531839d9..cbe03dd7 100755
--- a/src/airbyte/models/shared/source_azure_table.py
+++ b/src/airbyte/models/shared/source_azure_table.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourceAzureTableAzureTableEnum(str, Enum):
+class SourceAzureTableAzureTable(str, Enum):
AZURE_TABLE = 'azure-table'
@@ -16,7 +16,7 @@ class SourceAzureTableAzureTableEnum(str, Enum):
class SourceAzureTable:
r"""The values required to configure the source."""
- source_type: SourceAzureTableAzureTableEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceAzureTableAzureTable = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
storage_access_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('storage_access_key') }})
r"""Azure Table Storage Access Key. See the docs for more information on how to obtain this key."""
storage_account_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('storage_account_name') }})
diff --git a/src/airbyte/models/shared/source_bamboo_hr.py b/src/airbyte/models/shared/source_bamboo_hr.py
index b733c886..b675ed77 100755
--- a/src/airbyte/models/shared/source_bamboo_hr.py
+++ b/src/airbyte/models/shared/source_bamboo_hr.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourceBambooHrBambooHrEnum(str, Enum):
+class SourceBambooHrBambooHr(str, Enum):
BAMBOO_HR = 'bamboo-hr'
@@ -18,7 +18,7 @@ class SourceBambooHr:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""Api key of bamboo hr"""
- source_type: SourceBambooHrBambooHrEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceBambooHrBambooHr = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
subdomain: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('subdomain') }})
r"""Sub Domain of bamboo hr"""
custom_reports_fields: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('custom_reports_fields'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_bigcommerce.py b/src/airbyte/models/shared/source_bigcommerce.py
index e3862dcc..cfcf0b42 100755
--- a/src/airbyte/models/shared/source_bigcommerce.py
+++ b/src/airbyte/models/shared/source_bigcommerce.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceBigcommerceBigcommerceEnum(str, Enum):
+class SourceBigcommerceBigcommerce(str, Enum):
BIGCOMMERCE = 'bigcommerce'
@@ -17,7 +17,7 @@ class SourceBigcommerce:
access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }})
r"""Access Token for making authenticated requests."""
- source_type: SourceBigcommerceBigcommerceEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceBigcommerceBigcommerce = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }})
r"""The date you would like to replicate data. Format: YYYY-MM-DD."""
store_hash: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('store_hash') }})
diff --git a/src/airbyte/models/shared/source_bigquery.py b/src/airbyte/models/shared/source_bigquery.py
index cb465f60..54268dbe 100755
--- a/src/airbyte/models/shared/source_bigquery.py
+++ b/src/airbyte/models/shared/source_bigquery.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourceBigqueryBigqueryEnum(str, Enum):
+class SourceBigqueryBigquery(str, Enum):
BIGQUERY = 'bigquery'
@@ -20,7 +20,7 @@ class SourceBigquery:
r"""The contents of your Service Account Key JSON file. See the docs for more information on how to obtain this key."""
project_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('project_id') }})
r"""The GCP project ID for the project containing the target BigQuery dataset."""
- source_type: SourceBigqueryBigqueryEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceBigqueryBigquery = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
dataset_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dataset_id'), 'exclude': lambda f: f is None }})
r"""The dataset ID to search for tables and views. If you are only loading data from one dataset, setting this option could result in much faster schema discovery."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_bing_ads.py b/src/airbyte/models/shared/source_bing_ads.py
index 31a96ba7..7e54692e 100755
--- a/src/airbyte/models/shared/source_bing_ads.py
+++ b/src/airbyte/models/shared/source_bing_ads.py
@@ -9,10 +9,10 @@
from marshmallow import fields
from typing import Optional
-class SourceBingAdsAuthMethodEnum(str, Enum):
+class SourceBingAdsAuthMethod(str, Enum):
OAUTH2_0 = 'oauth2.0'
-class SourceBingAdsBingAdsEnum(str, Enum):
+class SourceBingAdsBingAds(str, Enum):
BING_ADS = 'bing-ads'
@@ -29,8 +29,8 @@ class SourceBingAds:
r"""Refresh Token to renew the expired Access Token."""
reports_start_date: date = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('reports_start_date'), 'encoder': utils.dateisoformat(False), 'decoder': utils.datefromisoformat, 'mm_field': fields.DateTime(format='iso') }})
r"""The start date from which to begin replicating report data. Any data generated before this date will not be replicated in reports. This is a UTC date in YYYY-MM-DD format."""
- source_type: SourceBingAdsBingAdsEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
- auth_method: Optional[SourceBingAdsAuthMethodEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method'), 'exclude': lambda f: f is None }})
+ source_type: SourceBingAdsBingAds = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ auth_method: Optional[SourceBingAdsAuthMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method'), 'exclude': lambda f: f is None }})
client_secret: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret'), 'exclude': lambda f: f is None }})
r"""The Client Secret of your Microsoft Advertising developer application."""
lookback_window: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lookback_window'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_braintree.py b/src/airbyte/models/shared/source_braintree.py
index f3161adc..51e482d0 100755
--- a/src/airbyte/models/shared/source_braintree.py
+++ b/src/airbyte/models/shared/source_braintree.py
@@ -10,14 +10,14 @@
from marshmallow import fields
from typing import Optional
-class SourceBraintreeEnvironmentEnum(str, Enum):
+class SourceBraintreeEnvironment(str, Enum):
r"""Environment specifies where the data will come from."""
DEVELOPMENT = 'Development'
SANDBOX = 'Sandbox'
QA = 'Qa'
PRODUCTION = 'Production'
-class SourceBraintreeBraintreeEnum(str, Enum):
+class SourceBraintreeBraintree(str, Enum):
BRAINTREE = 'braintree'
@@ -26,7 +26,7 @@ class SourceBraintreeBraintreeEnum(str, Enum):
class SourceBraintree:
r"""The values required to configure the source."""
- environment: SourceBraintreeEnvironmentEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('environment') }})
+ environment: SourceBraintreeEnvironment = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('environment') }})
r"""Environment specifies where the data will come from."""
merchant_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('merchant_id') }})
r"""The unique identifier for your entire gateway account. See the docs for more information on how to obtain this ID."""
@@ -34,7 +34,7 @@ class SourceBraintree:
r"""Braintree Private Key. See the docs for more information on how to obtain this key."""
public_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('public_key') }})
r"""Braintree Public Key. See the docs for more information on how to obtain this key."""
- source_type: SourceBraintreeBraintreeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceBraintreeBraintree = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso'), 'exclude': lambda f: f is None }})
r"""UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_braze.py b/src/airbyte/models/shared/source_braze.py
index b62c811d..9cbf4e9c 100755
--- a/src/airbyte/models/shared/source_braze.py
+++ b/src/airbyte/models/shared/source_braze.py
@@ -8,7 +8,7 @@
from enum import Enum
from marshmallow import fields
-class SourceBrazeBrazeEnum(str, Enum):
+class SourceBrazeBraze(str, Enum):
BRAZE = 'braze'
@@ -19,7 +19,7 @@ class SourceBraze:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""Braze REST API key"""
- source_type: SourceBrazeBrazeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceBrazeBraze = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: date = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.dateisoformat(False), 'decoder': utils.datefromisoformat, 'mm_field': fields.DateTime(format='iso') }})
r"""Rows after this date will be synced"""
url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('url') }})
diff --git a/src/airbyte/models/shared/source_chargebee.py b/src/airbyte/models/shared/source_chargebee.py
index 63fe7070..a8ab2f19 100755
--- a/src/airbyte/models/shared/source_chargebee.py
+++ b/src/airbyte/models/shared/source_chargebee.py
@@ -9,12 +9,12 @@
from enum import Enum
from marshmallow import fields
-class SourceChargebeeProductCatalogEnum(str, Enum):
+class SourceChargebeeProductCatalog(str, Enum):
r"""Product Catalog version of your Chargebee site. Instructions on how to find your version you may find here under `API Version` section."""
ONE_0 = '1.0'
TWO_0 = '2.0'
-class SourceChargebeeChargebeeEnum(str, Enum):
+class SourceChargebeeChargebee(str, Enum):
CHARGEBEE = 'chargebee'
@@ -23,13 +23,13 @@ class SourceChargebeeChargebeeEnum(str, Enum):
class SourceChargebee:
r"""The values required to configure the source."""
- product_catalog: SourceChargebeeProductCatalogEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('product_catalog') }})
+ product_catalog: SourceChargebeeProductCatalog = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('product_catalog') }})
r"""Product Catalog version of your Chargebee site. Instructions on how to find your version you may find here under `API Version` section."""
site: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('site') }})
r"""The site prefix for your Chargebee instance."""
site_api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('site_api_key') }})
r"""Chargebee API Key. See the docs for more information on how to obtain this key."""
- source_type: SourceChargebeeChargebeeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceChargebeeChargebee = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""UTC date and time in the format 2021-01-25T00:00:00Z. Any data before this date will not be replicated."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_chartmogul.py b/src/airbyte/models/shared/source_chartmogul.py
index e36b47ae..f5f6b2ed 100755
--- a/src/airbyte/models/shared/source_chartmogul.py
+++ b/src/airbyte/models/shared/source_chartmogul.py
@@ -9,14 +9,14 @@
from enum import Enum
from marshmallow import fields
-class SourceChartmogulIntervalEnum(str, Enum):
+class SourceChartmogulInterval(str, Enum):
r"""Some APIs such as Metrics require intervals to cluster data."""
DAY = 'day'
WEEK = 'week'
MONTH = 'month'
QUARTER = 'quarter'
-class SourceChartmogulChartmogulEnum(str, Enum):
+class SourceChartmogulChartmogul(str, Enum):
CHARTMOGUL = 'chartmogul'
@@ -27,9 +27,9 @@ class SourceChartmogul:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""Your Chartmogul API key. See the docs for info on how to obtain this."""
- interval: SourceChartmogulIntervalEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('interval') }})
+ interval: SourceChartmogulInterval = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('interval') }})
r"""Some APIs such as Metrics require intervals to cluster data."""
- source_type: SourceChartmogulChartmogulEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceChartmogulChartmogul = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""UTC date and time in the format 2017-01-25T00:00:00Z. When feasible, any data before this date will not be replicated."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_clickhouse.py b/src/airbyte/models/shared/source_clickhouse.py
index 23bcce46..478d3c24 100755
--- a/src/airbyte/models/shared/source_clickhouse.py
+++ b/src/airbyte/models/shared/source_clickhouse.py
@@ -7,10 +7,10 @@
from enum import Enum
from typing import Any, Optional
-class SourceClickhouseClickhouseEnum(str, Enum):
+class SourceClickhouseClickhouse(str, Enum):
CLICKHOUSE = 'clickhouse'
-class SourceClickhouseTunnelMethodPasswordAuthenticationTunnelMethodEnum(str, Enum):
+class SourceClickhouseTunnelMethodPasswordAuthenticationTunnelMethod(str, Enum):
r"""Connect through a jump server tunnel host using username and password authentication"""
SSH_PASSWORD_AUTH = 'SSH_PASSWORD_AUTH'
@@ -22,7 +22,7 @@ class SourceClickhouseTunnelMethodPasswordAuthentication:
tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }})
r"""Hostname of the jump server host that allows inbound ssh tunnel."""
- tunnel_method: SourceClickhouseTunnelMethodPasswordAuthenticationTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: SourceClickhouseTunnelMethodPasswordAuthenticationTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""Connect through a jump server tunnel host using username and password authentication"""
tunnel_port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port') }})
r"""Port on the proxy/jump server that accepts inbound ssh connections."""
@@ -31,7 +31,7 @@ class SourceClickhouseTunnelMethodPasswordAuthentication:
tunnel_user_password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user_password') }})
r"""OS-level password for logging into the jump server host"""
-class SourceClickhouseTunnelMethodSSHKeyAuthenticationTunnelMethodEnum(str, Enum):
+class SourceClickhouseTunnelMethodSSHKeyAuthenticationTunnelMethod(str, Enum):
r"""Connect through a jump server tunnel host using username and ssh key"""
SSH_KEY_AUTH = 'SSH_KEY_AUTH'
@@ -45,14 +45,14 @@ class SourceClickhouseTunnelMethodSSHKeyAuthentication:
r"""OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"""
tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }})
r"""Hostname of the jump server host that allows inbound ssh tunnel."""
- tunnel_method: SourceClickhouseTunnelMethodSSHKeyAuthenticationTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: SourceClickhouseTunnelMethodSSHKeyAuthenticationTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""Connect through a jump server tunnel host using username and ssh key"""
tunnel_port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port') }})
r"""Port on the proxy/jump server that accepts inbound ssh connections."""
tunnel_user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user') }})
r"""OS-level username for logging into the jump server host."""
-class SourceClickhouseTunnelMethodNoTunnelTunnelMethodEnum(str, Enum):
+class SourceClickhouseTunnelMethodNoTunnelTunnelMethod(str, Enum):
r"""No ssh tunnel needed to connect to database"""
NO_TUNNEL = 'NO_TUNNEL'
@@ -62,7 +62,7 @@ class SourceClickhouseTunnelMethodNoTunnelTunnelMethodEnum(str, Enum):
class SourceClickhouseTunnelMethodNoTunnel:
r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use."""
- tunnel_method: SourceClickhouseTunnelMethodNoTunnelTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: SourceClickhouseTunnelMethodNoTunnelTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""No ssh tunnel needed to connect to database"""
@@ -77,7 +77,7 @@ class SourceClickhouse:
r"""The host endpoint of the Clickhouse cluster."""
port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port') }})
r"""The port of the database."""
- source_type: SourceClickhouseClickhouseEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceClickhouseClickhouse = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }})
r"""The username which is used to access the database."""
password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_clickup_api.py b/src/airbyte/models/shared/source_clickup_api.py
index d8b894c0..f1f53395 100755
--- a/src/airbyte/models/shared/source_clickup_api.py
+++ b/src/airbyte/models/shared/source_clickup_api.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourceClickupAPIClickupAPIEnum(str, Enum):
+class SourceClickupAPIClickupAPI(str, Enum):
CLICKUP_API = 'clickup-api'
@@ -18,7 +18,7 @@ class SourceClickupAPI:
api_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_token') }})
r"""Every ClickUp API call required authentication. This field is your personal API token. See here."""
- source_type: SourceClickupAPIClickupAPIEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceClickupAPIClickupAPI = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
folder_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('folder_id'), 'exclude': lambda f: f is None }})
r"""The ID of your folder in your space. Retrieve it from the `/space/{space_id}/folder` of the ClickUp API. See here."""
include_closed_tasks: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('include_closed_tasks'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_close_com.py b/src/airbyte/models/shared/source_close_com.py
index 3c9ed287..86fb8327 100755
--- a/src/airbyte/models/shared/source_close_com.py
+++ b/src/airbyte/models/shared/source_close_com.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Optional
-class SourceCloseComCloseComEnum(str, Enum):
+class SourceCloseComCloseCom(str, Enum):
CLOSE_COM = 'close-com'
@@ -21,7 +21,7 @@ class SourceCloseCom:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""Close.com API key (usually starts with 'api_'; find yours here)."""
- source_type: SourceCloseComCloseComEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceCloseComCloseCom = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso'), 'exclude': lambda f: f is None }})
r"""The start date to sync data. Leave blank for full sync. Format: YYYY-MM-DD."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_coda.py b/src/airbyte/models/shared/source_coda.py
index 46d35187..110eaaf7 100755
--- a/src/airbyte/models/shared/source_coda.py
+++ b/src/airbyte/models/shared/source_coda.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceCodaCodaEnum(str, Enum):
+class SourceCodaCoda(str, Enum):
CODA = 'coda'
@@ -17,5 +17,5 @@ class SourceCoda:
auth_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_token') }})
r"""Bearer token"""
- source_type: SourceCodaCodaEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceCodaCoda = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_coin_api.py b/src/airbyte/models/shared/source_coin_api.py
index e8eb9493..3ac082ed 100755
--- a/src/airbyte/models/shared/source_coin_api.py
+++ b/src/airbyte/models/shared/source_coin_api.py
@@ -7,12 +7,12 @@
from enum import Enum
from typing import Optional
-class SourceCoinAPIEnvironmentEnum(str, Enum):
+class SourceCoinAPIEnvironment(str, Enum):
r"""The environment to use. Either sandbox or production."""
SANDBOX = 'sandbox'
PRODUCTION = 'production'
-class SourceCoinAPICoinAPIEnum(str, Enum):
+class SourceCoinAPICoinAPI(str, Enum):
COIN_API = 'coin-api'
@@ -23,11 +23,11 @@ class SourceCoinAPI:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""API Key"""
- environment: SourceCoinAPIEnvironmentEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('environment') }})
+ environment: SourceCoinAPIEnvironment = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('environment') }})
r"""The environment to use. Either sandbox or production."""
period: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('period') }})
r"""The period to use. See the documentation for a list. https://docs.coinapi.io/#list-all-periods-get"""
- source_type: SourceCoinAPICoinAPIEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceCoinAPICoinAPI = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }})
r"""The start date in ISO 8601 format."""
symbol_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('symbol_id') }})
diff --git a/src/airbyte/models/shared/source_coinmarketcap.py b/src/airbyte/models/shared/source_coinmarketcap.py
index 6864c697..3d439713 100755
--- a/src/airbyte/models/shared/source_coinmarketcap.py
+++ b/src/airbyte/models/shared/source_coinmarketcap.py
@@ -7,12 +7,12 @@
from enum import Enum
from typing import Optional
-class SourceCoinmarketcapDataTypeEnum(str, Enum):
+class SourceCoinmarketcapDataType(str, Enum):
r"""/latest: Latest market ticker quotes and averages for cryptocurrencies and exchanges. /historical: Intervals of historic market data like OHLCV data or data for use in charting libraries. See here."""
LATEST = 'latest'
HISTORICAL = 'historical'
-class SourceCoinmarketcapCoinmarketcapEnum(str, Enum):
+class SourceCoinmarketcapCoinmarketcap(str, Enum):
COINMARKETCAP = 'coinmarketcap'
@@ -23,9 +23,9 @@ class SourceCoinmarketcap:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""Your API Key. See here. The token is case sensitive."""
- data_type: SourceCoinmarketcapDataTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('data_type') }})
+ data_type: SourceCoinmarketcapDataType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('data_type') }})
r"""/latest: Latest market ticker quotes and averages for cryptocurrencies and exchanges. /historical: Intervals of historic market data like OHLCV data or data for use in charting libraries. See here."""
- source_type: SourceCoinmarketcapCoinmarketcapEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceCoinmarketcapCoinmarketcap = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
symbols: Optional[list[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('symbols'), 'exclude': lambda f: f is None }})
r"""Cryptocurrency symbols. (only used for quotes stream)"""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_configcat.py b/src/airbyte/models/shared/source_configcat.py
index a34cde0a..ff88ecba 100755
--- a/src/airbyte/models/shared/source_configcat.py
+++ b/src/airbyte/models/shared/source_configcat.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceConfigcatConfigcatEnum(str, Enum):
+class SourceConfigcatConfigcat(str, Enum):
CONFIGCAT = 'configcat'
@@ -17,7 +17,7 @@ class SourceConfigcat:
password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password') }})
r"""Basic auth password. See here."""
- source_type: SourceConfigcatConfigcatEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceConfigcatConfigcat = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }})
r"""Basic auth user name. See here."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_confluence.py b/src/airbyte/models/shared/source_confluence.py
index 84f78ac6..9c34a765 100755
--- a/src/airbyte/models/shared/source_confluence.py
+++ b/src/airbyte/models/shared/source_confluence.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceConfluenceConfluenceEnum(str, Enum):
+class SourceConfluenceConfluence(str, Enum):
CONFLUENCE = 'confluence'
@@ -21,5 +21,5 @@ class SourceConfluence:
r"""Your Confluence domain name"""
email: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('email') }})
r"""Your Confluence login email"""
- source_type: SourceConfluenceConfluenceEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceConfluenceConfluence = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_datascope.py b/src/airbyte/models/shared/source_datascope.py
index 85fd4cf3..031d2a24 100755
--- a/src/airbyte/models/shared/source_datascope.py
+++ b/src/airbyte/models/shared/source_datascope.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceDatascopeDatascopeEnum(str, Enum):
+class SourceDatascopeDatascope(str, Enum):
DATASCOPE = 'datascope'
@@ -17,7 +17,7 @@ class SourceDatascope:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""API Key"""
- source_type: SourceDatascopeDatascopeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceDatascopeDatascope = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }})
r"""Start date for the data to be replicated"""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_delighted.py b/src/airbyte/models/shared/source_delighted.py
index af2b5b68..4d30c7cf 100755
--- a/src/airbyte/models/shared/source_delighted.py
+++ b/src/airbyte/models/shared/source_delighted.py
@@ -9,7 +9,7 @@
from enum import Enum
from marshmallow import fields
-class SourceDelightedDelightedEnum(str, Enum):
+class SourceDelightedDelighted(str, Enum):
DELIGHTED = 'delighted'
@@ -22,5 +22,5 @@ class SourceDelighted:
r"""A Delighted API key."""
since: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('since'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""The date from which you'd like to replicate the data"""
- source_type: SourceDelightedDelightedEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceDelightedDelighted = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_dixa.py b/src/airbyte/models/shared/source_dixa.py
index 9d1969d3..fe08ac43 100755
--- a/src/airbyte/models/shared/source_dixa.py
+++ b/src/airbyte/models/shared/source_dixa.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourceDixaDixaEnum(str, Enum):
+class SourceDixaDixa(str, Enum):
DIXA = 'dixa'
@@ -18,7 +18,7 @@ class SourceDixa:
api_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_token') }})
r"""Dixa API token"""
- source_type: SourceDixaDixaEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceDixaDixa = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }})
r"""The connector pulls records updated from this date onwards."""
batch_size: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('batch_size'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_dockerhub.py b/src/airbyte/models/shared/source_dockerhub.py
index db894a18..fe5e8cfe 100755
--- a/src/airbyte/models/shared/source_dockerhub.py
+++ b/src/airbyte/models/shared/source_dockerhub.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceDockerhubDockerhubEnum(str, Enum):
+class SourceDockerhubDockerhub(str, Enum):
DOCKERHUB = 'dockerhub'
@@ -17,5 +17,5 @@ class SourceDockerhub:
docker_username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('docker_username') }})
r"""Username of DockerHub person or organization (for https://hub.docker.com/v2/repositories/USERNAME/ API call)"""
- source_type: SourceDockerhubDockerhubEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceDockerhubDockerhub = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_dremio.py b/src/airbyte/models/shared/source_dremio.py
index f8a6caf9..8f49697d 100755
--- a/src/airbyte/models/shared/source_dremio.py
+++ b/src/airbyte/models/shared/source_dremio.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceDremioDremioEnum(str, Enum):
+class SourceDremioDremio(str, Enum):
DREMIO = 'dremio'
@@ -19,5 +19,5 @@ class SourceDremio:
r"""API Key that is generated when you authenticate to Dremio API"""
base_url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('base_url') }})
r"""URL of your Dremio instance"""
- source_type: SourceDremioDremioEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceDremioDremio = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_dynamodb.py b/src/airbyte/models/shared/source_dynamodb.py
index 8ddaace8..18b1509e 100755
--- a/src/airbyte/models/shared/source_dynamodb.py
+++ b/src/airbyte/models/shared/source_dynamodb.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourceDynamodbDynamodbRegionEnum(str, Enum):
+class SourceDynamodbDynamodbRegion(str, Enum):
r"""The region of the Dynamodb database"""
UNKNOWN = ''
US_EAST_1 = 'us-east-1'
@@ -36,7 +36,7 @@ class SourceDynamodbDynamodbRegionEnum(str, Enum):
US_GOV_EAST_1 = 'us-gov-east-1'
US_GOV_WEST_1 = 'us-gov-west-1'
-class SourceDynamodbDynamodbEnum(str, Enum):
+class SourceDynamodbDynamodb(str, Enum):
DYNAMODB = 'dynamodb'
@@ -49,10 +49,10 @@ class SourceDynamodb:
r"""The access key id to access Dynamodb. Airbyte requires read permissions to the database"""
secret_access_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('secret_access_key') }})
r"""The corresponding secret to the access key id."""
- source_type: SourceDynamodbDynamodbEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceDynamodbDynamodb = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
endpoint: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('endpoint'), 'exclude': lambda f: f is None }})
r"""the URL of the Dynamodb database"""
- region: Optional[SourceDynamodbDynamodbRegionEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('region'), 'exclude': lambda f: f is None }})
+ region: Optional[SourceDynamodbDynamodbRegion] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('region'), 'exclude': lambda f: f is None }})
r"""The region of the Dynamodb database"""
reserved_attribute_names: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('reserved_attribute_names'), 'exclude': lambda f: f is None }})
r"""Comma separated reserved attribute names present in your tables"""
diff --git a/src/airbyte/models/shared/source_e2e_test_cloud.py b/src/airbyte/models/shared/source_e2e_test_cloud.py
index 55a50462..202d20c5 100755
--- a/src/airbyte/models/shared/source_e2e_test_cloud.py
+++ b/src/airbyte/models/shared/source_e2e_test_cloud.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class SourceE2eTestCloudMockCatalogMultiSchemaTypeEnum(str, Enum):
+class SourceE2eTestCloudMockCatalogMultiSchemaType(str, Enum):
MULTI_STREAM = 'MULTI_STREAM'
@@ -18,9 +18,9 @@ class SourceE2eTestCloudMockCatalogMultiSchema:
stream_schemas: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('stream_schemas') }})
r"""A Json object specifying multiple data streams and their schemas. Each key in this object is one stream name. Each value is the schema for that stream. The schema should be compatible with draft-07. See this doc for examples."""
- type: SourceE2eTestCloudMockCatalogMultiSchemaTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('type') }})
+ type: SourceE2eTestCloudMockCatalogMultiSchemaType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('type') }})
-class SourceE2eTestCloudMockCatalogSingleSchemaTypeEnum(str, Enum):
+class SourceE2eTestCloudMockCatalogSingleSchemaType(str, Enum):
SINGLE_STREAM = 'SINGLE_STREAM'
@@ -33,14 +33,14 @@ class SourceE2eTestCloudMockCatalogSingleSchema:
r"""Name of the data stream."""
stream_schema: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('stream_schema') }})
r"""A Json schema for the stream. The schema should be compatible with draft-07. See this doc for examples."""
- type: SourceE2eTestCloudMockCatalogSingleSchemaTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('type') }})
+ type: SourceE2eTestCloudMockCatalogSingleSchemaType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('type') }})
stream_duplication: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('stream_duplication'), 'exclude': lambda f: f is None }})
r"""Duplicate the stream for easy load testing. Each stream name will have a number suffix. For example, if the stream name is \\"ds\\", the duplicated streams will be \\"ds_0\\", \\"ds_1\\", etc."""
-class SourceE2eTestCloudE2eTestCloudEnum(str, Enum):
+class SourceE2eTestCloudE2eTestCloud(str, Enum):
E2E_TEST_CLOUD = 'e2e-test-cloud'
-class SourceE2eTestCloudTypeEnum(str, Enum):
+class SourceE2eTestCloudType(str, Enum):
CONTINUOUS_FEED = 'CONTINUOUS_FEED'
@@ -52,10 +52,10 @@ class SourceE2eTestCloud:
max_messages: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('max_messages') }})
r"""Number of records to emit per stream. Min 1. Max 100 billion."""
mock_catalog: Any = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mock_catalog') }})
- source_type: SourceE2eTestCloudE2eTestCloudEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceE2eTestCloudE2eTestCloud = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
message_interval_ms: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('message_interval_ms'), 'exclude': lambda f: f is None }})
r"""Interval between messages in ms. Min 0 ms. Max 60000 ms (1 minute)."""
seed: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('seed'), 'exclude': lambda f: f is None }})
r"""When the seed is unspecified, the current time millis will be used as the seed. Range: [0, 1000000]."""
- type: Optional[SourceE2eTestCloudTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('type'), 'exclude': lambda f: f is None }})
+ type: Optional[SourceE2eTestCloudType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('type'), 'exclude': lambda f: f is None }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_emailoctopus.py b/src/airbyte/models/shared/source_emailoctopus.py
index 9d770003..939f2cfe 100755
--- a/src/airbyte/models/shared/source_emailoctopus.py
+++ b/src/airbyte/models/shared/source_emailoctopus.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceEmailoctopusEmailoctopusEnum(str, Enum):
+class SourceEmailoctopusEmailoctopus(str, Enum):
EMAILOCTOPUS = 'emailoctopus'
@@ -17,5 +17,5 @@ class SourceEmailoctopus:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""EmailOctopus API Key. See the docs for information on how to generate this key."""
- source_type: SourceEmailoctopusEmailoctopusEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceEmailoctopusEmailoctopus = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_exchange_rates.py b/src/airbyte/models/shared/source_exchange_rates.py
index 2e43aff7..5f4dc691 100755
--- a/src/airbyte/models/shared/source_exchange_rates.py
+++ b/src/airbyte/models/shared/source_exchange_rates.py
@@ -9,7 +9,7 @@
from marshmallow import fields
from typing import Optional
-class SourceExchangeRatesExchangeRatesEnum(str, Enum):
+class SourceExchangeRatesExchangeRates(str, Enum):
EXCHANGE_RATES = 'exchange-rates'
@@ -20,7 +20,7 @@ class SourceExchangeRates:
access_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_key') }})
r"""Your API Key. See here. The key is case sensitive."""
- source_type: SourceExchangeRatesExchangeRatesEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceExchangeRatesExchangeRates = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: date = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.dateisoformat(False), 'decoder': utils.datefromisoformat, 'mm_field': fields.DateTime(format='iso') }})
r"""Start getting data from that date."""
base: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('base'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_facebook_marketing.py b/src/airbyte/models/shared/source_facebook_marketing.py
index ef5eb569..82cded11 100755
--- a/src/airbyte/models/shared/source_facebook_marketing.py
+++ b/src/airbyte/models/shared/source_facebook_marketing.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Optional
-class SourceFacebookMarketingInsightConfigValidActionBreakdownsEnum(str, Enum):
+class SourceFacebookMarketingInsightConfigValidActionBreakdowns(str, Enum):
r"""Generic enumeration.
Derive from this class to define new enumerations.
@@ -26,7 +26,7 @@ class SourceFacebookMarketingInsightConfigValidActionBreakdownsEnum(str, Enum):
ACTION_VIDEO_SOUND = 'action_video_sound'
ACTION_VIDEO_TYPE = 'action_video_type'
-class SourceFacebookMarketingInsightConfigValidBreakdownsEnum(str, Enum):
+class SourceFacebookMarketingInsightConfigValidBreakdowns(str, Enum):
r"""Generic enumeration.
Derive from this class to define new enumerations.
@@ -59,7 +59,7 @@ class SourceFacebookMarketingInsightConfigValidBreakdownsEnum(str, Enum):
TITLE_ASSET = 'title_asset'
VIDEO_ASSET = 'video_asset'
-class SourceFacebookMarketingInsightConfigValidEnumsEnum(str, Enum):
+class SourceFacebookMarketingInsightConfigValidEnums(str, Enum):
r"""Generic enumeration.
Derive from this class to define new enumerations.
@@ -196,7 +196,7 @@ class SourceFacebookMarketingInsightConfigValidEnumsEnum(str, Enum):
WEBSITE_PURCHASE_ROAS = 'website_purchase_roas'
WISH_BID = 'wish_bid'
-class SourceFacebookMarketingInsightConfigLevelEnum(str, Enum):
+class SourceFacebookMarketingInsightConfigLevel(str, Enum):
r"""Chosen level for API"""
AD = 'ad'
ADSET = 'adset'
@@ -211,24 +211,24 @@ class SourceFacebookMarketingInsightConfig:
name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name') }})
r"""The name value of insight"""
- action_breakdowns: Optional[list[SourceFacebookMarketingInsightConfigValidActionBreakdownsEnum]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('action_breakdowns'), 'exclude': lambda f: f is None }})
+ action_breakdowns: Optional[list[SourceFacebookMarketingInsightConfigValidActionBreakdowns]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('action_breakdowns'), 'exclude': lambda f: f is None }})
r"""A list of chosen action_breakdowns for action_breakdowns"""
- breakdowns: Optional[list[SourceFacebookMarketingInsightConfigValidBreakdownsEnum]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('breakdowns'), 'exclude': lambda f: f is None }})
+ breakdowns: Optional[list[SourceFacebookMarketingInsightConfigValidBreakdowns]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('breakdowns'), 'exclude': lambda f: f is None }})
r"""A list of chosen breakdowns for breakdowns"""
end_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso'), 'exclude': lambda f: f is None }})
r"""The date until which you'd like to replicate data for this stream, in the format YYYY-MM-DDT00:00:00Z. All data generated between the start date and this end date will be replicated. Not setting this option will result in always syncing the latest data."""
- fields_: Optional[list[SourceFacebookMarketingInsightConfigValidEnumsEnum]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('fields'), 'exclude': lambda f: f is None }})
+ fields_: Optional[list[SourceFacebookMarketingInsightConfigValidEnums]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('fields'), 'exclude': lambda f: f is None }})
r"""A list of chosen fields for fields parameter"""
insights_lookback_window: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('insights_lookback_window'), 'exclude': lambda f: f is None }})
r"""The attribution window"""
- level: Optional[SourceFacebookMarketingInsightConfigLevelEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('level'), 'exclude': lambda f: f is None }})
+ level: Optional[SourceFacebookMarketingInsightConfigLevel] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('level'), 'exclude': lambda f: f is None }})
r"""Chosen level for API"""
start_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso'), 'exclude': lambda f: f is None }})
r"""The date from which you'd like to replicate data for this stream, in the format YYYY-MM-DDT00:00:00Z."""
time_increment: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('time_increment'), 'exclude': lambda f: f is None }})
r"""Time window in days by which to aggregate statistics. The sync will be chunked into N day intervals, where N is the number of days you specified. For example, if you set this value to 7, then all statistics will be reported as 7-day aggregates by starting from the start_date. If the start and end dates are October 1st and October 30th, then the connector will output 5 records: 01 - 06, 07 - 13, 14 - 20, 21 - 27, and 28 - 30 (3 days only)."""
-class SourceFacebookMarketingFacebookMarketingEnum(str, Enum):
+class SourceFacebookMarketingFacebookMarketing(str, Enum):
FACEBOOK_MARKETING = 'facebook-marketing'
@@ -241,7 +241,7 @@ class SourceFacebookMarketing:
r"""The value of the generated access token. From your App’s Dashboard, click on \\"Marketing API\\" then \\"Tools\\". Select permissions ads_management, ads_read, read_insights, business_management. Then click on \\"Get token\\". See the docs for more information."""
account_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('account_id') }})
r"""The Facebook Ad account ID to use when pulling data from the Facebook Marketing API. Open your Meta Ads Manager. The Ad account ID number is in the account dropdown menu or in your browser's address bar. See the docs for more information."""
- source_type: SourceFacebookMarketingFacebookMarketingEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceFacebookMarketingFacebookMarketing = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""The date from which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated."""
action_breakdowns_allow_empty: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('action_breakdowns_allow_empty'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_facebook_pages.py b/src/airbyte/models/shared/source_facebook_pages.py
index 6bac295c..47dfea35 100755
--- a/src/airbyte/models/shared/source_facebook_pages.py
+++ b/src/airbyte/models/shared/source_facebook_pages.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceFacebookPagesFacebookPagesEnum(str, Enum):
+class SourceFacebookPagesFacebookPages(str, Enum):
FACEBOOK_PAGES = 'facebook-pages'
@@ -19,5 +19,5 @@ class SourceFacebookPages:
r"""Facebook Page Access Token"""
page_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('page_id') }})
r"""Page ID"""
- source_type: SourceFacebookPagesFacebookPagesEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceFacebookPagesFacebookPages = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_faker.py b/src/airbyte/models/shared/source_faker.py
index aa51239a..a53862ff 100755
--- a/src/airbyte/models/shared/source_faker.py
+++ b/src/airbyte/models/shared/source_faker.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourceFakerFakerEnum(str, Enum):
+class SourceFakerFaker(str, Enum):
FAKER = 'faker'
@@ -18,7 +18,7 @@ class SourceFaker:
count: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('count') }})
r"""How many users should be generated in total. This setting does not apply to the purchases or products stream."""
- source_type: SourceFakerFakerEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceFakerFaker = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
parallelism: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('parallelism'), 'exclude': lambda f: f is None }})
r"""How many parallel workers should we use to generate fake data? Choose a value equal to the number of CPUs you will allocate to this source."""
records_per_slice: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('records_per_slice'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_fauna.py b/src/airbyte/models/shared/source_fauna.py
index 3deaa9cd..4b0dfcd8 100755
--- a/src/airbyte/models/shared/source_fauna.py
+++ b/src/airbyte/models/shared/source_fauna.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class SourceFaunaCollectionDeletionsEnabledDeletionModeEnum(str, Enum):
+class SourceFaunaCollectionDeletionsEnabledDeletionMode(str, Enum):
DELETED_FIELD = 'deleted_field'
@@ -22,9 +22,9 @@ class SourceFaunaCollectionDeletionsEnabled:
column: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('column') }})
r"""Name of the \\"deleted at\\" column."""
- deletion_mode: SourceFaunaCollectionDeletionsEnabledDeletionModeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('deletion_mode') }})
+ deletion_mode: SourceFaunaCollectionDeletionsEnabledDeletionMode = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('deletion_mode') }})
-class SourceFaunaCollectionDeletionsDisabledDeletionModeEnum(str, Enum):
+class SourceFaunaCollectionDeletionsDisabledDeletionMode(str, Enum):
IGNORE = 'ignore'
@@ -37,7 +37,7 @@ class SourceFaunaCollectionDeletionsDisabled:
Enabled - Enables this feature. When a document is deleted, the connector exports a record with a \"deleted at\" column containing the time that the document was deleted.
"""
- deletion_mode: SourceFaunaCollectionDeletionsDisabledDeletionModeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('deletion_mode') }})
+ deletion_mode: SourceFaunaCollectionDeletionsDisabledDeletionMode = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('deletion_mode') }})
@dataclass_json(undefined=Undefined.EXCLUDE)
@@ -57,7 +57,7 @@ class SourceFaunaCollection:
See the docs.
"""
-class SourceFaunaFaunaEnum(str, Enum):
+class SourceFaunaFauna(str, Enum):
FAUNA = 'fauna'
@@ -74,7 +74,7 @@ class SourceFauna:
r"""URL scheme."""
secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('secret') }})
r"""Fauna secret, used when authenticating with the database."""
- source_type: SourceFaunaFaunaEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceFaunaFauna = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
collection: Optional[SourceFaunaCollection] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('collection'), 'exclude': lambda f: f is None }})
r"""Settings for the Fauna Collection."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_file_secure.py b/src/airbyte/models/shared/source_file_secure.py
index a7e87adc..d67e8ee5 100755
--- a/src/airbyte/models/shared/source_file_secure.py
+++ b/src/airbyte/models/shared/source_file_secure.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class SourceFileSecureFileFormatEnum(str, Enum):
+class SourceFileSecureFileFormat(str, Enum):
r"""The Format of the file which should be replicated (Warning: some formats may be experimental, please refer to the docs)."""
CSV = 'csv'
JSON = 'json'
@@ -18,7 +18,7 @@ class SourceFileSecureFileFormatEnum(str, Enum):
PARQUET = 'parquet'
YAML = 'yaml'
-class SourceFileSecureProviderSFTPSecureFileTransferProtocolStorageEnum(str, Enum):
+class SourceFileSecureProviderSFTPSecureFileTransferProtocolStorage(str, Enum):
SFTP = 'SFTP'
@@ -28,12 +28,12 @@ class SourceFileSecureProviderSFTPSecureFileTransferProtocol:
r"""The storage Provider or Location of the file(s) which should be replicated."""
host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }})
- storage: SourceFileSecureProviderSFTPSecureFileTransferProtocolStorageEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('storage') }})
+ storage: SourceFileSecureProviderSFTPSecureFileTransferProtocolStorage = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('storage') }})
user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('user') }})
password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }})
port: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port'), 'exclude': lambda f: f is None }})
-class SourceFileSecureProviderSCPSecureCopyProtocolStorageEnum(str, Enum):
+class SourceFileSecureProviderSCPSecureCopyProtocolStorage(str, Enum):
SCP = 'SCP'
@@ -43,12 +43,12 @@ class SourceFileSecureProviderSCPSecureCopyProtocol:
r"""The storage Provider or Location of the file(s) which should be replicated."""
host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }})
- storage: SourceFileSecureProviderSCPSecureCopyProtocolStorageEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('storage') }})
+ storage: SourceFileSecureProviderSCPSecureCopyProtocolStorage = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('storage') }})
user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('user') }})
password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }})
port: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port'), 'exclude': lambda f: f is None }})
-class SourceFileSecureProviderSSHSecureShellStorageEnum(str, Enum):
+class SourceFileSecureProviderSSHSecureShellStorage(str, Enum):
SSH = 'SSH'
@@ -58,12 +58,12 @@ class SourceFileSecureProviderSSHSecureShell:
r"""The storage Provider or Location of the file(s) which should be replicated."""
host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }})
- storage: SourceFileSecureProviderSSHSecureShellStorageEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('storage') }})
+ storage: SourceFileSecureProviderSSHSecureShellStorage = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('storage') }})
user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('user') }})
password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }})
port: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port'), 'exclude': lambda f: f is None }})
-class SourceFileSecureProviderAzBlobAzureBlobStorageStorageEnum(str, Enum):
+class SourceFileSecureProviderAzBlobAzureBlobStorageStorage(str, Enum):
AZ_BLOB = 'AzBlob'
@@ -72,7 +72,7 @@ class SourceFileSecureProviderAzBlobAzureBlobStorageStorageEnum(str, Enum):
class SourceFileSecureProviderAzBlobAzureBlobStorage:
r"""The storage Provider or Location of the file(s) which should be replicated."""
- storage: SourceFileSecureProviderAzBlobAzureBlobStorageStorageEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('storage') }})
+ storage: SourceFileSecureProviderAzBlobAzureBlobStorageStorage = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('storage') }})
storage_account: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('storage_account') }})
r"""The globally unique name of the storage account that the desired blob sits within. See here for more details."""
sas_token: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sas_token'), 'exclude': lambda f: f is None }})
@@ -80,7 +80,7 @@ class SourceFileSecureProviderAzBlobAzureBlobStorage:
shared_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('shared_key'), 'exclude': lambda f: f is None }})
r"""To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a storage account shared key (aka account key or access key). If accessing publicly available data, this field is not necessary."""
-class SourceFileSecureProviderS3AmazonWebServicesStorageEnum(str, Enum):
+class SourceFileSecureProviderS3AmazonWebServicesStorage(str, Enum):
S3 = 'S3'
@@ -89,13 +89,13 @@ class SourceFileSecureProviderS3AmazonWebServicesStorageEnum(str, Enum):
class SourceFileSecureProviderS3AmazonWebServices:
r"""The storage Provider or Location of the file(s) which should be replicated."""
- storage: SourceFileSecureProviderS3AmazonWebServicesStorageEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('storage') }})
+ storage: SourceFileSecureProviderS3AmazonWebServicesStorage = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('storage') }})
aws_access_key_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('aws_access_key_id'), 'exclude': lambda f: f is None }})
r"""In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary."""
aws_secret_access_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('aws_secret_access_key'), 'exclude': lambda f: f is None }})
r"""In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary."""
-class SourceFileSecureProviderGCSGoogleCloudStorageStorageEnum(str, Enum):
+class SourceFileSecureProviderGCSGoogleCloudStorageStorage(str, Enum):
GCS = 'GCS'
@@ -104,11 +104,11 @@ class SourceFileSecureProviderGCSGoogleCloudStorageStorageEnum(str, Enum):
class SourceFileSecureProviderGCSGoogleCloudStorage:
r"""The storage Provider or Location of the file(s) which should be replicated."""
- storage: SourceFileSecureProviderGCSGoogleCloudStorageStorageEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('storage') }})
+ storage: SourceFileSecureProviderGCSGoogleCloudStorageStorage = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('storage') }})
service_account_json: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('service_account_json'), 'exclude': lambda f: f is None }})
r"""In order to access private Buckets stored on Google Cloud, this connector would need a service account json credentials with the proper permissions as described here. Please generate the credentials.json file and copy/paste its content to this field (expecting JSON formats). If accessing publicly available data, this field is not necessary."""
-class SourceFileSecureProviderHTTPSPublicWebStorageEnum(str, Enum):
+class SourceFileSecureProviderHTTPSPublicWebStorage(str, Enum):
HTTPS = 'HTTPS'
@@ -117,11 +117,11 @@ class SourceFileSecureProviderHTTPSPublicWebStorageEnum(str, Enum):
class SourceFileSecureProviderHTTPSPublicWeb:
r"""The storage Provider or Location of the file(s) which should be replicated."""
- storage: SourceFileSecureProviderHTTPSPublicWebStorageEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('storage') }})
+ storage: SourceFileSecureProviderHTTPSPublicWebStorage = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('storage') }})
user_agent: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('user_agent'), 'exclude': lambda f: f is None }})
r"""Add User-Agent to request"""
-class SourceFileSecureFileSecureEnum(str, Enum):
+class SourceFileSecureFileSecure(str, Enum):
FILE_SECURE = 'file-secure'
@@ -132,11 +132,11 @@ class SourceFileSecure:
dataset_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dataset_name') }})
r"""The Name of the final table to replicate this file into (should include letters, numbers dash and underscores only)."""
- format: SourceFileSecureFileFormatEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format') }})
+ format: SourceFileSecureFileFormat = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format') }})
r"""The Format of the file which should be replicated (Warning: some formats may be experimental, please refer to the docs)."""
provider: Any = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('provider') }})
r"""The storage Provider or Location of the file(s) which should be replicated."""
- source_type: SourceFileSecureFileSecureEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceFileSecureFileSecure = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('url') }})
r"""The URL path to access the file which should be replicated."""
reader_options: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('reader_options'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_firebolt.py b/src/airbyte/models/shared/source_firebolt.py
index c2788580..930eff5b 100755
--- a/src/airbyte/models/shared/source_firebolt.py
+++ b/src/airbyte/models/shared/source_firebolt.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourceFireboltFireboltEnum(str, Enum):
+class SourceFireboltFirebolt(str, Enum):
FIREBOLT = 'firebolt'
@@ -20,7 +20,7 @@ class SourceFirebolt:
r"""The database to connect to."""
password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password') }})
r"""Firebolt password."""
- source_type: SourceFireboltFireboltEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceFireboltFirebolt = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }})
r"""Firebolt email address you use to login."""
account: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('account'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_freshcaller.py b/src/airbyte/models/shared/source_freshcaller.py
index 06d45836..d5948bb3 100755
--- a/src/airbyte/models/shared/source_freshcaller.py
+++ b/src/airbyte/models/shared/source_freshcaller.py
@@ -2,12 +2,15 @@
from __future__ import annotations
import dataclasses
+import dateutil.parser
from airbyte import utils
from dataclasses_json import Undefined, dataclass_json
+from datetime import datetime
from enum import Enum
-from typing import Any, Optional
+from marshmallow import fields
+from typing import Optional
-class SourceFreshcallerFreshcallerEnum(str, Enum):
+class SourceFreshcallerFreshcaller(str, Enum):
FRESHCALLER = 'freshcaller'
@@ -17,11 +20,11 @@ class SourceFreshcaller:
r"""The values required to configure the source."""
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
- r"""Freshcaller API Key. See the docs for more information on how to obtain this key."""
+ r"""Freshcaller API Key. See the docs for more information on how to obtain this key."""
domain: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('domain') }})
r"""Used to construct Base URL for the Freshcaller APIs"""
- source_type: SourceFreshcallerFreshcallerEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
- start_date: Any = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }})
+ source_type: SourceFreshcallerFreshcaller = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""UTC date and time. Any data created after this date will be replicated."""
requests_per_minute: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('requests_per_minute'), 'exclude': lambda f: f is None }})
r"""The number of requests per minute that this source allowed to use. There is a rate limit of 50 requests per minute per app per account."""
diff --git a/src/airbyte/models/shared/source_freshdesk.py b/src/airbyte/models/shared/source_freshdesk.py
index 05ad6eda..7f2f69f0 100755
--- a/src/airbyte/models/shared/source_freshdesk.py
+++ b/src/airbyte/models/shared/source_freshdesk.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Optional
-class SourceFreshdeskFreshdeskEnum(str, Enum):
+class SourceFreshdeskFreshdesk(str, Enum):
FRESHDESK = 'freshdesk'
@@ -23,7 +23,7 @@ class SourceFreshdesk:
r"""Freshdesk API Key. See the docs for more information on how to obtain this key."""
domain: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('domain') }})
r"""Freshdesk domain"""
- source_type: SourceFreshdeskFreshdeskEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceFreshdeskFreshdesk = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
requests_per_minute: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('requests_per_minute'), 'exclude': lambda f: f is None }})
r"""The number of requests per minute that this source allowed to use. There is a rate limit of 50 requests per minute per app per account."""
start_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_freshsales.py b/src/airbyte/models/shared/source_freshsales.py
index 3004fb9a..429af1e6 100755
--- a/src/airbyte/models/shared/source_freshsales.py
+++ b/src/airbyte/models/shared/source_freshsales.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceFreshsalesFreshsalesEnum(str, Enum):
+class SourceFreshsalesFreshsales(str, Enum):
FRESHSALES = 'freshsales'
@@ -19,5 +19,5 @@ class SourceFreshsales:
r"""Freshsales API Key. See here. The key is case sensitive."""
domain_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('domain_name') }})
r"""The Name of your Freshsales domain"""
- source_type: SourceFreshsalesFreshsalesEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceFreshsalesFreshsales = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_gcs.py b/src/airbyte/models/shared/source_gcs.py
index 480ab196..bbf93747 100755
--- a/src/airbyte/models/shared/source_gcs.py
+++ b/src/airbyte/models/shared/source_gcs.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceGcsGcsEnum(str, Enum):
+class SourceGcsGcs(str, Enum):
GCS = 'gcs'
@@ -21,5 +21,5 @@ class SourceGcs:
r"""GCS path to data"""
service_account: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('service_account') }})
r"""Enter your Google Cloud service account key in JSON format"""
- source_type: SourceGcsGcsEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceGcsGcs = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_getlago.py b/src/airbyte/models/shared/source_getlago.py
index d851d9e1..38f532b7 100755
--- a/src/airbyte/models/shared/source_getlago.py
+++ b/src/airbyte/models/shared/source_getlago.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceGetlagoGetlagoEnum(str, Enum):
+class SourceGetlagoGetlago(str, Enum):
GETLAGO = 'getlago'
@@ -17,5 +17,5 @@ class SourceGetlago:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""Your API Key. See here."""
- source_type: SourceGetlagoGetlagoEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceGetlagoGetlago = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_github.py b/src/airbyte/models/shared/source_github.py
index fb8f8793..4379e4db 100755
--- a/src/airbyte/models/shared/source_github.py
+++ b/src/airbyte/models/shared/source_github.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Any, Optional
-class SourceGithubCredentialsPersonalAccessTokenOptionTitleEnum(str, Enum):
+class SourceGithubCredentialsPersonalAccessTokenOptionTitle(str, Enum):
PAT_CREDENTIALS = 'PAT Credentials'
@@ -21,9 +21,9 @@ class SourceGithubCredentialsPersonalAccessToken:
personal_access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('personal_access_token') }})
r"""Log into GitHub and then generate a personal access token. To load balance your API quota consumption across multiple API tokens, input multiple tokens separated with \\",\\" """
- option_title: Optional[SourceGithubCredentialsPersonalAccessTokenOptionTitleEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('option_title'), 'exclude': lambda f: f is None }})
+ option_title: Optional[SourceGithubCredentialsPersonalAccessTokenOptionTitle] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('option_title'), 'exclude': lambda f: f is None }})
-class SourceGithubCredentialsOAuthOptionTitleEnum(str, Enum):
+class SourceGithubCredentialsOAuthOptionTitle(str, Enum):
O_AUTH_CREDENTIALS = 'OAuth Credentials'
@@ -34,9 +34,9 @@ class SourceGithubCredentialsOAuth:
access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }})
r"""OAuth access token"""
- option_title: Optional[SourceGithubCredentialsOAuthOptionTitleEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('option_title'), 'exclude': lambda f: f is None }})
+ option_title: Optional[SourceGithubCredentialsOAuthOptionTitle] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('option_title'), 'exclude': lambda f: f is None }})
-class SourceGithubGithubEnum(str, Enum):
+class SourceGithubGithub(str, Enum):
GITHUB = 'github'
@@ -47,7 +47,7 @@ class SourceGithub:
repository: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('repository') }})
r"""Space-delimited list of GitHub organizations/repositories, e.g. `airbytehq/airbyte` for single repository, `airbytehq/*` for get all repositories from organization and `airbytehq/airbyte airbytehq/another-repo` for multiple repositories."""
- source_type: SourceGithubGithubEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceGithubGithub = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""The date from which you'd like to replicate data from GitHub in the format YYYY-MM-DDT00:00:00Z. For the streams which support this configuration, only data generated on or after the start date will be replicated. This field doesn't apply to all streams, see the docs for more info"""
branch: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('branch'), 'exclude': lambda f: f is None }})
@@ -56,4 +56,6 @@ class SourceGithub:
r"""Choose how to authenticate to GitHub"""
page_size_for_large_streams: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('page_size_for_large_streams'), 'exclude': lambda f: f is None }})
r"""The Github connector contains several streams with a large amount of data. The page size of such streams depends on the size of your repository. We recommended that you specify values between 10 and 30."""
+ requests_per_hour: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('requests_per_hour'), 'exclude': lambda f: f is None }})
+ r"""The GitHub API allows for a maximum of 5000 requests per hour (15000 for Github Enterprise). You can specify a lower value to limit your use of the API quota."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_gitlab.py b/src/airbyte/models/shared/source_gitlab.py
index 45d6d124..c839ffd6 100755
--- a/src/airbyte/models/shared/source_gitlab.py
+++ b/src/airbyte/models/shared/source_gitlab.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Any, Optional
-class SourceGitlabCredentialsPrivateTokenAuthTypeEnum(str, Enum):
+class SourceGitlabCredentialsPrivateTokenAuthType(str, Enum):
ACCESS_TOKEN = 'access_token'
@@ -20,9 +20,9 @@ class SourceGitlabCredentialsPrivateToken:
access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }})
r"""Log into your Gitlab account and then generate a personal Access Token."""
- auth_type: Optional[SourceGitlabCredentialsPrivateTokenAuthTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
+ auth_type: Optional[SourceGitlabCredentialsPrivateTokenAuthType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
-class SourceGitlabCredentialsOAuth20AuthTypeEnum(str, Enum):
+class SourceGitlabCredentialsOAuth20AuthType(str, Enum):
OAUTH2_0 = 'oauth2.0'
@@ -40,9 +40,9 @@ class SourceGitlabCredentialsOAuth20:
r"""The key to refresh the expired access_token."""
token_expiry_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('token_expiry_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""The date-time when the access token should be refreshed."""
- auth_type: Optional[SourceGitlabCredentialsOAuth20AuthTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
+ auth_type: Optional[SourceGitlabCredentialsOAuth20AuthType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
-class SourceGitlabGitlabEnum(str, Enum):
+class SourceGitlabGitlab(str, Enum):
GITLAB = 'gitlab'
@@ -54,7 +54,7 @@ class SourceGitlab:
api_url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_url') }})
r"""Please enter your basic URL from GitLab instance."""
credentials: Any = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }})
- source_type: SourceGitlabGitlabEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceGitlabGitlab = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""The date from which you'd like to replicate data for GitLab API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated."""
groups: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('groups'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_glassfrog.py b/src/airbyte/models/shared/source_glassfrog.py
index 19cbc19e..1f6acfa9 100755
--- a/src/airbyte/models/shared/source_glassfrog.py
+++ b/src/airbyte/models/shared/source_glassfrog.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceGlassfrogGlassfrogEnum(str, Enum):
+class SourceGlassfrogGlassfrog(str, Enum):
GLASSFROG = 'glassfrog'
@@ -17,5 +17,5 @@ class SourceGlassfrog:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""API key provided by Glassfrog"""
- source_type: SourceGlassfrogGlassfrogEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceGlassfrogGlassfrog = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_gnews.py b/src/airbyte/models/shared/source_gnews.py
index ff5ae5b0..2a57ce39 100755
--- a/src/airbyte/models/shared/source_gnews.py
+++ b/src/airbyte/models/shared/source_gnews.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourceGnewsCountryEnum(str, Enum):
+class SourceGnewsCountry(str, Enum):
r"""This parameter allows you to specify the country where the news articles returned by the API were published, the contents of the articles are not necessarily related to the specified country. You have to set as value the 2 letters code of the country you want to filter."""
AU = 'au'
BR = 'br'
@@ -40,12 +40,12 @@ class SourceGnewsCountryEnum(str, Enum):
GB = 'gb'
US = 'us'
-class SourceGnewsInEnum(str, Enum):
+class SourceGnewsIn(str, Enum):
TITLE = 'title'
DESCRIPTION = 'description'
CONTENT = 'content'
-class SourceGnewsLanguageEnum(str, Enum):
+class SourceGnewsLanguage(str, Enum):
AR = 'ar'
ZH = 'zh'
NL = 'nl'
@@ -69,12 +69,12 @@ class SourceGnewsLanguageEnum(str, Enum):
TE = 'te'
UK = 'uk'
-class SourceGnewsNullableEnum(str, Enum):
+class SourceGnewsNullable(str, Enum):
TITLE = 'title'
DESCRIPTION = 'description'
CONTENT = 'content'
-class SourceGnewsSortByEnum(str, Enum):
+class SourceGnewsSortBy(str, Enum):
r"""This parameter allows you to choose with which type of sorting the articles should be returned. Two values are possible:
- publishedAt = sort by publication date, the articles with the most recent publication date are returned first
- relevance = sort by best match to keywords, the articles with the best match are returned first
@@ -82,10 +82,10 @@ class SourceGnewsSortByEnum(str, Enum):
PUBLISHED_AT = 'publishedAt'
RELEVANCE = 'relevance'
-class SourceGnewsGnewsEnum(str, Enum):
+class SourceGnewsGnews(str, Enum):
GNEWS = 'gnews'
-class SourceGnewsTopHeadlinesTopicEnum(str, Enum):
+class SourceGnewsTopHeadlinesTopic(str, Enum):
r"""This parameter allows you to change the category for the request."""
BREAKING_NEWS = 'breaking-news'
WORLD = 'world'
@@ -121,17 +121,17 @@ class SourceGnews:
For example the query: Apple NOT iPhone will return all articles matching the keyword Apple but not the keyword
iPhone
"""
- source_type: SourceGnewsGnewsEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
- country: Optional[SourceGnewsCountryEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('country'), 'exclude': lambda f: f is None }})
+ source_type: SourceGnewsGnews = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ country: Optional[SourceGnewsCountry] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('country'), 'exclude': lambda f: f is None }})
r"""This parameter allows you to specify the country where the news articles returned by the API were published, the contents of the articles are not necessarily related to the specified country. You have to set as value the 2 letters code of the country you want to filter."""
end_date: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'exclude': lambda f: f is None }})
r"""This parameter allows you to filter the articles that have a publication date smaller than or equal to the specified value. The date must respect the following format: YYYY-MM-DD hh:mm:ss (in UTC)"""
- in_: Optional[list[SourceGnewsInEnum]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('in'), 'exclude': lambda f: f is None }})
+ in_: Optional[list[SourceGnewsIn]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('in'), 'exclude': lambda f: f is None }})
r"""This parameter allows you to choose in which attributes the keywords are searched. The attributes that can be set are title, description and content. It is possible to combine several attributes."""
- language: Optional[SourceGnewsLanguageEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('language'), 'exclude': lambda f: f is None }})
- nullable: Optional[list[SourceGnewsNullableEnum]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('nullable'), 'exclude': lambda f: f is None }})
+ language: Optional[SourceGnewsLanguage] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('language'), 'exclude': lambda f: f is None }})
+ nullable: Optional[list[SourceGnewsNullable]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('nullable'), 'exclude': lambda f: f is None }})
r"""This parameter allows you to specify the attributes that you allow to return null values. The attributes that can be set are title, description and content. It is possible to combine several attributes"""
- sortby: Optional[SourceGnewsSortByEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sortby'), 'exclude': lambda f: f is None }})
+ sortby: Optional[SourceGnewsSortBy] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sortby'), 'exclude': lambda f: f is None }})
r"""This parameter allows you to choose with which type of sorting the articles should be returned. Two values are possible:
- publishedAt = sort by publication date, the articles with the most recent publication date are returned first
- relevance = sort by best match to keywords, the articles with the best match are returned first
@@ -154,6 +154,6 @@ class SourceGnews:
For example the query: Apple NOT iPhone will return all articles matching the keyword Apple but not the keyword
iPhone
"""
- top_headlines_topic: Optional[SourceGnewsTopHeadlinesTopicEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('top_headlines_topic'), 'exclude': lambda f: f is None }})
+ top_headlines_topic: Optional[SourceGnewsTopHeadlinesTopic] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('top_headlines_topic'), 'exclude': lambda f: f is None }})
r"""This parameter allows you to change the category for the request."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_google_ads.py b/src/airbyte/models/shared/source_google_ads.py
index 761e977a..4d963135 100755
--- a/src/airbyte/models/shared/source_google_ads.py
+++ b/src/airbyte/models/shared/source_google_ads.py
@@ -35,7 +35,7 @@ class SourceGoogleAdsCustomQueries:
table_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('table_name') }})
r"""The table name in your destination database for choosen query."""
-class SourceGoogleAdsGoogleAdsEnum(str, Enum):
+class SourceGoogleAdsGoogleAds(str, Enum):
GOOGLE_ADS = 'google-ads'
@@ -47,7 +47,7 @@ class SourceGoogleAds:
credentials: SourceGoogleAdsGoogleCredentials = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }})
customer_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('customer_id') }})
r"""Comma separated list of (client) customer IDs. Each customer ID must be specified as a 10-digit number without dashes. More instruction on how to find this value in our docs. Metrics streams like AdGroupAdReport cannot be requested for a manager account."""
- source_type: SourceGoogleAdsGoogleAdsEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceGoogleAdsGoogleAds = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: date = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.dateisoformat(False), 'decoder': utils.datefromisoformat, 'mm_field': fields.DateTime(format='iso') }})
r"""UTC date and time in the format 2017-01-25. Any data before this date will not be replicated."""
conversion_window_days: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('conversion_window_days'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_google_analytics_data_api.py b/src/airbyte/models/shared/source_google_analytics_data_api.py
index 675ce975..9768fe70 100755
--- a/src/airbyte/models/shared/source_google_analytics_data_api.py
+++ b/src/airbyte/models/shared/source_google_analytics_data_api.py
@@ -9,7 +9,7 @@
from marshmallow import fields
from typing import Any, Optional
-class SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthenticationAuthTypeEnum(str, Enum):
+class SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthenticationAuthType(str, Enum):
SERVICE = 'Service'
@@ -20,9 +20,9 @@ class SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication:
credentials_json: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials_json') }})
r"""The JSON key of the service account to use for authorization"""
- auth_type: Optional[SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthenticationAuthTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
+ auth_type: Optional[SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthenticationAuthType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
-class SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauthAuthTypeEnum(str, Enum):
+class SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauthAuthType(str, Enum):
CLIENT = 'Client'
@@ -39,9 +39,9 @@ class SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth:
r"""The token for obtaining a new access token."""
access_token: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token'), 'exclude': lambda f: f is None }})
r"""Access Token for making authenticated requests."""
- auth_type: Optional[SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauthAuthTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
+ auth_type: Optional[SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauthAuthType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
-class SourceGoogleAnalyticsDataAPIGoogleAnalyticsDataAPIEnum(str, Enum):
+class SourceGoogleAnalyticsDataAPIGoogleAnalyticsDataAPI(str, Enum):
GOOGLE_ANALYTICS_DATA_API = 'google-analytics-data-api'
@@ -54,7 +54,7 @@ class SourceGoogleAnalyticsDataAPI:
r"""The start date from which to replicate report data in the format YYYY-MM-DD. Data generated before this date will not be included in the report. Not applied to custom Cohort reports."""
property_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('property_id') }})
r"""A Google Analytics GA4 property identifier whose events are tracked. Specified in the URL path and not the body such as \\"123...\\". See the docs for more details."""
- source_type: SourceGoogleAnalyticsDataAPIGoogleAnalyticsDataAPIEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceGoogleAnalyticsDataAPIGoogleAnalyticsDataAPI = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
credentials: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }})
r"""Credentials for the service"""
custom_reports: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('custom_reports'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_google_analytics_v4.py b/src/airbyte/models/shared/source_google_analytics_v4.py
index 9a893257..d753fde2 100755
--- a/src/airbyte/models/shared/source_google_analytics_v4.py
+++ b/src/airbyte/models/shared/source_google_analytics_v4.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthenticationAuthTypeEnum(str, Enum):
+class SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthenticationAuthType(str, Enum):
SERVICE = 'Service'
@@ -18,9 +18,9 @@ class SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication:
credentials_json: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials_json') }})
r"""The JSON key of the service account to use for authorization"""
- auth_type: Optional[SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthenticationAuthTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
+ auth_type: Optional[SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthenticationAuthType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
-class SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauthAuthTypeEnum(str, Enum):
+class SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauthAuthType(str, Enum):
CLIENT = 'Client'
@@ -37,9 +37,9 @@ class SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth:
r"""The token for obtaining a new access token."""
access_token: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token'), 'exclude': lambda f: f is None }})
r"""Access Token for making authenticated requests."""
- auth_type: Optional[SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauthAuthTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
+ auth_type: Optional[SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauthAuthType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
-class SourceGoogleAnalyticsV4GoogleAnalyticsV4Enum(str, Enum):
+class SourceGoogleAnalyticsV4GoogleAnalyticsV4(str, Enum):
GOOGLE_ANALYTICS_V4 = 'google-analytics-v4'
@@ -48,7 +48,7 @@ class SourceGoogleAnalyticsV4GoogleAnalyticsV4Enum(str, Enum):
class SourceGoogleAnalyticsV4:
r"""The values required to configure the source."""
- source_type: SourceGoogleAnalyticsV4GoogleAnalyticsV4Enum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceGoogleAnalyticsV4GoogleAnalyticsV4 = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }})
r"""The date in the format YYYY-MM-DD. Any data before this date will not be replicated."""
view_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('view_id') }})
diff --git a/src/airbyte/models/shared/source_google_directory.py b/src/airbyte/models/shared/source_google_directory.py
index e430fb8c..74c3f2d0 100755
--- a/src/airbyte/models/shared/source_google_directory.py
+++ b/src/airbyte/models/shared/source_google_directory.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceGoogleDirectoryGoogleDirectoryEnum(str, Enum):
+class SourceGoogleDirectoryGoogleDirectory(str, Enum):
GOOGLE_DIRECTORY = 'google-directory'
@@ -19,5 +19,5 @@ class SourceGoogleDirectory:
r"""The contents of the JSON service account key. See the docs for more information on how to generate this key."""
email: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('email') }})
r"""The email of the user, which has permissions to access the Google Workspace Admin APIs."""
- source_type: SourceGoogleDirectoryGoogleDirectoryEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceGoogleDirectoryGoogleDirectory = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_google_search_console.py b/src/airbyte/models/shared/source_google_search_console.py
index f9c3b93b..bf26614c 100755
--- a/src/airbyte/models/shared/source_google_search_console.py
+++ b/src/airbyte/models/shared/source_google_search_console.py
@@ -9,7 +9,7 @@
from marshmallow import fields
from typing import Any, Optional
-class SourceGoogleSearchConsoleAuthorizationServiceAccountKeyAuthenticationAuthTypeEnum(str, Enum):
+class SourceGoogleSearchConsoleAuthorizationServiceAccountKeyAuthenticationAuthType(str, Enum):
SERVICE = 'Service'
@@ -17,13 +17,13 @@ class SourceGoogleSearchConsoleAuthorizationServiceAccountKeyAuthenticationAuthT
@dataclasses.dataclass
class SourceGoogleSearchConsoleAuthorizationServiceAccountKeyAuthentication:
- auth_type: SourceGoogleSearchConsoleAuthorizationServiceAccountKeyAuthenticationAuthTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
+ auth_type: SourceGoogleSearchConsoleAuthorizationServiceAccountKeyAuthenticationAuthType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
email: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('email') }})
r"""The email of the user which has permissions to access the Google Workspace Admin APIs."""
service_account_info: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('service_account_info') }})
r"""The JSON key of the service account to use for authorization. Read more here."""
-class SourceGoogleSearchConsoleAuthorizationOAuthAuthTypeEnum(str, Enum):
+class SourceGoogleSearchConsoleAuthorizationOAuthAuthType(str, Enum):
CLIENT = 'Client'
@@ -31,7 +31,7 @@ class SourceGoogleSearchConsoleAuthorizationOAuthAuthTypeEnum(str, Enum):
@dataclasses.dataclass
class SourceGoogleSearchConsoleAuthorizationOAuth:
- auth_type: SourceGoogleSearchConsoleAuthorizationOAuthAuthTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
+ auth_type: SourceGoogleSearchConsoleAuthorizationOAuthAuthType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }})
r"""The client ID of your Google Search Console developer application. Read more here."""
client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }})
@@ -41,7 +41,7 @@ class SourceGoogleSearchConsoleAuthorizationOAuth:
access_token: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token'), 'exclude': lambda f: f is None }})
r"""Access token for making authenticated requests. Read more here."""
-class SourceGoogleSearchConsoleGoogleSearchConsoleEnum(str, Enum):
+class SourceGoogleSearchConsoleGoogleSearchConsole(str, Enum):
GOOGLE_SEARCH_CONSOLE = 'google-search-console'
@@ -53,7 +53,7 @@ class SourceGoogleSearchConsole:
authorization: Any = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('authorization') }})
site_urls: list[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('site_urls') }})
r"""The URLs of the website property attached to your GSC account. Read more here."""
- source_type: SourceGoogleSearchConsoleGoogleSearchConsoleEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceGoogleSearchConsoleGoogleSearchConsole = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: date = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.dateisoformat(False), 'decoder': utils.datefromisoformat, 'mm_field': fields.DateTime(format='iso') }})
r"""UTC date in the format 2017-01-25. Any data before this date will not be replicated."""
custom_reports: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('custom_reports'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_google_sheets.py b/src/airbyte/models/shared/source_google_sheets.py
index 37e64eee..48d2b0c2 100755
--- a/src/airbyte/models/shared/source_google_sheets.py
+++ b/src/airbyte/models/shared/source_google_sheets.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class SourceGoogleSheetsCredentialsServiceAccountKeyAuthenticationAuthTypeEnum(str, Enum):
+class SourceGoogleSheetsCredentialsServiceAccountKeyAuthenticationAuthType(str, Enum):
SERVICE = 'Service'
@@ -16,11 +16,11 @@ class SourceGoogleSheetsCredentialsServiceAccountKeyAuthenticationAuthTypeEnum(s
class SourceGoogleSheetsCredentialsServiceAccountKeyAuthentication:
r"""Credentials for connecting to the Google Sheets API"""
- auth_type: SourceGoogleSheetsCredentialsServiceAccountKeyAuthenticationAuthTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
+ auth_type: SourceGoogleSheetsCredentialsServiceAccountKeyAuthenticationAuthType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
service_account_info: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('service_account_info') }})
r"""Enter your Google Cloud service account key in JSON format"""
-class SourceGoogleSheetsCredentialsAuthenticateViaGoogleOAuthAuthTypeEnum(str, Enum):
+class SourceGoogleSheetsCredentialsAuthenticateViaGoogleOAuthAuthType(str, Enum):
CLIENT = 'Client'
@@ -29,7 +29,7 @@ class SourceGoogleSheetsCredentialsAuthenticateViaGoogleOAuthAuthTypeEnum(str, E
class SourceGoogleSheetsCredentialsAuthenticateViaGoogleOAuth:
r"""Credentials for connecting to the Google Sheets API"""
- auth_type: SourceGoogleSheetsCredentialsAuthenticateViaGoogleOAuthAuthTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
+ auth_type: SourceGoogleSheetsCredentialsAuthenticateViaGoogleOAuthAuthType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }})
r"""Enter your Google application's Client ID"""
client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }})
@@ -37,7 +37,7 @@ class SourceGoogleSheetsCredentialsAuthenticateViaGoogleOAuth:
refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token') }})
r"""Enter your Google application's refresh token"""
-class SourceGoogleSheetsGoogleSheetsEnum(str, Enum):
+class SourceGoogleSheetsGoogleSheets(str, Enum):
GOOGLE_SHEETS = 'google-sheets'
@@ -48,7 +48,7 @@ class SourceGoogleSheets:
credentials: Any = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }})
r"""Credentials for connecting to the Google Sheets API"""
- source_type: SourceGoogleSheetsGoogleSheetsEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceGoogleSheetsGoogleSheets = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
spreadsheet_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('spreadsheet_id') }})
r"""Enter the link to the Google spreadsheet you want to sync"""
row_batch_size: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('row_batch_size'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_google_webfonts.py b/src/airbyte/models/shared/source_google_webfonts.py
index 8f4e975a..29f2e5d2 100755
--- a/src/airbyte/models/shared/source_google_webfonts.py
+++ b/src/airbyte/models/shared/source_google_webfonts.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourceGoogleWebfontsGoogleWebfontsEnum(str, Enum):
+class SourceGoogleWebfontsGoogleWebfonts(str, Enum):
GOOGLE_WEBFONTS = 'google-webfonts'
@@ -18,7 +18,7 @@ class SourceGoogleWebfonts:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""API key is required to access google apis, For getting your's goto google console and generate api key for Webfonts"""
- source_type: SourceGoogleWebfontsGoogleWebfontsEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceGoogleWebfontsGoogleWebfonts = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
alt: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('alt'), 'exclude': lambda f: f is None }})
r"""Optional, Available params- json, media, proto"""
pretty_print: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('prettyPrint'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_google_workspace_admin_reports.py b/src/airbyte/models/shared/source_google_workspace_admin_reports.py
index 040d5014..d8e098bf 100755
--- a/src/airbyte/models/shared/source_google_workspace_admin_reports.py
+++ b/src/airbyte/models/shared/source_google_workspace_admin_reports.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourceGoogleWorkspaceAdminReportsGoogleWorkspaceAdminReportsEnum(str, Enum):
+class SourceGoogleWorkspaceAdminReportsGoogleWorkspaceAdminReports(str, Enum):
GOOGLE_WORKSPACE_ADMIN_REPORTS = 'google-workspace-admin-reports'
@@ -20,7 +20,7 @@ class SourceGoogleWorkspaceAdminReports:
r"""The contents of the JSON service account key. See the docs for more information on how to generate this key."""
email: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('email') }})
r"""The email of the user, which has permissions to access the Google Workspace Admin APIs."""
- source_type: SourceGoogleWorkspaceAdminReportsGoogleWorkspaceAdminReportsEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceGoogleWorkspaceAdminReportsGoogleWorkspaceAdminReports = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
lookback: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lookback'), 'exclude': lambda f: f is None }})
r"""Sets the range of time shown in the report. Reports API allows from up to 180 days ago."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_greenhouse.py b/src/airbyte/models/shared/source_greenhouse.py
index e6d94d34..5574bbc6 100755
--- a/src/airbyte/models/shared/source_greenhouse.py
+++ b/src/airbyte/models/shared/source_greenhouse.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceGreenhouseGreenhouseEnum(str, Enum):
+class SourceGreenhouseGreenhouse(str, Enum):
GREENHOUSE = 'greenhouse'
@@ -17,5 +17,5 @@ class SourceGreenhouse:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""Greenhouse API Key. See the docs for more information on how to generate this key."""
- source_type: SourceGreenhouseGreenhouseEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceGreenhouseGreenhouse = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_gridly.py b/src/airbyte/models/shared/source_gridly.py
index b3cf222e..9e16e6ee 100755
--- a/src/airbyte/models/shared/source_gridly.py
+++ b/src/airbyte/models/shared/source_gridly.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceGridlyGridlyEnum(str, Enum):
+class SourceGridlyGridly(str, Enum):
GRIDLY = 'gridly'
@@ -18,5 +18,5 @@ class SourceGridly:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
grid_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('grid_id') }})
r"""ID of a grid, or can be ID of a branch"""
- source_type: SourceGridlyGridlyEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceGridlyGridly = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_harvest.py b/src/airbyte/models/shared/source_harvest.py
index 49b974a9..fc6523c1 100755
--- a/src/airbyte/models/shared/source_harvest.py
+++ b/src/airbyte/models/shared/source_harvest.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Any, Optional
-class SourceHarvestHarvestEnum(str, Enum):
+class SourceHarvestHarvest(str, Enum):
HARVEST = 'harvest'
@@ -23,7 +23,7 @@ class SourceHarvest:
r"""Harvest account ID. Required for all Harvest requests in pair with Personal Access Token"""
replication_start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('replication_start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated."""
- source_type: SourceHarvestHarvestEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceHarvestHarvest = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
credentials: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }})
r"""Choose how to authenticate to Harvest."""
replication_end_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('replication_end_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_hubplanner.py b/src/airbyte/models/shared/source_hubplanner.py
index 9d9112c4..6d4b899b 100755
--- a/src/airbyte/models/shared/source_hubplanner.py
+++ b/src/airbyte/models/shared/source_hubplanner.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceHubplannerHubplannerEnum(str, Enum):
+class SourceHubplannerHubplanner(str, Enum):
HUBPLANNER = 'hubplanner'
@@ -17,5 +17,5 @@ class SourceHubplanner:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""Hubplanner API key. See https://github.com/hubplanner/API#authentication for more details."""
- source_type: SourceHubplannerHubplannerEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceHubplannerHubplanner = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_hubspot.py b/src/airbyte/models/shared/source_hubspot.py
index 6a5653c9..f11ad666 100755
--- a/src/airbyte/models/shared/source_hubspot.py
+++ b/src/airbyte/models/shared/source_hubspot.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Any
-class SourceHubspotCredentialsPrivateAppCredentialsEnum(str, Enum):
+class SourceHubspotCredentialsPrivateAppCredentials(str, Enum):
r"""Name of the credentials set"""
PRIVATE_APP_CREDENTIALS = 'Private App Credentials'
@@ -22,10 +22,10 @@ class SourceHubspotCredentialsPrivateApp:
access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }})
r"""HubSpot Access token. See the Hubspot docs if you need help finding this token."""
- credentials_title: SourceHubspotCredentialsPrivateAppCredentialsEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials_title') }})
+ credentials_title: SourceHubspotCredentialsPrivateAppCredentials = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials_title') }})
r"""Name of the credentials set"""
-class SourceHubspotCredentialsOAuthCredentialsEnum(str, Enum):
+class SourceHubspotCredentialsOAuthCredentials(str, Enum):
r"""Name of the credentials"""
O_AUTH_CREDENTIALS = 'OAuth Credentials'
@@ -39,12 +39,12 @@ class SourceHubspotCredentialsOAuth:
r"""The Client ID of your HubSpot developer application. See the Hubspot docs if you need help finding this ID."""
client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }})
r"""The client secret for your HubSpot developer application. See the Hubspot docs if you need help finding this secret."""
- credentials_title: SourceHubspotCredentialsOAuthCredentialsEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials_title') }})
+ credentials_title: SourceHubspotCredentialsOAuthCredentials = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials_title') }})
r"""Name of the credentials"""
refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token') }})
r"""Refresh token to renew an expired access token. See the Hubspot docs if you need help finding this token."""
-class SourceHubspotHubspotEnum(str, Enum):
+class SourceHubspotHubspot(str, Enum):
HUBSPOT = 'hubspot'
@@ -55,7 +55,7 @@ class SourceHubspot:
credentials: Any = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }})
r"""Choose how to authenticate to HubSpot."""
- source_type: SourceHubspotHubspotEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceHubspotHubspot = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_insightly.py b/src/airbyte/models/shared/source_insightly.py
index f877656b..84719a41 100755
--- a/src/airbyte/models/shared/source_insightly.py
+++ b/src/airbyte/models/shared/source_insightly.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceInsightlyInsightlyEnum(str, Enum):
+class SourceInsightlyInsightly(str, Enum):
INSIGHTLY = 'insightly'
@@ -15,7 +15,7 @@ class SourceInsightlyInsightlyEnum(str, Enum):
class SourceInsightly:
r"""The values required to configure the source."""
- source_type: SourceInsightlyInsightlyEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceInsightlyInsightly = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }})
r"""The date from which you'd like to replicate data for Insightly in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated. Note that it will be used only for incremental streams."""
token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('token') }})
diff --git a/src/airbyte/models/shared/source_instagram.py b/src/airbyte/models/shared/source_instagram.py
index 25cdbf87..bd73374e 100755
--- a/src/airbyte/models/shared/source_instagram.py
+++ b/src/airbyte/models/shared/source_instagram.py
@@ -9,7 +9,7 @@
from enum import Enum
from marshmallow import fields
-class SourceInstagramInstagramEnum(str, Enum):
+class SourceInstagramInstagram(str, Enum):
INSTAGRAM = 'instagram'
@@ -20,7 +20,7 @@ class SourceInstagram:
access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }})
r"""The value of the access token generated with instagram_basic, instagram_manage_insights, pages_show_list, pages_read_engagement, Instagram Public Content Access permissions. See the docs for more information"""
- source_type: SourceInstagramInstagramEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceInstagramInstagram = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""The date from which you'd like to replicate data for User Insights, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_instatus.py b/src/airbyte/models/shared/source_instatus.py
index 943b386a..deb87e6c 100755
--- a/src/airbyte/models/shared/source_instatus.py
+++ b/src/airbyte/models/shared/source_instatus.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceInstatusInstatusEnum(str, Enum):
+class SourceInstatusInstatus(str, Enum):
INSTATUS = 'instatus'
@@ -17,5 +17,5 @@ class SourceInstatus:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""Instatus REST API key"""
- source_type: SourceInstatusInstatusEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceInstatusInstatus = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_intercom.py b/src/airbyte/models/shared/source_intercom.py
index 3e4deed9..00c45451 100755
--- a/src/airbyte/models/shared/source_intercom.py
+++ b/src/airbyte/models/shared/source_intercom.py
@@ -9,7 +9,7 @@
from enum import Enum
from marshmallow import fields
-class SourceIntercomIntercomEnum(str, Enum):
+class SourceIntercomIntercom(str, Enum):
INTERCOM = 'intercom'
@@ -20,7 +20,7 @@ class SourceIntercom:
access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }})
r"""Access token for making authenticated requests. See the Intercom docs for more information."""
- source_type: SourceIntercomIntercomEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceIntercomIntercom = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_ip2whois.py b/src/airbyte/models/shared/source_ip2whois.py
index 5b5163aa..c0cc0260 100755
--- a/src/airbyte/models/shared/source_ip2whois.py
+++ b/src/airbyte/models/shared/source_ip2whois.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourceIp2whoisIp2whoisEnum(str, Enum):
+class SourceIp2whoisIp2whois(str, Enum):
IP2WHOIS = 'ip2whois'
@@ -16,7 +16,7 @@ class SourceIp2whoisIp2whoisEnum(str, Enum):
class SourceIp2whois:
r"""The values required to configure the source."""
- source_type: SourceIp2whoisIp2whoisEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceIp2whoisIp2whois = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
api_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key'), 'exclude': lambda f: f is None }})
r"""Your API Key. See here."""
domain: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('domain'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_iterable.py b/src/airbyte/models/shared/source_iterable.py
index fb867d17..6da57b0e 100755
--- a/src/airbyte/models/shared/source_iterable.py
+++ b/src/airbyte/models/shared/source_iterable.py
@@ -9,7 +9,7 @@
from enum import Enum
from marshmallow import fields
-class SourceIterableIterableEnum(str, Enum):
+class SourceIterableIterable(str, Enum):
ITERABLE = 'iterable'
@@ -20,7 +20,7 @@ class SourceIterable:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""Iterable API Key. See the docs for more information on how to obtain this key."""
- source_type: SourceIterableIterableEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceIterableIterable = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""The date from which you'd like to replicate data for Iterable, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_jira.py b/src/airbyte/models/shared/source_jira.py
index 4dbe5958..f507dc49 100755
--- a/src/airbyte/models/shared/source_jira.py
+++ b/src/airbyte/models/shared/source_jira.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Optional
-class SourceJiraJiraEnum(str, Enum):
+class SourceJiraJira(str, Enum):
JIRA = 'jira'
@@ -20,20 +20,20 @@ class SourceJira:
r"""The values required to configure the source."""
api_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_token') }})
- r"""Jira API Token. See the docs for more information on how to generate this key."""
+ r"""Jira API Token. See the docs for more information on how to generate this key. API Token is used for Authorization to your account by BasicAuth."""
domain: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('domain') }})
r"""The Domain for your Jira account, e.g. airbyteio.atlassian.net, airbyteio.jira.com, jira.your-domain.com"""
email: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('email') }})
- r"""The user email for your Jira account."""
- source_type: SourceJiraJiraEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ r"""The user email for your Jira account which you used to generate the API token. This field is used for Authorization to your account by BasicAuth."""
+ source_type: SourceJiraJira = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
enable_experimental_streams: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('enable_experimental_streams'), 'exclude': lambda f: f is None }})
r"""Allow the use of experimental streams which rely on undocumented Jira API endpoints. See https://docs.airbyte.com/integrations/sources/jira#experimental-tables for more info."""
expand_issue_changelog: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('expand_issue_changelog'), 'exclude': lambda f: f is None }})
r"""Expand the changelog when replicating issues."""
projects: Optional[list[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('projects'), 'exclude': lambda f: f is None }})
- r"""List of Jira project keys to replicate data for."""
+ r"""List of Jira project keys to replicate data for, or leave it empty if you want to replicate data for all projects."""
render_fields: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('render_fields'), 'exclude': lambda f: f is None }})
r"""Render issue fields in HTML format in addition to Jira JSON-like format."""
start_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso'), 'exclude': lambda f: f is None }})
- r"""The date from which you want to replicate data from Jira, use the format YYYY-MM-DDT00:00:00Z. Note that this field only applies to certain streams, and only data generated on or after the start date will be replicated. For more information, refer to the documentation."""
+ r"""The date from which you want to replicate data from Jira, use the format YYYY-MM-DDT00:00:00Z. Note that this field only applies to certain streams, and only data generated on or after the start date will be replicated. Or leave it empty if you want to replicate all data. For more information, refer to the documentation."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_k6_cloud.py b/src/airbyte/models/shared/source_k6_cloud.py
index 42a12a71..fcfd066a 100755
--- a/src/airbyte/models/shared/source_k6_cloud.py
+++ b/src/airbyte/models/shared/source_k6_cloud.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceK6CloudK6CloudEnum(str, Enum):
+class SourceK6CloudK6Cloud(str, Enum):
K6_CLOUD = 'k6-cloud'
@@ -17,5 +17,5 @@ class SourceK6Cloud:
api_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_token') }})
r"""Your API Token. See here. The key is case sensitive."""
- source_type: SourceK6CloudK6CloudEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceK6CloudK6Cloud = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_klarna.py b/src/airbyte/models/shared/source_klarna.py
index 45190a7c..b94ed51d 100755
--- a/src/airbyte/models/shared/source_klarna.py
+++ b/src/airbyte/models/shared/source_klarna.py
@@ -6,13 +6,13 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceKlarnaRegionEnum(str, Enum):
+class SourceKlarnaRegion(str, Enum):
r"""Base url region (For playground eu https://docs.klarna.com/klarna-payments/api/payments-api/#tag/API-URLs). Supported 'eu', 'us', 'oc'"""
EU = 'eu'
US = 'us'
OC = 'oc'
-class SourceKlarnaKlarnaEnum(str, Enum):
+class SourceKlarnaKlarna(str, Enum):
KLARNA = 'klarna'
@@ -25,9 +25,9 @@ class SourceKlarna:
r"""A string which is associated with your Merchant ID and is used to authorize use of Klarna's APIs (https://developers.klarna.com/api/#authentication)"""
playground: bool = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('playground') }})
r"""Propertie defining if connector is used against playground or production environment"""
- region: SourceKlarnaRegionEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('region') }})
+ region: SourceKlarnaRegion = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('region') }})
r"""Base url region (For playground eu https://docs.klarna.com/klarna-payments/api/payments-api/#tag/API-URLs). Supported 'eu', 'us', 'oc'"""
- source_type: SourceKlarnaKlarnaEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceKlarnaKlarna = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }})
r"""Consists of your Merchant ID (eid) - a unique number that identifies your e-store, combined with a random string (https://developers.klarna.com/api/#authentication)"""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_klaviyo.py b/src/airbyte/models/shared/source_klaviyo.py
index 19a16fd0..d1e8e294 100755
--- a/src/airbyte/models/shared/source_klaviyo.py
+++ b/src/airbyte/models/shared/source_klaviyo.py
@@ -9,7 +9,7 @@
from enum import Enum
from marshmallow import fields
-class SourceKlaviyoKlaviyoEnum(str, Enum):
+class SourceKlaviyoKlaviyo(str, Enum):
KLAVIYO = 'klaviyo'
@@ -20,7 +20,7 @@ class SourceKlaviyo:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""Klaviyo API Key. See our docs if you need help finding this key."""
- source_type: SourceKlaviyoKlaviyoEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceKlaviyoKlaviyo = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_kustomer_singer.py b/src/airbyte/models/shared/source_kustomer_singer.py
index bfc2ffda..3b1a021f 100755
--- a/src/airbyte/models/shared/source_kustomer_singer.py
+++ b/src/airbyte/models/shared/source_kustomer_singer.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceKustomerSingerKustomerSingerEnum(str, Enum):
+class SourceKustomerSingerKustomerSinger(str, Enum):
KUSTOMER_SINGER = 'kustomer-singer'
@@ -17,7 +17,7 @@ class SourceKustomerSinger:
api_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_token') }})
r"""Kustomer API Token. See the docs on how to obtain this"""
- source_type: SourceKustomerSingerKustomerSingerEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceKustomerSingerKustomerSinger = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }})
r"""The date from which you'd like to replicate the data"""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_launchdarkly.py b/src/airbyte/models/shared/source_launchdarkly.py
index ba175da5..3f5926e1 100755
--- a/src/airbyte/models/shared/source_launchdarkly.py
+++ b/src/airbyte/models/shared/source_launchdarkly.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceLaunchdarklyLaunchdarklyEnum(str, Enum):
+class SourceLaunchdarklyLaunchdarkly(str, Enum):
LAUNCHDARKLY = 'launchdarkly'
@@ -17,5 +17,5 @@ class SourceLaunchdarkly:
access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }})
r"""Your Access token. See here."""
- source_type: SourceLaunchdarklyLaunchdarklyEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceLaunchdarklyLaunchdarkly = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_lemlist.py b/src/airbyte/models/shared/source_lemlist.py
index 3ec988d5..9cd2296a 100755
--- a/src/airbyte/models/shared/source_lemlist.py
+++ b/src/airbyte/models/shared/source_lemlist.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceLemlistLemlistEnum(str, Enum):
+class SourceLemlistLemlist(str, Enum):
LEMLIST = 'lemlist'
@@ -17,5 +17,5 @@ class SourceLemlist:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""Lemlist API key."""
- source_type: SourceLemlistLemlistEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceLemlistLemlist = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_linkedin_ads.py b/src/airbyte/models/shared/source_linkedin_ads.py
index 079d742b..02a3a070 100755
--- a/src/airbyte/models/shared/source_linkedin_ads.py
+++ b/src/airbyte/models/shared/source_linkedin_ads.py
@@ -9,7 +9,7 @@
from marshmallow import fields
from typing import Any, Optional
-class SourceLinkedinAdsCredentialsAccessTokenAuthMethodEnum(str, Enum):
+class SourceLinkedinAdsCredentialsAccessTokenAuthMethod(str, Enum):
ACCESS_TOKEN = 'access_token'
@@ -19,9 +19,9 @@ class SourceLinkedinAdsCredentialsAccessToken:
access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }})
r"""The token value generated using the authentication code. See the docs to obtain yours."""
- auth_method: Optional[SourceLinkedinAdsCredentialsAccessTokenAuthMethodEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method'), 'exclude': lambda f: f is None }})
+ auth_method: Optional[SourceLinkedinAdsCredentialsAccessTokenAuthMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method'), 'exclude': lambda f: f is None }})
-class SourceLinkedinAdsCredentialsOAuth20AuthMethodEnum(str, Enum):
+class SourceLinkedinAdsCredentialsOAuth20AuthMethod(str, Enum):
O_AUTH2_0 = 'oAuth2.0'
@@ -35,9 +35,9 @@ class SourceLinkedinAdsCredentialsOAuth20:
r"""The client secret the LinkedIn Ads developer application."""
refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token') }})
r"""The key to refresh the expired access token."""
- auth_method: Optional[SourceLinkedinAdsCredentialsOAuth20AuthMethodEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method'), 'exclude': lambda f: f is None }})
+ auth_method: Optional[SourceLinkedinAdsCredentialsOAuth20AuthMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method'), 'exclude': lambda f: f is None }})
-class SourceLinkedinAdsLinkedinAdsEnum(str, Enum):
+class SourceLinkedinAdsLinkedinAds(str, Enum):
LINKEDIN_ADS = 'linkedin-ads'
@@ -46,7 +46,7 @@ class SourceLinkedinAdsLinkedinAdsEnum(str, Enum):
class SourceLinkedinAds:
r"""The values required to configure the source."""
- source_type: SourceLinkedinAdsLinkedinAdsEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceLinkedinAdsLinkedinAds = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: date = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.dateisoformat(False), 'decoder': utils.datefromisoformat, 'mm_field': fields.DateTime(format='iso') }})
r"""UTC date in the format 2020-09-17. Any data before this date will not be replicated."""
account_ids: Optional[list[int]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('account_ids'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_linkedin_pages.py b/src/airbyte/models/shared/source_linkedin_pages.py
index afae5eb6..334dbd55 100755
--- a/src/airbyte/models/shared/source_linkedin_pages.py
+++ b/src/airbyte/models/shared/source_linkedin_pages.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class SourceLinkedinPagesCredentialsAccessTokenAuthMethodEnum(str, Enum):
+class SourceLinkedinPagesCredentialsAccessTokenAuthMethod(str, Enum):
ACCESS_TOKEN = 'access_token'
@@ -17,9 +17,9 @@ class SourceLinkedinPagesCredentialsAccessToken:
access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }})
r"""The token value generated using the LinkedIn Developers OAuth Token Tools. See the docs to obtain yours."""
- auth_method: Optional[SourceLinkedinPagesCredentialsAccessTokenAuthMethodEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method'), 'exclude': lambda f: f is None }})
+ auth_method: Optional[SourceLinkedinPagesCredentialsAccessTokenAuthMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method'), 'exclude': lambda f: f is None }})
-class SourceLinkedinPagesCredentialsOAuth20AuthMethodEnum(str, Enum):
+class SourceLinkedinPagesCredentialsOAuth20AuthMethod(str, Enum):
O_AUTH2_0 = 'oAuth2.0'
@@ -33,9 +33,9 @@ class SourceLinkedinPagesCredentialsOAuth20:
r"""The client secret of the LinkedIn developer application."""
refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token') }})
r"""The token value generated using the LinkedIn Developers OAuth Token Tools. See the docs to obtain yours."""
- auth_method: Optional[SourceLinkedinPagesCredentialsOAuth20AuthMethodEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method'), 'exclude': lambda f: f is None }})
+ auth_method: Optional[SourceLinkedinPagesCredentialsOAuth20AuthMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method'), 'exclude': lambda f: f is None }})
-class SourceLinkedinPagesLinkedinPagesEnum(str, Enum):
+class SourceLinkedinPagesLinkedinPages(str, Enum):
LINKEDIN_PAGES = 'linkedin-pages'
@@ -46,6 +46,6 @@ class SourceLinkedinPages:
org_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('org_id') }})
r"""Specify the Organization ID"""
- source_type: SourceLinkedinPagesLinkedinPagesEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceLinkedinPagesLinkedinPages = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
credentials: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_linnworks.py b/src/airbyte/models/shared/source_linnworks.py
index 1b750d5f..b27a7072 100755
--- a/src/airbyte/models/shared/source_linnworks.py
+++ b/src/airbyte/models/shared/source_linnworks.py
@@ -9,7 +9,7 @@
from enum import Enum
from marshmallow import fields
-class SourceLinnworksLinnworksEnum(str, Enum):
+class SourceLinnworksLinnworks(str, Enum):
LINNWORKS = 'linnworks'
@@ -22,7 +22,7 @@ class SourceLinnworks:
r"""Linnworks Application ID"""
application_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('application_secret') }})
r"""Linnworks Application Secret"""
- source_type: SourceLinnworksLinnworksEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceLinnworksLinnworks = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated."""
token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('token') }})
diff --git a/src/airbyte/models/shared/source_lokalise.py b/src/airbyte/models/shared/source_lokalise.py
index 051510f1..d1534ded 100755
--- a/src/airbyte/models/shared/source_lokalise.py
+++ b/src/airbyte/models/shared/source_lokalise.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceLokaliseLokaliseEnum(str, Enum):
+class SourceLokaliseLokalise(str, Enum):
LOKALISE = 'lokalise'
@@ -19,5 +19,5 @@ class SourceLokalise:
r"""Lokalise API Key with read-access. Available at Profile settings > API tokens. See here."""
project_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('project_id') }})
r"""Lokalise project ID. Available at Project Settings > General."""
- source_type: SourceLokaliseLokaliseEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceLokaliseLokalise = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_mailchimp.py b/src/airbyte/models/shared/source_mailchimp.py
index 08acb6b7..0d07f4d0 100755
--- a/src/airbyte/models/shared/source_mailchimp.py
+++ b/src/airbyte/models/shared/source_mailchimp.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class SourceMailchimpCredentialsAPIKeyAuthTypeEnum(str, Enum):
+class SourceMailchimpCredentialsAPIKeyAuthType(str, Enum):
APIKEY = 'apikey'
@@ -17,9 +17,9 @@ class SourceMailchimpCredentialsAPIKey:
apikey: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('apikey') }})
r"""Mailchimp API Key. See the docs for information on how to generate this key."""
- auth_type: SourceMailchimpCredentialsAPIKeyAuthTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
+ auth_type: SourceMailchimpCredentialsAPIKeyAuthType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
-class SourceMailchimpCredentialsOAuth20AuthTypeEnum(str, Enum):
+class SourceMailchimpCredentialsOAuth20AuthType(str, Enum):
OAUTH2_0 = 'oauth2.0'
@@ -29,13 +29,13 @@ class SourceMailchimpCredentialsOAuth20:
access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }})
r"""An access token generated using the above client ID and secret."""
- auth_type: SourceMailchimpCredentialsOAuth20AuthTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
+ auth_type: SourceMailchimpCredentialsOAuth20AuthType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
client_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id'), 'exclude': lambda f: f is None }})
r"""The Client ID of your OAuth application."""
client_secret: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret'), 'exclude': lambda f: f is None }})
r"""The Client Secret of your OAuth application."""
-class SourceMailchimpMailchimpEnum(str, Enum):
+class SourceMailchimpMailchimp(str, Enum):
MAILCHIMP = 'mailchimp'
@@ -44,7 +44,7 @@ class SourceMailchimpMailchimpEnum(str, Enum):
class SourceMailchimp:
r"""The values required to configure the source."""
- source_type: SourceMailchimpMailchimpEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceMailchimpMailchimp = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
campaign_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('campaign_id'), 'exclude': lambda f: f is None }})
credentials: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_mailgun.py b/src/airbyte/models/shared/source_mailgun.py
index f3577ab7..d9d9f818 100755
--- a/src/airbyte/models/shared/source_mailgun.py
+++ b/src/airbyte/models/shared/source_mailgun.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Optional
-class SourceMailgunMailgunEnum(str, Enum):
+class SourceMailgunMailgun(str, Enum):
MAILGUN = 'mailgun'
@@ -21,7 +21,7 @@ class SourceMailgun:
private_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('private_key') }})
r"""Primary account API key to access your Mailgun data."""
- source_type: SourceMailgunMailgunEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceMailgunMailgun = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
domain_region: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('domain_region'), 'exclude': lambda f: f is None }})
r"""Domain region code. 'EU' or 'US' are possible values. The default is 'US'."""
start_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_mailjet_sms.py b/src/airbyte/models/shared/source_mailjet_sms.py
index 75c61507..d7de844d 100755
--- a/src/airbyte/models/shared/source_mailjet_sms.py
+++ b/src/airbyte/models/shared/source_mailjet_sms.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourceMailjetSmsMailjetSmsEnum(str, Enum):
+class SourceMailjetSmsMailjetSms(str, Enum):
MAILJET_SMS = 'mailjet-sms'
@@ -16,7 +16,7 @@ class SourceMailjetSmsMailjetSmsEnum(str, Enum):
class SourceMailjetSms:
r"""The values required to configure the source."""
- source_type: SourceMailjetSmsMailjetSmsEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceMailjetSmsMailjetSms = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('token') }})
r"""Your access token. See here."""
end_date: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_marketo.py b/src/airbyte/models/shared/source_marketo.py
index aa1122e9..8bb5ac3b 100755
--- a/src/airbyte/models/shared/source_marketo.py
+++ b/src/airbyte/models/shared/source_marketo.py
@@ -9,7 +9,7 @@
from enum import Enum
from marshmallow import fields
-class SourceMarketoMarketoEnum(str, Enum):
+class SourceMarketoMarketo(str, Enum):
MARKETO = 'marketo'
@@ -24,7 +24,7 @@ class SourceMarketo:
r"""The Client Secret of your Marketo developer application. See the docs for info on how to obtain this."""
domain_url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('domain_url') }})
r"""Your Marketo Base URL. See the docs for info on how to obtain this."""
- source_type: SourceMarketoMarketoEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceMarketoMarketo = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_metabase.py b/src/airbyte/models/shared/source_metabase.py
index e3765a2d..f777299d 100755
--- a/src/airbyte/models/shared/source_metabase.py
+++ b/src/airbyte/models/shared/source_metabase.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourceMetabaseMetabaseEnum(str, Enum):
+class SourceMetabaseMetabase(str, Enum):
METABASE = 'metabase'
@@ -18,7 +18,7 @@ class SourceMetabase:
instance_api_url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('instance_api_url') }})
r"""URL to your metabase instance API"""
- source_type: SourceMetabaseMetabaseEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceMetabaseMetabase = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }})
session_token: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('session_token'), 'exclude': lambda f: f is None }})
r"""To generate your session token, you need to run the following command: ``` curl -X POST \
diff --git a/src/airbyte/models/shared/source_microsoft_teams.py b/src/airbyte/models/shared/source_microsoft_teams.py
index 4191040b..9b4c1353 100755
--- a/src/airbyte/models/shared/source_microsoft_teams.py
+++ b/src/airbyte/models/shared/source_microsoft_teams.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class SourceMicrosoftTeamsCredentialsAuthenticateViaMicrosoftAuthTypeEnum(str, Enum):
+class SourceMicrosoftTeamsCredentialsAuthenticateViaMicrosoftAuthType(str, Enum):
TOKEN = 'Token'
@@ -22,9 +22,9 @@ class SourceMicrosoftTeamsCredentialsAuthenticateViaMicrosoft:
r"""The Client Secret of your Microsoft Teams developer application."""
tenant_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tenant_id') }})
r"""A globally unique identifier (GUID) that is different than your organization name or domain. Follow these steps to obtain: open one of the Teams where you belong inside the Teams Application -> Click on the … next to the Team title -> Click on Get link to team -> Copy the link to the team and grab the tenant ID form the URL"""
- auth_type: Optional[SourceMicrosoftTeamsCredentialsAuthenticateViaMicrosoftAuthTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
+ auth_type: Optional[SourceMicrosoftTeamsCredentialsAuthenticateViaMicrosoftAuthType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
-class SourceMicrosoftTeamsCredentialsAuthenticateViaMicrosoftOAuth20AuthTypeEnum(str, Enum):
+class SourceMicrosoftTeamsCredentialsAuthenticateViaMicrosoftOAuth20AuthType(str, Enum):
CLIENT = 'Client'
@@ -41,9 +41,9 @@ class SourceMicrosoftTeamsCredentialsAuthenticateViaMicrosoftOAuth20:
r"""A Refresh Token to renew the expired Access Token."""
tenant_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tenant_id') }})
r"""A globally unique identifier (GUID) that is different than your organization name or domain. Follow these steps to obtain: open one of the Teams where you belong inside the Teams Application -> Click on the … next to the Team title -> Click on Get link to team -> Copy the link to the team and grab the tenant ID form the URL"""
- auth_type: Optional[SourceMicrosoftTeamsCredentialsAuthenticateViaMicrosoftOAuth20AuthTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
+ auth_type: Optional[SourceMicrosoftTeamsCredentialsAuthenticateViaMicrosoftOAuth20AuthType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
-class SourceMicrosoftTeamsMicrosoftTeamsEnum(str, Enum):
+class SourceMicrosoftTeamsMicrosoftTeams(str, Enum):
MICROSOFT_TEAMS = 'microsoft-teams'
@@ -54,7 +54,7 @@ class SourceMicrosoftTeams:
period: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('period') }})
r"""Specifies the length of time over which the Team Device Report stream is aggregated. The supported values are: D7, D30, D90, and D180."""
- source_type: SourceMicrosoftTeamsMicrosoftTeamsEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceMicrosoftTeamsMicrosoftTeams = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
credentials: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }})
r"""Choose how to authenticate to Microsoft"""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_mixpanel.py b/src/airbyte/models/shared/source_mixpanel.py
index d1b7e15c..9c4ca5f0 100755
--- a/src/airbyte/models/shared/source_mixpanel.py
+++ b/src/airbyte/models/shared/source_mixpanel.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Any, Optional
-class SourceMixpanelCredentialsProjectSecretOptionTitleEnum(str, Enum):
+class SourceMixpanelCredentialsProjectSecretOptionTitle(str, Enum):
PROJECT_SECRET = 'Project Secret'
@@ -21,9 +21,9 @@ class SourceMixpanelCredentialsProjectSecret:
api_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_secret') }})
r"""Mixpanel project secret. See the docs for more information on how to obtain this."""
- option_title: Optional[SourceMixpanelCredentialsProjectSecretOptionTitleEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('option_title'), 'exclude': lambda f: f is None }})
+ option_title: Optional[SourceMixpanelCredentialsProjectSecretOptionTitle] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('option_title'), 'exclude': lambda f: f is None }})
-class SourceMixpanelCredentialsServiceAccountOptionTitleEnum(str, Enum):
+class SourceMixpanelCredentialsServiceAccountOptionTitle(str, Enum):
SERVICE_ACCOUNT = 'Service Account'
@@ -36,14 +36,14 @@ class SourceMixpanelCredentialsServiceAccount:
r"""Mixpanel Service Account Secret. See the docs for more information on how to obtain this."""
username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }})
r"""Mixpanel Service Account Username. See the docs for more information on how to obtain this."""
- option_title: Optional[SourceMixpanelCredentialsServiceAccountOptionTitleEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('option_title'), 'exclude': lambda f: f is None }})
+ option_title: Optional[SourceMixpanelCredentialsServiceAccountOptionTitle] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('option_title'), 'exclude': lambda f: f is None }})
-class SourceMixpanelRegionEnum(str, Enum):
+class SourceMixpanelRegion(str, Enum):
r"""The region of mixpanel domain instance either US or EU."""
US = 'US'
EU = 'EU'
-class SourceMixpanelMixpanelEnum(str, Enum):
+class SourceMixpanelMixpanel(str, Enum):
MIXPANEL = 'mixpanel'
@@ -52,7 +52,7 @@ class SourceMixpanelMixpanelEnum(str, Enum):
class SourceMixpanel:
r"""The values required to configure the source."""
- source_type: SourceMixpanelMixpanelEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceMixpanelMixpanel = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
attribution_window: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('attribution_window'), 'exclude': lambda f: f is None }})
r"""A period of time for attributing results to ads and the lookback period after those actions occur during which ad results are counted. Default attribution window is 5 days."""
credentials: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }})
@@ -65,7 +65,7 @@ class SourceMixpanel:
r"""Your project ID number. See the docs for more information on how to obtain this."""
project_timezone: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('project_timezone'), 'exclude': lambda f: f is None }})
r"""Time zone in which integer date times are stored. The project timezone may be found in the project settings in the Mixpanel console."""
- region: Optional[SourceMixpanelRegionEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('region'), 'exclude': lambda f: f is None }})
+ region: Optional[SourceMixpanelRegion] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('region'), 'exclude': lambda f: f is None }})
r"""The region of mixpanel domain instance either US or EU."""
select_properties_by_default: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('select_properties_by_default'), 'exclude': lambda f: f is None }})
r"""Setting this config parameter to TRUE ensures that new properties on events and engage records are captured. Otherwise new properties will be ignored."""
diff --git a/src/airbyte/models/shared/source_monday.py b/src/airbyte/models/shared/source_monday.py
index d6dce474..f2beb1bd 100755
--- a/src/airbyte/models/shared/source_monday.py
+++ b/src/airbyte/models/shared/source_monday.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class SourceMondayCredentialsAPITokenAuthTypeEnum(str, Enum):
+class SourceMondayCredentialsAPITokenAuthType(str, Enum):
API_TOKEN = 'api_token'
@@ -17,9 +17,9 @@ class SourceMondayCredentialsAPIToken:
api_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_token') }})
r"""API Token for making authenticated requests."""
- auth_type: SourceMondayCredentialsAPITokenAuthTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
+ auth_type: SourceMondayCredentialsAPITokenAuthType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
-class SourceMondayCredentialsOAuth20AuthTypeEnum(str, Enum):
+class SourceMondayCredentialsOAuth20AuthType(str, Enum):
OAUTH2_0 = 'oauth2.0'
@@ -29,7 +29,7 @@ class SourceMondayCredentialsOAuth20:
access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }})
r"""Access Token for making authenticated requests."""
- auth_type: SourceMondayCredentialsOAuth20AuthTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
+ auth_type: SourceMondayCredentialsOAuth20AuthType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }})
r"""The Client ID of your OAuth application."""
client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }})
@@ -37,7 +37,7 @@ class SourceMondayCredentialsOAuth20:
subdomain: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('subdomain'), 'exclude': lambda f: f is None }})
r"""Slug/subdomain of the account, or the first part of the URL that comes before .monday.com"""
-class SourceMondayMondayEnum(str, Enum):
+class SourceMondayMonday(str, Enum):
MONDAY = 'monday'
@@ -46,6 +46,6 @@ class SourceMondayMondayEnum(str, Enum):
class SourceMonday:
r"""The values required to configure the source."""
- source_type: SourceMondayMondayEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceMondayMonday = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
credentials: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_mongodb.py b/src/airbyte/models/shared/source_mongodb.py
index fa3e78d4..bf8cdfe9 100755
--- a/src/airbyte/models/shared/source_mongodb.py
+++ b/src/airbyte/models/shared/source_mongodb.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class SourceMongodbInstanceTypeMongoDBAtlasInstanceEnum(str, Enum):
+class SourceMongodbInstanceTypeMongoDBAtlasInstance(str, Enum):
ATLAS = 'atlas'
@@ -18,9 +18,9 @@ class SourceMongodbInstanceTypeMongoDBAtlas:
cluster_url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cluster_url') }})
r"""The URL of a cluster to connect to."""
- instance: SourceMongodbInstanceTypeMongoDBAtlasInstanceEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('instance') }})
+ instance: SourceMongodbInstanceTypeMongoDBAtlasInstance = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('instance') }})
-class SourceMongodbInstanceTypeReplicaSetInstanceEnum(str, Enum):
+class SourceMongodbInstanceTypeReplicaSetInstance(str, Enum):
REPLICA = 'replica'
@@ -29,13 +29,13 @@ class SourceMongodbInstanceTypeReplicaSetInstanceEnum(str, Enum):
class SourceMongodbInstanceTypeReplicaSet:
r"""The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default."""
- instance: SourceMongodbInstanceTypeReplicaSetInstanceEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('instance') }})
+ instance: SourceMongodbInstanceTypeReplicaSetInstance = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('instance') }})
server_addresses: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('server_addresses') }})
r"""The members of a replica set. Please specify `host`:`port` of each member separated by comma."""
replica_set: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('replica_set'), 'exclude': lambda f: f is None }})
r"""A replica set in MongoDB is a group of mongod processes that maintain the same data set."""
-class SourceMongodbInstanceTypeStandaloneMongoDbInstanceInstanceEnum(str, Enum):
+class SourceMongodbInstanceTypeStandaloneMongoDbInstanceInstance(str, Enum):
STANDALONE = 'standalone'
@@ -46,11 +46,11 @@ class SourceMongodbInstanceTypeStandaloneMongoDbInstance:
host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }})
r"""The host name of the Mongo database."""
- instance: SourceMongodbInstanceTypeStandaloneMongoDbInstanceInstanceEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('instance') }})
+ instance: SourceMongodbInstanceTypeStandaloneMongoDbInstanceInstance = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('instance') }})
port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port') }})
r"""The port of the Mongo database."""
-class SourceMongodbMongodbEnum(str, Enum):
+class SourceMongodbMongodb(str, Enum):
MONGODB = 'mongodb'
@@ -61,7 +61,7 @@ class SourceMongodb:
database: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('database') }})
r"""The database you want to replicate."""
- source_type: SourceMongodbMongodbEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceMongodbMongodb = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
auth_source: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_source'), 'exclude': lambda f: f is None }})
r"""The authentication source where the user information is stored."""
instance_type: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('instance_type'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_mssql.py b/src/airbyte/models/shared/source_mssql.py
index 3b16ea78..4e401df3 100755
--- a/src/airbyte/models/shared/source_mssql.py
+++ b/src/airbyte/models/shared/source_mssql.py
@@ -7,15 +7,15 @@
from enum import Enum
from typing import Any, Optional
-class SourceMssqlReplicationMethodLogicalReplicationCDCDataToSyncEnum(str, Enum):
+class SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync(str, Enum):
r"""What data should be synced under the CDC. \\"Existing and New\\" will read existing data as a snapshot, and sync new changes through CDC. \\"New Changes Only\\" will skip the initial snapshot, and only sync new changes through CDC."""
EXISTING_AND_NEW = 'Existing and New'
NEW_CHANGES_ONLY = 'New Changes Only'
-class SourceMssqlReplicationMethodLogicalReplicationCDCMethodEnum(str, Enum):
+class SourceMssqlReplicationMethodLogicalReplicationCDCMethod(str, Enum):
CDC = 'CDC'
-class SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevelEnum(str, Enum):
+class SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel(str, Enum):
r"""Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the \\"Snapshot\\" level, you must enable the snapshot isolation mode on the database."""
SNAPSHOT = 'Snapshot'
READ_COMMITTED = 'Read Committed'
@@ -26,15 +26,15 @@ class SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationL
class SourceMssqlReplicationMethodLogicalReplicationCDC:
r"""CDC uses {TBC} to detect inserts, updates, and deletes. This needs to be configured on the source database itself."""
- method: SourceMssqlReplicationMethodLogicalReplicationCDCMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
- data_to_sync: Optional[SourceMssqlReplicationMethodLogicalReplicationCDCDataToSyncEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('data_to_sync'), 'exclude': lambda f: f is None }})
+ method: SourceMssqlReplicationMethodLogicalReplicationCDCMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
+ data_to_sync: Optional[SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('data_to_sync'), 'exclude': lambda f: f is None }})
r"""What data should be synced under the CDC. \\"Existing and New\\" will read existing data as a snapshot, and sync new changes through CDC. \\"New Changes Only\\" will skip the initial snapshot, and only sync new changes through CDC."""
initial_waiting_seconds: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('initial_waiting_seconds'), 'exclude': lambda f: f is None }})
r"""The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time."""
- snapshot_isolation: Optional[SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevelEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('snapshot_isolation'), 'exclude': lambda f: f is None }})
+ snapshot_isolation: Optional[SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('snapshot_isolation'), 'exclude': lambda f: f is None }})
r"""Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the \\"Snapshot\\" level, you must enable the snapshot isolation mode on the database."""
-class SourceMssqlReplicationMethodStandardMethodEnum(str, Enum):
+class SourceMssqlReplicationMethodStandardMethod(str, Enum):
STANDARD = 'STANDARD'
@@ -43,12 +43,12 @@ class SourceMssqlReplicationMethodStandardMethodEnum(str, Enum):
class SourceMssqlReplicationMethodStandard:
r"""Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally."""
- method: SourceMssqlReplicationMethodStandardMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
+ method: SourceMssqlReplicationMethodStandardMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
-class SourceMssqlMssqlEnum(str, Enum):
+class SourceMssqlMssql(str, Enum):
MSSQL = 'mssql'
-class SourceMssqlSslMethodEncryptedVerifyCertificateSslMethodEnum(str, Enum):
+class SourceMssqlSslMethodEncryptedVerifyCertificateSslMethod(str, Enum):
ENCRYPTED_VERIFY_CERTIFICATE = 'encrypted_verify_certificate'
@@ -57,11 +57,11 @@ class SourceMssqlSslMethodEncryptedVerifyCertificateSslMethodEnum(str, Enum):
class SourceMssqlSslMethodEncryptedVerifyCertificate:
r"""Verify and use the certificate provided by the server."""
- ssl_method: SourceMssqlSslMethodEncryptedVerifyCertificateSslMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_method') }})
+ ssl_method: SourceMssqlSslMethodEncryptedVerifyCertificateSslMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_method') }})
host_name_in_certificate: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('hostNameInCertificate'), 'exclude': lambda f: f is None }})
r"""Specifies the host name of the server. The value of this property must match the subject property of the certificate."""
-class SourceMssqlSslMethodEncryptedTrustServerCertificateSslMethodEnum(str, Enum):
+class SourceMssqlSslMethodEncryptedTrustServerCertificateSslMethod(str, Enum):
ENCRYPTED_TRUST_SERVER_CERTIFICATE = 'encrypted_trust_server_certificate'
@@ -70,9 +70,9 @@ class SourceMssqlSslMethodEncryptedTrustServerCertificateSslMethodEnum(str, Enum
class SourceMssqlSslMethodEncryptedTrustServerCertificate:
r"""Use the certificate provided by the server without verification. (For testing purposes only!)"""
- ssl_method: SourceMssqlSslMethodEncryptedTrustServerCertificateSslMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_method') }})
+ ssl_method: SourceMssqlSslMethodEncryptedTrustServerCertificateSslMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_method') }})
-class SourceMssqlTunnelMethodPasswordAuthenticationTunnelMethodEnum(str, Enum):
+class SourceMssqlTunnelMethodPasswordAuthenticationTunnelMethod(str, Enum):
r"""Connect through a jump server tunnel host using username and password authentication"""
SSH_PASSWORD_AUTH = 'SSH_PASSWORD_AUTH'
@@ -84,7 +84,7 @@ class SourceMssqlTunnelMethodPasswordAuthentication:
tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }})
r"""Hostname of the jump server host that allows inbound ssh tunnel."""
- tunnel_method: SourceMssqlTunnelMethodPasswordAuthenticationTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: SourceMssqlTunnelMethodPasswordAuthenticationTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""Connect through a jump server tunnel host using username and password authentication"""
tunnel_port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port') }})
r"""Port on the proxy/jump server that accepts inbound ssh connections."""
@@ -93,7 +93,7 @@ class SourceMssqlTunnelMethodPasswordAuthentication:
tunnel_user_password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user_password') }})
r"""OS-level password for logging into the jump server host"""
-class SourceMssqlTunnelMethodSSHKeyAuthenticationTunnelMethodEnum(str, Enum):
+class SourceMssqlTunnelMethodSSHKeyAuthenticationTunnelMethod(str, Enum):
r"""Connect through a jump server tunnel host using username and ssh key"""
SSH_KEY_AUTH = 'SSH_KEY_AUTH'
@@ -107,14 +107,14 @@ class SourceMssqlTunnelMethodSSHKeyAuthentication:
r"""OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"""
tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }})
r"""Hostname of the jump server host that allows inbound ssh tunnel."""
- tunnel_method: SourceMssqlTunnelMethodSSHKeyAuthenticationTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: SourceMssqlTunnelMethodSSHKeyAuthenticationTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""Connect through a jump server tunnel host using username and ssh key"""
tunnel_port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port') }})
r"""Port on the proxy/jump server that accepts inbound ssh connections."""
tunnel_user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user') }})
r"""OS-level username for logging into the jump server host."""
-class SourceMssqlTunnelMethodNoTunnelTunnelMethodEnum(str, Enum):
+class SourceMssqlTunnelMethodNoTunnelTunnelMethod(str, Enum):
r"""No ssh tunnel needed to connect to database"""
NO_TUNNEL = 'NO_TUNNEL'
@@ -124,7 +124,7 @@ class SourceMssqlTunnelMethodNoTunnelTunnelMethodEnum(str, Enum):
class SourceMssqlTunnelMethodNoTunnel:
r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use."""
- tunnel_method: SourceMssqlTunnelMethodNoTunnelTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: SourceMssqlTunnelMethodNoTunnelTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""No ssh tunnel needed to connect to database"""
@@ -139,7 +139,7 @@ class SourceMssql:
r"""The hostname of the database."""
port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port') }})
r"""The port of the database."""
- source_type: SourceMssqlMssqlEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceMssqlMssql = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }})
r"""The username which is used to access the database."""
jdbc_url_params: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jdbc_url_params'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_my_hours.py b/src/airbyte/models/shared/source_my_hours.py
index 1d5c6a16..73412b8e 100755
--- a/src/airbyte/models/shared/source_my_hours.py
+++ b/src/airbyte/models/shared/source_my_hours.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourceMyHoursMyHoursEnum(str, Enum):
+class SourceMyHoursMyHours(str, Enum):
MY_HOURS = 'my-hours'
@@ -20,7 +20,7 @@ class SourceMyHours:
r"""Your My Hours username"""
password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password') }})
r"""The password associated to the username"""
- source_type: SourceMyHoursMyHoursEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceMyHoursMyHours = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }})
r"""Start date for collecting time logs"""
logs_batch_size: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('logs_batch_size'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_mysql.py b/src/airbyte/models/shared/source_mysql.py
index 4d1b5204..126b5f23 100755
--- a/src/airbyte/models/shared/source_mysql.py
+++ b/src/airbyte/models/shared/source_mysql.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class SourceMysqlReplicationMethodLogicalReplicationCDCMethodEnum(str, Enum):
+class SourceMysqlReplicationMethodLogicalReplicationCDCMethod(str, Enum):
CDC = 'CDC'
@@ -16,13 +16,13 @@ class SourceMysqlReplicationMethodLogicalReplicationCDCMethodEnum(str, Enum):
class SourceMysqlReplicationMethodLogicalReplicationCDC:
r"""CDC uses the Binlog to detect inserts, updates, and deletes. This needs to be configured on the source database itself."""
- method: SourceMysqlReplicationMethodLogicalReplicationCDCMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
+ method: SourceMysqlReplicationMethodLogicalReplicationCDCMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
initial_waiting_seconds: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('initial_waiting_seconds'), 'exclude': lambda f: f is None }})
r"""The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time."""
server_time_zone: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('server_time_zone'), 'exclude': lambda f: f is None }})
r"""Enter the configured MySQL server timezone. This should only be done if the configured timezone in your MySQL instance does not conform to IANNA standard."""
-class SourceMysqlReplicationMethodStandardMethodEnum(str, Enum):
+class SourceMysqlReplicationMethodStandardMethod(str, Enum):
STANDARD = 'STANDARD'
@@ -31,12 +31,12 @@ class SourceMysqlReplicationMethodStandardMethodEnum(str, Enum):
class SourceMysqlReplicationMethodStandard:
r"""Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally."""
- method: SourceMysqlReplicationMethodStandardMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
+ method: SourceMysqlReplicationMethodStandardMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
-class SourceMysqlMysqlEnum(str, Enum):
+class SourceMysqlMysql(str, Enum):
MYSQL = 'mysql'
-class SourceMysqlSslModeVerifyIdentityModeEnum(str, Enum):
+class SourceMysqlSslModeVerifyIdentityMode(str, Enum):
VERIFY_IDENTITY = 'verify_identity'
@@ -47,7 +47,7 @@ class SourceMysqlSslModeVerifyIdentity:
ca_certificate: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ca_certificate') }})
r"""CA certificate"""
- mode: SourceMysqlSslModeVerifyIdentityModeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }})
+ mode: SourceMysqlSslModeVerifyIdentityMode = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }})
client_certificate: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_certificate'), 'exclude': lambda f: f is None }})
r"""Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well)"""
client_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_key'), 'exclude': lambda f: f is None }})
@@ -55,7 +55,7 @@ class SourceMysqlSslModeVerifyIdentity:
client_key_password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_key_password'), 'exclude': lambda f: f is None }})
r"""Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically."""
-class SourceMysqlSslModeVerifyCAModeEnum(str, Enum):
+class SourceMysqlSslModeVerifyCAMode(str, Enum):
VERIFY_CA = 'verify_ca'
@@ -66,7 +66,7 @@ class SourceMysqlSslModeVerifyCA:
ca_certificate: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ca_certificate') }})
r"""CA certificate"""
- mode: SourceMysqlSslModeVerifyCAModeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }})
+ mode: SourceMysqlSslModeVerifyCAMode = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }})
client_certificate: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_certificate'), 'exclude': lambda f: f is None }})
r"""Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well)"""
client_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_key'), 'exclude': lambda f: f is None }})
@@ -74,7 +74,7 @@ class SourceMysqlSslModeVerifyCA:
client_key_password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_key_password'), 'exclude': lambda f: f is None }})
r"""Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically."""
-class SourceMysqlSslModeRequiredModeEnum(str, Enum):
+class SourceMysqlSslModeRequiredMode(str, Enum):
REQUIRED = 'required'
@@ -83,9 +83,9 @@ class SourceMysqlSslModeRequiredModeEnum(str, Enum):
class SourceMysqlSslModeRequired:
r"""Always connect with SSL. If the MySQL server doesn’t support SSL, the connection will not be established. Certificate Authority (CA) and Hostname are not verified."""
- mode: SourceMysqlSslModeRequiredModeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }})
+ mode: SourceMysqlSslModeRequiredMode = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }})
-class SourceMysqlSslModePreferredModeEnum(str, Enum):
+class SourceMysqlSslModePreferredMode(str, Enum):
PREFERRED = 'preferred'
@@ -94,9 +94,9 @@ class SourceMysqlSslModePreferredModeEnum(str, Enum):
class SourceMysqlSslModePreferred:
r"""Automatically attempt SSL connection. If the MySQL server does not support SSL, continue with a regular connection."""
- mode: SourceMysqlSslModePreferredModeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }})
+ mode: SourceMysqlSslModePreferredMode = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }})
-class SourceMysqlTunnelMethodPasswordAuthenticationTunnelMethodEnum(str, Enum):
+class SourceMysqlTunnelMethodPasswordAuthenticationTunnelMethod(str, Enum):
r"""Connect through a jump server tunnel host using username and password authentication"""
SSH_PASSWORD_AUTH = 'SSH_PASSWORD_AUTH'
@@ -108,7 +108,7 @@ class SourceMysqlTunnelMethodPasswordAuthentication:
tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }})
r"""Hostname of the jump server host that allows inbound ssh tunnel."""
- tunnel_method: SourceMysqlTunnelMethodPasswordAuthenticationTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: SourceMysqlTunnelMethodPasswordAuthenticationTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""Connect through a jump server tunnel host using username and password authentication"""
tunnel_port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port') }})
r"""Port on the proxy/jump server that accepts inbound ssh connections."""
@@ -117,7 +117,7 @@ class SourceMysqlTunnelMethodPasswordAuthentication:
tunnel_user_password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user_password') }})
r"""OS-level password for logging into the jump server host"""
-class SourceMysqlTunnelMethodSSHKeyAuthenticationTunnelMethodEnum(str, Enum):
+class SourceMysqlTunnelMethodSSHKeyAuthenticationTunnelMethod(str, Enum):
r"""Connect through a jump server tunnel host using username and ssh key"""
SSH_KEY_AUTH = 'SSH_KEY_AUTH'
@@ -131,14 +131,14 @@ class SourceMysqlTunnelMethodSSHKeyAuthentication:
r"""OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"""
tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }})
r"""Hostname of the jump server host that allows inbound ssh tunnel."""
- tunnel_method: SourceMysqlTunnelMethodSSHKeyAuthenticationTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: SourceMysqlTunnelMethodSSHKeyAuthenticationTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""Connect through a jump server tunnel host using username and ssh key"""
tunnel_port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port') }})
r"""Port on the proxy/jump server that accepts inbound ssh connections."""
tunnel_user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user') }})
r"""OS-level username for logging into the jump server host."""
-class SourceMysqlTunnelMethodNoTunnelTunnelMethodEnum(str, Enum):
+class SourceMysqlTunnelMethodNoTunnelTunnelMethod(str, Enum):
r"""No ssh tunnel needed to connect to database"""
NO_TUNNEL = 'NO_TUNNEL'
@@ -148,7 +148,7 @@ class SourceMysqlTunnelMethodNoTunnelTunnelMethodEnum(str, Enum):
class SourceMysqlTunnelMethodNoTunnel:
r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use."""
- tunnel_method: SourceMysqlTunnelMethodNoTunnelTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: SourceMysqlTunnelMethodNoTunnelTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""No ssh tunnel needed to connect to database"""
@@ -165,7 +165,7 @@ class SourceMysql:
r"""The port to connect to."""
replication_method: Any = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('replication_method') }})
r"""Replication method to use for extracting data from the database."""
- source_type: SourceMysqlMysqlEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceMysqlMysql = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }})
r"""The username which is used to access the database."""
jdbc_url_params: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jdbc_url_params'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_netsuite.py b/src/airbyte/models/shared/source_netsuite.py
index 4b1e7673..812ad6c8 100755
--- a/src/airbyte/models/shared/source_netsuite.py
+++ b/src/airbyte/models/shared/source_netsuite.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourceNetsuiteNetsuiteEnum(str, Enum):
+class SourceNetsuiteNetsuite(str, Enum):
NETSUITE = 'netsuite'
@@ -22,7 +22,7 @@ class SourceNetsuite:
r"""Consumer secret associated with your integration"""
realm: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('realm') }})
r"""Netsuite realm e.g. 2344535, as for `production` or 2344535_SB1, as for the `sandbox`"""
- source_type: SourceNetsuiteNetsuiteEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceNetsuiteNetsuite = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_datetime: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_datetime') }})
r"""Starting point for your data replication, in format of \\"YYYY-MM-DDTHH:mm:ssZ\\" """
token_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('token_key') }})
diff --git a/src/airbyte/models/shared/source_notion.py b/src/airbyte/models/shared/source_notion.py
index 63561f68..6d7c70d3 100755
--- a/src/airbyte/models/shared/source_notion.py
+++ b/src/airbyte/models/shared/source_notion.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Any, Optional
-class SourceNotionCredentialsAccessTokenAuthTypeEnum(str, Enum):
+class SourceNotionCredentialsAccessTokenAuthType(str, Enum):
TOKEN = 'token'
@@ -19,11 +19,11 @@ class SourceNotionCredentialsAccessTokenAuthTypeEnum(str, Enum):
class SourceNotionCredentialsAccessToken:
r"""Pick an authentication method."""
- auth_type: SourceNotionCredentialsAccessTokenAuthTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
+ auth_type: SourceNotionCredentialsAccessTokenAuthType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('token') }})
r"""Notion API access token, see the docs for more information on how to obtain this token."""
-class SourceNotionCredentialsOAuth20AuthTypeEnum(str, Enum):
+class SourceNotionCredentialsOAuth20AuthType(str, Enum):
O_AUTH2_0 = 'OAuth2.0'
@@ -34,13 +34,13 @@ class SourceNotionCredentialsOAuth20:
access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }})
r"""Access Token is a token you received by complete the OauthWebFlow of Notion."""
- auth_type: SourceNotionCredentialsOAuth20AuthTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
+ auth_type: SourceNotionCredentialsOAuth20AuthType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }})
r"""The ClientID of your Notion integration."""
client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }})
r"""The ClientSecret of your Notion integration."""
-class SourceNotionNotionEnum(str, Enum):
+class SourceNotionNotion(str, Enum):
NOTION = 'notion'
@@ -49,7 +49,7 @@ class SourceNotionNotionEnum(str, Enum):
class SourceNotion:
r"""The values required to configure the source."""
- source_type: SourceNotionNotionEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceNotionNotion = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""UTC date and time in the format 2017-01-25T00:00:00.000Z. Any data before this date will not be replicated."""
credentials: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_nytimes.py b/src/airbyte/models/shared/source_nytimes.py
index d3328b2a..1608f2e5 100755
--- a/src/airbyte/models/shared/source_nytimes.py
+++ b/src/airbyte/models/shared/source_nytimes.py
@@ -9,17 +9,17 @@
from marshmallow import fields
from typing import Optional
-class SourceNytimesPeriodUsedForMostPopularStreamsEnum(int, Enum):
+class SourceNytimesPeriodUsedForMostPopularStreams(int, Enum):
r"""Period of time (in days)"""
ONE = 1
SEVEN = 7
THIRTY = 30
-class SourceNytimesShareTypeUsedForMostPopularSharedStreamEnum(str, Enum):
+class SourceNytimesShareTypeUsedForMostPopularSharedStream(str, Enum):
r"""Share Type"""
FACEBOOK = 'facebook'
-class SourceNytimesNytimesEnum(str, Enum):
+class SourceNytimesNytimes(str, Enum):
NYTIMES = 'nytimes'
@@ -30,13 +30,13 @@ class SourceNytimes:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""API Key"""
- period: SourceNytimesPeriodUsedForMostPopularStreamsEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('period') }})
+ period: SourceNytimesPeriodUsedForMostPopularStreams = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('period') }})
r"""Period of time (in days)"""
- source_type: SourceNytimesNytimesEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceNytimesNytimes = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: date = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.dateisoformat(False), 'decoder': utils.datefromisoformat, 'mm_field': fields.DateTime(format='iso') }})
r"""Start date to begin the article retrieval (format YYYY-MM)"""
end_date: Optional[date] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'encoder': utils.dateisoformat(True), 'decoder': utils.datefromisoformat, 'mm_field': fields.DateTime(format='iso'), 'exclude': lambda f: f is None }})
r"""End date to stop the article retrieval (format YYYY-MM)"""
- share_type: Optional[SourceNytimesShareTypeUsedForMostPopularSharedStreamEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('share_type'), 'exclude': lambda f: f is None }})
+ share_type: Optional[SourceNytimesShareTypeUsedForMostPopularSharedStream] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('share_type'), 'exclude': lambda f: f is None }})
r"""Share Type"""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_okta.py b/src/airbyte/models/shared/source_okta.py
index 27ea8c59..5f343215 100755
--- a/src/airbyte/models/shared/source_okta.py
+++ b/src/airbyte/models/shared/source_okta.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class SourceOktaCredentialsAPITokenAuthTypeEnum(str, Enum):
+class SourceOktaCredentialsAPITokenAuthType(str, Enum):
API_TOKEN = 'api_token'
@@ -17,9 +17,9 @@ class SourceOktaCredentialsAPIToken:
api_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_token') }})
r"""An Okta token. See the docs for instructions on how to generate it."""
- auth_type: SourceOktaCredentialsAPITokenAuthTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
+ auth_type: SourceOktaCredentialsAPITokenAuthType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
-class SourceOktaCredentialsOAuth20AuthTypeEnum(str, Enum):
+class SourceOktaCredentialsOAuth20AuthType(str, Enum):
OAUTH2_0 = 'oauth2.0'
@@ -27,7 +27,7 @@ class SourceOktaCredentialsOAuth20AuthTypeEnum(str, Enum):
@dataclasses.dataclass
class SourceOktaCredentialsOAuth20:
- auth_type: SourceOktaCredentialsOAuth20AuthTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
+ auth_type: SourceOktaCredentialsOAuth20AuthType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }})
r"""The Client ID of your OAuth application."""
client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }})
@@ -35,7 +35,7 @@ class SourceOktaCredentialsOAuth20:
refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token') }})
r"""Refresh Token to obtain new Access Token, when it's expired."""
-class SourceOktaOktaEnum(str, Enum):
+class SourceOktaOkta(str, Enum):
OKTA = 'okta'
@@ -44,7 +44,7 @@ class SourceOktaOktaEnum(str, Enum):
class SourceOkta:
r"""The values required to configure the source."""
- source_type: SourceOktaOktaEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceOktaOkta = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
credentials: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }})
domain: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('domain'), 'exclude': lambda f: f is None }})
r"""The Okta domain. See the docs for instructions on how to find it."""
diff --git a/src/airbyte/models/shared/source_omnisend.py b/src/airbyte/models/shared/source_omnisend.py
index b26b0020..0b98bc0a 100755
--- a/src/airbyte/models/shared/source_omnisend.py
+++ b/src/airbyte/models/shared/source_omnisend.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceOmnisendOmnisendEnum(str, Enum):
+class SourceOmnisendOmnisend(str, Enum):
OMNISEND = 'omnisend'
@@ -17,5 +17,5 @@ class SourceOmnisend:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""API Key"""
- source_type: SourceOmnisendOmnisendEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceOmnisendOmnisend = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_onesignal.py b/src/airbyte/models/shared/source_onesignal.py
index 52b910a3..6bf241f1 100755
--- a/src/airbyte/models/shared/source_onesignal.py
+++ b/src/airbyte/models/shared/source_onesignal.py
@@ -19,7 +19,7 @@ class SourceOnesignalApplications:
app_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('app_id') }})
app_name: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('app_name'), 'exclude': lambda f: f is None }})
-class SourceOnesignalOnesignalEnum(str, Enum):
+class SourceOnesignalOnesignal(str, Enum):
ONESIGNAL = 'onesignal'
@@ -32,7 +32,7 @@ class SourceOnesignal:
r"""Applications keys, see the docs for more information on how to obtain this data"""
outcome_names: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('outcome_names') }})
r"""Comma-separated list of names and the value (sum/count) for the returned outcome data. See the docs for more details"""
- source_type: SourceOnesignalOnesignalEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceOnesignalOnesignal = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""The date from which you'd like to replicate data for OneSignal API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated."""
user_auth_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('user_auth_key') }})
diff --git a/src/airbyte/models/shared/source_openweather.py b/src/airbyte/models/shared/source_openweather.py
index 85a89b03..4fa83718 100755
--- a/src/airbyte/models/shared/source_openweather.py
+++ b/src/airbyte/models/shared/source_openweather.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourceOpenweatherLanguageEnum(str, Enum):
+class SourceOpenweatherLanguage(str, Enum):
r"""You can use lang parameter to get the output in your language. The contents of the description field will be translated. See here for the list of supported languages."""
AF = 'af'
AL = 'al'
@@ -59,10 +59,10 @@ class SourceOpenweatherLanguageEnum(str, Enum):
ZH_TW = 'zh_tw'
ZU = 'zu'
-class SourceOpenweatherOpenweatherEnum(str, Enum):
+class SourceOpenweatherOpenweather(str, Enum):
OPENWEATHER = 'openweather'
-class SourceOpenweatherUnitsEnum(str, Enum):
+class SourceOpenweatherUnits(str, Enum):
r"""Units of measurement. standard, metric and imperial units are available. If you do not use the units parameter, standard units will be applied by default."""
STANDARD = 'standard'
METRIC = 'metric'
@@ -80,9 +80,9 @@ class SourceOpenweather:
r"""Latitude for which you want to get weather condition from. (min -90, max 90)"""
lon: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lon') }})
r"""Longitude for which you want to get weather condition from. (min -180, max 180)"""
- source_type: SourceOpenweatherOpenweatherEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
- lang: Optional[SourceOpenweatherLanguageEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lang'), 'exclude': lambda f: f is None }})
+ source_type: SourceOpenweatherOpenweather = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ lang: Optional[SourceOpenweatherLanguage] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lang'), 'exclude': lambda f: f is None }})
r"""You can use lang parameter to get the output in your language. The contents of the description field will be translated. See here for the list of supported languages."""
- units: Optional[SourceOpenweatherUnitsEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('units'), 'exclude': lambda f: f is None }})
+ units: Optional[SourceOpenweatherUnits] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('units'), 'exclude': lambda f: f is None }})
r"""Units of measurement. standard, metric and imperial units are available. If you do not use the units parameter, standard units will be applied by default."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_oracle.py b/src/airbyte/models/shared/source_oracle.py
index 6dd63339..fd9ce387 100755
--- a/src/airbyte/models/shared/source_oracle.py
+++ b/src/airbyte/models/shared/source_oracle.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class SourceOracleConnectionDataSystemIDSIDConnectionTypeEnum(str, Enum):
+class SourceOracleConnectionDataSystemIDSIDConnectionType(str, Enum):
SID = 'sid'
@@ -17,9 +17,9 @@ class SourceOracleConnectionDataSystemIDSID:
r"""Use SID (Oracle System Identifier)"""
sid: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sid') }})
- connection_type: Optional[SourceOracleConnectionDataSystemIDSIDConnectionTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('connection_type'), 'exclude': lambda f: f is None }})
+ connection_type: Optional[SourceOracleConnectionDataSystemIDSIDConnectionType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('connection_type'), 'exclude': lambda f: f is None }})
-class SourceOracleConnectionDataServiceNameConnectionTypeEnum(str, Enum):
+class SourceOracleConnectionDataServiceNameConnectionType(str, Enum):
SERVICE_NAME = 'service_name'
@@ -29,9 +29,9 @@ class SourceOracleConnectionDataServiceName:
r"""Use service name"""
service_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('service_name') }})
- connection_type: Optional[SourceOracleConnectionDataServiceNameConnectionTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('connection_type'), 'exclude': lambda f: f is None }})
+ connection_type: Optional[SourceOracleConnectionDataServiceNameConnectionType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('connection_type'), 'exclude': lambda f: f is None }})
-class SourceOracleEncryptionTLSEncryptedVerifyCertificateEncryptionMethodEnum(str, Enum):
+class SourceOracleEncryptionTLSEncryptedVerifyCertificateEncryptionMethod(str, Enum):
ENCRYPTED_VERIFY_CERTIFICATE = 'encrypted_verify_certificate'
@@ -40,17 +40,17 @@ class SourceOracleEncryptionTLSEncryptedVerifyCertificateEncryptionMethodEnum(st
class SourceOracleEncryptionTLSEncryptedVerifyCertificate:
r"""Verify and use the certificate provided by the server."""
- encryption_method: SourceOracleEncryptionTLSEncryptedVerifyCertificateEncryptionMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_method') }})
+ encryption_method: SourceOracleEncryptionTLSEncryptedVerifyCertificateEncryptionMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_method') }})
ssl_certificate: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_certificate') }})
r"""Privacy Enhanced Mail (PEM) files are concatenated certificate containers frequently used in certificate installations."""
-class SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithmEnum(str, Enum):
+class SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithm(str, Enum):
r"""This parameter defines what encryption algorithm is used."""
AES256 = 'AES256'
RC4_56 = 'RC4_56'
THREE_DES168 = '3DES168'
-class SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionMethodEnum(str, Enum):
+class SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionMethod(str, Enum):
CLIENT_NNE = 'client_nne'
@@ -59,14 +59,14 @@ class SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionMethodEnum(str,
class SourceOracleEncryptionNativeNetworkEncryptionNNE:
r"""The native network encryption gives you the ability to encrypt database connections, without the configuration overhead of TCP/IP and SSL/TLS and without the need to open and listen on different ports."""
- encryption_method: SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_method') }})
- encryption_algorithm: Optional[SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithmEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_algorithm'), 'exclude': lambda f: f is None }})
+ encryption_method: SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_method') }})
+ encryption_algorithm: Optional[SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithm] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_algorithm'), 'exclude': lambda f: f is None }})
r"""This parameter defines what encryption algorithm is used."""
-class SourceOracleOracleEnum(str, Enum):
+class SourceOracleOracle(str, Enum):
ORACLE = 'oracle'
-class SourceOracleTunnelMethodPasswordAuthenticationTunnelMethodEnum(str, Enum):
+class SourceOracleTunnelMethodPasswordAuthenticationTunnelMethod(str, Enum):
r"""Connect through a jump server tunnel host using username and password authentication"""
SSH_PASSWORD_AUTH = 'SSH_PASSWORD_AUTH'
@@ -78,7 +78,7 @@ class SourceOracleTunnelMethodPasswordAuthentication:
tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }})
r"""Hostname of the jump server host that allows inbound ssh tunnel."""
- tunnel_method: SourceOracleTunnelMethodPasswordAuthenticationTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: SourceOracleTunnelMethodPasswordAuthenticationTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""Connect through a jump server tunnel host using username and password authentication"""
tunnel_port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port') }})
r"""Port on the proxy/jump server that accepts inbound ssh connections."""
@@ -87,7 +87,7 @@ class SourceOracleTunnelMethodPasswordAuthentication:
tunnel_user_password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user_password') }})
r"""OS-level password for logging into the jump server host"""
-class SourceOracleTunnelMethodSSHKeyAuthenticationTunnelMethodEnum(str, Enum):
+class SourceOracleTunnelMethodSSHKeyAuthenticationTunnelMethod(str, Enum):
r"""Connect through a jump server tunnel host using username and ssh key"""
SSH_KEY_AUTH = 'SSH_KEY_AUTH'
@@ -101,14 +101,14 @@ class SourceOracleTunnelMethodSSHKeyAuthentication:
r"""OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"""
tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }})
r"""Hostname of the jump server host that allows inbound ssh tunnel."""
- tunnel_method: SourceOracleTunnelMethodSSHKeyAuthenticationTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: SourceOracleTunnelMethodSSHKeyAuthenticationTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""Connect through a jump server tunnel host using username and ssh key"""
tunnel_port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port') }})
r"""Port on the proxy/jump server that accepts inbound ssh connections."""
tunnel_user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user') }})
r"""OS-level username for logging into the jump server host."""
-class SourceOracleTunnelMethodNoTunnelTunnelMethodEnum(str, Enum):
+class SourceOracleTunnelMethodNoTunnelTunnelMethod(str, Enum):
r"""No ssh tunnel needed to connect to database"""
NO_TUNNEL = 'NO_TUNNEL'
@@ -118,7 +118,7 @@ class SourceOracleTunnelMethodNoTunnelTunnelMethodEnum(str, Enum):
class SourceOracleTunnelMethodNoTunnel:
r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use."""
- tunnel_method: SourceOracleTunnelMethodNoTunnelTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: SourceOracleTunnelMethodNoTunnelTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""No ssh tunnel needed to connect to database"""
@@ -137,7 +137,7 @@ class SourceOracle:
1521 - Default listening port for client connections to the listener.
2484 - Recommended and officially registered listening port for client connections to the listener using TCP/IP with SSL
"""
- source_type: SourceOracleOracleEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceOracleOracle = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }})
r"""The username which is used to access the database."""
connection_data: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('connection_data'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_orb.py b/src/airbyte/models/shared/source_orb.py
index 0c119066..0b7148d2 100755
--- a/src/airbyte/models/shared/source_orb.py
+++ b/src/airbyte/models/shared/source_orb.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourceOrbOrbEnum(str, Enum):
+class SourceOrbOrb(str, Enum):
ORB = 'orb'
@@ -18,7 +18,7 @@ class SourceOrb:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""Orb API Key, issued from the Orb admin console."""
- source_type: SourceOrbOrbEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceOrbOrb = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }})
r"""UTC date and time in the format 2022-03-01T00:00:00Z. Any data with created_at before this data will not be synced. For Subscription Usage, this becomes the `timeframe_start` API parameter."""
lookback_window_days: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lookback_window_days'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_orbit.py b/src/airbyte/models/shared/source_orbit.py
index 087d142a..d43f8c11 100755
--- a/src/airbyte/models/shared/source_orbit.py
+++ b/src/airbyte/models/shared/source_orbit.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourceOrbitOrbitEnum(str, Enum):
+class SourceOrbitOrbit(str, Enum):
ORBIT = 'orbit'
@@ -18,7 +18,7 @@ class SourceOrbit:
api_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_token') }})
r"""Authorizes you to work with Orbit workspaces associated with the token."""
- source_type: SourceOrbitOrbitEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceOrbitOrbit = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
workspace: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('workspace') }})
r"""The unique name of the workspace that your API token is associated with."""
start_date: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_outreach.py b/src/airbyte/models/shared/source_outreach.py
index 15cbf24f..fe9b099c 100755
--- a/src/airbyte/models/shared/source_outreach.py
+++ b/src/airbyte/models/shared/source_outreach.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceOutreachOutreachEnum(str, Enum):
+class SourceOutreachOutreach(str, Enum):
OUTREACH = 'outreach'
@@ -23,7 +23,7 @@ class SourceOutreach:
r"""A Redirect URI is the location where the authorization server sends the user once the app has been successfully authorized and granted an authorization code or access token."""
refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token') }})
r"""The token for obtaining the new access token."""
- source_type: SourceOutreachOutreachEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceOutreachOutreach = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }})
r"""The date from which you'd like to replicate data for Outreach API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_paypal_transaction.py b/src/airbyte/models/shared/source_paypal_transaction.py
index 14cfc2ea..b71bf7f8 100755
--- a/src/airbyte/models/shared/source_paypal_transaction.py
+++ b/src/airbyte/models/shared/source_paypal_transaction.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Optional
-class SourcePaypalTransactionPaypalTransactionEnum(str, Enum):
+class SourcePaypalTransactionPaypalTransaction(str, Enum):
PAYPAL_TRANSACTION = 'paypal-transaction'
@@ -21,7 +21,7 @@ class SourcePaypalTransaction:
is_sandbox: bool = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('is_sandbox') }})
r"""Determines whether to use the sandbox or production environment."""
- source_type: SourcePaypalTransactionPaypalTransactionEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourcePaypalTransactionPaypalTransaction = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""Start Date for data extraction in ISO format. Date must be in range from 3 years till 12 hrs before present time."""
client_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_paystack.py b/src/airbyte/models/shared/source_paystack.py
index 82d24d69..461fb4c2 100755
--- a/src/airbyte/models/shared/source_paystack.py
+++ b/src/airbyte/models/shared/source_paystack.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Optional
-class SourcePaystackPaystackEnum(str, Enum):
+class SourcePaystackPaystack(str, Enum):
PAYSTACK = 'paystack'
@@ -21,7 +21,7 @@ class SourcePaystack:
secret_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('secret_key') }})
r"""The Paystack API key (usually starts with 'sk_live_'; find yours here)."""
- source_type: SourcePaystackPaystackEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourcePaystackPaystack = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated."""
lookback_window_days: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lookback_window_days'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_pendo.py b/src/airbyte/models/shared/source_pendo.py
index f25d89a1..f34486f8 100755
--- a/src/airbyte/models/shared/source_pendo.py
+++ b/src/airbyte/models/shared/source_pendo.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourcePendoPendoEnum(str, Enum):
+class SourcePendoPendo(str, Enum):
PENDO = 'pendo'
@@ -16,5 +16,5 @@ class SourcePendo:
r"""The values required to configure the source."""
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
- source_type: SourcePendoPendoEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourcePendoPendo = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_persistiq.py b/src/airbyte/models/shared/source_persistiq.py
index 10676aca..689d49c1 100755
--- a/src/airbyte/models/shared/source_persistiq.py
+++ b/src/airbyte/models/shared/source_persistiq.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourcePersistiqPersistiqEnum(str, Enum):
+class SourcePersistiqPersistiq(str, Enum):
PERSISTIQ = 'persistiq'
@@ -17,5 +17,5 @@ class SourcePersistiq:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""PersistIq API Key. See the docs for more information on where to find that key."""
- source_type: SourcePersistiqPersistiqEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourcePersistiqPersistiq = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_pexels_api.py b/src/airbyte/models/shared/source_pexels_api.py
index 8d111845..29f487c6 100755
--- a/src/airbyte/models/shared/source_pexels_api.py
+++ b/src/airbyte/models/shared/source_pexels_api.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourcePexelsAPIPexelsAPIEnum(str, Enum):
+class SourcePexelsAPIPexelsAPI(str, Enum):
PEXELS_API = 'pexels-api'
@@ -20,7 +20,7 @@ class SourcePexelsAPI:
r"""API key is required to access pexels api, For getting your's goto https://www.pexels.com/api/documentation and create account for free."""
query: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('query') }})
r"""Optional, the search query, Example Ocean, Tigers, Pears, etc."""
- source_type: SourcePexelsAPIPexelsAPIEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourcePexelsAPIPexelsAPI = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
color: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('color'), 'exclude': lambda f: f is None }})
r"""Optional, Desired photo color. Supported colors red, orange, yellow, green, turquoise, blue, violet, pink, brown, black, gray, white or any hexidecimal color code."""
locale: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('locale'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_pinterest.py b/src/airbyte/models/shared/source_pinterest.py
index 6e175766..c6030013 100755
--- a/src/airbyte/models/shared/source_pinterest.py
+++ b/src/airbyte/models/shared/source_pinterest.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class SourcePinterestCredentialsAccessTokenAuthMethodEnum(str, Enum):
+class SourcePinterestCredentialsAccessTokenAuthMethod(str, Enum):
ACCESS_TOKEN = 'access_token'
@@ -17,9 +17,9 @@ class SourcePinterestCredentialsAccessToken:
access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }})
r"""The Access Token to make authenticated requests."""
- auth_method: SourcePinterestCredentialsAccessTokenAuthMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method') }})
+ auth_method: SourcePinterestCredentialsAccessTokenAuthMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method') }})
-class SourcePinterestCredentialsOAuth20AuthMethodEnum(str, Enum):
+class SourcePinterestCredentialsOAuth20AuthMethod(str, Enum):
OAUTH2_0 = 'oauth2.0'
@@ -27,7 +27,7 @@ class SourcePinterestCredentialsOAuth20AuthMethodEnum(str, Enum):
@dataclasses.dataclass
class SourcePinterestCredentialsOAuth20:
- auth_method: SourcePinterestCredentialsOAuth20AuthMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method') }})
+ auth_method: SourcePinterestCredentialsOAuth20AuthMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method') }})
refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token') }})
r"""Refresh Token to obtain new Access Token, when it's expired."""
client_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id'), 'exclude': lambda f: f is None }})
@@ -35,10 +35,10 @@ class SourcePinterestCredentialsOAuth20:
client_secret: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret'), 'exclude': lambda f: f is None }})
r"""The Client Secret of your OAuth application."""
-class SourcePinterestPinterestEnum(str, Enum):
+class SourcePinterestPinterest(str, Enum):
PINTEREST = 'pinterest'
-class SourcePinterestStatusEnum(str, Enum):
+class SourcePinterestStatus(str, Enum):
ACTIVE = 'ACTIVE'
PAUSED = 'PAUSED'
ARCHIVED = 'ARCHIVED'
@@ -49,10 +49,10 @@ class SourcePinterestStatusEnum(str, Enum):
class SourcePinterest:
r"""The values required to configure the source."""
- source_type: SourcePinterestPinterestEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourcePinterestPinterest = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }})
r"""A date in the format YYYY-MM-DD. If you have not set a date, it would be defaulted to latest allowed date by api (89 days from today)."""
credentials: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }})
- status: Optional[list[SourcePinterestStatusEnum]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('status'), 'exclude': lambda f: f is None }})
+ status: Optional[list[SourcePinterestStatus]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('status'), 'exclude': lambda f: f is None }})
r"""Entity statuses based off of campaigns, ad_groups, and ads. If you do not have a status set, it will be ignored completely."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_pipedrive.py b/src/airbyte/models/shared/source_pipedrive.py
index 89c999e5..81f8400a 100755
--- a/src/airbyte/models/shared/source_pipedrive.py
+++ b/src/airbyte/models/shared/source_pipedrive.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Optional
-class SourcePipedriveAPIKeyAuthenticationAuthTypeEnum(str, Enum):
+class SourcePipedriveAPIKeyAuthenticationAuthType(str, Enum):
TOKEN = 'Token'
@@ -20,9 +20,9 @@ class SourcePipedriveAPIKeyAuthentication:
api_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_token') }})
r"""The Pipedrive API Token."""
- auth_type: SourcePipedriveAPIKeyAuthenticationAuthTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
+ auth_type: SourcePipedriveAPIKeyAuthenticationAuthType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
-class SourcePipedrivePipedriveEnum(str, Enum):
+class SourcePipedrivePipedrive(str, Enum):
PIPEDRIVE = 'pipedrive'
@@ -33,6 +33,6 @@ class SourcePipedrive:
replication_start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('replication_start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. When specified and not None, then stream will behave as incremental"""
- source_type: SourcePipedrivePipedriveEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourcePipedrivePipedrive = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
authorization: Optional[SourcePipedriveAPIKeyAuthentication] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('authorization'), 'exclude': lambda f: f is None }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_pocket.py b/src/airbyte/models/shared/source_pocket.py
index 8e0f5c0e..e6ec4fdf 100755
--- a/src/airbyte/models/shared/source_pocket.py
+++ b/src/airbyte/models/shared/source_pocket.py
@@ -7,28 +7,28 @@
from enum import Enum
from typing import Optional
-class SourcePocketContentTypeEnum(str, Enum):
+class SourcePocketContentType(str, Enum):
r"""Select the content type of the items to retrieve."""
ARTICLE = 'article'
VIDEO = 'video'
IMAGE = 'image'
-class SourcePocketDetailTypeEnum(str, Enum):
+class SourcePocketDetailType(str, Enum):
r"""Select the granularity of the information about each item."""
SIMPLE = 'simple'
COMPLETE = 'complete'
-class SourcePocketSortByEnum(str, Enum):
+class SourcePocketSortBy(str, Enum):
r"""Sort retrieved items by the given criteria."""
NEWEST = 'newest'
OLDEST = 'oldest'
TITLE = 'title'
SITE = 'site'
-class SourcePocketPocketEnum(str, Enum):
+class SourcePocketPocket(str, Enum):
POCKET = 'pocket'
-class SourcePocketStateEnum(str, Enum):
+class SourcePocketState(str, Enum):
r"""Select the state of the items to retrieve."""
UNREAD = 'unread'
ARCHIVE = 'archive'
@@ -44,10 +44,10 @@ class SourcePocket:
r"""The user's Pocket access token."""
consumer_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('consumer_key') }})
r"""Your application's Consumer Key."""
- source_type: SourcePocketPocketEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
- content_type: Optional[SourcePocketContentTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('content_type'), 'exclude': lambda f: f is None }})
+ source_type: SourcePocketPocket = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ content_type: Optional[SourcePocketContentType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('content_type'), 'exclude': lambda f: f is None }})
r"""Select the content type of the items to retrieve."""
- detail_type: Optional[SourcePocketDetailTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('detail_type'), 'exclude': lambda f: f is None }})
+ detail_type: Optional[SourcePocketDetailType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('detail_type'), 'exclude': lambda f: f is None }})
r"""Select the granularity of the information about each item."""
domain: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('domain'), 'exclude': lambda f: f is None }})
r"""Only return items from a particular `domain`."""
@@ -57,9 +57,9 @@ class SourcePocket:
r"""Only return items whose title or url contain the `search` string."""
since: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('since'), 'exclude': lambda f: f is None }})
r"""Only return items modified since the given timestamp."""
- sort: Optional[SourcePocketSortByEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sort'), 'exclude': lambda f: f is None }})
+ sort: Optional[SourcePocketSortBy] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sort'), 'exclude': lambda f: f is None }})
r"""Sort retrieved items by the given criteria."""
- state: Optional[SourcePocketStateEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('state'), 'exclude': lambda f: f is None }})
+ state: Optional[SourcePocketState] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('state'), 'exclude': lambda f: f is None }})
r"""Select the state of the items to retrieve."""
tag: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tag'), 'exclude': lambda f: f is None }})
r"""Return only items tagged with this tag name. Use _untagged_ for retrieving only untagged items."""
diff --git a/src/airbyte/models/shared/source_pokeapi.py b/src/airbyte/models/shared/source_pokeapi.py
index 59720a50..8269d43f 100755
--- a/src/airbyte/models/shared/source_pokeapi.py
+++ b/src/airbyte/models/shared/source_pokeapi.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourcePokeapiPokeapiEnum(str, Enum):
+class SourcePokeapiPokeapi(str, Enum):
POKEAPI = 'pokeapi'
@@ -17,5 +17,5 @@ class SourcePokeapi:
pokemon_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('pokemon_name') }})
r"""Pokemon requested from the API."""
- source_type: SourcePokeapiPokeapiEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourcePokeapiPokeapi = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_polygon_stock_api.py b/src/airbyte/models/shared/source_polygon_stock_api.py
index 7df39bfe..9226e9b3 100755
--- a/src/airbyte/models/shared/source_polygon_stock_api.py
+++ b/src/airbyte/models/shared/source_polygon_stock_api.py
@@ -9,7 +9,7 @@
from marshmallow import fields
from typing import Optional
-class SourcePolygonStockAPIPolygonStockAPIEnum(str, Enum):
+class SourcePolygonStockAPIPolygonStockAPI(str, Enum):
POLYGON_STOCK_API = 'polygon-stock-api'
@@ -24,7 +24,7 @@ class SourcePolygonStockAPI:
r"""The target date for the aggregate window."""
multiplier: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('multiplier') }})
r"""The size of the timespan multiplier."""
- source_type: SourcePolygonStockAPIPolygonStockAPIEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourcePolygonStockAPIPolygonStockAPI = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: date = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.dateisoformat(False), 'decoder': utils.datefromisoformat, 'mm_field': fields.DateTime(format='iso') }})
r"""The beginning date for the aggregate window."""
stocks_ticker: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('stocksTicker') }})
diff --git a/src/airbyte/models/shared/source_postgres.py b/src/airbyte/models/shared/source_postgres.py
index f137ce1b..7a9be88b 100755
--- a/src/airbyte/models/shared/source_postgres.py
+++ b/src/airbyte/models/shared/source_postgres.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class SourcePostgresReplicationMethodStandardMethodEnum(str, Enum):
+class SourcePostgresReplicationMethodStandardMethod(str, Enum):
STANDARD = 'Standard'
@@ -16,12 +16,12 @@ class SourcePostgresReplicationMethodStandardMethodEnum(str, Enum):
class SourcePostgresReplicationMethodStandard:
r"""Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally."""
- method: SourcePostgresReplicationMethodStandardMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
+ method: SourcePostgresReplicationMethodStandardMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }})
-class SourcePostgresPostgresEnum(str, Enum):
+class SourcePostgresPostgres(str, Enum):
POSTGRES = 'postgres'
-class SourcePostgresTunnelMethodPasswordAuthenticationTunnelMethodEnum(str, Enum):
+class SourcePostgresTunnelMethodPasswordAuthenticationTunnelMethod(str, Enum):
r"""Connect through a jump server tunnel host using username and password authentication"""
SSH_PASSWORD_AUTH = 'SSH_PASSWORD_AUTH'
@@ -33,7 +33,7 @@ class SourcePostgresTunnelMethodPasswordAuthentication:
tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }})
r"""Hostname of the jump server host that allows inbound ssh tunnel."""
- tunnel_method: SourcePostgresTunnelMethodPasswordAuthenticationTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: SourcePostgresTunnelMethodPasswordAuthenticationTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""Connect through a jump server tunnel host using username and password authentication"""
tunnel_port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port') }})
r"""Port on the proxy/jump server that accepts inbound ssh connections."""
@@ -42,7 +42,7 @@ class SourcePostgresTunnelMethodPasswordAuthentication:
tunnel_user_password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user_password') }})
r"""OS-level password for logging into the jump server host"""
-class SourcePostgresTunnelMethodSSHKeyAuthenticationTunnelMethodEnum(str, Enum):
+class SourcePostgresTunnelMethodSSHKeyAuthenticationTunnelMethod(str, Enum):
r"""Connect through a jump server tunnel host using username and ssh key"""
SSH_KEY_AUTH = 'SSH_KEY_AUTH'
@@ -56,14 +56,14 @@ class SourcePostgresTunnelMethodSSHKeyAuthentication:
r"""OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"""
tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }})
r"""Hostname of the jump server host that allows inbound ssh tunnel."""
- tunnel_method: SourcePostgresTunnelMethodSSHKeyAuthenticationTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: SourcePostgresTunnelMethodSSHKeyAuthenticationTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""Connect through a jump server tunnel host using username and ssh key"""
tunnel_port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port') }})
r"""Port on the proxy/jump server that accepts inbound ssh connections."""
tunnel_user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user') }})
r"""OS-level username for logging into the jump server host."""
-class SourcePostgresTunnelMethodNoTunnelTunnelMethodEnum(str, Enum):
+class SourcePostgresTunnelMethodNoTunnelTunnelMethod(str, Enum):
r"""No ssh tunnel needed to connect to database"""
NO_TUNNEL = 'NO_TUNNEL'
@@ -73,7 +73,7 @@ class SourcePostgresTunnelMethodNoTunnelTunnelMethodEnum(str, Enum):
class SourcePostgresTunnelMethodNoTunnel:
r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use."""
- tunnel_method: SourcePostgresTunnelMethodNoTunnelTunnelMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
+ tunnel_method: SourcePostgresTunnelMethodNoTunnelTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }})
r"""No ssh tunnel needed to connect to database"""
@@ -88,7 +88,7 @@ class SourcePostgres:
r"""Hostname of the database."""
port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port') }})
r"""Port of the database."""
- source_type: SourcePostgresPostgresEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourcePostgresPostgres = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }})
r"""Username to access the database."""
jdbc_url_params: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jdbc_url_params'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_posthog.py b/src/airbyte/models/shared/source_posthog.py
index 2d4466e7..eefd3121 100755
--- a/src/airbyte/models/shared/source_posthog.py
+++ b/src/airbyte/models/shared/source_posthog.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Optional
-class SourcePosthogPosthogEnum(str, Enum):
+class SourcePosthogPosthog(str, Enum):
POSTHOG = 'posthog'
@@ -21,7 +21,7 @@ class SourcePosthog:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""API Key. See the docs for information on how to generate this key."""
- source_type: SourcePosthogPosthogEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourcePosthogPosthog = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""The date from which you'd like to replicate the data. Any data before this date will not be replicated."""
base_url: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('base_url'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_postmarkapp.py b/src/airbyte/models/shared/source_postmarkapp.py
index 6d613ab6..493602e3 100755
--- a/src/airbyte/models/shared/source_postmarkapp.py
+++ b/src/airbyte/models/shared/source_postmarkapp.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourcePostmarkappPostmarkappEnum(str, Enum):
+class SourcePostmarkappPostmarkapp(str, Enum):
POSTMARKAPP = 'postmarkapp'
@@ -15,7 +15,7 @@ class SourcePostmarkappPostmarkappEnum(str, Enum):
class SourcePostmarkapp:
r"""The values required to configure the source."""
- source_type: SourcePostmarkappPostmarkappEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourcePostmarkappPostmarkapp = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
x_postmark_account_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('X-Postmark-Account-Token') }})
r"""API Key for account"""
x_postmark_server_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('X-Postmark-Server-Token') }})
diff --git a/src/airbyte/models/shared/source_prestashop.py b/src/airbyte/models/shared/source_prestashop.py
index 3258e7d3..86b3c0ff 100755
--- a/src/airbyte/models/shared/source_prestashop.py
+++ b/src/airbyte/models/shared/source_prestashop.py
@@ -8,7 +8,7 @@
from enum import Enum
from marshmallow import fields
-class SourcePrestashopPrestashopEnum(str, Enum):
+class SourcePrestashopPrestashop(str, Enum):
PRESTASHOP = 'prestashop'
@@ -19,7 +19,7 @@ class SourcePrestashop:
access_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_key') }})
r"""Your PrestaShop access key. See the docs for info on how to obtain this."""
- source_type: SourcePrestashopPrestashopEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourcePrestashopPrestashop = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: date = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.dateisoformat(False), 'decoder': utils.datefromisoformat, 'mm_field': fields.DateTime(format='iso') }})
r"""The Start date in the format YYYY-MM-DD."""
url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('url') }})
diff --git a/src/airbyte/models/shared/source_public_apis.py b/src/airbyte/models/shared/source_public_apis.py
index 2fb0bd08..032e4c2e 100755
--- a/src/airbyte/models/shared/source_public_apis.py
+++ b/src/airbyte/models/shared/source_public_apis.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourcePublicApisPublicApisEnum(str, Enum):
+class SourcePublicApisPublicApis(str, Enum):
PUBLIC_APIS = 'public-apis'
@@ -15,5 +15,5 @@ class SourcePublicApisPublicApisEnum(str, Enum):
class SourcePublicApis:
r"""The values required to configure the source."""
- source_type: SourcePublicApisPublicApisEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourcePublicApisPublicApis = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_punk_api.py b/src/airbyte/models/shared/source_punk_api.py
index 223e2cbe..2d0e32e0 100755
--- a/src/airbyte/models/shared/source_punk_api.py
+++ b/src/airbyte/models/shared/source_punk_api.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourcePunkAPIPunkAPIEnum(str, Enum):
+class SourcePunkAPIPunkAPI(str, Enum):
PUNK_API = 'punk-api'
@@ -20,7 +20,7 @@ class SourcePunkAPI:
r"""To extract specific data with Unique ID"""
brewed_before: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('brewed_before') }})
r"""To extract specific data with Unique ID"""
- source_type: SourcePunkAPIPunkAPIEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourcePunkAPIPunkAPI = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('id'), 'exclude': lambda f: f is None }})
r"""To extract specific data with Unique ID"""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_pypi.py b/src/airbyte/models/shared/source_pypi.py
index 08e0495d..5119c1fe 100755
--- a/src/airbyte/models/shared/source_pypi.py
+++ b/src/airbyte/models/shared/source_pypi.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourcePypiPypiEnum(str, Enum):
+class SourcePypiPypi(str, Enum):
PYPI = 'pypi'
@@ -18,7 +18,7 @@ class SourcePypi:
project_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('project_name') }})
r"""Name of the project/package. Can only be in lowercase with hyphen. This is the name used using pip command for installing the package."""
- source_type: SourcePypiPypiEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourcePypiPypi = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
version: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('version'), 'exclude': lambda f: f is None }})
r"""Version of the project/package. Use it to find a particular release instead of all releases."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_qualaroo.py b/src/airbyte/models/shared/source_qualaroo.py
index 3db5dba5..31807184 100755
--- a/src/airbyte/models/shared/source_qualaroo.py
+++ b/src/airbyte/models/shared/source_qualaroo.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourceQualarooQualarooEnum(str, Enum):
+class SourceQualarooQualaroo(str, Enum):
QUALAROO = 'qualaroo'
@@ -18,7 +18,7 @@ class SourceQualaroo:
key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('key') }})
r"""A Qualaroo token. See the docs for instructions on how to generate it."""
- source_type: SourceQualarooQualarooEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceQualarooQualaroo = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }})
r"""UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated."""
token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('token') }})
diff --git a/src/airbyte/models/shared/source_quickbooks.py b/src/airbyte/models/shared/source_quickbooks.py
index 8ec4e9aa..05dc382b 100755
--- a/src/airbyte/models/shared/source_quickbooks.py
+++ b/src/airbyte/models/shared/source_quickbooks.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Any, Optional
-class SourceQuickbooksCredentialsOAuth20AuthTypeEnum(str, Enum):
+class SourceQuickbooksCredentialsOAuth20AuthType(str, Enum):
OAUTH2_0 = 'oauth2.0'
@@ -30,9 +30,9 @@ class SourceQuickbooksCredentialsOAuth20:
r"""A token used when refreshing the access token."""
token_expiry_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('token_expiry_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""The date-time when the access token should be refreshed."""
- auth_type: Optional[SourceQuickbooksCredentialsOAuth20AuthTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
+ auth_type: Optional[SourceQuickbooksCredentialsOAuth20AuthType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
-class SourceQuickbooksQuickbooksEnum(str, Enum):
+class SourceQuickbooksQuickbooks(str, Enum):
QUICKBOOKS = 'quickbooks'
@@ -44,7 +44,7 @@ class SourceQuickbooks:
credentials: Any = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }})
sandbox: bool = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sandbox') }})
r"""Determines whether to use the sandbox or production environment."""
- source_type: SourceQuickbooksQuickbooksEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceQuickbooksQuickbooks = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""The default value to use if no bookmark exists for an endpoint (rfc3339 date string). E.g, 2021-03-20T00:00:00+00:00. Any data before this date will not be replicated."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_railz.py b/src/airbyte/models/shared/source_railz.py
index 903766f5..ca9b0ecb 100755
--- a/src/airbyte/models/shared/source_railz.py
+++ b/src/airbyte/models/shared/source_railz.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceRailzRailzEnum(str, Enum):
+class SourceRailzRailz(str, Enum):
RAILZ = 'railz'
@@ -19,7 +19,7 @@ class SourceRailz:
r"""Client ID (client_id)"""
secret_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('secret_key') }})
r"""Secret key (secret_key)"""
- source_type: SourceRailzRailzEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceRailzRailz = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }})
r"""Start date"""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_recharge.py b/src/airbyte/models/shared/source_recharge.py
index 9d9003fa..77fb2551 100755
--- a/src/airbyte/models/shared/source_recharge.py
+++ b/src/airbyte/models/shared/source_recharge.py
@@ -9,7 +9,7 @@
from enum import Enum
from marshmallow import fields
-class SourceRechargeRechargeEnum(str, Enum):
+class SourceRechargeRecharge(str, Enum):
RECHARGE = 'recharge'
@@ -20,7 +20,7 @@ class SourceRecharge:
access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }})
r"""The value of the Access Token generated. See the docs for more information."""
- source_type: SourceRechargeRechargeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceRechargeRecharge = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""The date from which you'd like to replicate data for Recharge API, in the format YYYY-MM-DDT00:00:00Z. Any data before this date will not be replicated."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_recreation.py b/src/airbyte/models/shared/source_recreation.py
index c35ee7c3..17b65542 100755
--- a/src/airbyte/models/shared/source_recreation.py
+++ b/src/airbyte/models/shared/source_recreation.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourceRecreationRecreationEnum(str, Enum):
+class SourceRecreationRecreation(str, Enum):
RECREATION = 'recreation'
@@ -18,6 +18,6 @@ class SourceRecreation:
apikey: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('apikey') }})
r"""API Key"""
- source_type: SourceRecreationRecreationEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceRecreationRecreation = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
query_campsites: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('query_campsites'), 'exclude': lambda f: f is None }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_recruitee.py b/src/airbyte/models/shared/source_recruitee.py
index 8a6f2543..b57a3cba 100755
--- a/src/airbyte/models/shared/source_recruitee.py
+++ b/src/airbyte/models/shared/source_recruitee.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceRecruiteeRecruiteeEnum(str, Enum):
+class SourceRecruiteeRecruitee(str, Enum):
RECRUITEE = 'recruitee'
@@ -19,5 +19,5 @@ class SourceRecruitee:
r"""Recruitee API Key. See here."""
company_id: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('company_id') }})
r"""Recruitee Company ID. You can also find this ID on the Recruitee API tokens page."""
- source_type: SourceRecruiteeRecruiteeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceRecruiteeRecruitee = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_recurly.py b/src/airbyte/models/shared/source_recurly.py
index e3a672c0..13672260 100755
--- a/src/airbyte/models/shared/source_recurly.py
+++ b/src/airbyte/models/shared/source_recurly.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourceRecurlyRecurlyEnum(str, Enum):
+class SourceRecurlyRecurly(str, Enum):
RECURLY = 'recurly'
@@ -18,7 +18,7 @@ class SourceRecurly:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""Recurly API Key. See the docs for more information on how to generate this key."""
- source_type: SourceRecurlyRecurlyEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceRecurlyRecurly = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
begin_time: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('begin_time'), 'exclude': lambda f: f is None }})
r"""ISO8601 timestamp from which the replication from Recurly API will start from."""
end_time: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_time'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_redshift.py b/src/airbyte/models/shared/source_redshift.py
index bc6cd786..55ca637a 100755
--- a/src/airbyte/models/shared/source_redshift.py
+++ b/src/airbyte/models/shared/source_redshift.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourceRedshiftRedshiftEnum(str, Enum):
+class SourceRedshiftRedshift(str, Enum):
REDSHIFT = 'redshift'
@@ -24,7 +24,7 @@ class SourceRedshift:
r"""Password associated with the username."""
port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port') }})
r"""Port of the database."""
- source_type: SourceRedshiftRedshiftEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceRedshiftRedshift = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }})
r"""Username to use to access the database."""
jdbc_url_params: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jdbc_url_params'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_retently.py b/src/airbyte/models/shared/source_retently.py
index 43c71a56..1ce8219b 100755
--- a/src/airbyte/models/shared/source_retently.py
+++ b/src/airbyte/models/shared/source_retently.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class SourceRetentlyRetentlyEnum(str, Enum):
+class SourceRetentlyRetently(str, Enum):
RETENTLY = 'retently'
@@ -16,7 +16,7 @@ class SourceRetentlyRetentlyEnum(str, Enum):
class SourceRetently:
r"""The values required to configure the source."""
- source_type: SourceRetentlyRetentlyEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceRetentlyRetently = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
credentials: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }})
r"""Choose how to authenticate to Retently"""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_rki_covid.py b/src/airbyte/models/shared/source_rki_covid.py
index 94b3f111..70706dc9 100755
--- a/src/airbyte/models/shared/source_rki_covid.py
+++ b/src/airbyte/models/shared/source_rki_covid.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceRkiCovidRkiCovidEnum(str, Enum):
+class SourceRkiCovidRkiCovid(str, Enum):
RKI_COVID = 'rki-covid'
@@ -15,7 +15,7 @@ class SourceRkiCovidRkiCovidEnum(str, Enum):
class SourceRkiCovid:
r"""The values required to configure the source."""
- source_type: SourceRkiCovidRkiCovidEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceRkiCovidRkiCovid = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }})
r"""UTC date in the format 2017-01-25. Any data before this date will not be replicated."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_rss.py b/src/airbyte/models/shared/source_rss.py
index 7fe7908e..5da174a2 100755
--- a/src/airbyte/models/shared/source_rss.py
+++ b/src/airbyte/models/shared/source_rss.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceRssRssEnum(str, Enum):
+class SourceRssRss(str, Enum):
RSS = 'rss'
@@ -15,7 +15,7 @@ class SourceRssRssEnum(str, Enum):
class SourceRss:
r"""The values required to configure the source."""
- source_type: SourceRssRssEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceRssRss = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('url') }})
r"""RSS Feed URL"""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_s3.py b/src/airbyte/models/shared/source_s3.py
index 353cef10..69a3ec7d 100755
--- a/src/airbyte/models/shared/source_s3.py
+++ b/src/airbyte/models/shared/source_s3.py
@@ -10,10 +10,10 @@
from marshmallow import fields
from typing import Any, Optional
-class SourceS3FormatJsonlFiletypeEnum(str, Enum):
+class SourceS3FormatJsonlFiletype(str, Enum):
JSONL = 'jsonl'
-class SourceS3FormatJsonlUnexpectedFieldBehaviorEnum(str, Enum):
+class SourceS3FormatJsonlUnexpectedFieldBehavior(str, Enum):
r"""How JSON fields outside of explicit_schema (if given) are treated. Check PyArrow documentation for details"""
IGNORE = 'ignore'
INFER = 'infer'
@@ -27,13 +27,13 @@ class SourceS3FormatJsonl:
block_size: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('block_size'), 'exclude': lambda f: f is None }})
r"""The chunk size in bytes to process at a time in memory from each file. If your data is particularly wide and failing during schema detection, increasing this should solve it. Beware of raising this too high as you could hit OOM errors."""
- filetype: Optional[SourceS3FormatJsonlFiletypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }})
+ filetype: Optional[SourceS3FormatJsonlFiletype] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }})
newlines_in_values: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('newlines_in_values'), 'exclude': lambda f: f is None }})
r"""Whether newline characters are allowed in JSON values. Turning this on may affect performance. Leave blank to default to False."""
- unexpected_field_behavior: Optional[SourceS3FormatJsonlUnexpectedFieldBehaviorEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('unexpected_field_behavior'), 'exclude': lambda f: f is None }})
+ unexpected_field_behavior: Optional[SourceS3FormatJsonlUnexpectedFieldBehavior] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('unexpected_field_behavior'), 'exclude': lambda f: f is None }})
r"""How JSON fields outside of explicit_schema (if given) are treated. Check PyArrow documentation for details"""
-class SourceS3FormatAvroFiletypeEnum(str, Enum):
+class SourceS3FormatAvroFiletype(str, Enum):
AVRO = 'avro'
@@ -42,9 +42,9 @@ class SourceS3FormatAvroFiletypeEnum(str, Enum):
class SourceS3FormatAvro:
r"""This connector utilises fastavro for Avro parsing."""
- filetype: Optional[SourceS3FormatAvroFiletypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }})
+ filetype: Optional[SourceS3FormatAvroFiletype] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }})
-class SourceS3FormatParquetFiletypeEnum(str, Enum):
+class SourceS3FormatParquetFiletype(str, Enum):
PARQUET = 'parquet'
@@ -59,9 +59,9 @@ class SourceS3FormatParquet:
r"""Perform read buffering when deserializing individual column chunks. By default every group column will be loaded fully to memory. This option can help avoid out-of-memory errors if your data is particularly wide."""
columns: Optional[list[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('columns'), 'exclude': lambda f: f is None }})
r"""If you only want to sync a subset of the columns from the file(s), add the columns you want here as a comma-delimited list. Leave it empty to sync all columns."""
- filetype: Optional[SourceS3FormatParquetFiletypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }})
+ filetype: Optional[SourceS3FormatParquetFiletype] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }})
-class SourceS3FormatCSVFiletypeEnum(str, Enum):
+class SourceS3FormatCSVFiletype(str, Enum):
CSV = 'csv'
@@ -84,7 +84,7 @@ class SourceS3FormatCSV:
r"""The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options."""
escape_char: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('escape_char'), 'exclude': lambda f: f is None }})
r"""The character used for escaping special characters. To disallow escaping, leave this field blank."""
- filetype: Optional[SourceS3FormatCSVFiletypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }})
+ filetype: Optional[SourceS3FormatCSVFiletype] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }})
infer_datatypes: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('infer_datatypes'), 'exclude': lambda f: f is None }})
r"""Configures whether a schema for the source should be inferred from the current data or not. If set to false and a custom schema is set, then the manually enforced schema is used. If a schema is not manually set, and this is set to false, then all fields will be read as strings"""
newlines_in_values: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('newlines_in_values'), 'exclude': lambda f: f is None }})
@@ -111,7 +111,7 @@ class SourceS3S3AmazonWebServices:
start_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso'), 'exclude': lambda f: f is None }})
r"""UTC date and time in the format 2017-01-25T00:00:00Z. Any file modified before this date will not be replicated."""
-class SourceS3S3Enum(str, Enum):
+class SourceS3S3(str, Enum):
S3 = 's3'
@@ -126,7 +126,7 @@ class SourceS3:
r"""A regular expression which tells the connector which files to replicate. All files which match this pattern will be replicated. Use | to separate multiple patterns. See this page to understand pattern syntax (GLOBSTAR and SPLIT flags are enabled). Use pattern ** to pick up all files."""
provider: SourceS3S3AmazonWebServices = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('provider') }})
r"""Use this to load files from S3 or S3-compatible services"""
- source_type: SourceS3S3Enum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceS3S3 = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
format: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format'), 'exclude': lambda f: f is None }})
r"""The format of the files you'd like to replicate"""
schema: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('schema'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_salesforce.py b/src/airbyte/models/shared/source_salesforce.py
index a6374534..4ba0d05c 100755
--- a/src/airbyte/models/shared/source_salesforce.py
+++ b/src/airbyte/models/shared/source_salesforce.py
@@ -10,13 +10,13 @@
from marshmallow import fields
from typing import Optional
-class SourceSalesforceAuthTypeEnum(str, Enum):
+class SourceSalesforceAuthType(str, Enum):
CLIENT = 'Client'
-class SourceSalesforceSalesforceEnum(str, Enum):
+class SourceSalesforceSalesforce(str, Enum):
SALESFORCE = 'salesforce'
-class SourceSalesforceStreamsCriteriaSearchCriteriaEnum(str, Enum):
+class SourceSalesforceStreamsCriteriaSearchCriteria(str, Enum):
STARTS_WITH = 'starts with'
ENDS_WITH = 'ends with'
CONTAINS = 'contains'
@@ -31,7 +31,7 @@ class SourceSalesforceStreamsCriteriaSearchCriteriaEnum(str, Enum):
@dataclasses.dataclass
class SourceSalesforceStreamsCriteria:
- criteria: SourceSalesforceStreamsCriteriaSearchCriteriaEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('criteria') }})
+ criteria: SourceSalesforceStreamsCriteriaSearchCriteria = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('criteria') }})
value: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('value') }})
@@ -46,8 +46,8 @@ class SourceSalesforce:
r"""Enter your Salesforce developer application's Client secret"""
refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token') }})
r"""Enter your application's Salesforce Refresh Token used for Airbyte to access your Salesforce account."""
- source_type: SourceSalesforceSalesforceEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
- auth_type: Optional[SourceSalesforceAuthTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
+ source_type: SourceSalesforceSalesforce = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ auth_type: Optional[SourceSalesforceAuthType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
is_sandbox: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('is_sandbox'), 'exclude': lambda f: f is None }})
r"""Toggle if you're using a Salesforce Sandbox"""
start_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_salesforce_singer.py b/src/airbyte/models/shared/source_salesforce_singer.py
index 6cc92a7b..8c1f3786 100755
--- a/src/airbyte/models/shared/source_salesforce_singer.py
+++ b/src/airbyte/models/shared/source_salesforce_singer.py
@@ -7,12 +7,12 @@
from enum import Enum
from typing import Optional
-class SourceSalesforceSingerAPITypeEnum(str, Enum):
+class SourceSalesforceSingerAPIType(str, Enum):
r"""Unless you know that you are transferring a very small amount of data, prefer using the BULK API. This will help avoid using up all of your API call quota with Salesforce. Valid values are BULK or REST."""
BULK = 'BULK'
REST = 'REST'
-class SourceSalesforceSingerSalesforceSingerEnum(str, Enum):
+class SourceSalesforceSingerSalesforceSinger(str, Enum):
SALESFORCE_SINGER = 'salesforce-singer'
@@ -21,7 +21,7 @@ class SourceSalesforceSingerSalesforceSingerEnum(str, Enum):
class SourceSalesforceSinger:
r"""The values required to configure the source."""
- api_type: SourceSalesforceSingerAPITypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_type') }})
+ api_type: SourceSalesforceSingerAPIType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_type') }})
r"""Unless you know that you are transferring a very small amount of data, prefer using the BULK API. This will help avoid using up all of your API call quota with Salesforce. Valid values are BULK or REST."""
client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }})
r"""The Consumer Key that can be found when viewing your app in Salesforce"""
@@ -29,7 +29,7 @@ class SourceSalesforceSinger:
r"""The Consumer Secret that can be found when viewing your app in Salesforce"""
refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token') }})
r"""Salesforce Refresh Token used for Airbyte to access your Salesforce account. If you don't know what this is, follow this guide to retrieve it."""
- source_type: SourceSalesforceSingerSalesforceSingerEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceSalesforceSingerSalesforceSinger = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }})
r"""UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated."""
is_sandbox: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('is_sandbox'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_salesloft.py b/src/airbyte/models/shared/source_salesloft.py
index a0118b61..4734aed5 100755
--- a/src/airbyte/models/shared/source_salesloft.py
+++ b/src/airbyte/models/shared/source_salesloft.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Any
-class SourceSalesloftCredentialsAuthenticateViaAPIKeyAuthTypeEnum(str, Enum):
+class SourceSalesloftCredentialsAuthenticateViaAPIKeyAuthType(str, Enum):
API_KEY = 'api_key'
@@ -20,9 +20,9 @@ class SourceSalesloftCredentialsAuthenticateViaAPIKey:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""API Key for making authenticated requests. More instruction on how to find this value in our docs"""
- auth_type: SourceSalesloftCredentialsAuthenticateViaAPIKeyAuthTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
+ auth_type: SourceSalesloftCredentialsAuthenticateViaAPIKeyAuthType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
-class SourceSalesloftCredentialsAuthenticateViaOAuthAuthTypeEnum(str, Enum):
+class SourceSalesloftCredentialsAuthenticateViaOAuthAuthType(str, Enum):
OAUTH2_0 = 'oauth2.0'
@@ -32,7 +32,7 @@ class SourceSalesloftCredentialsAuthenticateViaOAuth:
access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }})
r"""Access Token for making authenticated requests."""
- auth_type: SourceSalesloftCredentialsAuthenticateViaOAuthAuthTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
+ auth_type: SourceSalesloftCredentialsAuthenticateViaOAuthAuthType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }})
r"""The Client ID of your Salesloft developer application."""
client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }})
@@ -42,7 +42,7 @@ class SourceSalesloftCredentialsAuthenticateViaOAuth:
token_expiry_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('token_expiry_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""The date-time when the access token should be refreshed."""
-class SourceSalesloftSalesloftEnum(str, Enum):
+class SourceSalesloftSalesloft(str, Enum):
SALESLOFT = 'salesloft'
@@ -52,7 +52,7 @@ class SourceSalesloft:
r"""The values required to configure the source."""
credentials: Any = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }})
- source_type: SourceSalesloftSalesloftEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceSalesloftSalesloft = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""The date from which you'd like to replicate data for Salesloft API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_sap_fieldglass.py b/src/airbyte/models/shared/source_sap_fieldglass.py
index ef44397e..bb4eaa19 100755
--- a/src/airbyte/models/shared/source_sap_fieldglass.py
+++ b/src/airbyte/models/shared/source_sap_fieldglass.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceSapFieldglassSapFieldglassEnum(str, Enum):
+class SourceSapFieldglassSapFieldglass(str, Enum):
SAP_FIELDGLASS = 'sap-fieldglass'
@@ -17,5 +17,5 @@ class SourceSapFieldglass:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""API Key"""
- source_type: SourceSapFieldglassSapFieldglassEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceSapFieldglassSapFieldglass = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_secoda.py b/src/airbyte/models/shared/source_secoda.py
index 2790c7b1..c96867cc 100755
--- a/src/airbyte/models/shared/source_secoda.py
+++ b/src/airbyte/models/shared/source_secoda.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceSecodaSecodaEnum(str, Enum):
+class SourceSecodaSecoda(str, Enum):
SECODA = 'secoda'
@@ -17,5 +17,5 @@ class SourceSecoda:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""Your API Access Key. See here. The key is case sensitive."""
- source_type: SourceSecodaSecodaEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceSecodaSecoda = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_sendgrid.py b/src/airbyte/models/shared/source_sendgrid.py
index ed1d65a1..46e09df9 100755
--- a/src/airbyte/models/shared/source_sendgrid.py
+++ b/src/airbyte/models/shared/source_sendgrid.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Optional
-class SourceSendgridSendgridEnum(str, Enum):
+class SourceSendgridSendgrid(str, Enum):
SENDGRID = 'sendgrid'
@@ -21,7 +21,7 @@ class SourceSendgrid:
apikey: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('apikey') }})
r"""API Key, use admin to generate this key."""
- source_type: SourceSendgridSendgridEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceSendgridSendgrid = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_time: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_time'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso'), 'exclude': lambda f: f is None }})
r"""Start time in ISO8601 format. Any data before this time point will not be replicated."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_sendinblue.py b/src/airbyte/models/shared/source_sendinblue.py
index d6402dc2..38b8faec 100755
--- a/src/airbyte/models/shared/source_sendinblue.py
+++ b/src/airbyte/models/shared/source_sendinblue.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceSendinblueSendinblueEnum(str, Enum):
+class SourceSendinblueSendinblue(str, Enum):
SENDINBLUE = 'sendinblue'
@@ -17,5 +17,5 @@ class SourceSendinblue:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""Your API Key. See here."""
- source_type: SourceSendinblueSendinblueEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceSendinblueSendinblue = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_senseforce.py b/src/airbyte/models/shared/source_senseforce.py
index ad814354..365d4f24 100755
--- a/src/airbyte/models/shared/source_senseforce.py
+++ b/src/airbyte/models/shared/source_senseforce.py
@@ -9,7 +9,7 @@
from marshmallow import fields
from typing import Optional
-class SourceSenseforceSenseforceEnum(str, Enum):
+class SourceSenseforceSenseforce(str, Enum):
SENSEFORCE = 'senseforce'
@@ -24,7 +24,7 @@ class SourceSenseforce:
r"""Your Senseforce API backend URL. This is the URL shown during the Login screen. See here for more details. (Note: Most Senseforce backend APIs have the term 'galaxy' in their ULR)"""
dataset_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dataset_id') }})
r"""The ID of the dataset you want to synchronize. The ID can be found in the URL when opening the dataset. See here for more details. (Note: As the Senseforce API only allows to synchronize a specific dataset, each dataset you want to synchronize needs to be implemented as a separate airbyte source)."""
- source_type: SourceSenseforceSenseforceEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceSenseforceSenseforce = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: date = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.dateisoformat(False), 'decoder': utils.datefromisoformat, 'mm_field': fields.DateTime(format='iso') }})
r"""UTC date and time in the format 2017-01-25. Only data with \\"Timestamp\\" after this date will be replicated. Important note: This start date must be set to the first day of where your dataset provides data. If your dataset has data from 2020-10-10 10:21:10, set the start_date to 2020-10-10 or later"""
slice_range: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('slice_range'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_sentry.py b/src/airbyte/models/shared/source_sentry.py
index 8889e959..b4cc726e 100755
--- a/src/airbyte/models/shared/source_sentry.py
+++ b/src/airbyte/models/shared/source_sentry.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class SourceSentrySentryEnum(str, Enum):
+class SourceSentrySentry(str, Enum):
SENTRY = 'sentry'
@@ -22,7 +22,7 @@ class SourceSentry:
r"""The slug of the organization the groups belong to."""
project: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('project') }})
r"""The name (slug) of the Project you want to sync."""
- source_type: SourceSentrySentryEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceSentrySentry = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
discover_fields: Optional[list[Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('discover_fields'), 'exclude': lambda f: f is None }})
r"""Fields to retrieve when fetching discover events"""
hostname: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('hostname'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_sftp.py b/src/airbyte/models/shared/source_sftp.py
index 15e59ecc..2f98380b 100755
--- a/src/airbyte/models/shared/source_sftp.py
+++ b/src/airbyte/models/shared/source_sftp.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class SourceSftpCredentialsSSHKeyAuthenticationAuthMethodEnum(str, Enum):
+class SourceSftpCredentialsSSHKeyAuthenticationAuthMethod(str, Enum):
r"""Connect through ssh key"""
SSH_KEY_AUTH = 'SSH_KEY_AUTH'
@@ -17,12 +17,12 @@ class SourceSftpCredentialsSSHKeyAuthenticationAuthMethodEnum(str, Enum):
class SourceSftpCredentialsSSHKeyAuthentication:
r"""The server authentication method"""
- auth_method: SourceSftpCredentialsSSHKeyAuthenticationAuthMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method') }})
+ auth_method: SourceSftpCredentialsSSHKeyAuthenticationAuthMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method') }})
r"""Connect through ssh key"""
auth_ssh_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_ssh_key') }})
r"""OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"""
-class SourceSftpCredentialsPasswordAuthenticationAuthMethodEnum(str, Enum):
+class SourceSftpCredentialsPasswordAuthenticationAuthMethod(str, Enum):
r"""Connect through password authentication"""
SSH_PASSWORD_AUTH = 'SSH_PASSWORD_AUTH'
@@ -32,12 +32,12 @@ class SourceSftpCredentialsPasswordAuthenticationAuthMethodEnum(str, Enum):
class SourceSftpCredentialsPasswordAuthentication:
r"""The server authentication method"""
- auth_method: SourceSftpCredentialsPasswordAuthenticationAuthMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method') }})
+ auth_method: SourceSftpCredentialsPasswordAuthenticationAuthMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method') }})
r"""Connect through password authentication"""
auth_user_password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_user_password') }})
r"""OS-level password for logging into the jump server host"""
-class SourceSftpSftpEnum(str, Enum):
+class SourceSftpSftp(str, Enum):
SFTP = 'sftp'
@@ -50,7 +50,7 @@ class SourceSftp:
r"""The server host address"""
port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port') }})
r"""The server port"""
- source_type: SourceSftpSftpEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceSftpSftp = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('user') }})
r"""The server user"""
credentials: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_sftp_bulk.py b/src/airbyte/models/shared/source_sftp_bulk.py
index b2564fb1..da67a4a8 100755
--- a/src/airbyte/models/shared/source_sftp_bulk.py
+++ b/src/airbyte/models/shared/source_sftp_bulk.py
@@ -10,12 +10,12 @@
from marshmallow import fields
from typing import Optional
-class SourceSftpBulkFileTypeEnum(str, Enum):
+class SourceSftpBulkFileType(str, Enum):
r"""The file type you want to sync. Currently only 'csv' and 'json' files are supported."""
CSV = 'csv'
JSON = 'json'
-class SourceSftpBulkSftpBulkEnum(str, Enum):
+class SourceSftpBulkSftpBulk(str, Enum):
SFTP_BULK = 'sftp-bulk'
@@ -30,7 +30,7 @@ class SourceSftpBulk:
r"""The server host address"""
port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port') }})
r"""The server port"""
- source_type: SourceSftpBulkSftpBulkEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceSftpBulkSftpBulk = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""The date from which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated."""
stream_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('stream_name') }})
@@ -41,7 +41,7 @@ class SourceSftpBulk:
r"""Sync only the most recent file for the configured folder path and file pattern"""
file_pattern: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('file_pattern'), 'exclude': lambda f: f is None }})
r"""The regular expression to specify files for sync in a chosen Folder Path"""
- file_type: Optional[SourceSftpBulkFileTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('file_type'), 'exclude': lambda f: f is None }})
+ file_type: Optional[SourceSftpBulkFileType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('file_type'), 'exclude': lambda f: f is None }})
r"""The file type you want to sync. Currently only 'csv' and 'json' files are supported."""
password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }})
r"""OS-level password for logging into the jump server host"""
diff --git a/src/airbyte/models/shared/source_shopify.py b/src/airbyte/models/shared/source_shopify.py
index 39887ecb..74f1b148 100755
--- a/src/airbyte/models/shared/source_shopify.py
+++ b/src/airbyte/models/shared/source_shopify.py
@@ -9,7 +9,7 @@
from marshmallow import fields
from typing import Any, Optional
-class SourceShopifyCredentialsOAuth20AuthMethodEnum(str, Enum):
+class SourceShopifyCredentialsOAuth20AuthMethod(str, Enum):
OAUTH2_0 = 'oauth2.0'
@@ -18,7 +18,7 @@ class SourceShopifyCredentialsOAuth20AuthMethodEnum(str, Enum):
class SourceShopifyCredentialsOAuth20:
r"""OAuth2.0"""
- auth_method: SourceShopifyCredentialsOAuth20AuthMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method') }})
+ auth_method: SourceShopifyCredentialsOAuth20AuthMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method') }})
access_token: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token'), 'exclude': lambda f: f is None }})
r"""The Access Token for making authenticated requests."""
client_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id'), 'exclude': lambda f: f is None }})
@@ -26,7 +26,7 @@ class SourceShopifyCredentialsOAuth20:
client_secret: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret'), 'exclude': lambda f: f is None }})
r"""The Client Secret of the Shopify developer application."""
-class SourceShopifyCredentialsAPIPasswordAuthMethodEnum(str, Enum):
+class SourceShopifyCredentialsAPIPasswordAuthMethod(str, Enum):
API_PASSWORD = 'api_password'
@@ -37,9 +37,9 @@ class SourceShopifyCredentialsAPIPassword:
api_password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_password') }})
r"""The API Password for your private application in the `Shopify` store."""
- auth_method: SourceShopifyCredentialsAPIPasswordAuthMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method') }})
+ auth_method: SourceShopifyCredentialsAPIPasswordAuthMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method') }})
-class SourceShopifyShopifyEnum(str, Enum):
+class SourceShopifyShopify(str, Enum):
SHOPIFY = 'shopify'
@@ -50,7 +50,7 @@ class SourceShopify:
shop: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('shop') }})
r"""The name of your Shopify store found in the URL. For example, if your URL was https://NAME.myshopify.com, then the name would be 'NAME'."""
- source_type: SourceShopifyShopifyEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceShopifyShopify = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: date = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.dateisoformat(False), 'decoder': utils.datefromisoformat, 'mm_field': fields.DateTime(format='iso') }})
r"""The date you would like to replicate data from. Format: YYYY-MM-DD. Any data before this date will not be replicated."""
credentials: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_shortio.py b/src/airbyte/models/shared/source_shortio.py
index 5d345ec7..119ae9be 100755
--- a/src/airbyte/models/shared/source_shortio.py
+++ b/src/airbyte/models/shared/source_shortio.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceShortioShortioEnum(str, Enum):
+class SourceShortioShortio(str, Enum):
SHORTIO = 'shortio'
@@ -18,7 +18,7 @@ class SourceShortio:
domain_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('domain_id') }})
secret_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('secret_key') }})
r"""Short.io Secret Key"""
- source_type: SourceShortioShortioEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceShortioShortio = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }})
r"""UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_slack.py b/src/airbyte/models/shared/source_slack.py
index 4bc9446c..3b0f8e14 100755
--- a/src/airbyte/models/shared/source_slack.py
+++ b/src/airbyte/models/shared/source_slack.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Any, Optional
-class SourceSlackCredentialsAPITokenOptionTitleEnum(str, Enum):
+class SourceSlackCredentialsAPITokenOptionTitle(str, Enum):
API_TOKEN_CREDENTIALS = 'API Token Credentials'
@@ -21,9 +21,9 @@ class SourceSlackCredentialsAPIToken:
api_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_token') }})
r"""A Slack bot token. See the docs for instructions on how to generate it."""
- option_title: SourceSlackCredentialsAPITokenOptionTitleEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('option_title') }})
+ option_title: SourceSlackCredentialsAPITokenOptionTitle = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('option_title') }})
-class SourceSlackCredentialsSignInViaSlackOAuthOptionTitleEnum(str, Enum):
+class SourceSlackCredentialsSignInViaSlackOAuthOptionTitle(str, Enum):
DEFAULT_O_AUTH2_0_AUTHORIZATION = 'Default OAuth2.0 authorization'
@@ -38,9 +38,9 @@ class SourceSlackCredentialsSignInViaSlackOAuth:
r"""Slack client_id. See our docs if you need help finding this id."""
client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }})
r"""Slack client_secret. See our docs if you need help finding this secret."""
- option_title: SourceSlackCredentialsSignInViaSlackOAuthOptionTitleEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('option_title') }})
+ option_title: SourceSlackCredentialsSignInViaSlackOAuthOptionTitle = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('option_title') }})
-class SourceSlackSlackEnum(str, Enum):
+class SourceSlackSlack(str, Enum):
SLACK = 'slack'
@@ -52,8 +52,8 @@ class SourceSlack:
join_channels: bool = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('join_channels') }})
r"""Whether to join all channels or to sync data only from channels the bot is already in. If false, you'll need to manually add the bot to all the channels from which you'd like to sync messages."""
lookback_window: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lookback_window') }})
- r"""How far into the past to look for messages in threads."""
- source_type: SourceSlackSlackEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ r"""How far into the past to look for messages in threads, default is 0 days"""
+ source_type: SourceSlackSlack = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated."""
channel_filter: Optional[list[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('channel_filter'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_smaily.py b/src/airbyte/models/shared/source_smaily.py
index a0ccef46..a1cf4b73 100755
--- a/src/airbyte/models/shared/source_smaily.py
+++ b/src/airbyte/models/shared/source_smaily.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceSmailySmailyEnum(str, Enum):
+class SourceSmailySmaily(str, Enum):
SMAILY = 'smaily'
@@ -21,5 +21,5 @@ class SourceSmaily:
r"""API Subdomain. See https://smaily.com/help/api/general/create-api-user/"""
api_username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_username') }})
r"""API user username. See https://smaily.com/help/api/general/create-api-user/"""
- source_type: SourceSmailySmailyEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceSmailySmaily = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_smartengage.py b/src/airbyte/models/shared/source_smartengage.py
index a04404eb..d29a5c15 100755
--- a/src/airbyte/models/shared/source_smartengage.py
+++ b/src/airbyte/models/shared/source_smartengage.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceSmartengageSmartengageEnum(str, Enum):
+class SourceSmartengageSmartengage(str, Enum):
SMARTENGAGE = 'smartengage'
@@ -17,5 +17,5 @@ class SourceSmartengage:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""API Key"""
- source_type: SourceSmartengageSmartengageEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceSmartengageSmartengage = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_smartsheets.py b/src/airbyte/models/shared/source_smartsheets.py
index f0be04ef..e574d81d 100755
--- a/src/airbyte/models/shared/source_smartsheets.py
+++ b/src/airbyte/models/shared/source_smartsheets.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Any, Optional
-class SourceSmartsheetsCredentialsAPIAccessTokenAuthTypeEnum(str, Enum):
+class SourceSmartsheetsCredentialsAPIAccessTokenAuthType(str, Enum):
ACCESS_TOKEN = 'access_token'
@@ -20,9 +20,9 @@ class SourceSmartsheetsCredentialsAPIAccessToken:
access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }})
r"""The access token to use for accessing your data from Smartsheets. This access token must be generated by a user with at least read access to the data you'd like to replicate. Generate an access token in the Smartsheets main menu by clicking Account > Apps & Integrations > API Access. See the setup guide for information on how to obtain this token."""
- auth_type: Optional[SourceSmartsheetsCredentialsAPIAccessTokenAuthTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
+ auth_type: Optional[SourceSmartsheetsCredentialsAPIAccessTokenAuthType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
-class SourceSmartsheetsCredentialsOAuth20AuthTypeEnum(str, Enum):
+class SourceSmartsheetsCredentialsOAuth20AuthType(str, Enum):
OAUTH2_0 = 'oauth2.0'
@@ -40,9 +40,9 @@ class SourceSmartsheetsCredentialsOAuth20:
r"""The key to refresh the expired access_token."""
token_expiry_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('token_expiry_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""The date-time when the access token should be refreshed."""
- auth_type: Optional[SourceSmartsheetsCredentialsOAuth20AuthTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
+ auth_type: Optional[SourceSmartsheetsCredentialsOAuth20AuthType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
-class SourceSmartsheetsSmartsheetsEnum(str, Enum):
+class SourceSmartsheetsSmartsheets(str, Enum):
SMARTSHEETS = 'smartsheets'
@@ -52,7 +52,7 @@ class SourceSmartsheets:
r"""The values required to configure the source."""
credentials: Any = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }})
- source_type: SourceSmartsheetsSmartsheetsEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceSmartsheetsSmartsheets = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
spreadsheet_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('spreadsheet_id') }})
r"""The spreadsheet ID. Find it by opening the spreadsheet then navigating to File > Properties"""
start_datetime: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_datetime'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_snapchat_marketing.py b/src/airbyte/models/shared/source_snapchat_marketing.py
index a9214148..e2d2ca90 100755
--- a/src/airbyte/models/shared/source_snapchat_marketing.py
+++ b/src/airbyte/models/shared/source_snapchat_marketing.py
@@ -9,7 +9,7 @@
from marshmallow import fields
from typing import Optional
-class SourceSnapchatMarketingSnapchatMarketingEnum(str, Enum):
+class SourceSnapchatMarketingSnapchatMarketing(str, Enum):
SNAPCHAT_MARKETING = 'snapchat-marketing'
@@ -24,7 +24,7 @@ class SourceSnapchatMarketing:
r"""The Client Secret of your Snapchat developer application."""
refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token') }})
r"""Refresh Token to renew the expired Access Token."""
- source_type: SourceSnapchatMarketingSnapchatMarketingEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceSnapchatMarketingSnapchatMarketing = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
end_date: Optional[date] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'encoder': utils.dateisoformat(True), 'decoder': utils.datefromisoformat, 'mm_field': fields.DateTime(format='iso'), 'exclude': lambda f: f is None }})
r"""Date in the format 2017-01-25. Any data after this date will not be replicated."""
start_date: Optional[date] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.dateisoformat(True), 'decoder': utils.datefromisoformat, 'mm_field': fields.DateTime(format='iso'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_snowflake.py b/src/airbyte/models/shared/source_snowflake.py
index 4df8fbd4..a93b2193 100755
--- a/src/airbyte/models/shared/source_snowflake.py
+++ b/src/airbyte/models/shared/source_snowflake.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class SourceSnowflakeCredentialsUsernameAndPasswordAuthTypeEnum(str, Enum):
+class SourceSnowflakeCredentialsUsernameAndPasswordAuthType(str, Enum):
USERNAME_PASSWORD = 'username/password'
@@ -15,13 +15,13 @@ class SourceSnowflakeCredentialsUsernameAndPasswordAuthTypeEnum(str, Enum):
@dataclasses.dataclass
class SourceSnowflakeCredentialsUsernameAndPassword:
- auth_type: SourceSnowflakeCredentialsUsernameAndPasswordAuthTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
+ auth_type: SourceSnowflakeCredentialsUsernameAndPasswordAuthType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password') }})
r"""The password associated with the username."""
username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }})
r"""The username you created to allow Airbyte to access the database."""
-class SourceSnowflakeCredentialsOAuth20AuthTypeEnum(str, Enum):
+class SourceSnowflakeCredentialsOAuth20AuthType(str, Enum):
O_AUTH = 'OAuth'
@@ -29,7 +29,7 @@ class SourceSnowflakeCredentialsOAuth20AuthTypeEnum(str, Enum):
@dataclasses.dataclass
class SourceSnowflakeCredentialsOAuth20:
- auth_type: SourceSnowflakeCredentialsOAuth20AuthTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
+ auth_type: SourceSnowflakeCredentialsOAuth20AuthType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }})
r"""The Client ID of your Snowflake developer application."""
client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }})
@@ -39,7 +39,7 @@ class SourceSnowflakeCredentialsOAuth20:
refresh_token: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token'), 'exclude': lambda f: f is None }})
r"""Refresh Token for making authenticated requests."""
-class SourceSnowflakeSnowflakeEnum(str, Enum):
+class SourceSnowflakeSnowflake(str, Enum):
SNOWFLAKE = 'snowflake'
@@ -54,7 +54,7 @@ class SourceSnowflake:
r"""The host domain of the snowflake instance (must include the account, region, cloud environment, and end with snowflakecomputing.com)."""
role: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('role') }})
r"""The role you created for Airbyte to access Snowflake."""
- source_type: SourceSnowflakeSnowflakeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceSnowflakeSnowflake = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
warehouse: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('warehouse') }})
r"""The warehouse you created for Airbyte to access data."""
credentials: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_sonar_cloud.py b/src/airbyte/models/shared/source_sonar_cloud.py
index 0cc30e95..412f9344 100755
--- a/src/airbyte/models/shared/source_sonar_cloud.py
+++ b/src/airbyte/models/shared/source_sonar_cloud.py
@@ -9,7 +9,7 @@
from marshmallow import fields
from typing import Any, Optional
-class SourceSonarCloudSonarCloudEnum(str, Enum):
+class SourceSonarCloudSonarCloud(str, Enum):
SONAR_CLOUD = 'sonar-cloud'
@@ -22,7 +22,7 @@ class SourceSonarCloud:
r"""Comma-separated list of component keys."""
organization: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('organization') }})
r"""Organization key. See here."""
- source_type: SourceSonarCloudSonarCloudEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceSonarCloudSonarCloud = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
user_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('user_token') }})
r"""Your User Token. See here. The token is case sensitive."""
end_date: Optional[date] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'encoder': utils.dateisoformat(True), 'decoder': utils.datefromisoformat, 'mm_field': fields.DateTime(format='iso'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_spacex_api.py b/src/airbyte/models/shared/source_spacex_api.py
index 358f5a35..c1321f43 100755
--- a/src/airbyte/models/shared/source_spacex_api.py
+++ b/src/airbyte/models/shared/source_spacex_api.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourceSpacexAPISpacexAPIEnum(str, Enum):
+class SourceSpacexAPISpacexAPI(str, Enum):
SPACEX_API = 'spacex-api'
@@ -16,7 +16,7 @@ class SourceSpacexAPISpacexAPIEnum(str, Enum):
class SourceSpacexAPI:
r"""The values required to configure the source."""
- source_type: SourceSpacexAPISpacexAPIEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceSpacexAPISpacexAPI = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('id'), 'exclude': lambda f: f is None }})
options: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('options'), 'exclude': lambda f: f is None }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_square.py b/src/airbyte/models/shared/source_square.py
index 4677eee1..1ecb913c 100755
--- a/src/airbyte/models/shared/source_square.py
+++ b/src/airbyte/models/shared/source_square.py
@@ -9,7 +9,7 @@
from marshmallow import fields
from typing import Any, Optional
-class SourceSquareCredentialsAPIKeyAuthTypeEnum(str, Enum):
+class SourceSquareCredentialsAPIKeyAuthType(str, Enum):
API_KEY = 'API Key'
@@ -20,9 +20,9 @@ class SourceSquareCredentialsAPIKey:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""The API key for a Square application"""
- auth_type: SourceSquareCredentialsAPIKeyAuthTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
+ auth_type: SourceSquareCredentialsAPIKeyAuthType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
-class SourceSquareCredentialsOauthAuthenticationAuthTypeEnum(str, Enum):
+class SourceSquareCredentialsOauthAuthenticationAuthType(str, Enum):
O_AUTH = 'OAuth'
@@ -31,7 +31,7 @@ class SourceSquareCredentialsOauthAuthenticationAuthTypeEnum(str, Enum):
class SourceSquareCredentialsOauthAuthentication:
r"""Choose how to authenticate to Square."""
- auth_type: SourceSquareCredentialsOauthAuthenticationAuthTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
+ auth_type: SourceSquareCredentialsOauthAuthenticationAuthType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }})
client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }})
r"""The Square-issued ID of your application"""
client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }})
@@ -39,7 +39,7 @@ class SourceSquareCredentialsOauthAuthentication:
refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token') }})
r"""A refresh token generated using the above client ID and secret"""
-class SourceSquareSquareEnum(str, Enum):
+class SourceSquareSquare(str, Enum):
SQUARE = 'square'
@@ -50,7 +50,7 @@ class SourceSquare:
is_sandbox: bool = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('is_sandbox') }})
r"""Determines whether to use the sandbox or production environment."""
- source_type: SourceSquareSquareEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceSquareSquare = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
credentials: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }})
r"""Choose how to authenticate to Square."""
include_deleted_objects: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('include_deleted_objects'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_strava.py b/src/airbyte/models/shared/source_strava.py
index d83da26b..c0b6f9a5 100755
--- a/src/airbyte/models/shared/source_strava.py
+++ b/src/airbyte/models/shared/source_strava.py
@@ -10,10 +10,10 @@
from marshmallow import fields
from typing import Optional
-class SourceStravaAuthTypeEnum(str, Enum):
+class SourceStravaAuthType(str, Enum):
CLIENT = 'Client'
-class SourceStravaStravaEnum(str, Enum):
+class SourceStravaStrava(str, Enum):
STRAVA = 'strava'
@@ -30,8 +30,8 @@ class SourceStrava:
r"""The Client Secret of your Strava developer application."""
refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token') }})
r"""The Refresh Token with the activity: read_all permissions."""
- source_type: SourceStravaStravaEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceStravaStrava = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""UTC date and time. Any data before this date will not be replicated."""
- auth_type: Optional[SourceStravaAuthTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
+ auth_type: Optional[SourceStravaAuthType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_stripe.py b/src/airbyte/models/shared/source_stripe.py
index c14dde40..a5f45a92 100755
--- a/src/airbyte/models/shared/source_stripe.py
+++ b/src/airbyte/models/shared/source_stripe.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Optional
-class SourceStripeStripeEnum(str, Enum):
+class SourceStripeStripe(str, Enum):
STRIPE = 'stripe'
@@ -23,7 +23,7 @@ class SourceStripe:
r"""Your Stripe account ID (starts with 'acct_', find yours here)."""
client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }})
r"""Stripe API key (usually starts with 'sk_live_'; find yours here)."""
- source_type: SourceStripeStripeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceStripeStripe = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""UTC date and time in the format 2017-01-25T00:00:00Z. Only data generated after this date will be replicated."""
lookback_window_days: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lookback_window_days'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_survey_sparrow.py b/src/airbyte/models/shared/source_survey_sparrow.py
index 8d3b6e4a..cc69e8e1 100755
--- a/src/airbyte/models/shared/source_survey_sparrow.py
+++ b/src/airbyte/models/shared/source_survey_sparrow.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class SourceSurveySparrowRegionGlobalAccountURLBaseEnum(str, Enum):
+class SourceSurveySparrowRegionGlobalAccountURLBase(str, Enum):
HTTPS_API_SURVEYSPARROW_COM_V3 = 'https://api.surveysparrow.com/v3'
@@ -16,9 +16,9 @@ class SourceSurveySparrowRegionGlobalAccountURLBaseEnum(str, Enum):
class SourceSurveySparrowRegionGlobalAccount:
r"""Is your account location is EU based? If yes, the base url to retrieve data will be different."""
- url_base: Optional[SourceSurveySparrowRegionGlobalAccountURLBaseEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('url_base'), 'exclude': lambda f: f is None }})
+ url_base: Optional[SourceSurveySparrowRegionGlobalAccountURLBase] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('url_base'), 'exclude': lambda f: f is None }})
-class SourceSurveySparrowRegionEUBasedAccountURLBaseEnum(str, Enum):
+class SourceSurveySparrowRegionEUBasedAccountURLBase(str, Enum):
HTTPS_EU_API_SURVEYSPARROW_COM_V3 = 'https://eu-api.surveysparrow.com/v3'
@@ -27,9 +27,9 @@ class SourceSurveySparrowRegionEUBasedAccountURLBaseEnum(str, Enum):
class SourceSurveySparrowRegionEUBasedAccount:
r"""Is your account location is EU based? If yes, the base url to retrieve data will be different."""
- url_base: Optional[SourceSurveySparrowRegionEUBasedAccountURLBaseEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('url_base'), 'exclude': lambda f: f is None }})
+ url_base: Optional[SourceSurveySparrowRegionEUBasedAccountURLBase] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('url_base'), 'exclude': lambda f: f is None }})
-class SourceSurveySparrowSurveySparrowEnum(str, Enum):
+class SourceSurveySparrowSurveySparrow(str, Enum):
SURVEY_SPARROW = 'survey-sparrow'
@@ -40,7 +40,7 @@ class SourceSurveySparrow:
access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }})
r"""Your access token. See here. The key is case sensitive."""
- source_type: SourceSurveySparrowSurveySparrowEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceSurveySparrowSurveySparrow = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
region: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('region'), 'exclude': lambda f: f is None }})
r"""Is your account location is EU based? If yes, the base url to retrieve data will be different."""
survey_id: Optional[list[Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('survey_id'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_surveymonkey.py b/src/airbyte/models/shared/source_surveymonkey.py
index 9147addc..f4501d10 100755
--- a/src/airbyte/models/shared/source_surveymonkey.py
+++ b/src/airbyte/models/shared/source_surveymonkey.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Optional
-class SourceSurveymonkeySurveyMonkeyAuthorizationMethodAuthMethodEnum(str, Enum):
+class SourceSurveymonkeySurveyMonkeyAuthorizationMethodAuthMethod(str, Enum):
OAUTH2_0 = 'oauth2.0'
@@ -21,19 +21,19 @@ class SourceSurveymonkeySurveyMonkeyAuthorizationMethod:
access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }})
r"""Access Token for making authenticated requests. See the docs for information on how to generate this key."""
- auth_method: SourceSurveymonkeySurveyMonkeyAuthorizationMethodAuthMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method') }})
+ auth_method: SourceSurveymonkeySurveyMonkeyAuthorizationMethodAuthMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method') }})
client_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id'), 'exclude': lambda f: f is None }})
r"""The Client ID of the SurveyMonkey developer application."""
client_secret: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret'), 'exclude': lambda f: f is None }})
r"""The Client Secret of the SurveyMonkey developer application."""
-class SourceSurveymonkeyOriginDatacenterOfTheSurveyMonkeyAccountEnum(str, Enum):
+class SourceSurveymonkeyOriginDatacenterOfTheSurveyMonkeyAccount(str, Enum):
r"""Depending on the originating datacenter of the SurveyMonkey account, the API access URL may be different."""
USA = 'USA'
EUROPE = 'Europe'
CANADA = 'Canada'
-class SourceSurveymonkeySurveymonkeyEnum(str, Enum):
+class SourceSurveymonkeySurveymonkey(str, Enum):
SURVEYMONKEY = 'surveymonkey'
@@ -42,12 +42,12 @@ class SourceSurveymonkeySurveymonkeyEnum(str, Enum):
class SourceSurveymonkey:
r"""The values required to configure the source."""
- source_type: SourceSurveymonkeySurveymonkeyEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceSurveymonkeySurveymonkey = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated."""
credentials: Optional[SourceSurveymonkeySurveyMonkeyAuthorizationMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }})
r"""The authorization method to use to retrieve data from SurveyMonkey"""
- origin: Optional[SourceSurveymonkeyOriginDatacenterOfTheSurveyMonkeyAccountEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('origin'), 'exclude': lambda f: f is None }})
+ origin: Optional[SourceSurveymonkeyOriginDatacenterOfTheSurveyMonkeyAccount] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('origin'), 'exclude': lambda f: f is None }})
r"""Depending on the originating datacenter of the SurveyMonkey account, the API access URL may be different."""
survey_ids: Optional[list[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('survey_ids'), 'exclude': lambda f: f is None }})
r"""IDs of the surveys from which you'd like to replicate data. If left empty, data from all boards to which you have access will be replicated."""
diff --git a/src/airbyte/models/shared/source_tempo.py b/src/airbyte/models/shared/source_tempo.py
index 95a52650..76d62255 100755
--- a/src/airbyte/models/shared/source_tempo.py
+++ b/src/airbyte/models/shared/source_tempo.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceTempoTempoEnum(str, Enum):
+class SourceTempoTempo(str, Enum):
TEMPO = 'tempo'
@@ -17,5 +17,5 @@ class SourceTempo:
api_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_token') }})
r"""Tempo API Token. Go to Tempo>Settings, scroll down to Data Access and select API integration."""
- source_type: SourceTempoTempoEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceTempoTempo = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_the_guardian_api.py b/src/airbyte/models/shared/source_the_guardian_api.py
index 611e7907..2f1c2170 100755
--- a/src/airbyte/models/shared/source_the_guardian_api.py
+++ b/src/airbyte/models/shared/source_the_guardian_api.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourceTheGuardianAPITheGuardianAPIEnum(str, Enum):
+class SourceTheGuardianAPITheGuardianAPI(str, Enum):
THE_GUARDIAN_API = 'the-guardian-api'
@@ -18,7 +18,7 @@ class SourceTheGuardianAPI:
api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }})
r"""Your API Key. See here. The key is case sensitive."""
- source_type: SourceTheGuardianAPITheGuardianAPIEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceTheGuardianAPITheGuardianAPI = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }})
r"""Use this to set the minimum date (YYYY-MM-DD) of the results. Results older than the start_date will not be shown."""
end_date: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_tiktok_marketing.py b/src/airbyte/models/shared/source_tiktok_marketing.py
index 24692d0b..b61a8182 100755
--- a/src/airbyte/models/shared/source_tiktok_marketing.py
+++ b/src/airbyte/models/shared/source_tiktok_marketing.py
@@ -9,7 +9,7 @@
from marshmallow import fields
from typing import Any, Optional
-class SourceTiktokMarketingCredentialsSandboxAccessTokenAuthTypeEnum(str, Enum):
+class SourceTiktokMarketingCredentialsSandboxAccessTokenAuthType(str, Enum):
SANDBOX_ACCESS_TOKEN = 'sandbox_access_token'
@@ -22,9 +22,9 @@ class SourceTiktokMarketingCredentialsSandboxAccessToken:
r"""The long-term authorized access token."""
advertiser_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('advertiser_id') }})
r"""The Advertiser ID which generated for the developer's Sandbox application."""
- auth_type: Optional[SourceTiktokMarketingCredentialsSandboxAccessTokenAuthTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
+ auth_type: Optional[SourceTiktokMarketingCredentialsSandboxAccessTokenAuthType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
-class SourceTiktokMarketingCredentialsOAuth20AuthTypeEnum(str, Enum):
+class SourceTiktokMarketingCredentialsOAuth20AuthType(str, Enum):
OAUTH2_0 = 'oauth2.0'
@@ -41,9 +41,9 @@ class SourceTiktokMarketingCredentialsOAuth20:
r"""The Developer Application Secret."""
advertiser_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('advertiser_id'), 'exclude': lambda f: f is None }})
r"""The Advertiser ID to filter reports and streams. Let this empty to retrieve all."""
- auth_type: Optional[SourceTiktokMarketingCredentialsOAuth20AuthTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
+ auth_type: Optional[SourceTiktokMarketingCredentialsOAuth20AuthType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
-class SourceTiktokMarketingTiktokMarketingEnum(str, Enum):
+class SourceTiktokMarketingTiktokMarketing(str, Enum):
TIKTOK_MARKETING = 'tiktok-marketing'
@@ -52,7 +52,7 @@ class SourceTiktokMarketingTiktokMarketingEnum(str, Enum):
class SourceTiktokMarketing:
r"""The values required to configure the source."""
- source_type: SourceTiktokMarketingTiktokMarketingEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceTiktokMarketingTiktokMarketing = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
attribution_window: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('attribution_window'), 'exclude': lambda f: f is None }})
r"""The attribution window in days."""
credentials: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_todoist.py b/src/airbyte/models/shared/source_todoist.py
index 84caee4f..d9acb4d8 100755
--- a/src/airbyte/models/shared/source_todoist.py
+++ b/src/airbyte/models/shared/source_todoist.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceTodoistTodoistEnum(str, Enum):
+class SourceTodoistTodoist(str, Enum):
TODOIST = 'todoist'
@@ -15,7 +15,7 @@ class SourceTodoistTodoistEnum(str, Enum):
class SourceTodoist:
r"""The values required to configure the source."""
- source_type: SourceTodoistTodoistEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceTodoistTodoist = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('token') }})
r"""Your API Token. See here. The token is case sensitive."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_trello.py b/src/airbyte/models/shared/source_trello.py
index c02a2836..878032a2 100755
--- a/src/airbyte/models/shared/source_trello.py
+++ b/src/airbyte/models/shared/source_trello.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Optional
-class SourceTrelloTrelloEnum(str, Enum):
+class SourceTrelloTrello(str, Enum):
TRELLO = 'trello'
@@ -21,7 +21,7 @@ class SourceTrello:
key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('key') }})
r"""Trello API key. See the docs for instructions on how to generate it."""
- source_type: SourceTrelloTrelloEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceTrelloTrello = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated."""
token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('token') }})
diff --git a/src/airbyte/models/shared/source_trustpilot.py b/src/airbyte/models/shared/source_trustpilot.py
index 5b96f42c..a2054b7e 100755
--- a/src/airbyte/models/shared/source_trustpilot.py
+++ b/src/airbyte/models/shared/source_trustpilot.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Any, Optional
-class SourceTrustpilotCredentialsAPIKeyAuthTypeEnum(str, Enum):
+class SourceTrustpilotCredentialsAPIKeyAuthType(str, Enum):
APIKEY = 'apikey'
@@ -21,9 +21,9 @@ class SourceTrustpilotCredentialsAPIKey:
client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }})
r"""The API key of the Trustpilot API application."""
- auth_type: Optional[SourceTrustpilotCredentialsAPIKeyAuthTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
+ auth_type: Optional[SourceTrustpilotCredentialsAPIKeyAuthType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
-class SourceTrustpilotCredentialsOAuth20AuthTypeEnum(str, Enum):
+class SourceTrustpilotCredentialsOAuth20AuthType(str, Enum):
OAUTH2_0 = 'oauth2.0'
@@ -41,9 +41,9 @@ class SourceTrustpilotCredentialsOAuth20:
r"""The key to refresh the expired access_token."""
token_expiry_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('token_expiry_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""The date-time when the access token should be refreshed."""
- auth_type: Optional[SourceTrustpilotCredentialsOAuth20AuthTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
+ auth_type: Optional[SourceTrustpilotCredentialsOAuth20AuthType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }})
-class SourceTrustpilotTrustpilotEnum(str, Enum):
+class SourceTrustpilotTrustpilot(str, Enum):
TRUSTPILOT = 'trustpilot'
@@ -55,7 +55,7 @@ class SourceTrustpilot:
business_units: list[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('business_units') }})
r"""The names of business units which shall be synchronized. Some streams e.g. configured_business_units or private_reviews use this configuration."""
credentials: Any = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }})
- source_type: SourceTrustpilotTrustpilotEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceTrustpilotTrustpilot = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }})
r"""For streams with sync. method incremental the start date time to be used"""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_tvmaze_schedule.py b/src/airbyte/models/shared/source_tvmaze_schedule.py
index 5aa26782..2069d613 100755
--- a/src/airbyte/models/shared/source_tvmaze_schedule.py
+++ b/src/airbyte/models/shared/source_tvmaze_schedule.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourceTvmazeScheduleTvmazeScheduleEnum(str, Enum):
+class SourceTvmazeScheduleTvmazeSchedule(str, Enum):
TVMAZE_SCHEDULE = 'tvmaze-schedule'
@@ -18,7 +18,7 @@ class SourceTvmazeSchedule:
domestic_schedule_country_code: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('domestic_schedule_country_code') }})
r"""Country code for domestic TV schedule retrieval."""
- source_type: SourceTvmazeScheduleTvmazeScheduleEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceTvmazeScheduleTvmazeSchedule = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }})
r"""Start date for TV schedule retrieval. May be in the future."""
end_date: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_twilio.py b/src/airbyte/models/shared/source_twilio.py
index a9a91e37..8fb19fbc 100755
--- a/src/airbyte/models/shared/source_twilio.py
+++ b/src/airbyte/models/shared/source_twilio.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Optional
-class SourceTwilioTwilioEnum(str, Enum):
+class SourceTwilioTwilio(str, Enum):
TWILIO = 'twilio'
@@ -23,7 +23,7 @@ class SourceTwilio:
r"""Twilio account SID"""
auth_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_token') }})
r"""Twilio Auth Token."""
- source_type: SourceTwilioTwilioEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceTwilioTwilio = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""UTC date and time in the format 2020-10-01T00:00:00Z. Any data before this date will not be replicated."""
lookback_window: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lookback_window'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_twilio_taskrouter.py b/src/airbyte/models/shared/source_twilio_taskrouter.py
index a43e1803..a7722d61 100755
--- a/src/airbyte/models/shared/source_twilio_taskrouter.py
+++ b/src/airbyte/models/shared/source_twilio_taskrouter.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceTwilioTaskrouterTwilioTaskrouterEnum(str, Enum):
+class SourceTwilioTaskrouterTwilioTaskrouter(str, Enum):
TWILIO_TASKROUTER = 'twilio-taskrouter'
@@ -19,5 +19,5 @@ class SourceTwilioTaskrouter:
r"""Twilio Account ID"""
auth_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_token') }})
r"""Twilio Auth Token"""
- source_type: SourceTwilioTaskrouterTwilioTaskrouterEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceTwilioTaskrouterTwilioTaskrouter = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_twitter.py b/src/airbyte/models/shared/source_twitter.py
index b443423d..b04dbe6b 100755
--- a/src/airbyte/models/shared/source_twitter.py
+++ b/src/airbyte/models/shared/source_twitter.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Optional
-class SourceTwitterTwitterEnum(str, Enum):
+class SourceTwitterTwitter(str, Enum):
TWITTER = 'twitter'
@@ -23,7 +23,7 @@ class SourceTwitter:
r"""App only Bearer Token. See the docs for more information on how to obtain this token."""
query: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('query') }})
r"""Query for matching Tweets. You can learn how to build this query by reading build a query guide ."""
- source_type: SourceTwitterTwitterEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceTwitterTwitter = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
end_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso'), 'exclude': lambda f: f is None }})
r"""The end date for retrieving tweets must be a minimum of 10 seconds prior to the request time."""
start_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_typeform.py b/src/airbyte/models/shared/source_typeform.py
index 13cb8b39..75807e25 100755
--- a/src/airbyte/models/shared/source_typeform.py
+++ b/src/airbyte/models/shared/source_typeform.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Optional
-class SourceTypeformTypeformEnum(str, Enum):
+class SourceTypeformTypeform(str, Enum):
TYPEFORM = 'typeform'
@@ -19,7 +19,7 @@ class SourceTypeformTypeformEnum(str, Enum):
class SourceTypeform:
r"""The values required to configure the source."""
- source_type: SourceTypeformTypeformEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceTypeformTypeform = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""UTC date and time in the format: YYYY-MM-DDTHH:mm:ss[Z]. Any data before this date will not be replicated."""
token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('token') }})
diff --git a/src/airbyte/models/shared/source_us_census.py b/src/airbyte/models/shared/source_us_census.py
index 0c1b585a..318c0c40 100755
--- a/src/airbyte/models/shared/source_us_census.py
+++ b/src/airbyte/models/shared/source_us_census.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourceUsCensusUsCensusEnum(str, Enum):
+class SourceUsCensusUsCensus(str, Enum):
US_CENSUS = 'us-census'
@@ -20,7 +20,7 @@ class SourceUsCensus:
r"""Your API Key. Get your key here."""
query_path: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('query_path') }})
r"""The path portion of the GET request"""
- source_type: SourceUsCensusUsCensusEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceUsCensusUsCensus = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
query_params: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('query_params'), 'exclude': lambda f: f is None }})
r"""The query parameters portion of the GET request, without the api key"""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_vantage.py b/src/airbyte/models/shared/source_vantage.py
index 96b11d5c..26df0987 100755
--- a/src/airbyte/models/shared/source_vantage.py
+++ b/src/airbyte/models/shared/source_vantage.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceVantageVantageEnum(str, Enum):
+class SourceVantageVantage(str, Enum):
VANTAGE = 'vantage'
@@ -17,5 +17,5 @@ class SourceVantage:
access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }})
r"""Your API Access token. See here."""
- source_type: SourceVantageVantageEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceVantageVantage = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_webflow.py b/src/airbyte/models/shared/source_webflow.py
index d7881a7a..a0cfcfe7 100755
--- a/src/airbyte/models/shared/source_webflow.py
+++ b/src/airbyte/models/shared/source_webflow.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceWebflowWebflowEnum(str, Enum):
+class SourceWebflowWebflow(str, Enum):
WEBFLOW = 'webflow'
@@ -19,5 +19,5 @@ class SourceWebflow:
r"""The API token for authenticating to Webflow. See https://university.webflow.com/lesson/intro-to-the-webflow-api"""
site_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('site_id') }})
r"""The id of the Webflow site you are requesting data from. See https://developers.webflow.com/#sites"""
- source_type: SourceWebflowWebflowEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceWebflowWebflow = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_whisky_hunter.py b/src/airbyte/models/shared/source_whisky_hunter.py
index dcea46a8..ebbbf98c 100755
--- a/src/airbyte/models/shared/source_whisky_hunter.py
+++ b/src/airbyte/models/shared/source_whisky_hunter.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceWhiskyHunterWhiskyHunterEnum(str, Enum):
+class SourceWhiskyHunterWhiskyHunter(str, Enum):
WHISKY_HUNTER = 'whisky-hunter'
@@ -15,5 +15,5 @@ class SourceWhiskyHunterWhiskyHunterEnum(str, Enum):
class SourceWhiskyHunter:
r"""The values required to configure the source."""
- source_type: SourceWhiskyHunterWhiskyHunterEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceWhiskyHunterWhiskyHunter = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_wikipedia_pageviews.py b/src/airbyte/models/shared/source_wikipedia_pageviews.py
index 212e7046..99d6a062 100755
--- a/src/airbyte/models/shared/source_wikipedia_pageviews.py
+++ b/src/airbyte/models/shared/source_wikipedia_pageviews.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceWikipediaPageviewsWikipediaPageviewsEnum(str, Enum):
+class SourceWikipediaPageviewsWikipediaPageviews(str, Enum):
WIKIPEDIA_PAGEVIEWS = 'wikipedia-pageviews'
@@ -27,7 +27,7 @@ class SourceWikipediaPageviews:
r"""The date of the last day to include, in YYYYMMDD or YYYYMMDDHH format."""
project: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('project') }})
r"""If you want to filter by project, use the domain of any Wikimedia project."""
- source_type: SourceWikipediaPageviewsWikipediaPageviewsEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceWikipediaPageviewsWikipediaPageviews = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start') }})
r"""The date of the first day to include, in YYYYMMDD or YYYYMMDDHH format."""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_woocommerce.py b/src/airbyte/models/shared/source_woocommerce.py
index eedf4220..70c84dd4 100755
--- a/src/airbyte/models/shared/source_woocommerce.py
+++ b/src/airbyte/models/shared/source_woocommerce.py
@@ -8,7 +8,7 @@
from enum import Enum
from marshmallow import fields
-class SourceWoocommerceWoocommerceEnum(str, Enum):
+class SourceWoocommerceWoocommerce(str, Enum):
WOOCOMMERCE = 'woocommerce'
@@ -23,7 +23,7 @@ class SourceWoocommerce:
r"""Customer Secret for API in WooCommerce shop"""
shop: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('shop') }})
r"""The name of the store. For https://EXAMPLE.com, the shop name is 'EXAMPLE.com'."""
- source_type: SourceWoocommerceWoocommerceEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceWoocommerceWoocommerce = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: date = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.dateisoformat(False), 'decoder': utils.datefromisoformat, 'mm_field': fields.DateTime(format='iso') }})
r"""The date you would like to replicate data from. Format: YYYY-MM-DD"""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_xero.py b/src/airbyte/models/shared/source_xero.py
index f4403932..22dd15fd 100755
--- a/src/airbyte/models/shared/source_xero.py
+++ b/src/airbyte/models/shared/source_xero.py
@@ -25,7 +25,7 @@ class SourceXeroAuthenticateViaXeroOAuth:
token_expiry_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('token_expiry_date') }})
r"""The date-time when the access token should be refreshed"""
-class SourceXeroXeroEnum(str, Enum):
+class SourceXeroXero(str, Enum):
XERO = 'xero'
@@ -35,7 +35,7 @@ class SourceXero:
r"""The values required to configure the source."""
authentication: SourceXeroAuthenticateViaXeroOAuth = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('authentication') }})
- source_type: SourceXeroXeroEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceXeroXero = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""UTC date and time in the format YYYY-MM-DDTHH:mm:ssZ. Any data with created_at before this data will not be synced."""
tenant_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tenant_id') }})
diff --git a/src/airbyte/models/shared/source_xkcd.py b/src/airbyte/models/shared/source_xkcd.py
index d7d344c3..2f1d6a93 100755
--- a/src/airbyte/models/shared/source_xkcd.py
+++ b/src/airbyte/models/shared/source_xkcd.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceXkcdXkcdEnum(str, Enum):
+class SourceXkcdXkcd(str, Enum):
XKCD = 'xkcd'
@@ -15,5 +15,5 @@ class SourceXkcdXkcdEnum(str, Enum):
class SourceXkcd:
r"""The values required to configure the source."""
- source_type: SourceXkcdXkcdEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceXkcdXkcd = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_yandex_metrica.py b/src/airbyte/models/shared/source_yandex_metrica.py
index c540de53..3d8baa5a 100755
--- a/src/airbyte/models/shared/source_yandex_metrica.py
+++ b/src/airbyte/models/shared/source_yandex_metrica.py
@@ -9,7 +9,7 @@
from marshmallow import fields
from typing import Optional
-class SourceYandexMetricaYandexMetricaEnum(str, Enum):
+class SourceYandexMetricaYandexMetrica(str, Enum):
YANDEX_METRICA = 'yandex-metrica'
@@ -22,7 +22,7 @@ class SourceYandexMetrica:
r"""Your Yandex Metrica API access token"""
counter_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('counter_id') }})
r"""Counter ID"""
- source_type: SourceYandexMetricaYandexMetricaEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceYandexMetricaYandexMetrica = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: date = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.dateisoformat(False), 'decoder': utils.datefromisoformat, 'mm_field': fields.DateTime(format='iso') }})
r"""Starting point for your data replication, in format of \\"YYYY-MM-DD\\"."""
end_date: Optional[date] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'encoder': utils.dateisoformat(True), 'decoder': utils.datefromisoformat, 'mm_field': fields.DateTime(format='iso'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_younium.py b/src/airbyte/models/shared/source_younium.py
index 49319879..6ccfc25a 100755
--- a/src/airbyte/models/shared/source_younium.py
+++ b/src/airbyte/models/shared/source_younium.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourceYouniumYouniumEnum(str, Enum):
+class SourceYouniumYounium(str, Enum):
YOUNIUM = 'younium'
@@ -20,7 +20,7 @@ class SourceYounium:
r"""Legal Entity that data should be pulled from"""
password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password') }})
r"""Account password for younium account API key"""
- source_type: SourceYouniumYouniumEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceYouniumYounium = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }})
r"""Username for Younium account"""
playground: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('playground'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_youtube_analytics.py b/src/airbyte/models/shared/source_youtube_analytics.py
index 84ac268e..a73fa2a5 100755
--- a/src/airbyte/models/shared/source_youtube_analytics.py
+++ b/src/airbyte/models/shared/source_youtube_analytics.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any
-class SourceYoutubeAnalyticsYoutubeAnalyticsEnum(str, Enum):
+class SourceYoutubeAnalyticsYoutubeAnalytics(str, Enum):
YOUTUBE_ANALYTICS = 'youtube-analytics'
@@ -17,5 +17,5 @@ class SourceYoutubeAnalytics:
r"""The values required to configure the source."""
credentials: dict[str, Any] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }})
- source_type: SourceYoutubeAnalyticsYoutubeAnalyticsEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceYoutubeAnalyticsYoutubeAnalytics = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_zendesk_chat.py b/src/airbyte/models/shared/source_zendesk_chat.py
index f15a4bdb..0fbca2e9 100755
--- a/src/airbyte/models/shared/source_zendesk_chat.py
+++ b/src/airbyte/models/shared/source_zendesk_chat.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Any, Optional
-class SourceZendeskChatCredentialsAccessTokenCredentialsEnum(str, Enum):
+class SourceZendeskChatCredentialsAccessTokenCredentials(str, Enum):
ACCESS_TOKEN = 'access_token'
@@ -20,9 +20,9 @@ class SourceZendeskChatCredentialsAccessToken:
access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }})
r"""The Access Token to make authenticated requests."""
- credentials: SourceZendeskChatCredentialsAccessTokenCredentialsEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }})
+ credentials: SourceZendeskChatCredentialsAccessTokenCredentials = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }})
-class SourceZendeskChatCredentialsOAuth20CredentialsEnum(str, Enum):
+class SourceZendeskChatCredentialsOAuth20Credentials(str, Enum):
OAUTH2_0 = 'oauth2.0'
@@ -30,7 +30,7 @@ class SourceZendeskChatCredentialsOAuth20CredentialsEnum(str, Enum):
@dataclasses.dataclass
class SourceZendeskChatCredentialsOAuth20:
- credentials: SourceZendeskChatCredentialsOAuth20CredentialsEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }})
+ credentials: SourceZendeskChatCredentialsOAuth20Credentials = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }})
access_token: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token'), 'exclude': lambda f: f is None }})
r"""Access Token for making authenticated requests."""
client_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id'), 'exclude': lambda f: f is None }})
@@ -40,7 +40,7 @@ class SourceZendeskChatCredentialsOAuth20:
refresh_token: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token'), 'exclude': lambda f: f is None }})
r"""Refresh Token to obtain new Access Token, when it's expired."""
-class SourceZendeskChatZendeskChatEnum(str, Enum):
+class SourceZendeskChatZendeskChat(str, Enum):
ZENDESK_CHAT = 'zendesk-chat'
@@ -49,7 +49,7 @@ class SourceZendeskChatZendeskChatEnum(str, Enum):
class SourceZendeskChat:
r"""The values required to configure the source."""
- source_type: SourceZendeskChatZendeskChatEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceZendeskChatZendeskChat = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""The date from which you'd like to replicate data for Zendesk Chat API, in the format YYYY-MM-DDT00:00:00Z."""
credentials: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_zendesk_sunshine.py b/src/airbyte/models/shared/source_zendesk_sunshine.py
index 2ab90592..1344dc7d 100755
--- a/src/airbyte/models/shared/source_zendesk_sunshine.py
+++ b/src/airbyte/models/shared/source_zendesk_sunshine.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Optional
-class SourceZendeskSunshineCredentialsAPITokenAuthMethodEnum(str, Enum):
+class SourceZendeskSunshineCredentialsAPITokenAuthMethod(str, Enum):
API_TOKEN = 'api_token'
@@ -17,11 +17,11 @@ class SourceZendeskSunshineCredentialsAPIToken:
api_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_token') }})
r"""API Token. See the docs for information on how to generate this key."""
- auth_method: SourceZendeskSunshineCredentialsAPITokenAuthMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method') }})
+ auth_method: SourceZendeskSunshineCredentialsAPITokenAuthMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method') }})
email: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('email') }})
r"""The user email for your Zendesk account"""
-class SourceZendeskSunshineCredentialsOAuth20AuthMethodEnum(str, Enum):
+class SourceZendeskSunshineCredentialsOAuth20AuthMethod(str, Enum):
OAUTH2_0 = 'oauth2.0'
@@ -31,13 +31,13 @@ class SourceZendeskSunshineCredentialsOAuth20:
access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }})
r"""Long-term access Token for making authenticated requests."""
- auth_method: SourceZendeskSunshineCredentialsOAuth20AuthMethodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method') }})
+ auth_method: SourceZendeskSunshineCredentialsOAuth20AuthMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_method') }})
client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }})
r"""The Client ID of your OAuth application."""
client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }})
r"""The Client Secret of your OAuth application."""
-class SourceZendeskSunshineZendeskSunshineEnum(str, Enum):
+class SourceZendeskSunshineZendeskSunshine(str, Enum):
ZENDESK_SUNSHINE = 'zendesk-sunshine'
@@ -46,7 +46,7 @@ class SourceZendeskSunshineZendeskSunshineEnum(str, Enum):
class SourceZendeskSunshine:
r"""The values required to configure the source."""
- source_type: SourceZendeskSunshineZendeskSunshineEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceZendeskSunshineZendeskSunshine = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }})
r"""The date from which you'd like to replicate data for Zendesk Sunshine API, in the format YYYY-MM-DDT00:00:00Z."""
subdomain: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('subdomain') }})
diff --git a/src/airbyte/models/shared/source_zendesk_support.py b/src/airbyte/models/shared/source_zendesk_support.py
index 7eb0f0e1..80231271 100755
--- a/src/airbyte/models/shared/source_zendesk_support.py
+++ b/src/airbyte/models/shared/source_zendesk_support.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Any, Optional
-class SourceZendeskSupportZendeskSupportEnum(str, Enum):
+class SourceZendeskSupportZendeskSupport(str, Enum):
ZENDESK_SUPPORT = 'zendesk-support'
@@ -19,7 +19,7 @@ class SourceZendeskSupportZendeskSupportEnum(str, Enum):
class SourceZendeskSupport:
r"""The values required to configure the source."""
- source_type: SourceZendeskSupportZendeskSupportEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceZendeskSupportZendeskSupport = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""The date from which you'd like to replicate data for Zendesk Support API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated."""
subdomain: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('subdomain') }})
diff --git a/src/airbyte/models/shared/source_zendesk_talk.py b/src/airbyte/models/shared/source_zendesk_talk.py
index bca12401..c22c2d65 100755
--- a/src/airbyte/models/shared/source_zendesk_talk.py
+++ b/src/airbyte/models/shared/source_zendesk_talk.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Any, Optional
-class SourceZendeskTalkZendeskTalkEnum(str, Enum):
+class SourceZendeskTalkZendeskTalk(str, Enum):
ZENDESK_TALK = 'zendesk-talk'
@@ -19,7 +19,7 @@ class SourceZendeskTalkZendeskTalkEnum(str, Enum):
class SourceZendeskTalk:
r"""The values required to configure the source."""
- source_type: SourceZendeskTalkZendeskTalkEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceZendeskTalkZendeskTalk = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso') }})
r"""The date from which you'd like to replicate data for Zendesk Talk API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated."""
subdomain: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('subdomain') }})
diff --git a/src/airbyte/models/shared/source_zenloop.py b/src/airbyte/models/shared/source_zenloop.py
index 07246c7d..627967d0 100755
--- a/src/airbyte/models/shared/source_zenloop.py
+++ b/src/airbyte/models/shared/source_zenloop.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Optional
-class SourceZenloopZenloopEnum(str, Enum):
+class SourceZenloopZenloop(str, Enum):
ZENLOOP = 'zenloop'
@@ -18,7 +18,7 @@ class SourceZenloop:
api_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_token') }})
r"""Zenloop API Token. You can get the API token in settings page here"""
- source_type: SourceZenloopZenloopEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceZenloopZenloop = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
date_from: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('date_from'), 'exclude': lambda f: f is None }})
r"""Zenloop date_from. Format: 2021-10-24T03:30:30Z or 2021-10-24. Leave empty if only data from current data should be synced"""
survey_group_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('survey_group_id'), 'exclude': lambda f: f is None }})
diff --git a/src/airbyte/models/shared/source_zoho_crm.py b/src/airbyte/models/shared/source_zoho_crm.py
index ec0dba0d..0862f780 100755
--- a/src/airbyte/models/shared/source_zoho_crm.py
+++ b/src/airbyte/models/shared/source_zoho_crm.py
@@ -10,7 +10,7 @@
from marshmallow import fields
from typing import Optional
-class SourceZohoCrmDataCenterLocationEnum(str, Enum):
+class SourceZohoCrmDataCenterLocation(str, Enum):
r"""Please choose the region of your Data Center location. More info by this Link"""
US = 'US'
AU = 'AU'
@@ -19,7 +19,7 @@ class SourceZohoCrmDataCenterLocationEnum(str, Enum):
CN = 'CN'
JP = 'JP'
-class SourceZohoCRMZohoCRMEditionEnum(str, Enum):
+class SourceZohoCRMZohoCRMEdition(str, Enum):
r"""Choose your Edition of Zoho CRM to determine API Concurrency Limits"""
FREE = 'Free'
STANDARD = 'Standard'
@@ -27,13 +27,13 @@ class SourceZohoCRMZohoCRMEditionEnum(str, Enum):
ENTERPRISE = 'Enterprise'
ULTIMATE = 'Ultimate'
-class SourceZohoCrmEnvironmentEnum(str, Enum):
+class SourceZohoCrmEnvironment(str, Enum):
r"""Please choose the environment"""
PRODUCTION = 'Production'
DEVELOPER = 'Developer'
SANDBOX = 'Sandbox'
-class SourceZohoCrmZohoCrmEnum(str, Enum):
+class SourceZohoCrmZohoCrm(str, Enum):
ZOHO_CRM = 'zoho-crm'
@@ -46,15 +46,15 @@ class SourceZohoCrm:
r"""OAuth2.0 Client ID"""
client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }})
r"""OAuth2.0 Client Secret"""
- dc_region: SourceZohoCrmDataCenterLocationEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dc_region') }})
+ dc_region: SourceZohoCrmDataCenterLocation = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dc_region') }})
r"""Please choose the region of your Data Center location. More info by this Link"""
- edition: SourceZohoCRMZohoCRMEditionEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('edition') }})
+ edition: SourceZohoCRMZohoCRMEdition = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('edition') }})
r"""Choose your Edition of Zoho CRM to determine API Concurrency Limits"""
- environment: SourceZohoCrmEnvironmentEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('environment') }})
+ environment: SourceZohoCrmEnvironment = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('environment') }})
r"""Please choose the environment"""
refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token') }})
r"""OAuth2.0 Refresh Token"""
- source_type: SourceZohoCrmZohoCrmEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceZohoCrmZohoCrm = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_datetime: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_datetime'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'mm_field': fields.DateTime(format='iso'), 'exclude': lambda f: f is None }})
r"""ISO 8601, for instance: `YYYY-MM-DD`, `YYYY-MM-DD HH:MM:SS+HH:MM`"""
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_zoom.py b/src/airbyte/models/shared/source_zoom.py
index 45338d1b..418f2ccd 100755
--- a/src/airbyte/models/shared/source_zoom.py
+++ b/src/airbyte/models/shared/source_zoom.py
@@ -6,7 +6,7 @@
from dataclasses_json import Undefined, dataclass_json
from enum import Enum
-class SourceZoomZoomEnum(str, Enum):
+class SourceZoomZoom(str, Enum):
ZOOM = 'zoom'
@@ -17,5 +17,5 @@ class SourceZoom:
jwt_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jwt_token') }})
r"""JWT Token"""
- source_type: SourceZoomZoomEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceZoomZoom = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/source_zuora.py b/src/airbyte/models/shared/source_zuora.py
index a8d8adb0..c2d3335c 100755
--- a/src/airbyte/models/shared/source_zuora.py
+++ b/src/airbyte/models/shared/source_zuora.py
@@ -7,15 +7,15 @@
from enum import Enum
from typing import Optional
-class SourceZuoraDataQueryTypeEnum(str, Enum):
+class SourceZuoraDataQueryType(str, Enum):
r"""Choose between `Live`, or `Unlimited` - the optimized, replicated database at 12 hours freshness for high volume extraction Link"""
LIVE = 'Live'
UNLIMITED = 'Unlimited'
-class SourceZuoraZuoraEnum(str, Enum):
+class SourceZuoraZuora(str, Enum):
ZUORA = 'zuora'
-class SourceZuoraTenantEndpointLocationEnum(str, Enum):
+class SourceZuoraTenantEndpointLocation(str, Enum):
r"""Please choose the right endpoint where your Tenant is located. More info by this Link"""
US_PRODUCTION = 'US Production'
US_CLOUD_PRODUCTION = 'US Cloud Production'
@@ -37,12 +37,12 @@ class SourceZuora:
r"""Your OAuth user Client ID"""
client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }})
r"""Your OAuth user Client Secret"""
- data_query: SourceZuoraDataQueryTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('data_query') }})
+ data_query: SourceZuoraDataQueryType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('data_query') }})
r"""Choose between `Live`, or `Unlimited` - the optimized, replicated database at 12 hours freshness for high volume extraction Link"""
- source_type: SourceZuoraZuoraEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
+ source_type: SourceZuoraZuora = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }})
start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }})
r"""Start Date in format: YYYY-MM-DD"""
- tenant_endpoint: SourceZuoraTenantEndpointLocationEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tenant_endpoint') }})
+ tenant_endpoint: SourceZuoraTenantEndpointLocation = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tenant_endpoint') }})
r"""Please choose the right endpoint where your Tenant is located. More info by this Link"""
window_in_days: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('window_in_days'), 'exclude': lambda f: f is None }})
r"""The amount of days for each data-chunk begining from start_date. Bigger the value - faster the fetch. (0.1 - as for couple of hours, 1 - as for a Day; 364 - as for a Year)."""
diff --git a/src/airbyte/models/shared/streamconfiguration.py b/src/airbyte/models/shared/streamconfiguration.py
index 8083f4cb..18bdde4d 100755
--- a/src/airbyte/models/shared/streamconfiguration.py
+++ b/src/airbyte/models/shared/streamconfiguration.py
@@ -2,7 +2,7 @@
from __future__ import annotations
import dataclasses
-from ..shared import connectionsyncmodeenum_enum as shared_connectionsyncmodeenum_enum
+from ..shared import connectionsyncmodeenum as shared_connectionsyncmodeenum
from airbyte import utils
from dataclasses_json import Undefined, dataclass_json
from typing import Optional
@@ -18,5 +18,5 @@ class StreamConfiguration:
r"""Path to the field that will be used to determine if a record is new or modified since the last sync. This field is REQUIRED if `sync_mode` is `incremental` unless there is a default."""
primary_key: Optional[list[list[str]]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('primaryKey'), 'exclude': lambda f: f is None }})
r"""Paths to the fields that will be used as primary key. This field is REQUIRED if `destination_sync_mode` is `*_dedup` unless it is already supplied by the source schema."""
- sync_mode: Optional[shared_connectionsyncmodeenum_enum.ConnectionSyncModeEnumEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('syncMode'), 'exclude': lambda f: f is None }})
+ sync_mode: Optional[shared_connectionsyncmodeenum.ConnectionSyncModeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('syncMode'), 'exclude': lambda f: f is None }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/streamproperties.py b/src/airbyte/models/shared/streamproperties.py
index 6054a10a..d140a2e2 100755
--- a/src/airbyte/models/shared/streamproperties.py
+++ b/src/airbyte/models/shared/streamproperties.py
@@ -2,7 +2,7 @@
from __future__ import annotations
import dataclasses
-from ..shared import connectionsyncmodeenum_enum as shared_connectionsyncmodeenum_enum
+from ..shared import connectionsyncmodeenum as shared_connectionsyncmodeenum
from airbyte import utils
from dataclasses_json import Undefined, dataclass_json
from typing import Optional
@@ -18,5 +18,5 @@ class StreamProperties:
source_defined_cursor_field: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceDefinedCursorField'), 'exclude': lambda f: f is None }})
source_defined_primary_key: Optional[list[list[str]]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceDefinedPrimaryKey'), 'exclude': lambda f: f is None }})
stream_name: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('streamName'), 'exclude': lambda f: f is None }})
- sync_modes: Optional[list[shared_connectionsyncmodeenum_enum.ConnectionSyncModeEnumEnum]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('syncModes'), 'exclude': lambda f: f is None }})
+ sync_modes: Optional[list[shared_connectionsyncmodeenum.ConnectionSyncModeEnum]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('syncModes'), 'exclude': lambda f: f is None }})
\ No newline at end of file
diff --git a/src/airbyte/models/shared/workspaceoauthcredentialsrequest.py b/src/airbyte/models/shared/workspaceoauthcredentialsrequest.py
index 0d791b64..726bf411 100755
--- a/src/airbyte/models/shared/workspaceoauthcredentialsrequest.py
+++ b/src/airbyte/models/shared/workspaceoauthcredentialsrequest.py
@@ -2,7 +2,7 @@
from __future__ import annotations
import dataclasses
-from ..shared import actortypeenum_enum as shared_actortypeenum_enum
+from ..shared import actortypeenum as shared_actortypeenum
from airbyte import utils
from dataclasses_json import Undefined, dataclass_json
from typing import Any
@@ -13,7 +13,7 @@
class WorkspaceOAuthCredentialsRequest:
r"""POST body for creating/updating workspace level OAuth credentials"""
- actor_type: shared_actortypeenum_enum.ActorTypeEnumEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('actorType') }})
+ actor_type: shared_actortypeenum.ActorTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('actorType') }})
r"""Whether you're setting this override for a source or destination"""
configuration: dict[str, Any] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('configuration') }})
r"""The configuration for this source/destination based on the OAuth section of the relevant specification."""
diff --git a/src/airbyte/models/shared/workspaceresponse.py b/src/airbyte/models/shared/workspaceresponse.py
index 49a91293..aa094e62 100755
--- a/src/airbyte/models/shared/workspaceresponse.py
+++ b/src/airbyte/models/shared/workspaceresponse.py
@@ -2,7 +2,7 @@
from __future__ import annotations
import dataclasses
-from ..shared import geographyenum_enum as shared_geographyenum_enum
+from ..shared import geographyenum as shared_geographyenum
from airbyte import utils
from dataclasses_json import Undefined, dataclass_json
@@ -12,7 +12,7 @@
class WorkspaceResponse:
r"""Provides details of a single workspace."""
- data_residency: shared_geographyenum_enum.GeographyEnumEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dataResidency') }})
+ data_residency: shared_geographyenum.GeographyEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dataResidency') }})
name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name') }})
workspace_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('workspaceId') }})
\ No newline at end of file
diff --git a/src/airbyte/sdk.py b/src/airbyte/sdk.py
index 9dd85d0a..5f5b65fd 100755
--- a/src/airbyte/sdk.py
+++ b/src/airbyte/sdk.py
@@ -28,8 +28,8 @@ class Airbyte:
_security_client: requests_http.Session
_server_url: str = SERVERS[0]
_language: str = "python"
- _sdk_version: str = "0.7.0"
- _gen_version: str = "2.29.0"
+ _sdk_version: str = "0.10.0"
+ _gen_version: str = "2.32.2"
def __init__(self,
security: shared.Security = None,
diff --git a/src/airbyte/sources.py b/src/airbyte/sources.py
index cc85e9b7..6e2d60d6 100755
--- a/src/airbyte/sources.py
+++ b/src/airbyte/sources.py
@@ -29,11 +29,11 @@ def create_source(self, request: shared.SourceCreateRequest) -> operations.Creat
base_url = self._server_url
url = base_url.removesuffix('/') + '/sources'
-
headers = {}
req_content_type, data, form = utils.serialize_request_body(request, "request", 'json')
if req_content_type not in ('multipart/form-data', 'multipart/mixed'):
headers['content-type'] = req_content_type
+ headers['Accept'] = 'application/json'
headers['user-agent'] = f'speakeasy-sdk/{self._language} {self._sdk_version} {self._gen_version}'
client = self._security_client
@@ -58,8 +58,8 @@ def delete_source(self, request: operations.DeleteSourceRequest) -> operations.D
base_url = self._server_url
url = utils.generate_url(operations.DeleteSourceRequest, base_url, '/sources/{sourceId}', request)
-
headers = {}
+ headers['Accept'] = '*/*'
headers['user-agent'] = f'speakeasy-sdk/{self._language} {self._sdk_version} {self._gen_version}'
client = self._security_client
@@ -78,8 +78,8 @@ def get_source(self, request: operations.GetSourceRequest) -> operations.GetSour
base_url = self._server_url
url = utils.generate_url(operations.GetSourceRequest, base_url, '/sources/{sourceId}', request)
-
headers = {}
+ headers['Accept'] = 'application/json'
headers['user-agent'] = f'speakeasy-sdk/{self._language} {self._sdk_version} {self._gen_version}'
client = self._security_client
@@ -110,13 +110,13 @@ def initiate_o_auth(self, request: shared.InitiateOauthRequest) -> operations.In
base_url = self._server_url
url = base_url.removesuffix('/') + '/sources/initiateOAuth'
-
headers = {}
req_content_type, data, form = utils.serialize_request_body(request, "request", 'json')
if req_content_type not in ('multipart/form-data', 'multipart/mixed'):
headers['content-type'] = req_content_type
if data is None and form is None:
raise Exception('request body is required')
+ headers['Accept'] = '*/*'
headers['user-agent'] = f'speakeasy-sdk/{self._language} {self._sdk_version} {self._gen_version}'
client = self._security_client
@@ -135,9 +135,9 @@ def list_sources(self, request: operations.ListSourcesRequest) -> operations.Lis
base_url = self._server_url
url = base_url.removesuffix('/') + '/sources'
-
headers = {}
query_params = utils.get_query_params(operations.ListSourcesRequest, request)
+ headers['Accept'] = 'application/json'
headers['user-agent'] = f'speakeasy-sdk/{self._language} {self._sdk_version} {self._gen_version}'
client = self._security_client
diff --git a/src/airbyte/streams.py b/src/airbyte/streams.py
index f9588055..10f20936 100755
--- a/src/airbyte/streams.py
+++ b/src/airbyte/streams.py
@@ -27,9 +27,9 @@ def get_stream_properties(self, request: operations.GetStreamPropertiesRequest)
base_url = self._server_url
url = base_url.removesuffix('/') + '/streams'
-
headers = {}
query_params = utils.get_query_params(operations.GetStreamPropertiesRequest, request)
+ headers['Accept'] = 'application/json'
headers['user-agent'] = f'speakeasy-sdk/{self._language} {self._sdk_version} {self._gen_version}'
client = self._security_client
diff --git a/src/airbyte/workspaces.py b/src/airbyte/workspaces.py
index fe17382f..696150d3 100755
--- a/src/airbyte/workspaces.py
+++ b/src/airbyte/workspaces.py
@@ -30,13 +30,13 @@ def create_or_update_workspace_o_auth_credentials(self, request: operations.Crea
base_url = self._server_url
url = utils.generate_url(operations.CreateOrUpdateWorkspaceOAuthCredentialsRequest, base_url, '/workspaces/{workspaceId}/oauthCredentials', request)
-
headers = {}
req_content_type, data, form = utils.serialize_request_body(request, "workspace_o_auth_credentials_request", 'json')
if req_content_type not in ('multipart/form-data', 'multipart/mixed'):
headers['content-type'] = req_content_type
if data is None and form is None:
raise Exception('request body is required')
+ headers['Accept'] = '*/*'
headers['user-agent'] = f'speakeasy-sdk/{self._language} {self._sdk_version} {self._gen_version}'
client = self._security_client
@@ -55,13 +55,13 @@ def create_workspace(self, request: shared.WorkspaceCreateRequest) -> operations
base_url = self._server_url
url = base_url.removesuffix('/') + '/workspaces'
-
headers = {}
req_content_type, data, form = utils.serialize_request_body(request, "request", 'json')
if req_content_type not in ('multipart/form-data', 'multipart/mixed'):
headers['content-type'] = req_content_type
if data is None and form is None:
raise Exception('request body is required')
+ headers['Accept'] = 'application/json'
headers['user-agent'] = f'speakeasy-sdk/{self._language} {self._sdk_version} {self._gen_version}'
client = self._security_client
@@ -86,8 +86,8 @@ def delete_workspace(self, request: operations.DeleteWorkspaceRequest) -> operat
base_url = self._server_url
url = utils.generate_url(operations.DeleteWorkspaceRequest, base_url, '/workspaces/{workspaceId}', request)
-
headers = {}
+ headers['Accept'] = '*/*'
headers['user-agent'] = f'speakeasy-sdk/{self._language} {self._sdk_version} {self._gen_version}'
client = self._security_client
@@ -106,8 +106,8 @@ def get_workspace(self, request: operations.GetWorkspaceRequest) -> operations.G
base_url = self._server_url
url = utils.generate_url(operations.GetWorkspaceRequest, base_url, '/workspaces/{workspaceId}', request)
-
headers = {}
+ headers['Accept'] = 'application/json'
headers['user-agent'] = f'speakeasy-sdk/{self._language} {self._sdk_version} {self._gen_version}'
client = self._security_client
@@ -132,9 +132,9 @@ def list_workspaces(self, request: operations.ListWorkspacesRequest) -> operatio
base_url = self._server_url
url = base_url.removesuffix('/') + '/workspaces'
-
headers = {}
query_params = utils.get_query_params(operations.ListWorkspacesRequest, request)
+ headers['Accept'] = 'application/json'
headers['user-agent'] = f'speakeasy-sdk/{self._language} {self._sdk_version} {self._gen_version}'
client = self._security_client
@@ -159,13 +159,13 @@ def update_workspace(self, request: operations.UpdateWorkspaceRequest) -> operat
base_url = self._server_url
url = utils.generate_url(operations.UpdateWorkspaceRequest, base_url, '/workspaces/{workspaceId}', request)
-
headers = {}
req_content_type, data, form = utils.serialize_request_body(request, "workspace_update_request", 'json')
if req_content_type not in ('multipart/form-data', 'multipart/mixed'):
headers['content-type'] = req_content_type
if data is None and form is None:
raise Exception('request body is required')
+ headers['Accept'] = 'application/json'
headers['user-agent'] = f'speakeasy-sdk/{self._language} {self._sdk_version} {self._gen_version}'
client = self._security_client