diff --git a/src/datafactory/HISTORY.rst b/src/datafactory/HISTORY.rst index f4e5240e156..1c139576ba0 100644 --- a/src/datafactory/HISTORY.rst +++ b/src/datafactory/HISTORY.rst @@ -3,28 +3,6 @@ Release History =============== -0.4.0 -+++++ -* GA the whole module - -0.3.0 -+++++ -* [BREAKING CHANGE] Renamed command subgroup `az datafactory factory` to `az datafactory`. -* [BREAKING CHANGE] `az datafactory integration-runtime managed create`: `--type-properties-compute-properties` renamed to `--compute-properties`, - `--type-properties-ssis-properties` renamed to `--ssis-properties`. -* [BREAKING CHANGE] `az datafactory integration-runtime self-hosted create`: `--type-properties-linked-info` renamed to `--linked-info`. -* [BREAKING CHANGE] `az datafactory integration-runtime update`: `--properties` renamed to `--linked-service`. -* [BREAKING CHANGE] `az datafactory linked-service delete`: `--properties` renamed to `--dataset`. -* [BREAKING CHANGE] `az datafactory trigger list`: `--properties` renamed to `--trigger`. - -0.2.1 -+++++ -* az datafactory factory create: Enable managed identity by default - -0.2.0 -++++++ -* add update command for linked services and triggers and datasets - 0.1.0 ++++++ * Initial release. diff --git a/src/datafactory/azext_datafactory/azext_metadata.json b/src/datafactory/azext_datafactory/azext_metadata.json index 3695b0d7077..cfc30c747c7 100644 --- a/src/datafactory/azext_datafactory/azext_metadata.json +++ b/src/datafactory/azext_datafactory/azext_metadata.json @@ -1,3 +1,4 @@ { + "azext.isExperimental": true, "azext.minCliCoreVersion": "2.15.0" } \ No newline at end of file diff --git a/src/datafactory/azext_datafactory/generated/_client_factory.py b/src/datafactory/azext_datafactory/generated/_client_factory.py index 7db87b484da..837a01be1e4 100644 --- a/src/datafactory/azext_datafactory/generated/_client_factory.py +++ b/src/datafactory/azext_datafactory/generated/_client_factory.py @@ -54,3 +54,15 @@ def cf_trigger(cli_ctx, *_): def cf_trigger_run(cli_ctx, *_): return cf_datafactory_cl(cli_ctx).trigger_runs + + +def cf_private_end_point_connection(cli_ctx, *_): + return cf_datafactory_cl(cli_ctx).private_end_point_connections + + +def cf_private_endpoint_connection(cli_ctx, *_): + return cf_datafactory_cl(cli_ctx).private_endpoint_connection + + +def cf_private_link_resource(cli_ctx, *_): + return cf_datafactory_cl(cli_ctx).private_link_resources diff --git a/src/datafactory/azext_datafactory/generated/_help.py b/src/datafactory/azext_datafactory/generated/_help.py index fd2ab1dcd0e..202eeba1c85 100644 --- a/src/datafactory/azext_datafactory/generated/_help.py +++ b/src/datafactory/azext_datafactory/generated/_help.py @@ -42,33 +42,14 @@ type: command short-summary: "Create a factory." parameters: - - name: --factory-vsts-configuration - short-summary: "Factory's VSTS repo information." + - name: --identity + short-summary: "User assigned identity to use to authenticate to customer's key vault. If not provided Managed \ +Service Identity will be used." long-summary: | - Usage: --factory-vsts-configuration project-name=XX tenant-id=XX type=XX account-name=XX \ -repository-name=XX collaboration-branch=XX root-folder=XX last-commit-id=XX - - project-name: Required. VSTS project name. - tenant-id: VSTS tenant id. - type: Required. Type of repo configuration. - account-name: Required. Account name. - repository-name: Required. Repository name. - collaboration-branch: Required. Collaboration branch. - root-folder: Required. Root folder. - last-commit-id: Last commit id. - - name: --factory-git-hub-configuration - short-summary: "Factory's GitHub repo information." - long-summary: | - Usage: --factory-git-hub-configuration host-name=XX type=XX account-name=XX repository-name=XX \ -collaboration-branch=XX root-folder=XX last-commit-id=XX + Usage: --identity user-assigned-identity=XX - host-name: GitHub Enterprise host name. For example: https://github.mydomain.com - type: Required. Type of repo configuration. - account-name: Required. Account name. - repository-name: Required. Repository name. - collaboration-branch: Required. Collaboration branch. - root-folder: Required. Root folder. - last-commit-id: Last commit id. + user-assigned-identity: The resource id of the user assigned identity to authenticate to customer's key \ +vault. examples: - name: Factories_CreateOrUpdate text: |- @@ -98,41 +79,14 @@ helps['datafactory configure-factory-repo'] = """ type: command short-summary: "Updates a factory's repo information." - parameters: - - name: --factory-vsts-configuration - short-summary: "Factory's VSTS repo information." - long-summary: | - Usage: --factory-vsts-configuration project-name=XX tenant-id=XX type=XX account-name=XX \ -repository-name=XX collaboration-branch=XX root-folder=XX last-commit-id=XX - - project-name: Required. VSTS project name. - tenant-id: VSTS tenant id. - type: Required. Type of repo configuration. - account-name: Required. Account name. - repository-name: Required. Repository name. - collaboration-branch: Required. Collaboration branch. - root-folder: Required. Root folder. - last-commit-id: Last commit id. - - name: --factory-git-hub-configuration - short-summary: "Factory's GitHub repo information." - long-summary: | - Usage: --factory-git-hub-configuration host-name=XX type=XX account-name=XX repository-name=XX \ -collaboration-branch=XX root-folder=XX last-commit-id=XX - - host-name: GitHub Enterprise host name. For example: https://github.mydomain.com - type: Required. Type of repo configuration. - account-name: Required. Account name. - repository-name: Required. Repository name. - collaboration-branch: Required. Collaboration branch. - root-folder: Required. Root folder. - last-commit-id: Last commit id. examples: - name: Factories_ConfigureFactoryRepo text: |- az datafactory configure-factory-repo --factory-resource-id "/subscriptions/12345678-1234-1234-1234-1234\ 5678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName" \ ---factory-vsts-configuration account-name="ADF" collaboration-branch="master" last-commit-id="" project-name="project" \ -repository-name="repo" root-folder="/" tenant-id="" --location "East US" +--repo-configuration "{\\"type\\":\\"FactoryVSTSConfiguration\\",\\"accountName\\":\\"ADF\\",\\"collaborationBranch\\":\ +\\"master\\",\\"lastCommitId\\":\\"\\",\\"projectName\\":\\"project\\",\\"repositoryName\\":\\"repo\\",\\"rootFolder\\"\ +:\\"/\\",\\"tenantId\\":\\"\\"}" --location "East US" """ helps['datafactory get-data-plane-access'] = """ @@ -149,6 +103,14 @@ helps['datafactory get-git-hub-access-token'] = """ type: command short-summary: "Get GitHub Access Token." + parameters: + - name: --git-hub-client-secret + short-summary: "GitHub bring your own app client secret information." + long-summary: | + Usage: --git-hub-client-secret byoa-secret-akv-url=XX byoa-secret-name=XX + + byoa-secret-akv-url: Bring your own app client secret AKV URL. + byoa-secret-name: Bring your own app client secret name in AKV. examples: - name: Factories_GetGitHubAccessToken text: |- @@ -206,6 +168,13 @@ helps['datafactory integration-runtime managed create'] = """ type: command short-summary: "Create an integration runtime." + parameters: + - name: --managed-virtual-network + short-summary: "Managed Virtual Network reference." + long-summary: | + Usage: --managed-virtual-network reference-name=XX + + reference-name: Required. Reference ManagedVirtualNetwork name. """ helps['datafactory integration-runtime self-hosted'] = """ @@ -286,6 +255,16 @@ "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" """ +helps['datafactory integration-runtime list-outbound-network-dependency-endpoint'] = """ + type: command + short-summary: "Gets the list of outbound network dependencies for a given Azure-SSIS integration runtime." + examples: + - name: IntegrationRuntimes_OutboundNetworkDependenciesEndpoints + text: |- + az datafactory integration-runtime list-outbound-network-dependency-endpoint --factory-name \ +"exampleFactoryName" --name "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" +""" + helps['datafactory integration-runtime regenerate-auth-key'] = """ type: command short-summary: "Regenerates the authentication key for an integration runtime." @@ -591,7 +570,7 @@ taset\\"}],\\"typeProperties\\":{\\"dataIntegrationUnits\\":32,\\"sink\\":{\\"type\\":\\"BlobSink\\"},\\"source\\":{\\"\ type\\":\\"BlobSource\\"}}}],\\"isSequential\\":true,\\"items\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@pipeline(\ ).parameters.OutputBlobNameList\\"}}}]" --parameters "{\\"OutputBlobNameList\\":{\\"type\\":\\"Array\\"}}" --duration \ -"0.00:10:00" --name "examplePipeline" --resource-group "exampleResourceGroup" +"0.00:10:00" --pipeline-name "examplePipeline" --resource-group "exampleResourceGroup" """ helps['datafactory pipeline delete'] = """ @@ -902,3 +881,92 @@ az datafactory trigger-run rerun --factory-name "exampleFactoryName" --resource-group \ "exampleResourceGroup" --run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" --trigger-name "exampleTrigger" """ + +helps['datafactory private-end-point-connection'] = """ + type: group + short-summary: Manage private end point connection with datafactory +""" + +helps['datafactory private-end-point-connection list'] = """ + type: command + short-summary: "Lists Private endpoint connections." + examples: + - name: privateEndPointConnections_ListByFactory + text: |- + az datafactory private-end-point-connection list --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" +""" + +helps['datafactory private-endpoint-connection'] = """ + type: group + short-summary: Manage private endpoint connection with datafactory +""" + +helps['datafactory private-endpoint-connection show'] = """ + type: command + short-summary: "Gets a private endpoint connection." + examples: + - name: Get a private endpoint connection for a datafactory. + text: |- + az datafactory private-endpoint-connection show --factory-name "exampleFactoryName" --name "connection" \ +--resource-group "exampleResourceGroup" +""" + +helps['datafactory private-endpoint-connection create'] = """ + type: command + short-summary: "Approves or rejects a private endpoint connection." + parameters: + - name: --private-link-service-connection-state + short-summary: "The state of a private link connection" + long-summary: | + Usage: --private-link-service-connection-state status=XX description=XX actions-required=XX + + status: Status of a private link connection + description: Description of a private link connection + actions-required: ActionsRequired for a private link connection + examples: + - name: Approves or rejects a private endpoint connection for a factory. + text: |- + az datafactory private-endpoint-connection create --factory-name "exampleFactoryName" --name \ +"connection" --private-link-service-connection-state description="Approved by admin." actions-required="" \ +status="Approved" --resource-group "exampleResourceGroup" +""" + +helps['datafactory private-endpoint-connection update'] = """ + type: command + short-summary: "Approves or rejects a private endpoint connection." + parameters: + - name: --private-link-service-connection-state + short-summary: "The state of a private link connection" + long-summary: | + Usage: --private-link-service-connection-state status=XX description=XX actions-required=XX + + status: Status of a private link connection + description: Description of a private link connection + actions-required: ActionsRequired for a private link connection +""" + +helps['datafactory private-endpoint-connection delete'] = """ + type: command + short-summary: "Deletes a private endpoint connection." + examples: + - name: Delete a private endpoint connection for a datafactory. + text: |- + az datafactory private-endpoint-connection delete --factory-name "exampleFactoryName" --name \ +"connection" --resource-group "exampleResourceGroup" +""" + +helps['datafactory private-link-resource'] = """ + type: group + short-summary: Manage private link resource with datafactory +""" + +helps['datafactory private-link-resource show'] = """ + type: command + short-summary: "Gets the private link resources." + examples: + - name: Get private link resources of a site + text: |- + az datafactory private-link-resource show --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" +""" diff --git a/src/datafactory/azext_datafactory/generated/_params.py b/src/datafactory/azext_datafactory/generated/_params.py index 2162b81c231..c5d149f29fe 100644 --- a/src/datafactory/azext_datafactory/generated/_params.py +++ b/src/datafactory/azext_datafactory/generated/_params.py @@ -22,11 +22,13 @@ validate_file_or_dict ) from azext_datafactory.action import ( - AddFactoryVstsConfiguration, - AddFactoryGitHubConfiguration, + AddIdentity, + AddGitHubClientSecret, + AddManagedVirtualNetwork, AddFolder, AddFilters, - AddOrderBy + AddOrderBy, + AddPrivateLinkServiceConnectionState ) @@ -51,18 +53,37 @@ def load_arguments(self, _): c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False, validator=get_default_location_from_resource_group) c.argument('tags', tags_type) - c.argument('factory_vsts_configuration', action=AddFactoryVstsConfiguration, nargs='+', help='Factory\'s VSTS ' - 'repo information.', arg_group='RepoConfiguration') - c.argument('factory_git_hub_configuration', action=AddFactoryGitHubConfiguration, nargs='+', help='Factory\'s ' - 'GitHub repo information.', arg_group='RepoConfiguration') + c.argument('repo_configuration', type=validate_file_or_dict, help='Git repo information of the factory. ' + 'Expected value: json-string/@json-file.') c.argument('global_parameters', type=validate_file_or_dict, help='List of parameters for factory. Expected ' 'value: json-string/@json-file.') + c.argument('public_network_access', arg_type=get_enum_type(['Enabled', 'Disabled']), help='Whether or not ' + 'public network access is allowed for the data factory.') + c.argument('key_name', type=str, help='The name of the key in Azure Key Vault to use as Customer Managed Key.', + arg_group='Encryption') + c.argument('vault_base_url', type=str, help='The url of the Azure Key Vault used for CMK.', + arg_group='Encryption') + c.argument('key_version', type=str, help='The version of the key used for CMK. If not provided, latest version ' + 'will be used.', arg_group='Encryption') + c.argument('identity', action=AddIdentity, nargs='+', help='User assigned identity to use to authenticate to ' + 'customer\'s key vault. If not provided Managed Service Identity will be used.', + arg_group='Encryption') + c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned', 'UserAssigned', + 'SystemAssigned,UserAssigned']), + help='The identity type.', arg_group='Identity') + c.argument('user_assigned_identities', type=validate_file_or_dict, help='List of user assigned identities for ' + 'the factory. Expected value: json-string/@json-file.', arg_group='Identity') with self.argument_context('datafactory update') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.', id_part='name') c.argument('tags', tags_type) + c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned', 'UserAssigned', + 'SystemAssigned,UserAssigned']), + help='The identity type.', arg_group='Identity') + c.argument('user_assigned_identities', type=validate_file_or_dict, help='List of user assigned identities for ' + 'the factory. Expected value: json-string/@json-file.', arg_group='Identity') with self.argument_context('datafactory delete') as c: c.argument('resource_group_name', resource_group_name_type) @@ -72,10 +93,8 @@ def load_arguments(self, _): with self.argument_context('datafactory configure-factory-repo') as c: c.argument('location', arg_type=get_location_type(self.cli_ctx), id_part='name') c.argument('factory_resource_id', type=str, help='The factory resource id.') - c.argument('factory_vsts_configuration', action=AddFactoryVstsConfiguration, nargs='+', help='Factory\'s VSTS ' - 'repo information.', arg_group='RepoConfiguration') - c.argument('factory_git_hub_configuration', action=AddFactoryGitHubConfiguration, nargs='+', help='Factory\'s ' - 'GitHub repo information.', arg_group='RepoConfiguration') + c.argument('repo_configuration', type=validate_file_or_dict, help='Git repo information of the factory. ' + 'Expected value: json-string/@json-file.') with self.argument_context('datafactory get-data-plane-access') as c: c.argument('resource_group_name', resource_group_name_type) @@ -98,6 +117,8 @@ def load_arguments(self, _): id_part='name') c.argument('git_hub_access_code', type=str, help='GitHub access code.') c.argument('git_hub_client_id', type=str, help='GitHub application client ID.') + c.argument('git_hub_client_secret', action=AddGitHubClientSecret, nargs='+', help='GitHub bring your own app ' + 'client secret information.') c.argument('git_hub_access_token_base_url', type=str, help='GitHub access token base URL.') with self.argument_context('datafactory integration-runtime list') as c: @@ -133,6 +154,8 @@ def load_arguments(self, _): c.argument('if_match', type=str, help='ETag of the integration runtime entity. Should only be specified for ' 'update, for which it should match existing entity or can be * for unconditional update.') c.argument('description', type=str, help='Integration runtime description.') + c.argument('managed_virtual_network', action=AddManagedVirtualNetwork, nargs='+', help='Managed Virtual ' + 'Network reference.') c.argument('compute_properties', type=validate_file_or_dict, help='The compute resource for managed ' 'integration runtime. Expected value: json-string/@json-file.', arg_group='Type Properties') c.argument('ssis_properties', type=validate_file_or_dict, help='SSIS properties for managed integration ' @@ -190,6 +213,12 @@ def load_arguments(self, _): c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, help='The integration runtime name.') + with self.argument_context('datafactory integration-runtime list-outbound-network-dependency-endpoint') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', type=str, help='The factory name.') + c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str, + help='The integration runtime name.') + with self.argument_context('datafactory integration-runtime regenerate-auth-key') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('factory_name', type=str, help='The factory name.', id_part='name') @@ -386,8 +415,7 @@ def load_arguments(self, _): with self.argument_context('datafactory pipeline update') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('pipeline_name', options_list=['--name', '-n', '--pipeline-name'], type=str, help='The pipeline ' - 'name.', id_part='child_name_1') + c.argument('pipeline_name', type=str, help='The pipeline name.', id_part='child_name_1') c.argument('if_match', type=str, help='ETag of the pipeline entity. Should only be specified for update, for ' 'which it should match existing entity or can be * for unconditional update.') c.argument('description', type=str, help='The description of the pipeline.') @@ -404,8 +432,7 @@ def load_arguments(self, _): 'json-string/@json-file.') c.argument('duration', type=validate_file_or_dict, help='TimeSpan value, after which an Azure Monitoring ' 'Metric is fired. Expected value: json-string/@json-file.', arg_group='Policy Elapsed Time Metric') - c.argument('folder_name', type=str, help='The name of the folder that this Pipeline is in.', - arg_group='Folder') + c.argument('name', type=str, help='The name of the folder that this Pipeline is in.', arg_group='Folder') c.ignore('pipeline') with self.argument_context('datafactory pipeline delete') as c: @@ -578,3 +605,52 @@ def load_arguments(self, _): c.argument('factory_name', type=str, help='The factory name.', id_part='name') c.argument('trigger_name', type=str, help='The trigger name.', id_part='child_name_1') c.argument('run_id', type=str, help='The pipeline run identifier.', id_part='child_name_2') + + with self.argument_context('datafactory private-end-point-connection list') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', type=str, help='The factory name.') + + with self.argument_context('datafactory private-endpoint-connection show') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam' + 'e'], type=str, help='The private endpoint ' + 'connection name.', id_part='child_name_1') + c.argument('if_none_match', type=str, help='ETag of the private endpoint connection entity. Should only be ' + 'specified for get. If the ETag matches the existing entity tag, or if * was provided, then no ' + 'content will be returned.') + + with self.argument_context('datafactory private-endpoint-connection create') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', type=str, help='The factory name.') + c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam' + 'e'], type=str, help='The private endpoint ' + 'connection name.') + c.argument('if_match', type=str, help='ETag of the private endpoint connection entity. Should only be ' + 'specified for update, for which it should match existing entity or can be * for unconditional ' + 'update.') + c.argument('private_link_service_connection_state', action=AddPrivateLinkServiceConnectionState, nargs='+', + help='The state of a private link connection') + + with self.argument_context('datafactory private-endpoint-connection update') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam' + 'e'], type=str, help='The private endpoint ' + 'connection name.', id_part='child_name_1') + c.argument('if_match', type=str, help='ETag of the private endpoint connection entity. Should only be ' + 'specified for update, for which it should match existing entity or can be * for unconditional ' + 'update.') + c.argument('private_link_service_connection_state', action=AddPrivateLinkServiceConnectionState, nargs='+', + help='The state of a private link connection') + + with self.argument_context('datafactory private-endpoint-connection delete') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam' + 'e'], type=str, help='The private endpoint ' + 'connection name.', id_part='child_name_1') + + with self.argument_context('datafactory private-link-resource show') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', type=str, help='The factory name.', id_part='name') diff --git a/src/datafactory/azext_datafactory/generated/action.py b/src/datafactory/azext_datafactory/generated/action.py index f645d72981a..f6f712f317f 100644 --- a/src/datafactory/azext_datafactory/generated/action.py +++ b/src/datafactory/azext_datafactory/generated/action.py @@ -14,10 +14,10 @@ from knack.util import CLIError -class AddFactoryVstsConfiguration(argparse.Action): +class AddIdentity(argparse.Action): def __call__(self, parser, namespace, values, option_string=None): action = self.get_action(values, option_string) - namespace.factory_vsts_configuration = action + namespace.identity = action def get_action(self, values, option_string): # pylint: disable=no-self-use try: @@ -31,32 +31,18 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use for k in properties: kl = k.lower() v = properties[k] - if kl == 'project-name': - d['project_name'] = v[0] - elif kl == 'tenant-id': - d['tenant_id'] = v[0] - elif kl == 'account-name': - d['account_name'] = v[0] - elif kl == 'repository-name': - d['repository_name'] = v[0] - elif kl == 'collaboration-branch': - d['collaboration_branch'] = v[0] - elif kl == 'root-folder': - d['root_folder'] = v[0] - elif kl == 'last-commit-id': - d['last_commit_id'] = v[0] + if kl == 'user-assigned-identity': + d['user_assigned_identity'] = v[0] else: - raise CLIError('Unsupported Key {} is provided for parameter factory_vsts_configuration. All possible ' - 'keys are: project-name, tenant-id, account-name, repository-name, ' - 'collaboration-branch, root-folder, last-commit-id'.format(k)) - d['type'] = 'FactoryVSTSConfiguration' + raise CLIError('Unsupported Key {} is provided for parameter identity. All possible keys are: ' + 'user-assigned-identity'.format(k)) return d -class AddFactoryGitHubConfiguration(argparse.Action): +class AddGitHubClientSecret(argparse.Action): def __call__(self, parser, namespace, values, option_string=None): action = self.get_action(values, option_string) - namespace.factory_git_hub_configuration = action + namespace.git_hub_client_secret = action def get_action(self, values, option_string): # pylint: disable=no-self-use try: @@ -70,23 +56,39 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use for k in properties: kl = k.lower() v = properties[k] - if kl == 'host-name': - d['host_name'] = v[0] - elif kl == 'account-name': - d['account_name'] = v[0] - elif kl == 'repository-name': - d['repository_name'] = v[0] - elif kl == 'collaboration-branch': - d['collaboration_branch'] = v[0] - elif kl == 'root-folder': - d['root_folder'] = v[0] - elif kl == 'last-commit-id': - d['last_commit_id'] = v[0] + if kl == 'byoa-secret-akv-url': + d['byoa_secret_akv_url'] = v[0] + elif kl == 'byoa-secret-name': + d['byoa_secret_name'] = v[0] else: - raise CLIError('Unsupported Key {} is provided for parameter factory_git_hub_configuration. All ' - 'possible keys are: host-name, account-name, repository-name, collaboration-branch, ' - 'root-folder, last-commit-id'.format(k)) - d['type'] = 'FactoryGitHubConfiguration' + raise CLIError('Unsupported Key {} is provided for parameter git_hub_client_secret. All possible keys ' + 'are: byoa-secret-akv-url, byoa-secret-name'.format(k)) + return d + + +class AddManagedVirtualNetwork(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.managed_virtual_network = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + d['type'] = "ManagedVirtualNetworkReference" + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'reference-name': + d['reference_name'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter managed_virtual_network. All possible ' + 'keys are: reference-name'.format(k)) return d @@ -111,7 +113,7 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use d['name'] = v[0] else: raise CLIError('Unsupported Key {} is provided for parameter folder. All possible keys are: name'. - format(k)) + format(k)) return d @@ -169,3 +171,32 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use raise CLIError('Unsupported Key {} is provided for parameter order_by. All possible keys are: ' 'order-by, order'.format(k)) return d + + +class AddPrivateLinkServiceConnectionState(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.private_link_service_connection_state = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'status': + d['status'] = v[0] + elif kl == 'description': + d['description'] = v[0] + elif kl == 'actions-required': + d['actions_required'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter private_link_service_connection_state. ' + 'All possible keys are: status, description, actions-required'.format(k)) + return d diff --git a/src/datafactory/azext_datafactory/generated/commands.py b/src/datafactory/azext_datafactory/generated/commands.py index 83b7f9db34e..eb938fdd4eb 100644 --- a/src/datafactory/azext_datafactory/generated/commands.py +++ b/src/datafactory/azext_datafactory/generated/commands.py @@ -20,7 +20,7 @@ def load_command_table(self, _): operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._factories_operations#FactoriesOperatio' 'ns.{}', client_factory=cf_factory) - with self.command_group('datafactory', datafactory_factory, client_factory=cf_factory) as g: + with self.command_group('datafactory', datafactory_factory, client_factory=cf_factory, is_experimental=True) as g: g.custom_command('list', 'datafactory_list') g.custom_show_command('show', 'datafactory_show') g.custom_command('create', 'datafactory_create') @@ -49,6 +49,8 @@ def load_command_table(self, _): g.custom_command('get-monitoring-data', 'datafactory_integration_runtime_get_monitoring_data') g.custom_command('get-status', 'datafactory_integration_runtime_get_status') g.custom_command('list-auth-key', 'datafactory_integration_runtime_list_auth_key') + g.custom_command('list-outbound-network-dependency-endpoint', 'datafactory_integration_runtime_list_outbound_ne' + 'twork_dependency_endpoint') g.custom_command('regenerate-auth-key', 'datafactory_integration_runtime_regenerate_auth_key') g.custom_command('remove-link', 'datafactory_integration_runtime_remove_link') g.custom_command('start', 'datafactory_integration_runtime_start', supports_no_wait=True) @@ -157,3 +159,33 @@ def load_command_table(self, _): g.custom_command('cancel', 'datafactory_trigger_run_cancel') g.custom_command('query-by-factory', 'datafactory_trigger_run_query_by_factory') g.custom_command('rerun', 'datafactory_trigger_run_rerun') + + from azext_datafactory.generated._client_factory import cf_private_end_point_connection + datafactory_private_end_point_connection = CliCommandType( + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._private_end_point_connections_operatio' + 'ns#PrivateEndPointConnectionsOperations.{}', + client_factory=cf_private_end_point_connection) + with self.command_group('datafactory private-end-point-connection', datafactory_private_end_point_connection, + client_factory=cf_private_end_point_connection) as g: + g.custom_command('list', 'datafactory_private_end_point_connection_list') + + from azext_datafactory.generated._client_factory import cf_private_endpoint_connection + datafactory_private_endpoint_connection = CliCommandType( + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._private_endpoint_connection_operations' + '#PrivateEndpointConnectionOperations.{}', + client_factory=cf_private_endpoint_connection) + with self.command_group('datafactory private-endpoint-connection', datafactory_private_endpoint_connection, + client_factory=cf_private_endpoint_connection) as g: + g.custom_show_command('show', 'datafactory_private_endpoint_connection_show') + g.custom_command('create', 'datafactory_private_endpoint_connection_create') + g.custom_command('update', 'datafactory_private_endpoint_connection_update') + g.custom_command('delete', 'datafactory_private_endpoint_connection_delete', confirmation=True) + + from azext_datafactory.generated._client_factory import cf_private_link_resource + datafactory_private_link_resource = CliCommandType( + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._private_link_resources_operations#Priv' + 'ateLinkResourcesOperations.{}', + client_factory=cf_private_link_resource) + with self.command_group('datafactory private-link-resource', datafactory_private_link_resource, + client_factory=cf_private_link_resource) as g: + g.custom_show_command('show', 'datafactory_private_link_resource_show') diff --git a/src/datafactory/azext_datafactory/generated/custom.py b/src/datafactory/azext_datafactory/generated/custom.py index c269c1999ff..0a16a161bf7 100644 --- a/src/datafactory/azext_datafactory/generated/custom.py +++ b/src/datafactory/azext_datafactory/generated/custom.py @@ -11,7 +11,6 @@ # pylint: disable=too-many-lines # pylint: disable=unused-argument -from knack.util import CLIError from azure.cli.core.util import sdk_no_wait @@ -37,25 +36,29 @@ def datafactory_create(client, if_match=None, location=None, tags=None, - factory_vsts_configuration=None, - factory_git_hub_configuration=None, - global_parameters=None): - all_repo_configuration = [] - if factory_vsts_configuration is not None: - all_repo_configuration.append(factory_vsts_configuration) - if factory_git_hub_configuration is not None: - all_repo_configuration.append(factory_git_hub_configuration) - if len(all_repo_configuration) > 1: - raise CLIError('at most one of factory_vsts_configuration, factory_git_hub_configuration is needed for ' - 'repo_configuration!') - repo_configuration = all_repo_configuration[0] if len(all_repo_configuration) == 1 else None + repo_configuration=None, + global_parameters=None, + public_network_access=None, + key_name=None, + vault_base_url=None, + key_version=None, + identity=None, + type_=None, + user_assigned_identities=None): factory = {} factory['location'] = location factory['tags'] = tags factory['repo_configuration'] = repo_configuration factory['global_parameters'] = global_parameters + factory['public_network_access'] = public_network_access factory['encryption'] = {} + factory['encryption']['key_name'] = key_name + factory['encryption']['vault_base_url'] = vault_base_url + factory['encryption']['key_version'] = key_version + factory['encryption']['identity'] = identity factory['identity'] = {} + factory['identity']['type'] = type_ + factory['identity']['user_assigned_identities'] = user_assigned_identities return client.create_or_update(resource_group_name=resource_group_name, factory_name=factory_name, if_match=if_match, @@ -65,10 +68,14 @@ def datafactory_create(client, def datafactory_update(client, resource_group_name, factory_name, - tags=None): + tags=None, + type_=None, + user_assigned_identities=None): factory_update_parameters = {} factory_update_parameters['tags'] = tags factory_update_parameters['identity'] = {} + factory_update_parameters['identity']['type'] = type_ + factory_update_parameters['identity']['user_assigned_identities'] = user_assigned_identities return client.update(resource_group_name=resource_group_name, factory_name=factory_name, factory_update_parameters=factory_update_parameters) @@ -84,17 +91,7 @@ def datafactory_delete(client, def datafactory_configure_factory_repo(client, location, factory_resource_id=None, - factory_vsts_configuration=None, - factory_git_hub_configuration=None): - all_repo_configuration = [] - if factory_vsts_configuration is not None: - all_repo_configuration.append(factory_vsts_configuration) - if factory_git_hub_configuration is not None: - all_repo_configuration.append(factory_git_hub_configuration) - if len(all_repo_configuration) > 1: - raise CLIError('at most one of factory_vsts_configuration, factory_git_hub_configuration is needed for ' - 'repo_configuration!') - repo_configuration = all_repo_configuration[0] if len(all_repo_configuration) == 1 else None + repo_configuration=None): factory_repo_update = {} factory_repo_update['factory_resource_id'] = factory_resource_id factory_repo_update['repo_configuration'] = repo_configuration @@ -126,10 +123,12 @@ def datafactory_get_git_hub_access_token(client, factory_name, git_hub_access_code, git_hub_access_token_base_url, - git_hub_client_id=None): + git_hub_client_id=None, + git_hub_client_secret=None): git_hub_access_token_request = {} git_hub_access_token_request['git_hub_access_code'] = git_hub_access_code git_hub_access_token_request['git_hub_client_id'] = git_hub_client_id + git_hub_access_token_request['git_hub_client_secret'] = git_hub_client_secret git_hub_access_token_request['git_hub_access_token_base_url'] = git_hub_access_token_base_url return client.get_git_hub_access_token(resource_group_name=resource_group_name, factory_name=factory_name, @@ -179,12 +178,14 @@ def datafactory_integration_runtime_managed_create(client, integration_runtime_name, if_match=None, description=None, + managed_virtual_network=None, compute_properties=None, ssis_properties=None): integration_runtime = {} integration_runtime['properties'] = {} integration_runtime['properties']['type'] = 'Managed' integration_runtime['properties']['description'] = description + integration_runtime['properties']['managed_virtual_network'] = managed_virtual_network integration_runtime['properties']['compute_properties'] = compute_properties integration_runtime['properties']['ssis_properties'] = ssis_properties return client.create_or_update(resource_group_name=resource_group_name, @@ -273,6 +274,15 @@ def datafactory_integration_runtime_list_auth_key(client, integration_runtime_name=integration_runtime_name) +def datafactory_integration_runtime_list_outbound_network_dependency_endpoint(client, + resource_group_name, + factory_name, + integration_runtime_name): + return client.list_outbound_network_dependencies_endpoints(resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name) + + def datafactory_integration_runtime_regenerate_auth_key(client, resource_group_name, factory_name, @@ -566,7 +576,7 @@ def datafactory_pipeline_update(instance, annotations=None, run_dimensions=None, duration=None, - folder_name=None): + name=None): if description is not None: instance.description = description if activities is not None: @@ -583,8 +593,8 @@ def datafactory_pipeline_update(instance, instance.run_dimensions = run_dimensions if duration is not None: instance.elapsed_time_metric.duration = duration - if folder_name is not None: - instance.folder.name = folder_name + if name is not None: + instance.folder.name = name return instance @@ -841,3 +851,69 @@ def datafactory_trigger_run_rerun(client, factory_name=factory_name, trigger_name=trigger_name, run_id=run_id) + + +def datafactory_private_end_point_connection_list(client, + resource_group_name, + factory_name): + return client.list_by_factory(resource_group_name=resource_group_name, + factory_name=factory_name) + + +def datafactory_private_endpoint_connection_show(client, + resource_group_name, + factory_name, + private_endpoint_connection_name, + if_none_match=None): + return client.get(resource_group_name=resource_group_name, + factory_name=factory_name, + private_endpoint_connection_name=private_endpoint_connection_name, + if_none_match=if_none_match) + + +def datafactory_private_endpoint_connection_create(client, + resource_group_name, + factory_name, + private_endpoint_connection_name, + if_match=None, + private_link_service_connection_state=None): + private_endpoint_wrapper = {} + private_endpoint_wrapper['properties'] = {} + private_endpoint_wrapper['properties']['private_link_service_connection_state'] = private_link_service_connection_state + return client.create_or_update(resource_group_name=resource_group_name, + factory_name=factory_name, + private_endpoint_connection_name=private_endpoint_connection_name, + if_match=if_match, + private_endpoint_wrapper=private_endpoint_wrapper) + + +def datafactory_private_endpoint_connection_update(client, + resource_group_name, + factory_name, + private_endpoint_connection_name, + if_match=None, + private_link_service_connection_state=None): + private_endpoint_wrapper = {} + private_endpoint_wrapper['properties'] = {} + private_endpoint_wrapper['properties']['private_link_service_connection_state'] = private_link_service_connection_state + return client.create_or_update(resource_group_name=resource_group_name, + factory_name=factory_name, + private_endpoint_connection_name=private_endpoint_connection_name, + if_match=if_match, + private_endpoint_wrapper=private_endpoint_wrapper) + + +def datafactory_private_endpoint_connection_delete(client, + resource_group_name, + factory_name, + private_endpoint_connection_name): + return client.delete(resource_group_name=resource_group_name, + factory_name=factory_name, + private_endpoint_connection_name=private_endpoint_connection_name) + + +def datafactory_private_link_resource_show(client, + resource_group_name, + factory_name): + return client.get(resource_group_name=resource_group_name, + factory_name=factory_name) diff --git a/src/datafactory/azext_datafactory/tests/latest/example_steps.py b/src/datafactory/azext_datafactory/tests/latest/example_steps.py index 42222d4e576..317e8bd0f72 100644 --- a/src/datafactory/azext_datafactory/tests/latest/example_steps.py +++ b/src/datafactory/azext_datafactory/tests/latest/example_steps.py @@ -77,8 +77,9 @@ def step_configure_factory_repo(test, rg, checks=None): test.cmd('az datafactory configure-factory-repo ' '--factory-resource-id "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.DataFacto' 'ry/factories/{myFactory}" ' - '--factory-vsts-configuration account-name="ADF" collaboration-branch="master" last-commit-id="" ' - 'project-name="project" repository-name="repo" root-folder="/" tenant-id="" ' + '--repo-configuration "{{\\"type\\":\\"FactoryVSTSConfiguration\\",\\"accountName\\":\\"ADF\\",\\"collabor' + 'ationBranch\\":\\"master\\",\\"lastCommitId\\":\\"\\",\\"projectName\\":\\"project\\",\\"repositoryName\\' + '":\\"repo\\",\\"rootFolder\\":\\"/\\",\\"tenantId\\":\\"\\"}}" ' '--location "East US"', checks=checks) @@ -232,6 +233,18 @@ def step_integration_runtime_list(test, rg, checks=None): checks=checks) +# EXAMPLE: /IntegrationRuntimes/get/IntegrationRuntimes_OutboundNetworkDependenciesEndpoints +@try_manual +def step_integration_runtime_list2(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime list-outbound-network-dependency-endpoint ' + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=checks) + + # EXAMPLE: /IntegrationRuntimes/patch/IntegrationRuntimes_Update @try_manual def step_integration_runtime_update(test, rg, checks=None): @@ -581,7 +594,7 @@ def step_pipeline_update(test, rg, checks=None): 'ipeline().parameters.OutputBlobNameList\\"}}}}}}]" ' '--parameters "{{\\"OutputBlobNameList\\":{{\\"type\\":\\"Array\\"}}}}" ' '--duration "0.00:10:00" ' - '--name "{myPipeline}" ' + '--pipeline-name "{myPipeline}" ' '--resource-group "{rg}"', checks=checks) @@ -634,6 +647,66 @@ def step_pipeline_delete(test, rg, checks=None): checks=checks) +# EXAMPLE: /privateEndPointConnections/get/privateEndPointConnections_ListByFactory +@try_manual +def step_private_end_point_connection_list(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory private-end-point-connection list ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /PrivateEndpointConnection/put/Approves or rejects a private endpoint connection for a factory. +@try_manual +def step_private_endpoint_connection_create(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory private-endpoint-connection create ' + '--factory-name "{myFactory}" ' + '--name "{myPrivateEndPointConnection}" ' + '--private-link-service-connection-state description="Approved by admin." actions-required="" ' + 'status="Approved" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /PrivateEndpointConnection/get/Get a private endpoint connection for a datafactory. +@try_manual +def step_private_endpoint_connection_show(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory private-endpoint-connection show ' + '--factory-name "{myFactory}" ' + '--name "{myPrivateEndPointConnection}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /PrivateEndpointConnection/delete/Delete a private endpoint connection for a datafactory. +@try_manual +def step_private_endpoint_connection_delete(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory private-endpoint-connection delete -y ' + '--factory-name "{myFactory}" ' + '--name "{myPrivateEndPointConnection}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /privateLinkResources/get/Get private link resources of a site +@try_manual +def step_private_link_resource_show(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory private-link-resource show ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks) + + # EXAMPLE: /Triggers/put/Triggers_Create @try_manual def step_trigger_create(test, rg, checks=None): diff --git a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py index 517a35650f8..d6795dbdb8b 100644 --- a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py +++ b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py @@ -174,6 +174,7 @@ def __init__(self, *args, **kwargs): 'myDataset': self.create_random_name(prefix='exampleDataset'[:7], length=14), 'myPipeline': self.create_random_name(prefix='examplePipeline'[:7], length=15), 'myTrigger': self.create_random_name(prefix='exampleTrigger'[:7], length=14), + 'myPrivateEndPointConnection': 'connection', }) @ResourceGroupPreparer(name_prefix='clitestdatafactory_exampleResourceGroup'[:7], key='rg', parameter_name='rg') diff --git a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario_coverage.md b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario_coverage.md deleted file mode 100644 index b7eabe4528a..00000000000 --- a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario_coverage.md +++ /dev/null @@ -1,48 +0,0 @@ -|Scenario|Result|ErrorMessage|ErrorStack|ErrorNormalized|StartDt|EndDt| -|step_create|successed||||2021-04-26 09:05:32.308913|2021-04-26 09:05:32.501033| -|step_update|successed||||2021-04-26 09:05:22.750754|2021-04-26 09:05:22.880707| -|step_linked_service_create|successed||||2021-04-26 09:05:22.880707|2021-04-26 09:05:23.009706| -|step_linked_service_update|successed||||2021-04-26 09:05:23.010706|2021-04-26 09:05:23.174579| -|step_dataset_create|successed||||2021-04-26 09:05:23.174579|2021-04-26 09:05:23.317043| -|step_dataset_update|successed||||2021-04-26 09:05:23.318045|2021-04-26 09:05:23.451047| -|step_pipeline_create|successed||||2021-04-26 09:05:23.452049|2021-04-26 09:05:23.575751| -|step_trigger_create|successed||||2021-04-26 09:05:23.703756|2021-04-26 09:05:23.871057| -|step_trigger_update|successed||||2021-04-26 09:05:23.871057|2021-04-26 09:05:24.019053| -|step_integration_runtime_self_hosted_create|successed||||2021-04-26 09:05:24.019053|2021-04-26 09:05:24.155099| -|step_integration_runtime_update|successed||||2021-04-26 09:05:24.155099|2021-04-26 09:05:24.285096| -|step_integration_runtime_show|successed||||2021-04-26 09:05:29.524820|2021-04-26 09:05:29.675815| -|step_linked_service_show|successed||||2021-04-26 09:05:24.582291|2021-04-26 09:05:24.718292| -|step_pipeline_show|successed||||2021-04-26 09:05:24.719291|2021-04-26 09:05:24.872517| -|step_dataset_show|successed||||2021-04-26 09:05:24.873517|2021-04-26 09:05:25.000030| -|step_trigger_show|successed||||2021-04-26 09:05:33.782136|2021-04-26 09:05:33.927138| -|step_integration_runtime_list|successed||||2021-04-26 09:05:25.115003|2021-04-26 09:05:25.253055| -|step_linked_service_list|successed||||2021-04-26 09:05:25.254059|2021-04-26 09:05:25.409635| -|step_pipeline_list|successed||||2021-04-26 09:05:25.409635|2021-04-26 09:05:25.533704| -|step_trigger_list|successed||||2021-04-26 09:05:25.533704|2021-04-26 09:05:25.676865| -|step_dataset_list|successed||||2021-04-26 09:05:25.676865|2021-04-26 09:05:25.810871| -|step_show|successed||||2021-04-26 09:05:25.810871|2021-04-26 09:05:25.938042| -|step_list2|successed||||2021-04-26 09:05:25.938042|2021-04-26 09:05:26.060042| -|step_list|successed||||2021-04-26 09:05:26.060042|2021-04-26 09:05:26.183196| -|step_integration_runtime_regenerate_auth_key|successed||||2021-04-26 09:05:26.184194|2021-04-26 09:05:26.313194| -|step_integration_runtime_sync_credentials|successed||||2021-04-26 09:05:26.314192|2021-04-26 09:05:26.449307| -|step_integration_runtime_get_monitoring_data|successed||||2021-04-26 09:05:26.449307|2021-04-26 09:05:26.636000| -|step_integration_runtime_list_auth_key|successed||||2021-04-26 09:05:26.636000|2021-04-26 09:05:26.790002| -|step_integration_runtime_remove_link|successed||||2021-04-26 09:05:26.791005|2021-04-26 09:05:26.934513| -|step_integration_runtime_get_status|successed||||2021-04-26 09:05:26.935512|2021-04-26 09:05:27.069511| -|step_trigger_get_event_subscription_status|successed||||2021-04-26 09:05:27.069511|2021-04-26 09:05:27.211487| -|step_trigger_unsubscribe_from_event|successed||||2021-04-26 09:05:27.212492|2021-04-26 09:05:27.402802| -|step_trigger_subscribe_to_event|successed||||2021-04-26 09:05:27.402802|2021-04-26 09:05:27.532807| -|step_trigger_start|successed||||2021-04-26 09:05:33.632612|2021-04-26 09:05:33.782136| -|step_trigger_stop|successed||||2021-04-26 09:05:34.611518|2021-04-26 09:05:34.768873| -|step_get_data_plane_access|successed||||2021-04-26 09:05:27.837090|2021-04-26 09:05:27.977072| -|step_configure_factory_repo|successed||||2021-04-26 09:05:28.099075|2021-04-26 09:05:28.288426| -|step_integration_runtime_delete|successed||||2021-04-26 09:05:31.965947|2021-04-26 09:05:32.140944| -|step_trigger_delete|successed||||2021-04-26 09:05:34.768873|2021-04-26 09:05:34.900878| -|step_pipeline_delete|successed||||2021-04-26 09:05:34.900878|2021-04-26 09:05:35.030991| -|step_dataset_delete|successed||||2021-04-26 09:05:28.737334|2021-04-26 09:05:28.861337| -|step_linked_service_delete|successed||||2021-04-26 09:05:28.861337|2021-04-26 09:05:28.989612| -|step_delete|successed||||2021-04-26 09:05:35.031990|2021-04-26 09:05:35.197507| -|step_integration_runtime_start|successed||||2021-04-26 09:05:29.676815|2021-04-26 09:05:30.373119| -|step_integration_runtime_stop|successed||||2021-04-26 09:05:30.374118|2021-04-26 09:05:31.964925| -|step_activity_run_query_by_pipeline_run|successed||||2021-04-26 09:05:33.012581|2021-04-26 09:05:33.193579| -Coverage: 46/46 diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py index f272437a3e9..9dfe04b82d0 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py @@ -107,7 +107,6 @@ def __init__( client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) - self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) self.operations = Operations( diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration_async.py deleted file mode 100644 index 411d6c4a66e..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration_async.py +++ /dev/null @@ -1,67 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from typing import Any, TYPE_CHECKING - -from azure.core.configuration import Configuration -from azure.core.pipeline import policies -from azure.mgmt.core.policies import ARMHttpLoggingPolicy - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from azure.core.credentials_async import AsyncTokenCredential - -VERSION = "unknown" - -class DataFactoryManagementClientConfiguration(Configuration): - """Configuration for DataFactoryManagementClient. - - Note that all parameters used to create this instance are saved as instance - attributes. - - :param credential: Credential needed for the client to connect to Azure. - :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :param subscription_id: The subscription identifier. - :type subscription_id: str - """ - - def __init__( - self, - credential: "AsyncTokenCredential", - subscription_id: str, - **kwargs: Any - ) -> None: - if credential is None: - raise ValueError("Parameter 'credential' must not be None.") - if subscription_id is None: - raise ValueError("Parameter 'subscription_id' must not be None.") - super(DataFactoryManagementClientConfiguration, self).__init__(**kwargs) - - self.credential = credential - self.subscription_id = subscription_id - self.api_version = "2018-06-01" - self.credential_scopes = ['https://management.azure.com/.default'] - self.credential_scopes.extend(kwargs.pop('credential_scopes', [])) - kwargs.setdefault('sdk_moniker', 'datafactorymanagementclient/{}'.format(VERSION)) - self._configure(**kwargs) - - def _configure( - self, - **kwargs: Any - ) -> None: - self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) - self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) - self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) - self.authentication_policy = kwargs.get('authentication_policy') - if self.credential and not self.authentication_policy: - self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py index 255a1839c21..01497b56d61 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py @@ -104,7 +104,6 @@ def __init__( client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) - self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) self.operations = Operations( diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client_async.py deleted file mode 100644 index b2b322686b8..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client_async.py +++ /dev/null @@ -1,143 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from typing import Any, Optional, TYPE_CHECKING - -from azure.mgmt.core import AsyncARMPipelineClient -from msrest import Deserializer, Serializer - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from azure.core.credentials_async import AsyncTokenCredential - -from ._configuration_async import DataFactoryManagementClientConfiguration -from .operations_async import OperationOperations -from .operations_async import FactoryOperations -from .operations_async import ExposureControlOperations -from .operations_async import IntegrationRuntimeOperations -from .operations_async import IntegrationRuntimeObjectMetadataOperations -from .operations_async import IntegrationRuntimeNodeOperations -from .operations_async import LinkedServiceOperations -from .operations_async import DatasetOperations -from .operations_async import PipelineOperations -from .operations_async import PipelineRunOperations -from .operations_async import ActivityRunOperations -from .operations_async import TriggerOperations -from .operations_async import TriggerRunOperations -from .operations_async import DataFlowOperations -from .operations_async import DataFlowDebugSessionOperations -from .operations_async import ManagedVirtualNetworkOperations -from .operations_async import ManagedPrivateEndpointOperations -from .. import models - - -class DataFactoryManagementClient(object): - """The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services. - - :ivar operation: OperationOperations operations - :vartype operation: data_factory_management_client.aio.operations_async.OperationOperations - :ivar factory: FactoryOperations operations - :vartype factory: data_factory_management_client.aio.operations_async.FactoryOperations - :ivar exposure_control: ExposureControlOperations operations - :vartype exposure_control: data_factory_management_client.aio.operations_async.ExposureControlOperations - :ivar integration_runtime: IntegrationRuntimeOperations operations - :vartype integration_runtime: data_factory_management_client.aio.operations_async.IntegrationRuntimeOperations - :ivar integration_runtime_object_metadata: IntegrationRuntimeObjectMetadataOperations operations - :vartype integration_runtime_object_metadata: data_factory_management_client.aio.operations_async.IntegrationRuntimeObjectMetadataOperations - :ivar integration_runtime_node: IntegrationRuntimeNodeOperations operations - :vartype integration_runtime_node: data_factory_management_client.aio.operations_async.IntegrationRuntimeNodeOperations - :ivar linked_service: LinkedServiceOperations operations - :vartype linked_service: data_factory_management_client.aio.operations_async.LinkedServiceOperations - :ivar dataset: DatasetOperations operations - :vartype dataset: data_factory_management_client.aio.operations_async.DatasetOperations - :ivar pipeline: PipelineOperations operations - :vartype pipeline: data_factory_management_client.aio.operations_async.PipelineOperations - :ivar pipeline_run: PipelineRunOperations operations - :vartype pipeline_run: data_factory_management_client.aio.operations_async.PipelineRunOperations - :ivar activity_run: ActivityRunOperations operations - :vartype activity_run: data_factory_management_client.aio.operations_async.ActivityRunOperations - :ivar trigger: TriggerOperations operations - :vartype trigger: data_factory_management_client.aio.operations_async.TriggerOperations - :ivar trigger_run: TriggerRunOperations operations - :vartype trigger_run: data_factory_management_client.aio.operations_async.TriggerRunOperations - :ivar data_flow: DataFlowOperations operations - :vartype data_flow: data_factory_management_client.aio.operations_async.DataFlowOperations - :ivar data_flow_debug_session: DataFlowDebugSessionOperations operations - :vartype data_flow_debug_session: data_factory_management_client.aio.operations_async.DataFlowDebugSessionOperations - :ivar managed_virtual_network: ManagedVirtualNetworkOperations operations - :vartype managed_virtual_network: data_factory_management_client.aio.operations_async.ManagedVirtualNetworkOperations - :ivar managed_private_endpoint: ManagedPrivateEndpointOperations operations - :vartype managed_private_endpoint: data_factory_management_client.aio.operations_async.ManagedPrivateEndpointOperations - :param credential: Credential needed for the client to connect to Azure. - :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :param subscription_id: The subscription identifier. - :type subscription_id: str - :param str base_url: Service URL - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - """ - - def __init__( - self, - credential: "AsyncTokenCredential", - subscription_id: str, - base_url: Optional[str] = None, - **kwargs: Any - ) -> None: - if not base_url: - base_url = 'https://management.azure.com' - self._config = DataFactoryManagementClientConfiguration(credential, subscription_id, **kwargs) - self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) - - client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} - self._serialize = Serializer(client_models) - self._deserialize = Deserializer(client_models) - - self.operation = OperationOperations( - self._client, self._config, self._serialize, self._deserialize) - self.factory = FactoryOperations( - self._client, self._config, self._serialize, self._deserialize) - self.exposure_control = ExposureControlOperations( - self._client, self._config, self._serialize, self._deserialize) - self.integration_runtime = IntegrationRuntimeOperations( - self._client, self._config, self._serialize, self._deserialize) - self.integration_runtime_object_metadata = IntegrationRuntimeObjectMetadataOperations( - self._client, self._config, self._serialize, self._deserialize) - self.integration_runtime_node = IntegrationRuntimeNodeOperations( - self._client, self._config, self._serialize, self._deserialize) - self.linked_service = LinkedServiceOperations( - self._client, self._config, self._serialize, self._deserialize) - self.dataset = DatasetOperations( - self._client, self._config, self._serialize, self._deserialize) - self.pipeline = PipelineOperations( - self._client, self._config, self._serialize, self._deserialize) - self.pipeline_run = PipelineRunOperations( - self._client, self._config, self._serialize, self._deserialize) - self.activity_run = ActivityRunOperations( - self._client, self._config, self._serialize, self._deserialize) - self.trigger = TriggerOperations( - self._client, self._config, self._serialize, self._deserialize) - self.trigger_run = TriggerRunOperations( - self._client, self._config, self._serialize, self._deserialize) - self.data_flow = DataFlowOperations( - self._client, self._config, self._serialize, self._deserialize) - self.data_flow_debug_session = DataFlowDebugSessionOperations( - self._client, self._config, self._serialize, self._deserialize) - self.managed_virtual_network = ManagedVirtualNetworkOperations( - self._client, self._config, self._serialize, self._deserialize) - self.managed_private_endpoint = ManagedPrivateEndpointOperations( - self._client, self._config, self._serialize, self._deserialize) - - async def close(self) -> None: - await self._client.close() - - async def __aenter__(self) -> "DataFactoryManagementClient": - await self._client.__aenter__() - return self - - async def __aexit__(self, *exc_details) -> None: - await self._client.__aexit__(*exc_details) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtimes_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtimes_operations.py index 6b27efc1819..36c8e43bae0 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtimes_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtimes_operations.py @@ -455,6 +455,68 @@ async def get_status( return deserialized get_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus'} # type: ignore + async def list_outbound_network_dependencies_endpoints( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + **kwargs + ) -> "models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse": + """Gets the list of outbound network dependencies for a given Azure-SSIS integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse, or the result of cls(response) + :rtype: ~data_factory_management_client.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.list_outbound_network_dependencies_endpoints.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_outbound_network_dependencies_endpoints.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/outboundNetworkDependenciesEndpoints'} # type: ignore + async def get_connection_info( self, resource_group_name: str, diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/__init__.py deleted file mode 100644 index 554e3ba9232..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/__init__.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from ._operation_operations_async import OperationOperations -from ._factory_operations_async import FactoryOperations -from ._exposure_control_operations_async import ExposureControlOperations -from ._integration_runtime_operations_async import IntegrationRuntimeOperations -from ._integration_runtime_object_metadata_operations_async import IntegrationRuntimeObjectMetadataOperations -from ._integration_runtime_node_operations_async import IntegrationRuntimeNodeOperations -from ._linked_service_operations_async import LinkedServiceOperations -from ._dataset_operations_async import DatasetOperations -from ._pipeline_operations_async import PipelineOperations -from ._pipeline_run_operations_async import PipelineRunOperations -from ._activity_run_operations_async import ActivityRunOperations -from ._trigger_operations_async import TriggerOperations -from ._trigger_run_operations_async import TriggerRunOperations -from ._data_flow_operations_async import DataFlowOperations -from ._data_flow_debug_session_operations_async import DataFlowDebugSessionOperations -from ._managed_virtual_network_operations_async import ManagedVirtualNetworkOperations -from ._managed_private_endpoint_operations_async import ManagedPrivateEndpointOperations - -__all__ = [ - 'OperationOperations', - 'FactoryOperations', - 'ExposureControlOperations', - 'IntegrationRuntimeOperations', - 'IntegrationRuntimeObjectMetadataOperations', - 'IntegrationRuntimeNodeOperations', - 'LinkedServiceOperations', - 'DatasetOperations', - 'PipelineOperations', - 'PipelineRunOperations', - 'ActivityRunOperations', - 'TriggerOperations', - 'TriggerRunOperations', - 'DataFlowOperations', - 'DataFlowDebugSessionOperations', - 'ManagedVirtualNetworkOperations', - 'ManagedPrivateEndpointOperations', -] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_activity_run_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_activity_run_operations_async.py deleted file mode 100644 index 0d2e56be08b..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_activity_run_operations_async.py +++ /dev/null @@ -1,127 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class ActivityRunOperations: - """ActivityRunOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - async def query_by_pipeline_run( - self, - resource_group_name: str, - factory_name: str, - run_id: str, - last_updated_after: datetime.datetime, - last_updated_before: datetime.datetime, - continuation_token_parameter: Optional[str] = None, - filters: Optional[List["models.RunQueryFilter"]] = None, - order_by: Optional[List["models.RunQueryOrderBy"]] = None, - **kwargs - ) -> "models.ActivityRunsQueryResponse": - """Query activity runs based on input filter conditions. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param run_id: The pipeline run identifier. - :type run_id: str - :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601' - format. - :type last_updated_after: ~datetime.datetime - :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601' - format. - :type last_updated_before: ~datetime.datetime - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param filters: List of filters. - :type filters: list[~data_factory_management_client.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ActivityRunsQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ActivityRunsQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ActivityRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.query_by_pipeline_run.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ActivityRunsQueryResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - query_by_pipeline_run.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/queryActivityruns'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_debug_session_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_debug_session_operations_async.py deleted file mode 100644 index f1bf8ee8f73..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_debug_session_operations_async.py +++ /dev/null @@ -1,551 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar, Union -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class DataFlowDebugSessionOperations: - """DataFlowDebugSessionOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - async def _create_initial( - self, - resource_group_name: str, - factory_name: str, - compute_type: Optional[str] = None, - core_count: Optional[int] = None, - time_to_live: Optional[int] = None, - name: Optional[str] = None, - properties: Optional["models.IntegrationRuntime"] = None, - **kwargs - ) -> Optional["models.CreateDataFlowDebugSessionResponse"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.CreateDataFlowDebugSessionResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - request = models.CreateDataFlowDebugSessionRequest(compute_type=compute_type, core_count=core_count, time_to_live=time_to_live, name=name, properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self._create_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(request, 'CreateDataFlowDebugSessionRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - response_headers = {} - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('CreateDataFlowDebugSessionResponse', pipeline_response) - - if response.status_code == 202: - response_headers['location']=self._deserialize('str', response.headers.get('location')) - - if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized - _create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession'} # type: ignore - - async def begin_create( - self, - resource_group_name: str, - factory_name: str, - compute_type: Optional[str] = None, - core_count: Optional[int] = None, - time_to_live: Optional[int] = None, - name: Optional[str] = None, - properties: Optional["models.IntegrationRuntime"] = None, - **kwargs - ) -> AsyncLROPoller["models.CreateDataFlowDebugSessionResponse"]: - """Creates a data flow debug session. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param compute_type: Compute type of the cluster. The value will be overwritten by the same - setting in integration runtime if provided. - :type compute_type: str - :param core_count: Core count of the cluster. The value will be overwritten by the same setting - in integration runtime if provided. - :type core_count: int - :param time_to_live: Time to live setting of the cluster in minutes. - :type time_to_live: int - :param name: The resource name. - :type name: str - :param properties: Integration runtime properties. - :type properties: ~data_factory_management_client.models.IntegrationRuntime - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either CreateDataFlowDebugSessionResponse or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.CreateDataFlowDebugSessionResponse] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.CreateDataFlowDebugSessionResponse"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._create_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - compute_type=compute_type, - core_count=core_count, - time_to_live=time_to_live, - name=name, - properties=properties, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('CreateDataFlowDebugSessionResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession'} # type: ignore - - def query_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs - ) -> AsyncIterable["models.QueryDataFlowDebugSessionsResponse"]: - """Query all active data flow debug sessions. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either QueryDataFlowDebugSessionsResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.QueryDataFlowDebugSessionsResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.QueryDataFlowDebugSessionsResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.query_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.post(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('QueryDataFlowDebugSessionsResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryDataFlowDebugSessions'} # type: ignore - - async def add_data_flow( - self, - resource_group_name: str, - factory_name: str, - session_id: Optional[str] = None, - datasets: Optional[List["models.DatasetDebugResource"]] = None, - linked_services: Optional[List["models.LinkedServiceDebugResource"]] = None, - source_settings: Optional[List["models.DataFlowSourceSetting"]] = None, - parameters: Optional[Dict[str, object]] = None, - dataset_parameters: Optional[object] = None, - folder_path: Optional[object] = None, - reference_name: Optional[str] = None, - name: Optional[str] = None, - properties: Optional["models.DataFlow"] = None, - **kwargs - ) -> "models.AddDataFlowToDebugSessionResponse": - """Add a data flow into debug session. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param session_id: The ID of data flow debug session. - :type session_id: str - :param datasets: List of datasets. - :type datasets: list[~data_factory_management_client.models.DatasetDebugResource] - :param linked_services: List of linked services. - :type linked_services: list[~data_factory_management_client.models.LinkedServiceDebugResource] - :param source_settings: Source setting for data flow debug. - :type source_settings: list[~data_factory_management_client.models.DataFlowSourceSetting] - :param parameters: Data flow parameters. - :type parameters: dict[str, object] - :param dataset_parameters: Parameters for dataset. - :type dataset_parameters: object - :param folder_path: Folder path for staging blob. Type: string (or Expression with resultType - string). - :type folder_path: object - :param reference_name: Reference LinkedService name. - :type reference_name: str - :param name: The resource name. - :type name: str - :param properties: Data flow properties. - :type properties: ~data_factory_management_client.models.DataFlow - :keyword callable cls: A custom type or function that will be passed the direct response - :return: AddDataFlowToDebugSessionResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.AddDataFlowToDebugSessionResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.AddDataFlowToDebugSessionResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - request = models.DataFlowDebugPackage(session_id=session_id, datasets=datasets, linked_services=linked_services, source_settings=source_settings, parameters_debug_settings_parameters=parameters, dataset_parameters=dataset_parameters, folder_path=folder_path, reference_name=reference_name, name=name, properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.add_data_flow.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(request, 'DataFlowDebugPackage') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('AddDataFlowToDebugSessionResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - add_data_flow.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/addDataFlowToDebugSession'} # type: ignore - - async def delete( - self, - resource_group_name: str, - factory_name: str, - session_id: Optional[str] = None, - **kwargs - ) -> None: - """Deletes a data flow debug session. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param session_id: The ID of data flow debug session. - :type session_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - request = models.DeleteDataFlowDebugSessionRequest(session_id=session_id) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(request, 'DeleteDataFlowDebugSessionRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/deleteDataFlowDebugSession'} # type: ignore - - async def _execute_command_initial( - self, - resource_group_name: str, - factory_name: str, - session_id: Optional[str] = None, - command: Optional[Union[str, "models.DataFlowDebugCommandType"]] = None, - command_payload: Optional["models.DataFlowDebugCommandPayload"] = None, - **kwargs - ) -> Optional["models.DataFlowDebugCommandResponse"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DataFlowDebugCommandResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - request = models.DataFlowDebugCommandRequest(session_id=session_id, command=command, command_payload=command_payload) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self._execute_command_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(request, 'DataFlowDebugCommandRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - response_headers = {} - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('DataFlowDebugCommandResponse', pipeline_response) - - if response.status_code == 202: - response_headers['location']=self._deserialize('str', response.headers.get('location')) - - if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized - _execute_command_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand'} # type: ignore - - async def begin_execute_command( - self, - resource_group_name: str, - factory_name: str, - session_id: Optional[str] = None, - command: Optional[Union[str, "models.DataFlowDebugCommandType"]] = None, - command_payload: Optional["models.DataFlowDebugCommandPayload"] = None, - **kwargs - ) -> AsyncLROPoller["models.DataFlowDebugCommandResponse"]: - """Execute a data flow debug command. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param session_id: The ID of data flow debug session. - :type session_id: str - :param command: The command type. - :type command: str or ~data_factory_management_client.models.DataFlowDebugCommandType - :param command_payload: The command payload object. - :type command_payload: ~data_factory_management_client.models.DataFlowDebugCommandPayload - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either DataFlowDebugCommandResponse or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.DataFlowDebugCommandResponse] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowDebugCommandResponse"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._execute_command_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - session_id=session_id, - command=command, - command_payload=command_payload, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('DataFlowDebugCommandResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_execute_command.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_operations_async.py deleted file mode 100644 index b5c2e5656ce..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_operations_async.py +++ /dev/null @@ -1,309 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class DataFlowOperations: - """DataFlowOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - async def create_or_update( - self, - resource_group_name: str, - factory_name: str, - data_flow_name: str, - properties: "models.DataFlow", - if_match: Optional[str] = None, - **kwargs - ) -> "models.DataFlowResource": - """Creates or updates a data flow. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param data_flow_name: The data flow name. - :type data_flow_name: str - :param properties: Data flow properties. - :type properties: ~data_factory_management_client.models.DataFlow - :param if_match: ETag of the data flow entity. Should only be specified for update, for which - it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataFlowResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DataFlowResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - data_flow = models.DataFlowResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(data_flow, 'DataFlowResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('DataFlowResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - data_flow_name: str, - if_none_match: Optional[str] = None, - **kwargs - ) -> "models.DataFlowResource": - """Gets a data flow. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param data_flow_name: The data flow name. - :type data_flow_name: str - :param if_none_match: ETag of the data flow entity. Should only be specified for get. If the - ETag matches the existing entity tag, or if * was provided, then no content will be returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataFlowResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DataFlowResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('DataFlowResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore - - async def delete( - self, - resource_group_name: str, - factory_name: str, - data_flow_name: str, - **kwargs - ) -> None: - """Deletes a data flow. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param data_flow_name: The data flow name. - :type data_flow_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore - - def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs - ) -> AsyncIterable["models.DataFlowListResponse"]: - """Lists data flows. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either DataFlowListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.DataFlowListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('DataFlowListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_dataset_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_dataset_operations_async.py deleted file mode 100644 index a8be0369365..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_dataset_operations_async.py +++ /dev/null @@ -1,311 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class DatasetOperations: - """DatasetOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs - ) -> AsyncIterable["models.DatasetListResponse"]: - """Lists datasets. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either DatasetListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.DatasetListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('DatasetListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets'} # type: ignore - - async def create_or_update( - self, - resource_group_name: str, - factory_name: str, - dataset_name: str, - properties: "models.Dataset", - if_match: Optional[str] = None, - **kwargs - ) -> "models.DatasetResource": - """Creates or updates a dataset. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param dataset_name: The dataset name. - :type dataset_name: str - :param properties: Dataset properties. - :type properties: ~data_factory_management_client.models.Dataset - :param if_match: ETag of the dataset entity. Should only be specified for update, for which it - should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DatasetResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DatasetResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - dataset = models.DatasetResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(dataset, 'DatasetResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('DatasetResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - dataset_name: str, - if_none_match: Optional[str] = None, - **kwargs - ) -> Optional["models.DatasetResource"]: - """Gets a dataset. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param dataset_name: The dataset name. - :type dataset_name: str - :param if_none_match: ETag of the dataset entity. Should only be specified for get. If the ETag - matches the existing entity tag, or if * was provided, then no content will be returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DatasetResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DatasetResource or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DatasetResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('DatasetResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore - - async def delete( - self, - resource_group_name: str, - factory_name: str, - dataset_name: str, - **kwargs - ) -> None: - """Deletes a dataset. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param dataset_name: The dataset name. - :type dataset_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_exposure_control_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_exposure_control_operations_async.py deleted file mode 100644 index b20acb1c3c8..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_exposure_control_operations_async.py +++ /dev/null @@ -1,241 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class ExposureControlOperations: - """ExposureControlOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - async def get_feature_value( - self, - location_id: str, - feature_name: Optional[str] = None, - feature_type: Optional[str] = None, - **kwargs - ) -> "models.ExposureControlResponse": - """Get exposure control feature for specific location. - - :param location_id: The location identifier. - :type location_id: str - :param feature_name: The feature name. - :type feature_name: str - :param feature_type: The feature type. - :type feature_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ExposureControlResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ExposureControlResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.get_feature_value.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'locationId': self._serialize.url("location_id", location_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ExposureControlResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_feature_value.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/getFeatureValue'} # type: ignore - - async def get_feature_value_by_factory( - self, - resource_group_name: str, - factory_name: str, - feature_name: Optional[str] = None, - feature_type: Optional[str] = None, - **kwargs - ) -> "models.ExposureControlResponse": - """Get exposure control feature for specific factory. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param feature_name: The feature name. - :type feature_name: str - :param feature_type: The feature type. - :type feature_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ExposureControlResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ExposureControlResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.get_feature_value_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ExposureControlResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_feature_value_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getFeatureValue'} # type: ignore - - async def query_feature_value_by_factory( - self, - resource_group_name: str, - factory_name: str, - exposure_control_requests: List["models.ExposureControlRequest"], - **kwargs - ) -> "models.ExposureControlBatchResponse": - """Get list of exposure control features for specific factory. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param exposure_control_requests: List of exposure control features. - :type exposure_control_requests: list[~data_factory_management_client.models.ExposureControlRequest] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ExposureControlBatchResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ExposureControlBatchResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlBatchResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - exposure_control_batch_request = models.ExposureControlBatchRequest(exposure_control_requests=exposure_control_requests) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.query_feature_value_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(exposure_control_batch_request, 'ExposureControlBatchRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ExposureControlBatchResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - query_feature_value_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryFeaturesValue'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_factory_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_factory_operations_async.py deleted file mode 100644 index 46f37c1a6f7..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_factory_operations_async.py +++ /dev/null @@ -1,658 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class FactoryOperations: - """FactoryOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list( - self, - **kwargs - ) -> AsyncIterable["models.FactoryListResponse"]: - """Lists factories under the specified subscription. - - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either FactoryListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.FactoryListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('FactoryListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/factories'} # type: ignore - - async def configure_factory_repo( - self, - location_id: str, - factory_resource_id: Optional[str] = None, - repo_configuration: Optional["models.FactoryRepoConfiguration"] = None, - **kwargs - ) -> "models.Factory": - """Updates a factory's repo information. - - :param location_id: The location identifier. - :type location_id: str - :param factory_resource_id: The factory resource id. - :type factory_resource_id: str - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration - :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - factory_repo_update = models.FactoryRepoUpdate(factory_resource_id=factory_resource_id, repo_configuration=repo_configuration) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.configure_factory_repo.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'locationId': self._serialize.url("location_id", location_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(factory_repo_update, 'FactoryRepoUpdate') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('Factory', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - configure_factory_repo.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/configureFactoryRepo'} # type: ignore - - def list_by_resource_group( - self, - resource_group_name: str, - **kwargs - ) -> AsyncIterable["models.FactoryListResponse"]: - """Lists factories. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either FactoryListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.FactoryListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_resource_group.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('FactoryListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories'} # type: ignore - - async def create_or_update( - self, - resource_group_name: str, - factory_name: str, - if_match: Optional[str] = None, - location: Optional[str] = None, - tags: Optional[Dict[str, str]] = None, - identity: Optional["models.FactoryIdentity"] = None, - repo_configuration: Optional["models.FactoryRepoConfiguration"] = None, - global_parameters: Optional[Dict[str, "models.GlobalParameterSpecification"]] = None, - **kwargs - ) -> "models.Factory": - """Creates or updates a factory. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param if_match: ETag of the factory entity. Should only be specified for update, for which it - should match existing entity or can be * for unconditional update. - :type if_match: str - :param location: The resource location. - :type location: str - :param tags: The resource tags. - :type tags: dict[str, str] - :param identity: Managed service identity of the factory. - :type identity: ~data_factory_management_client.models.FactoryIdentity - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration - :param global_parameters: List of parameters for factory. - :type global_parameters: dict[str, ~data_factory_management_client.models.GlobalParameterSpecification] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - factory = models.Factory(location=location, tags=tags, identity=identity, repo_configuration=repo_configuration, global_parameters=global_parameters) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(factory, 'Factory') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('Factory', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore - - async def update( - self, - resource_group_name: str, - factory_name: str, - tags: Optional[Dict[str, str]] = None, - identity: Optional["models.FactoryIdentity"] = None, - **kwargs - ) -> "models.Factory": - """Updates a factory. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param tags: The resource tags. - :type tags: dict[str, str] - :param identity: Managed service identity of the factory. - :type identity: ~data_factory_management_client.models.FactoryIdentity - :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - factory_update_parameters = models.FactoryUpdateParameters(tags=tags, identity=identity) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(factory_update_parameters, 'FactoryUpdateParameters') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('Factory', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - if_none_match: Optional[str] = None, - **kwargs - ) -> Optional["models.Factory"]: - """Gets a factory. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param if_none_match: ETag of the factory entity. Should only be specified for get. If the ETag - matches the existing entity tag, or if * was provided, then no content will be returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Factory"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('Factory', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore - - async def delete( - self, - resource_group_name: str, - factory_name: str, - **kwargs - ) -> None: - """Deletes a factory. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore - - async def get_git_hub_access_token( - self, - resource_group_name: str, - factory_name: str, - git_hub_access_code: str, - git_hub_access_token_base_url: str, - git_hub_client_id: Optional[str] = None, - **kwargs - ) -> "models.GitHubAccessTokenResponse": - """Get GitHub Access Token. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param git_hub_access_code: GitHub access code. - :type git_hub_access_code: str - :param git_hub_access_token_base_url: GitHub access token base URL. - :type git_hub_access_token_base_url: str - :param git_hub_client_id: GitHub application client ID. - :type git_hub_client_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: GitHubAccessTokenResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.GitHubAccessTokenResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.GitHubAccessTokenResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - git_hub_access_token_request = models.GitHubAccessTokenRequest(git_hub_access_code=git_hub_access_code, git_hub_client_id=git_hub_client_id, git_hub_access_token_base_url=git_hub_access_token_base_url) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.get_git_hub_access_token.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(git_hub_access_token_request, 'GitHubAccessTokenRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('GitHubAccessTokenResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_git_hub_access_token.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getGitHubAccessToken'} # type: ignore - - async def get_data_plane_access( - self, - resource_group_name: str, - factory_name: str, - permissions: Optional[str] = None, - access_resource_path: Optional[str] = None, - profile_name: Optional[str] = None, - start_time: Optional[str] = None, - expire_time: Optional[str] = None, - **kwargs - ) -> "models.AccessPolicyResponse": - """Get Data Plane access. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param permissions: The string with permissions for Data Plane access. Currently only 'r' is - supported which grants read only access. - :type permissions: str - :param access_resource_path: The resource path to get access relative to factory. Currently - only empty string is supported which corresponds to the factory resource. - :type access_resource_path: str - :param profile_name: The name of the profile. Currently only the default is supported. The - default value is DefaultProfile. - :type profile_name: str - :param start_time: Start time for the token. If not specified the current time will be used. - :type start_time: str - :param expire_time: Expiration time for the token. Maximum duration for the token is eight - hours and by default the token will expire in eight hours. - :type expire_time: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: AccessPolicyResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.AccessPolicyResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.AccessPolicyResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - policy = models.UserAccessPolicy(permissions=permissions, access_resource_path=access_resource_path, profile_name=profile_name, start_time=start_time, expire_time=expire_time) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.get_data_plane_access.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(policy, 'UserAccessPolicy') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('AccessPolicyResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_data_plane_access.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getDataPlaneAccess'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_node_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_node_operations_async.py deleted file mode 100644 index a6022196653..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_node_operations_async.py +++ /dev/null @@ -1,301 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Generic, Optional, TypeVar -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class IntegrationRuntimeNodeOperations: - """IntegrationRuntimeNodeOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - async def get( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - node_name: str, - **kwargs - ) -> "models.SelfHostedIntegrationRuntimeNode": - """Gets a self-hosted integration runtime node. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param node_name: The integration runtime node name. - :type node_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response) - :rtype: ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore - - async def delete( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - node_name: str, - **kwargs - ) -> None: - """Deletes a self-hosted integration runtime node. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param node_name: The integration runtime node name. - :type node_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore - - async def update( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - node_name: str, - concurrent_jobs_limit: Optional[int] = None, - **kwargs - ) -> "models.SelfHostedIntegrationRuntimeNode": - """Updates a self-hosted integration runtime node. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param node_name: The integration runtime node name. - :type node_name: str - :param concurrent_jobs_limit: The number of concurrent jobs permitted to run on the integration - runtime node. Values between 1 and maxConcurrentJobs(inclusive) are allowed. - :type concurrent_jobs_limit: int - :keyword callable cls: A custom type or function that will be passed the direct response - :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response) - :rtype: ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - update_integration_runtime_node_request = models.UpdateIntegrationRuntimeNodeRequest(concurrent_jobs_limit=concurrent_jobs_limit) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(update_integration_runtime_node_request, 'UpdateIntegrationRuntimeNodeRequest') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore - - async def get_ip_address( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - node_name: str, - **kwargs - ) -> "models.IntegrationRuntimeNodeIpAddress": - """Get the IP address of self-hosted integration runtime node. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param node_name: The integration runtime node name. - :type node_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeNodeIpAddress, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeNodeIpAddress - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeNodeIpAddress"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get_ip_address.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeNodeIpAddress', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_ip_address.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}/ipAddress'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_object_metadata_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_object_metadata_operations_async.py deleted file mode 100644 index 70df0716c21..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_object_metadata_operations_async.py +++ /dev/null @@ -1,230 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class IntegrationRuntimeObjectMetadataOperations: - """IntegrationRuntimeObjectMetadataOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - async def _refresh_initial( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> Optional["models.SsisObjectMetadataStatusResponse"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.SsisObjectMetadataStatusResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._refresh_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('SsisObjectMetadataStatusResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - _refresh_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata'} # type: ignore - - async def begin_refresh( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> AsyncLROPoller["models.SsisObjectMetadataStatusResponse"]: - """Refresh a SSIS integration runtime object metadata. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either SsisObjectMetadataStatusResponse or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.SsisObjectMetadataStatusResponse] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.SsisObjectMetadataStatusResponse"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._refresh_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('SsisObjectMetadataStatusResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_refresh.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - metadata_path: Optional[str] = None, - **kwargs - ) -> "models.SsisObjectMetadataListResponse": - """Get a SSIS integration runtime object metadata by specified path. The return is pageable - metadata list. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param metadata_path: Metadata path. - :type metadata_path: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: SsisObjectMetadataListResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.SsisObjectMetadataListResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.SsisObjectMetadataListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - get_metadata_request = models.GetSsisObjectMetadataRequest(metadata_path=metadata_path) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - if get_metadata_request is not None: - body_content = self._serialize.body(get_metadata_request, 'GetSsisObjectMetadataRequest') - else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('SsisObjectMetadataListResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getObjectMetadata'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py deleted file mode 100644 index 82b285c7a74..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py +++ /dev/null @@ -1,1176 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class IntegrationRuntimeOperations: - """IntegrationRuntimeOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs - ) -> AsyncIterable["models.IntegrationRuntimeListResponse"]: - """Lists integration runtimes. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either IntegrationRuntimeListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.IntegrationRuntimeListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('IntegrationRuntimeListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes'} # type: ignore - - async def create_or_update( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - properties: "models.IntegrationRuntime", - if_match: Optional[str] = None, - **kwargs - ) -> "models.IntegrationRuntimeResource": - """Creates or updates an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param properties: Integration runtime properties. - :type properties: ~data_factory_management_client.models.IntegrationRuntime - :param if_match: ETag of the integration runtime entity. Should only be specified for update, - for which it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - integration_runtime = models.IntegrationRuntimeResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(integration_runtime, 'IntegrationRuntimeResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - if_none_match: Optional[str] = None, - **kwargs - ) -> Optional["models.IntegrationRuntimeResource"]: - """Gets an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param if_none_match: ETag of the integration runtime entity. Should only be specified for get. - If the ETag matches the existing entity tag, or if * was provided, then no content will be - returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore - - async def update( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - auto_update: Optional[Union[str, "models.IntegrationRuntimeAutoUpdate"]] = None, - update_delay_offset: Optional[str] = None, - **kwargs - ) -> "models.IntegrationRuntimeResource": - """Updates an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param auto_update: Enables or disables the auto-update feature of the self-hosted integration - runtime. See https://go.microsoft.com/fwlink/?linkid=854189. - :type auto_update: str or ~data_factory_management_client.models.IntegrationRuntimeAutoUpdate - :param update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. The - integration runtime auto update will happen on that time. - :type update_delay_offset: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - update_integration_runtime_request = models.UpdateIntegrationRuntimeRequest(auto_update=auto_update, update_delay_offset=update_delay_offset) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(update_integration_runtime_request, 'UpdateIntegrationRuntimeRequest') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore - - async def delete( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> None: - """Deletes an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore - - async def get_status( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> "models.IntegrationRuntimeStatusResponse": - """Gets detailed status information for an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeStatusResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeStatusResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get_status.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus'} # type: ignore - - async def get_connection_info( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> "models.IntegrationRuntimeConnectionInfo": - """Gets the on-premises integration runtime connection information for encrypting the on-premises - data source credentials. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeConnectionInfo, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeConnectionInfo - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeConnectionInfo"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get_connection_info.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeConnectionInfo', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_connection_info.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getConnectionInfo'} # type: ignore - - async def regenerate_auth_key( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - key_name: Optional[Union[str, "models.IntegrationRuntimeAuthKeyName"]] = None, - **kwargs - ) -> "models.IntegrationRuntimeAuthKeys": - """Regenerates the authentication key for an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param key_name: The name of the authentication key to regenerate. - :type key_name: str or ~data_factory_management_client.models.IntegrationRuntimeAuthKeyName - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeAuthKeys, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeAuthKeys - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - regenerate_key_parameters = models.IntegrationRuntimeRegenerateKeyParameters(key_name=key_name) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.regenerate_auth_key.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(regenerate_key_parameters, 'IntegrationRuntimeRegenerateKeyParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - regenerate_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/regenerateAuthKey'} # type: ignore - - async def list_auth_key( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> "models.IntegrationRuntimeAuthKeys": - """Retrieves the authentication keys for an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeAuthKeys, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeAuthKeys - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.list_auth_key.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - list_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/listAuthKeys'} # type: ignore - - async def _start_initial( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> Optional["models.IntegrationRuntimeStatusResponse"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeStatusResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._start_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore - - async def begin_start( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> AsyncLROPoller["models.IntegrationRuntimeStatusResponse"]: - """Starts a ManagedReserved type integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either IntegrationRuntimeStatusResponse or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.IntegrationRuntimeStatusResponse] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._start_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore - - async def _stop_initial( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._stop_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore - - async def begin_stop( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> AsyncLROPoller[None]: - """Stops a ManagedReserved type integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._stop_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore - - async def sync_credentials( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> None: - """Force the integration runtime to synchronize credentials across integration runtime nodes, and - this will override the credentials across all worker nodes with those available on the - dispatcher node. If you already have the latest credential backup file, you should manually - import it (preferred) on any self-hosted integration runtime node than using this API directly. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.sync_credentials.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - sync_credentials.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/syncCredentials'} # type: ignore - - async def get_monitoring_data( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> "models.IntegrationRuntimeMonitoringData": - """Get the integration runtime monitoring data, which includes the monitor data for all the nodes - under this integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeMonitoringData, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeMonitoringData - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeMonitoringData"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get_monitoring_data.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeMonitoringData', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_monitoring_data.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/monitoringData'} # type: ignore - - async def upgrade( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> None: - """Upgrade self-hosted integration runtime to latest version if availability. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.upgrade.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - upgrade.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/upgrade'} # type: ignore - - async def remove_link( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - linked_factory_name: str, - **kwargs - ) -> None: - """Remove all linked integration runtimes under specific data factory in a self-hosted integration - runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param linked_factory_name: The data factory name for linked integration runtime. - :type linked_factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - linked_integration_runtime_request = models.LinkedIntegrationRuntimeRequest(linked_factory_name=linked_factory_name) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.remove_link.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(linked_integration_runtime_request, 'LinkedIntegrationRuntimeRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - remove_link.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/removeLinks'} # type: ignore - - async def create_linked_integration_runtime( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - name: Optional[str] = None, - subscription_id: Optional[str] = None, - data_factory_name: Optional[str] = None, - data_factory_location: Optional[str] = None, - **kwargs - ) -> "models.IntegrationRuntimeStatusResponse": - """Create a linked integration runtime entry in a shared integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param name: The name of the linked integration runtime. - :type name: str - :param subscription_id: The ID of the subscription that the linked integration runtime belongs - to. - :type subscription_id: str - :param data_factory_name: The name of the data factory that the linked integration runtime - belongs to. - :type data_factory_name: str - :param data_factory_location: The location of the data factory that the linked integration - runtime belongs to. - :type data_factory_location: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeStatusResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeStatusResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - create_linked_integration_runtime_request = models.CreateLinkedIntegrationRuntimeRequest(name=name, subscription_id=subscription_id, data_factory_name=data_factory_name, data_factory_location=data_factory_location) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_linked_integration_runtime.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(create_linked_integration_runtime_request, 'CreateLinkedIntegrationRuntimeRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_linked_integration_runtime.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/linkedIntegrationRuntime'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_linked_service_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_linked_service_operations_async.py deleted file mode 100644 index 56e9e6f663a..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_linked_service_operations_async.py +++ /dev/null @@ -1,312 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class LinkedServiceOperations: - """LinkedServiceOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs - ) -> AsyncIterable["models.LinkedServiceListResponse"]: - """Lists linked services. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either LinkedServiceListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.LinkedServiceListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('LinkedServiceListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices'} # type: ignore - - async def create_or_update( - self, - resource_group_name: str, - factory_name: str, - linked_service_name: str, - properties: "models.LinkedService", - if_match: Optional[str] = None, - **kwargs - ) -> "models.LinkedServiceResource": - """Creates or updates a linked service. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param linked_service_name: The linked service name. - :type linked_service_name: str - :param properties: Properties of linked service. - :type properties: ~data_factory_management_client.models.LinkedService - :param if_match: ETag of the linkedService entity. Should only be specified for update, for - which it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: LinkedServiceResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.LinkedServiceResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - linked_service = models.LinkedServiceResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(linked_service, 'LinkedServiceResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('LinkedServiceResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - linked_service_name: str, - if_none_match: Optional[str] = None, - **kwargs - ) -> Optional["models.LinkedServiceResource"]: - """Gets a linked service. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param linked_service_name: The linked service name. - :type linked_service_name: str - :param if_none_match: ETag of the linked service entity. Should only be specified for get. If - the ETag matches the existing entity tag, or if * was provided, then no content will be - returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: LinkedServiceResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.LinkedServiceResource or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedServiceResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('LinkedServiceResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore - - async def delete( - self, - resource_group_name: str, - factory_name: str, - linked_service_name: str, - **kwargs - ) -> None: - """Deletes a linked service. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param linked_service_name: The linked service name. - :type linked_service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_private_endpoint_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_private_endpoint_operations_async.py deleted file mode 100644 index 3a899779963..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_private_endpoint_operations_async.py +++ /dev/null @@ -1,336 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class ManagedPrivateEndpointOperations: - """ManagedPrivateEndpointOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - managed_virtual_network_name: str, - **kwargs - ) -> AsyncIterable["models.ManagedPrivateEndpointListResponse"]: - """Lists managed private endpoints. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ManagedPrivateEndpointListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.ManagedPrivateEndpointListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('ManagedPrivateEndpointListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints'} # type: ignore - - async def create_or_update( - self, - resource_group_name: str, - factory_name: str, - managed_virtual_network_name: str, - managed_private_endpoint_name: str, - if_match: Optional[str] = None, - connection_state: Optional["models.ConnectionStateProperties"] = None, - fqdns: Optional[List[str]] = None, - group_id: Optional[str] = None, - private_link_resource_id: Optional[str] = None, - **kwargs - ) -> "models.ManagedPrivateEndpointResource": - """Creates or updates a managed private endpoint. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :param managed_private_endpoint_name: Managed private endpoint name. - :type managed_private_endpoint_name: str - :param if_match: ETag of the managed private endpoint entity. Should only be specified for - update, for which it should match existing entity or can be * for unconditional update. - :type if_match: str - :param connection_state: The managed private endpoint connection state. - :type connection_state: ~data_factory_management_client.models.ConnectionStateProperties - :param fqdns: Fully qualified domain names. - :type fqdns: list[str] - :param group_id: The groupId to which the managed private endpoint is created. - :type group_id: str - :param private_link_resource_id: The ARM resource ID of the resource to which the managed - private endpoint is created. - :type private_link_resource_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedPrivateEndpointResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - managed_private_endpoint = models.ManagedPrivateEndpointResource(connection_state=connection_state, fqdns=fqdns, group_id=group_id, private_link_resource_id=private_link_resource_id) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(managed_private_endpoint, 'ManagedPrivateEndpointResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - managed_virtual_network_name: str, - managed_private_endpoint_name: str, - if_none_match: Optional[str] = None, - **kwargs - ) -> "models.ManagedPrivateEndpointResource": - """Gets a managed private endpoint. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :param managed_private_endpoint_name: Managed private endpoint name. - :type managed_private_endpoint_name: str - :param if_none_match: ETag of the managed private endpoint entity. Should only be specified for - get. If the ETag matches the existing entity tag, or if * was provided, then no content will be - returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedPrivateEndpointResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore - - async def delete( - self, - resource_group_name: str, - factory_name: str, - managed_virtual_network_name: str, - managed_private_endpoint_name: str, - **kwargs - ) -> None: - """Deletes a managed private endpoint. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :param managed_private_endpoint_name: Managed private endpoint name. - :type managed_private_endpoint_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_virtual_network_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_virtual_network_operations_async.py deleted file mode 100644 index 2152988d7ef..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_virtual_network_operations_async.py +++ /dev/null @@ -1,255 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class ManagedVirtualNetworkOperations: - """ManagedVirtualNetworkOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs - ) -> AsyncIterable["models.ManagedVirtualNetworkListResponse"]: - """Lists managed Virtual Networks. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ManagedVirtualNetworkListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.ManagedVirtualNetworkListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('ManagedVirtualNetworkListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks'} # type: ignore - - async def create_or_update( - self, - resource_group_name: str, - factory_name: str, - managed_virtual_network_name: str, - properties: "models.ManagedVirtualNetwork", - if_match: Optional[str] = None, - **kwargs - ) -> "models.ManagedVirtualNetworkResource": - """Creates or updates a managed Virtual Network. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :param properties: Managed Virtual Network properties. - :type properties: ~data_factory_management_client.models.ManagedVirtualNetwork - :param if_match: ETag of the managed Virtual Network entity. Should only be specified for - update, for which it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedVirtualNetworkResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - managed_virtual_network = models.ManagedVirtualNetworkResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(managed_virtual_network, 'ManagedVirtualNetworkResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - managed_virtual_network_name: str, - if_none_match: Optional[str] = None, - **kwargs - ) -> "models.ManagedVirtualNetworkResource": - """Gets a managed Virtual Network. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :param if_none_match: ETag of the managed Virtual Network entity. Should only be specified for - get. If the ETag matches the existing entity tag, or if * was provided, then no content will be - returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedVirtualNetworkResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_operation_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_operation_operations_async.py deleted file mode 100644 index 83206d77039..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_operation_operations_async.py +++ /dev/null @@ -1,101 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class OperationOperations: - """OperationOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list( - self, - **kwargs - ) -> AsyncIterable["models.OperationListResponse"]: - """Lists the available Azure Data Factory API operations. - - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either OperationListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.OperationListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('OperationListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': '/providers/Microsoft.DataFactory/operations'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_operations_async.py deleted file mode 100644 index 34c7453f951..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_operations_async.py +++ /dev/null @@ -1,405 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class PipelineOperations: - """PipelineOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs - ) -> AsyncIterable["models.PipelineListResponse"]: - """Lists pipelines. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PipelineListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.PipelineListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('PipelineListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines'} # type: ignore - - async def create_or_update( - self, - resource_group_name: str, - factory_name: str, - pipeline_name: str, - pipeline: "models.PipelineResource", - if_match: Optional[str] = None, - **kwargs - ) -> "models.PipelineResource": - """Creates or updates a pipeline. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :param pipeline: Pipeline resource definition. - :type pipeline: ~data_factory_management_client.models.PipelineResource - :param if_match: ETag of the pipeline entity. Should only be specified for update, for which - it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(pipeline, 'PipelineResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('PipelineResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - pipeline_name: str, - if_none_match: Optional[str] = None, - **kwargs - ) -> Optional["models.PipelineResource"]: - """Gets a pipeline. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :param if_none_match: ETag of the pipeline entity. Should only be specified for get. If the - ETag matches the existing entity tag, or if * was provided, then no content will be returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineResource or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.PipelineResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('PipelineResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore - - async def delete( - self, - resource_group_name: str, - factory_name: str, - pipeline_name: str, - **kwargs - ) -> None: - """Deletes a pipeline. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore - - async def create_run( - self, - resource_group_name: str, - factory_name: str, - pipeline_name: str, - reference_pipeline_run_id: Optional[str] = None, - is_recovery: Optional[bool] = None, - start_activity_name: Optional[str] = None, - start_from_failure: Optional[bool] = None, - parameters: Optional[Dict[str, object]] = None, - **kwargs - ) -> "models.CreateRunResponse": - """Creates a run of a pipeline. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :param reference_pipeline_run_id: The pipeline run identifier. If run ID is specified the - parameters of the specified run will be used to create a new run. - :type reference_pipeline_run_id: str - :param is_recovery: Recovery mode flag. If recovery mode is set to true, the specified - referenced pipeline run and the new run will be grouped under the same groupId. - :type is_recovery: bool - :param start_activity_name: In recovery mode, the rerun will start from this activity. If not - specified, all activities will run. - :type start_activity_name: str - :param start_from_failure: In recovery mode, if set to true, the rerun will start from failed - activities. The property will be used only if startActivityName is not specified. - :type start_from_failure: bool - :param parameters: Parameters of the pipeline run. These parameters will be used only if the - runId is not specified. - :type parameters: dict[str, object] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: CreateRunResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.CreateRunResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.CreateRunResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_run.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - if reference_pipeline_run_id is not None: - query_parameters['referencePipelineRunId'] = self._serialize.query("reference_pipeline_run_id", reference_pipeline_run_id, 'str') - if is_recovery is not None: - query_parameters['isRecovery'] = self._serialize.query("is_recovery", is_recovery, 'bool') - if start_activity_name is not None: - query_parameters['startActivityName'] = self._serialize.query("start_activity_name", start_activity_name, 'str') - if start_from_failure is not None: - query_parameters['startFromFailure'] = self._serialize.query("start_from_failure", start_from_failure, 'bool') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - if parameters is not None: - body_content = self._serialize.body(parameters, '{object}') - else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('CreateRunResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_run.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}/createRun'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_run_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_run_operations_async.py deleted file mode 100644 index 5cdfd09fe01..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_run_operations_async.py +++ /dev/null @@ -1,243 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class PipelineRunOperations: - """PipelineRunOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - async def query_by_factory( - self, - resource_group_name: str, - factory_name: str, - last_updated_after: datetime.datetime, - last_updated_before: datetime.datetime, - continuation_token_parameter: Optional[str] = None, - filters: Optional[List["models.RunQueryFilter"]] = None, - order_by: Optional[List["models.RunQueryOrderBy"]] = None, - **kwargs - ) -> "models.PipelineRunsQueryResponse": - """Query pipeline runs in the factory based on input filter conditions. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601' - format. - :type last_updated_after: ~datetime.datetime - :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601' - format. - :type last_updated_before: ~datetime.datetime - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param filters: List of filters. - :type filters: list[~data_factory_management_client.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineRunsQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineRunsQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.query_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('PipelineRunsQueryResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryPipelineRuns'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - run_id: str, - **kwargs - ) -> "models.PipelineRun": - """Get a pipeline run by its run ID. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param run_id: The pipeline run identifier. - :type run_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineRun, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineRun - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRun"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('PipelineRun', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}'} # type: ignore - - async def cancel( - self, - resource_group_name: str, - factory_name: str, - run_id: str, - is_recursive: Optional[bool] = None, - **kwargs - ) -> None: - """Cancel a pipeline run by its run ID. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param run_id: The pipeline run identifier. - :type run_id: str - :param is_recursive: If true, cancel all the Child pipelines that are triggered by the current - pipeline. - :type is_recursive: bool - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.cancel.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if is_recursive is not None: - query_parameters['isRecursive'] = self._serialize.query("is_recursive", is_recursive, 'bool') - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/cancel'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py deleted file mode 100644 index f4669b45bc2..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py +++ /dev/null @@ -1,877 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class TriggerOperations: - """TriggerOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs - ) -> AsyncIterable["models.TriggerListResponse"]: - """Lists triggers. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either TriggerListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.TriggerListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('TriggerListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers'} # type: ignore - - async def query_by_factory( - self, - resource_group_name: str, - factory_name: str, - continuation_token_parameter: Optional[str] = None, - parent_trigger_name: Optional[str] = None, - **kwargs - ) -> "models.TriggerQueryResponse": - """Query triggers. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param parent_trigger_name: The name of the parent TumblingWindowTrigger to get the child rerun - triggers. - :type parent_trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.TriggerFilterParameters(continuation_token=continuation_token_parameter, parent_trigger_name=parent_trigger_name) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.query_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'TriggerFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('TriggerQueryResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/querytriggers'} # type: ignore - - async def create_or_update( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - properties: "models.Trigger", - if_match: Optional[str] = None, - **kwargs - ) -> "models.TriggerResource": - """Creates or updates a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param properties: Properties of the trigger. - :type properties: ~data_factory_management_client.models.Trigger - :param if_match: ETag of the trigger entity. Should only be specified for update, for which it - should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - trigger = models.TriggerResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(trigger, 'TriggerResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('TriggerResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - if_none_match: Optional[str] = None, - **kwargs - ) -> Optional["models.TriggerResource"]: - """Gets a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param if_none_match: ETag of the trigger entity. Should only be specified for get. If the ETag - matches the existing entity tag, or if * was provided, then no content will be returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerResource or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('TriggerResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore - - async def delete( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs - ) -> None: - """Deletes a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore - - async def _subscribe_to_event_initial( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs - ) -> Optional["models.TriggerSubscriptionOperationStatus"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._subscribe_to_event_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - _subscribe_to_event_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore - - async def begin_subscribe_to_event( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs - ) -> AsyncLROPoller["models.TriggerSubscriptionOperationStatus"]: - """Subscribe event trigger to events. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.TriggerSubscriptionOperationStatus] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._subscribe_to_event_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_subscribe_to_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore - - async def get_event_subscription_status( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs - ) -> "models.TriggerSubscriptionOperationStatus": - """Get a trigger's event subscription status. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerSubscriptionOperationStatus, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerSubscriptionOperationStatus - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get_event_subscription_status.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_event_subscription_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/getEventSubscriptionStatus'} # type: ignore - - async def _unsubscribe_from_event_initial( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs - ) -> Optional["models.TriggerSubscriptionOperationStatus"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._unsubscribe_from_event_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - _unsubscribe_from_event_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore - - async def begin_unsubscribe_from_event( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs - ) -> AsyncLROPoller["models.TriggerSubscriptionOperationStatus"]: - """Unsubscribe event trigger from events. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.TriggerSubscriptionOperationStatus] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._unsubscribe_from_event_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_unsubscribe_from_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore - - async def _start_initial( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs - ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._start_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore - - async def begin_start( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs - ) -> AsyncLROPoller[None]: - """Starts a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._start_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore - - async def _stop_initial( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs - ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._stop_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore - - async def begin_stop( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs - ) -> AsyncLROPoller[None]: - """Stops a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._stop_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_run_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_run_operations_async.py deleted file mode 100644 index 3401f9c95c1..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_run_operations_async.py +++ /dev/null @@ -1,241 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class TriggerRunOperations: - """TriggerRunOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - async def rerun( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - run_id: str, - **kwargs - ) -> None: - """Rerun single trigger instance by runId. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param run_id: The pipeline run identifier. - :type run_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.rerun.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - rerun.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/rerun'} # type: ignore - - async def cancel( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - run_id: str, - **kwargs - ) -> None: - """Cancel a single trigger instance by runId. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param run_id: The pipeline run identifier. - :type run_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.cancel.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/cancel'} # type: ignore - - async def query_by_factory( - self, - resource_group_name: str, - factory_name: str, - last_updated_after: datetime.datetime, - last_updated_before: datetime.datetime, - continuation_token_parameter: Optional[str] = None, - filters: Optional[List["models.RunQueryFilter"]] = None, - order_by: Optional[List["models.RunQueryOrderBy"]] = None, - **kwargs - ) -> "models.TriggerRunsQueryResponse": - """Query trigger runs. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601' - format. - :type last_updated_after: ~datetime.datetime - :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601' - format. - :type last_updated_before: ~datetime.datetime - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param filters: List of filters. - :type filters: list[~data_factory_management_client.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerRunsQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerRunsQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.query_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('TriggerRunsQueryResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryTriggerRuns'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py index 1f1ab102631..6e6d1c0e2cb 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py @@ -18,6 +18,10 @@ from ._models_py3 import AmazonMwsLinkedService from ._models_py3 import AmazonMwsObjectDataset from ._models_py3 import AmazonMwsSource + from ._models_py3 import AmazonRdsForOracleLinkedService + from ._models_py3 import AmazonRdsForOraclePartitionSettings + from ._models_py3 import AmazonRdsForOracleSource + from ._models_py3 import AmazonRdsForOracleTableDataset from ._models_py3 import AmazonRedshiftLinkedService from ._models_py3 import AmazonRedshiftSource from ._models_py3 import AmazonRedshiftTableDataset @@ -157,6 +161,9 @@ from ._models_py3 import CreateDataFlowDebugSessionResponse from ._models_py3 import CreateLinkedIntegrationRuntimeRequest from ._models_py3 import CreateRunResponse + from ._models_py3 import Credential + from ._models_py3 import CredentialReference + from ._models_py3 import CredentialResource from ._models_py3 import CustomActivity from ._models_py3 import CustomActivityReferenceObject from ._models_py3 import CustomDataSourceLinkedService @@ -277,6 +284,7 @@ from ._models_py3 import GetSsisObjectMetadataRequest from ._models_py3 import GitHubAccessTokenRequest from ._models_py3 import GitHubAccessTokenResponse + from ._models_py3 import GitHubClientSecret from ._models_py3 import GlobalParameterSpecification from ._models_py3 import GoogleAdWordsLinkedService from ._models_py3 import GoogleAdWordsObjectDataset @@ -336,6 +344,10 @@ from ._models_py3 import IntegrationRuntimeMonitoringData from ._models_py3 import IntegrationRuntimeNodeIpAddress from ._models_py3 import IntegrationRuntimeNodeMonitoringData + from ._models_py3 import IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint + from ._models_py3 import IntegrationRuntimeOutboundNetworkDependenciesEndpoint + from ._models_py3 import IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails + from ._models_py3 import IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse from ._models_py3 import IntegrationRuntimeReference from ._models_py3 import IntegrationRuntimeRegenerateKeyParameters from ._models_py3 import IntegrationRuntimeResource @@ -371,6 +383,7 @@ from ._models_py3 import MagentoLinkedService from ._models_py3 import MagentoObjectDataset from ._models_py3 import MagentoSource + from ._models_py3 import ManagedIdentityCredential from ._models_py3 import ManagedIntegrationRuntime from ._models_py3 import ManagedIntegrationRuntimeError from ._models_py3 import ManagedIntegrationRuntimeNode @@ -390,12 +403,14 @@ from ._models_py3 import MarketoLinkedService from ._models_py3 import MarketoObjectDataset from ._models_py3 import MarketoSource + from ._models_py3 import MetadataItem from ._models_py3 import MicrosoftAccessLinkedService from ._models_py3 import MicrosoftAccessSink from ._models_py3 import MicrosoftAccessSource from ._models_py3 import MicrosoftAccessTableDataset from ._models_py3 import MongoDbAtlasCollectionDataset from ._models_py3 import MongoDbAtlasLinkedService + from ._models_py3 import MongoDbAtlasSink from ._models_py3 import MongoDbAtlasSource from ._models_py3 import MongoDbCollectionDataset from ._models_py3 import MongoDbCursorMethodsProperties @@ -403,6 +418,7 @@ from ._models_py3 import MongoDbSource from ._models_py3 import MongoDbV2CollectionDataset from ._models_py3 import MongoDbV2LinkedService + from ._models_py3 import MongoDbV2Sink from ._models_py3 import MongoDbV2Source from ._models_py3 import MultiplePipelineTrigger from ._models_py3 import MySqlLinkedService @@ -551,6 +567,7 @@ from ._models_py3 import ServiceNowLinkedService from ._models_py3 import ServiceNowObjectDataset from ._models_py3 import ServiceNowSource + from ._models_py3 import ServicePrincipalCredential from ._models_py3 import SetVariableActivity from ._models_py3 import SftpLocation from ._models_py3 import SftpReadSettings @@ -575,6 +592,7 @@ from ._models_py3 import SqlAlwaysEncryptedProperties from ._models_py3 import SqlDwSink from ._models_py3 import SqlDwSource + from ._models_py3 import SqlDwUpsertSettings from ._models_py3 import SqlMiSink from ._models_py3 import SqlMiSource from ._models_py3 import SqlPartitionSettings @@ -585,6 +603,7 @@ from ._models_py3 import SqlServerTableDataset from ._models_py3 import SqlSink from ._models_py3 import SqlSource + from ._models_py3 import SqlUpsertSettings from ._models_py3 import SquareLinkedService from ._models_py3 import SquareObjectDataset from ._models_py3 import SquareSource @@ -683,6 +702,10 @@ from ._models import AmazonMwsLinkedService # type: ignore from ._models import AmazonMwsObjectDataset # type: ignore from ._models import AmazonMwsSource # type: ignore + from ._models import AmazonRdsForOracleLinkedService # type: ignore + from ._models import AmazonRdsForOraclePartitionSettings # type: ignore + from ._models import AmazonRdsForOracleSource # type: ignore + from ._models import AmazonRdsForOracleTableDataset # type: ignore from ._models import AmazonRedshiftLinkedService # type: ignore from ._models import AmazonRedshiftSource # type: ignore from ._models import AmazonRedshiftTableDataset # type: ignore @@ -822,6 +845,9 @@ from ._models import CreateDataFlowDebugSessionResponse # type: ignore from ._models import CreateLinkedIntegrationRuntimeRequest # type: ignore from ._models import CreateRunResponse # type: ignore + from ._models import Credential # type: ignore + from ._models import CredentialReference # type: ignore + from ._models import CredentialResource # type: ignore from ._models import CustomActivity # type: ignore from ._models import CustomActivityReferenceObject # type: ignore from ._models import CustomDataSourceLinkedService # type: ignore @@ -942,6 +968,7 @@ from ._models import GetSsisObjectMetadataRequest # type: ignore from ._models import GitHubAccessTokenRequest # type: ignore from ._models import GitHubAccessTokenResponse # type: ignore + from ._models import GitHubClientSecret # type: ignore from ._models import GlobalParameterSpecification # type: ignore from ._models import GoogleAdWordsLinkedService # type: ignore from ._models import GoogleAdWordsObjectDataset # type: ignore @@ -1001,6 +1028,10 @@ from ._models import IntegrationRuntimeMonitoringData # type: ignore from ._models import IntegrationRuntimeNodeIpAddress # type: ignore from ._models import IntegrationRuntimeNodeMonitoringData # type: ignore + from ._models import IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint # type: ignore + from ._models import IntegrationRuntimeOutboundNetworkDependenciesEndpoint # type: ignore + from ._models import IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails # type: ignore + from ._models import IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse # type: ignore from ._models import IntegrationRuntimeReference # type: ignore from ._models import IntegrationRuntimeRegenerateKeyParameters # type: ignore from ._models import IntegrationRuntimeResource # type: ignore @@ -1036,6 +1067,7 @@ from ._models import MagentoLinkedService # type: ignore from ._models import MagentoObjectDataset # type: ignore from ._models import MagentoSource # type: ignore + from ._models import ManagedIdentityCredential # type: ignore from ._models import ManagedIntegrationRuntime # type: ignore from ._models import ManagedIntegrationRuntimeError # type: ignore from ._models import ManagedIntegrationRuntimeNode # type: ignore @@ -1055,12 +1087,14 @@ from ._models import MarketoLinkedService # type: ignore from ._models import MarketoObjectDataset # type: ignore from ._models import MarketoSource # type: ignore + from ._models import MetadataItem # type: ignore from ._models import MicrosoftAccessLinkedService # type: ignore from ._models import MicrosoftAccessSink # type: ignore from ._models import MicrosoftAccessSource # type: ignore from ._models import MicrosoftAccessTableDataset # type: ignore from ._models import MongoDbAtlasCollectionDataset # type: ignore from ._models import MongoDbAtlasLinkedService # type: ignore + from ._models import MongoDbAtlasSink # type: ignore from ._models import MongoDbAtlasSource # type: ignore from ._models import MongoDbCollectionDataset # type: ignore from ._models import MongoDbCursorMethodsProperties # type: ignore @@ -1068,6 +1102,7 @@ from ._models import MongoDbSource # type: ignore from ._models import MongoDbV2CollectionDataset # type: ignore from ._models import MongoDbV2LinkedService # type: ignore + from ._models import MongoDbV2Sink # type: ignore from ._models import MongoDbV2Source # type: ignore from ._models import MultiplePipelineTrigger # type: ignore from ._models import MySqlLinkedService # type: ignore @@ -1216,6 +1251,7 @@ from ._models import ServiceNowLinkedService # type: ignore from ._models import ServiceNowObjectDataset # type: ignore from ._models import ServiceNowSource # type: ignore + from ._models import ServicePrincipalCredential # type: ignore from ._models import SetVariableActivity # type: ignore from ._models import SftpLocation # type: ignore from ._models import SftpReadSettings # type: ignore @@ -1240,6 +1276,7 @@ from ._models import SqlAlwaysEncryptedProperties # type: ignore from ._models import SqlDwSink # type: ignore from ._models import SqlDwSource # type: ignore + from ._models import SqlDwUpsertSettings # type: ignore from ._models import SqlMiSink # type: ignore from ._models import SqlMiSource # type: ignore from ._models import SqlPartitionSettings # type: ignore @@ -1250,6 +1287,7 @@ from ._models import SqlServerTableDataset # type: ignore from ._models import SqlSink # type: ignore from ._models import SqlSource # type: ignore + from ._models import SqlUpsertSettings # type: ignore from ._models import SquareLinkedService # type: ignore from ._models import SquareObjectDataset # type: ignore from ._models import SquareSource # type: ignore @@ -1338,6 +1376,7 @@ from ._models import ZohoSource # type: ignore from ._data_factory_management_client_enums import ( + AmazonRdsForOraclePartitionOption, AvroCompressionCodec, AzureFunctionActivityMethod, AzureSearchIndexWriteBehaviorType, @@ -1356,7 +1395,6 @@ DependencyCondition, DynamicsAuthenticationType, DynamicsDeploymentType, - DynamicsServicePrincipalCredentialType, DynamicsSinkWriteBehavior, EventSubscriptionStatus, FactoryIdentityType, @@ -1410,12 +1448,15 @@ SapTablePartitionOption, SelfHostedIntegrationRuntimeNodeStatus, ServiceNowAuthenticationType, + ServicePrincipalCredentialType, SftpAuthenticationType, SparkAuthenticationType, SparkServerType, SparkThriftTransportProtocol, SqlAlwaysEncryptedAkvAuthType, + SqlDwWriteBehaviorEnum, SqlPartitionOption, + SqlWriteBehaviorEnum, SsisLogLocationType, SsisObjectMetadataType, SsisPackageLocationType, @@ -1444,6 +1485,10 @@ 'AmazonMwsLinkedService', 'AmazonMwsObjectDataset', 'AmazonMwsSource', + 'AmazonRdsForOracleLinkedService', + 'AmazonRdsForOraclePartitionSettings', + 'AmazonRdsForOracleSource', + 'AmazonRdsForOracleTableDataset', 'AmazonRedshiftLinkedService', 'AmazonRedshiftSource', 'AmazonRedshiftTableDataset', @@ -1583,6 +1628,9 @@ 'CreateDataFlowDebugSessionResponse', 'CreateLinkedIntegrationRuntimeRequest', 'CreateRunResponse', + 'Credential', + 'CredentialReference', + 'CredentialResource', 'CustomActivity', 'CustomActivityReferenceObject', 'CustomDataSourceLinkedService', @@ -1703,6 +1751,7 @@ 'GetSsisObjectMetadataRequest', 'GitHubAccessTokenRequest', 'GitHubAccessTokenResponse', + 'GitHubClientSecret', 'GlobalParameterSpecification', 'GoogleAdWordsLinkedService', 'GoogleAdWordsObjectDataset', @@ -1762,6 +1811,10 @@ 'IntegrationRuntimeMonitoringData', 'IntegrationRuntimeNodeIpAddress', 'IntegrationRuntimeNodeMonitoringData', + 'IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint', + 'IntegrationRuntimeOutboundNetworkDependenciesEndpoint', + 'IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails', + 'IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse', 'IntegrationRuntimeReference', 'IntegrationRuntimeRegenerateKeyParameters', 'IntegrationRuntimeResource', @@ -1797,6 +1850,7 @@ 'MagentoLinkedService', 'MagentoObjectDataset', 'MagentoSource', + 'ManagedIdentityCredential', 'ManagedIntegrationRuntime', 'ManagedIntegrationRuntimeError', 'ManagedIntegrationRuntimeNode', @@ -1816,12 +1870,14 @@ 'MarketoLinkedService', 'MarketoObjectDataset', 'MarketoSource', + 'MetadataItem', 'MicrosoftAccessLinkedService', 'MicrosoftAccessSink', 'MicrosoftAccessSource', 'MicrosoftAccessTableDataset', 'MongoDbAtlasCollectionDataset', 'MongoDbAtlasLinkedService', + 'MongoDbAtlasSink', 'MongoDbAtlasSource', 'MongoDbCollectionDataset', 'MongoDbCursorMethodsProperties', @@ -1829,6 +1885,7 @@ 'MongoDbSource', 'MongoDbV2CollectionDataset', 'MongoDbV2LinkedService', + 'MongoDbV2Sink', 'MongoDbV2Source', 'MultiplePipelineTrigger', 'MySqlLinkedService', @@ -1977,6 +2034,7 @@ 'ServiceNowLinkedService', 'ServiceNowObjectDataset', 'ServiceNowSource', + 'ServicePrincipalCredential', 'SetVariableActivity', 'SftpLocation', 'SftpReadSettings', @@ -2001,6 +2059,7 @@ 'SqlAlwaysEncryptedProperties', 'SqlDwSink', 'SqlDwSource', + 'SqlDwUpsertSettings', 'SqlMiSink', 'SqlMiSource', 'SqlPartitionSettings', @@ -2011,6 +2070,7 @@ 'SqlServerTableDataset', 'SqlSink', 'SqlSource', + 'SqlUpsertSettings', 'SquareLinkedService', 'SquareObjectDataset', 'SquareSource', @@ -2097,6 +2157,7 @@ 'ZohoLinkedService', 'ZohoObjectDataset', 'ZohoSource', + 'AmazonRdsForOraclePartitionOption', 'AvroCompressionCodec', 'AzureFunctionActivityMethod', 'AzureSearchIndexWriteBehaviorType', @@ -2115,7 +2176,6 @@ 'DependencyCondition', 'DynamicsAuthenticationType', 'DynamicsDeploymentType', - 'DynamicsServicePrincipalCredentialType', 'DynamicsSinkWriteBehavior', 'EventSubscriptionStatus', 'FactoryIdentityType', @@ -2169,12 +2229,15 @@ 'SapTablePartitionOption', 'SelfHostedIntegrationRuntimeNodeStatus', 'ServiceNowAuthenticationType', + 'ServicePrincipalCredentialType', 'SftpAuthenticationType', 'SparkAuthenticationType', 'SparkServerType', 'SparkThriftTransportProtocol', 'SqlAlwaysEncryptedAkvAuthType', + 'SqlDwWriteBehaviorEnum', 'SqlPartitionOption', + 'SqlWriteBehaviorEnum', 'SsisLogLocationType', 'SsisObjectMetadataType', 'SsisPackageLocationType', diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py index 1e1c0d92c7d..6935b71fb07 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py @@ -26,6 +26,12 @@ def __getattr__(cls, name): raise AttributeError(name) +class AmazonRdsForOraclePartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + NONE = "None" + PHYSICAL_PARTITIONS_OF_TABLE = "PhysicalPartitionsOfTable" + DYNAMIC_RANGE = "DynamicRange" + class AvroCompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): NONE = "none" @@ -77,14 +83,16 @@ class CassandraSourceReadConsistencyLevels(with_metaclass(_CaseInsensitiveEnumMe LOCAL_SERIAL = "LOCAL_SERIAL" class CompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """All available compressionCodec values. + """ NONE = "none" - GZIP = "gzip" - SNAPPY = "snappy" LZO = "lzo" BZIP2 = "bzip2" + GZIP = "gzip" DEFLATE = "deflate" ZIP_DEFLATE = "zipDeflate" + SNAPPY = "snappy" LZ4 = "lz4" TAR = "tar" TAR_G_ZIP = "tarGZip" @@ -174,9 +182,7 @@ class DependencyCondition(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): COMPLETED = "Completed" class DynamicsAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd' - for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in - online scenario. Type: string (or Expression with resultType string). + """All available dynamicsAuthenticationType values. """ OFFICE365 = "Office365" @@ -184,23 +190,12 @@ class DynamicsAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, E AAD_SERVICE_PRINCIPAL = "AADServicePrincipal" class DynamicsDeploymentType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The deployment type of the Dynamics instance. 'Online' for Dynamics Online and - 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or Expression with - resultType string). + """All available dynamicsDeploymentType values. """ ONLINE = "Online" ON_PREMISES_WITH_IFD = "OnPremisesWithIfd" -class DynamicsServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The service principal credential type to use in Server-To-Server authentication. - 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or - Expression with resultType string). - """ - - SERVICE_PRINCIPAL_KEY = "ServicePrincipalKey" - SERVICE_PRINCIPAL_CERT = "ServicePrincipalCert" - class DynamicsSinkWriteBehavior(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Defines values for DynamicsSinkWriteBehavior. """ @@ -267,7 +262,7 @@ class HBaseAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum BASIC = "Basic" class HdiNodeTypes(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The node types on which the script action should be executed. + """All available HdiNodeTypes values. """ HEADNODE = "Headnode" @@ -417,8 +412,7 @@ class JsonFormatFilePattern(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)) ARRAY_OF_OBJECTS = "arrayOfObjects" class JsonWriteFilePattern(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """File pattern of JSON. This setting controls the way a collection of JSON objects will be - treated. The default value is 'setOfObjects'. It is case-sensitive. + """All available filePatterns. """ SET_OF_OBJECTS = "setOfObjects" @@ -661,6 +655,13 @@ class ServiceNowAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, BASIC = "Basic" O_AUTH2 = "OAuth2" +class ServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """All available servicePrincipalCredentialType values. + """ + + SERVICE_PRINCIPAL_KEY = "ServicePrincipalKey" + SERVICE_PRINCIPAL_CERT = "ServicePrincipalCert" + class SftpAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to be used to connect to the FTP server. """ @@ -702,6 +703,13 @@ class SqlAlwaysEncryptedAkvAuthType(with_metaclass(_CaseInsensitiveEnumMeta, str SERVICE_PRINCIPAL = "ServicePrincipal" MANAGED_IDENTITY = "ManagedIdentity" +class SqlDwWriteBehaviorEnum(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Specify the write behavior when copying data into sql dw. + """ + + INSERT = "Insert" + UPSERT = "Upsert" + class SqlPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The partition mechanism that will be used for Sql read in parallel. """ @@ -710,6 +718,14 @@ class SqlPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): PHYSICAL_PARTITIONS_OF_TABLE = "PhysicalPartitionsOfTable" DYNAMIC_RANGE = "DynamicRange" +class SqlWriteBehaviorEnum(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Specify the write behavior when copying data into sql. + """ + + INSERT = "Insert" + UPSERT = "Upsert" + STORED_PROCEDURE = "StoredProcedure" + class SsisLogLocationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The type of SSIS log location. """ diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py index e97fd0ab305..645cb484dfe 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py @@ -342,7 +342,7 @@ class LinkedService(msrest.serialization.Model): """The Azure Data Factory nested object which contains the information and credential which can be used to connect with related store or compute resource. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMwsLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, AmazonS3CompatibleLinkedService, AzureBatchLinkedService, AzureBlobFsLinkedService, AzureBlobStorageLinkedService, AzureDataExplorerLinkedService, AzureDataLakeAnalyticsLinkedService, AzureDataLakeStoreLinkedService, AzureDatabricksLinkedService, AzureDatabricksDeltaLakeLinkedService, AzureFileStorageLinkedService, AzureFunctionLinkedService, AzureKeyVaultLinkedService, AzureMlLinkedService, AzureMlServiceLinkedService, AzureMariaDbLinkedService, AzureMySqlLinkedService, AzurePostgreSqlLinkedService, AzureSearchLinkedService, AzureSqlDwLinkedService, AzureSqlDatabaseLinkedService, AzureSqlMiLinkedService, AzureStorageLinkedService, AzureTableStorageLinkedService, CassandraLinkedService, CommonDataServiceForAppsLinkedService, ConcurLinkedService, CosmosDbLinkedService, CosmosDbMongoDbApiLinkedService, CouchbaseLinkedService, CustomDataSourceLinkedService, Db2LinkedService, DrillLinkedService, DynamicsLinkedService, DynamicsAxLinkedService, DynamicsCrmLinkedService, EloquaLinkedService, FileServerLinkedService, FtpServerLinkedService, GoogleAdWordsLinkedService, GoogleBigQueryLinkedService, GoogleCloudStorageLinkedService, GreenplumLinkedService, HBaseLinkedService, HdInsightLinkedService, HdInsightOnDemandLinkedService, HdfsLinkedService, HiveLinkedService, HttpLinkedService, HubspotLinkedService, ImpalaLinkedService, InformixLinkedService, JiraLinkedService, MagentoLinkedService, MariaDbLinkedService, MarketoLinkedService, MicrosoftAccessLinkedService, MongoDbLinkedService, MongoDbAtlasLinkedService, MongoDbV2LinkedService, MySqlLinkedService, NetezzaLinkedService, ODataLinkedService, OdbcLinkedService, Office365LinkedService, OracleLinkedService, OracleCloudStorageLinkedService, OracleServiceCloudLinkedService, PaypalLinkedService, PhoenixLinkedService, PostgreSqlLinkedService, PrestoLinkedService, QuickBooksLinkedService, ResponsysLinkedService, RestServiceLinkedService, SalesforceLinkedService, SalesforceMarketingCloudLinkedService, SalesforceServiceCloudLinkedService, SapBwLinkedService, SapCloudForCustomerLinkedService, SapEccLinkedService, SapHanaLinkedService, SapOpenHubLinkedService, SapTableLinkedService, ServiceNowLinkedService, SftpServerLinkedService, SharePointOnlineListLinkedService, ShopifyLinkedService, SnowflakeLinkedService, SparkLinkedService, SqlServerLinkedService, SquareLinkedService, SybaseLinkedService, TeradataLinkedService, VerticaLinkedService, WebLinkedService, XeroLinkedService, ZohoLinkedService. + sub-classes are: AmazonMwsLinkedService, AmazonRdsForOracleLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, AmazonS3CompatibleLinkedService, AzureBatchLinkedService, AzureBlobFsLinkedService, AzureBlobStorageLinkedService, AzureDataExplorerLinkedService, AzureDataLakeAnalyticsLinkedService, AzureDataLakeStoreLinkedService, AzureDatabricksLinkedService, AzureDatabricksDeltaLakeLinkedService, AzureFileStorageLinkedService, AzureFunctionLinkedService, AzureKeyVaultLinkedService, AzureMlLinkedService, AzureMlServiceLinkedService, AzureMariaDbLinkedService, AzureMySqlLinkedService, AzurePostgreSqlLinkedService, AzureSearchLinkedService, AzureSqlDwLinkedService, AzureSqlDatabaseLinkedService, AzureSqlMiLinkedService, AzureStorageLinkedService, AzureTableStorageLinkedService, CassandraLinkedService, CommonDataServiceForAppsLinkedService, ConcurLinkedService, CosmosDbLinkedService, CosmosDbMongoDbApiLinkedService, CouchbaseLinkedService, CustomDataSourceLinkedService, Db2LinkedService, DrillLinkedService, DynamicsLinkedService, DynamicsAxLinkedService, DynamicsCrmLinkedService, EloquaLinkedService, FileServerLinkedService, FtpServerLinkedService, GoogleAdWordsLinkedService, GoogleBigQueryLinkedService, GoogleCloudStorageLinkedService, GreenplumLinkedService, HBaseLinkedService, HdInsightLinkedService, HdInsightOnDemandLinkedService, HdfsLinkedService, HiveLinkedService, HttpLinkedService, HubspotLinkedService, ImpalaLinkedService, InformixLinkedService, JiraLinkedService, MagentoLinkedService, MariaDbLinkedService, MarketoLinkedService, MicrosoftAccessLinkedService, MongoDbLinkedService, MongoDbAtlasLinkedService, MongoDbV2LinkedService, MySqlLinkedService, NetezzaLinkedService, ODataLinkedService, OdbcLinkedService, Office365LinkedService, OracleLinkedService, OracleCloudStorageLinkedService, OracleServiceCloudLinkedService, PaypalLinkedService, PhoenixLinkedService, PostgreSqlLinkedService, PrestoLinkedService, QuickBooksLinkedService, ResponsysLinkedService, RestServiceLinkedService, SalesforceLinkedService, SalesforceMarketingCloudLinkedService, SalesforceServiceCloudLinkedService, SapBwLinkedService, SapCloudForCustomerLinkedService, SapEccLinkedService, SapHanaLinkedService, SapOpenHubLinkedService, SapTableLinkedService, ServiceNowLinkedService, SftpServerLinkedService, SharePointOnlineListLinkedService, ShopifyLinkedService, SnowflakeLinkedService, SparkLinkedService, SqlServerLinkedService, SquareLinkedService, SybaseLinkedService, TeradataLinkedService, VerticaLinkedService, WebLinkedService, XeroLinkedService, ZohoLinkedService. All required parameters must be populated in order to send to Azure. @@ -375,7 +375,7 @@ class LinkedService(msrest.serialization.Model): } _subtype_map = { - 'type': {'AmazonMWS': 'AmazonMwsLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'AmazonS3Compatible': 'AmazonS3CompatibleLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureBlobFS': 'AzureBlobFsLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDatabricksDeltaLake': 'AzureDatabricksDeltaLakeLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureML': 'AzureMlLinkedService', 'AzureMLService': 'AzureMlServiceLinkedService', 'AzureMariaDB': 'AzureMariaDbLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'AzureSqlDW': 'AzureSqlDwLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'AzureSqlMI': 'AzureSqlMiLinkedService', 'AzureStorage': 'AzureStorageLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'Cassandra': 'CassandraLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'Concur': 'ConcurLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'Db2': 'Db2LinkedService', 'Drill': 'DrillLinkedService', 'Dynamics': 'DynamicsLinkedService', 'DynamicsAX': 'DynamicsAxLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Eloqua': 'EloquaLinkedService', 'FileServer': 'FileServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'Greenplum': 'GreenplumLinkedService', 'HBase': 'HBaseLinkedService', 'HDInsight': 'HdInsightLinkedService', 'HDInsightOnDemand': 'HdInsightOnDemandLinkedService', 'Hdfs': 'HdfsLinkedService', 'Hive': 'HiveLinkedService', 'HttpServer': 'HttpLinkedService', 'Hubspot': 'HubspotLinkedService', 'Impala': 'ImpalaLinkedService', 'Informix': 'InformixLinkedService', 'Jira': 'JiraLinkedService', 'Magento': 'MagentoLinkedService', 'MariaDB': 'MariaDbLinkedService', 'Marketo': 'MarketoLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'MongoDb': 'MongoDbLinkedService', 'MongoDbAtlas': 'MongoDbAtlasLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MySql': 'MySqlLinkedService', 'Netezza': 'NetezzaLinkedService', 'OData': 'ODataLinkedService', 'Odbc': 'OdbcLinkedService', 'Office365': 'Office365LinkedService', 'Oracle': 'OracleLinkedService', 'OracleCloudStorage': 'OracleCloudStorageLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'Paypal': 'PaypalLinkedService', 'Phoenix': 'PhoenixLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'Presto': 'PrestoLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Responsys': 'ResponsysLinkedService', 'RestService': 'RestServiceLinkedService', 'Salesforce': 'SalesforceLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'SapBW': 'SapBwLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapTable': 'SapTableLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'Sftp': 'SftpServerLinkedService', 'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Shopify': 'ShopifyLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'Spark': 'SparkLinkedService', 'SqlServer': 'SqlServerLinkedService', 'Square': 'SquareLinkedService', 'Sybase': 'SybaseLinkedService', 'Teradata': 'TeradataLinkedService', 'Vertica': 'VerticaLinkedService', 'Web': 'WebLinkedService', 'Xero': 'XeroLinkedService', 'Zoho': 'ZohoLinkedService'} + 'type': {'AmazonMWS': 'AmazonMwsLinkedService', 'AmazonRdsForOracle': 'AmazonRdsForOracleLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'AmazonS3Compatible': 'AmazonS3CompatibleLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureBlobFS': 'AzureBlobFsLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDatabricksDeltaLake': 'AzureDatabricksDeltaLakeLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureML': 'AzureMlLinkedService', 'AzureMLService': 'AzureMlServiceLinkedService', 'AzureMariaDB': 'AzureMariaDbLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'AzureSqlDW': 'AzureSqlDwLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'AzureSqlMI': 'AzureSqlMiLinkedService', 'AzureStorage': 'AzureStorageLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'Cassandra': 'CassandraLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'Concur': 'ConcurLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'Db2': 'Db2LinkedService', 'Drill': 'DrillLinkedService', 'Dynamics': 'DynamicsLinkedService', 'DynamicsAX': 'DynamicsAxLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Eloqua': 'EloquaLinkedService', 'FileServer': 'FileServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'Greenplum': 'GreenplumLinkedService', 'HBase': 'HBaseLinkedService', 'HDInsight': 'HdInsightLinkedService', 'HDInsightOnDemand': 'HdInsightOnDemandLinkedService', 'Hdfs': 'HdfsLinkedService', 'Hive': 'HiveLinkedService', 'HttpServer': 'HttpLinkedService', 'Hubspot': 'HubspotLinkedService', 'Impala': 'ImpalaLinkedService', 'Informix': 'InformixLinkedService', 'Jira': 'JiraLinkedService', 'Magento': 'MagentoLinkedService', 'MariaDB': 'MariaDbLinkedService', 'Marketo': 'MarketoLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'MongoDb': 'MongoDbLinkedService', 'MongoDbAtlas': 'MongoDbAtlasLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MySql': 'MySqlLinkedService', 'Netezza': 'NetezzaLinkedService', 'OData': 'ODataLinkedService', 'Odbc': 'OdbcLinkedService', 'Office365': 'Office365LinkedService', 'Oracle': 'OracleLinkedService', 'OracleCloudStorage': 'OracleCloudStorageLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'Paypal': 'PaypalLinkedService', 'Phoenix': 'PhoenixLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'Presto': 'PrestoLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Responsys': 'ResponsysLinkedService', 'RestService': 'RestServiceLinkedService', 'Salesforce': 'SalesforceLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'SapBW': 'SapBwLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapTable': 'SapTableLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'Sftp': 'SftpServerLinkedService', 'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Shopify': 'ShopifyLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'Spark': 'SparkLinkedService', 'SqlServer': 'SqlServerLinkedService', 'Square': 'SquareLinkedService', 'Sybase': 'SybaseLinkedService', 'Teradata': 'TeradataLinkedService', 'Vertica': 'VerticaLinkedService', 'Web': 'WebLinkedService', 'Xero': 'XeroLinkedService', 'Zoho': 'ZohoLinkedService'} } def __init__( @@ -489,7 +489,7 @@ class Dataset(msrest.serialization.Model): """The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMwsObjectDataset, AmazonRedshiftTableDataset, AmazonS3Dataset, AvroDataset, AzureBlobDataset, AzureBlobFsDataset, AzureDataExplorerTableDataset, AzureDataLakeStoreDataset, AzureDatabricksDeltaLakeDataset, AzureMariaDbTableDataset, AzureMySqlTableDataset, AzurePostgreSqlTableDataset, AzureSearchIndexDataset, AzureSqlDwTableDataset, AzureSqlMiTableDataset, AzureSqlTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDbMongoDbApiCollectionDataset, CosmosDbSqlApiCollectionDataset, CouchbaseTableDataset, CustomDataset, Db2TableDataset, DelimitedTextDataset, DocumentDbCollectionDataset, DrillTableDataset, DynamicsAxResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, ExcelDataset, FileShareDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HttpDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDbTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDbAtlasCollectionDataset, MongoDbCollectionDataset, MongoDbV2CollectionDataset, MySqlTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSqlTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, SharePointOnlineListResourceDataset, ShopifyObjectDataset, SnowflakeDataset, SparkObjectDataset, SqlServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, XmlDataset, ZohoObjectDataset. + sub-classes are: AmazonMwsObjectDataset, AmazonRdsForOracleTableDataset, AmazonRedshiftTableDataset, AmazonS3Dataset, AvroDataset, AzureBlobDataset, AzureBlobFsDataset, AzureDataExplorerTableDataset, AzureDataLakeStoreDataset, AzureDatabricksDeltaLakeDataset, AzureMariaDbTableDataset, AzureMySqlTableDataset, AzurePostgreSqlTableDataset, AzureSearchIndexDataset, AzureSqlDwTableDataset, AzureSqlMiTableDataset, AzureSqlTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDbMongoDbApiCollectionDataset, CosmosDbSqlApiCollectionDataset, CouchbaseTableDataset, CustomDataset, Db2TableDataset, DelimitedTextDataset, DocumentDbCollectionDataset, DrillTableDataset, DynamicsAxResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, ExcelDataset, FileShareDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HttpDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDbTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDbAtlasCollectionDataset, MongoDbCollectionDataset, MongoDbV2CollectionDataset, MySqlTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSqlTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, SharePointOnlineListResourceDataset, ShopifyObjectDataset, SnowflakeDataset, SparkObjectDataset, SqlServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, XmlDataset, ZohoObjectDataset. All required parameters must be populated in order to send to Azure. @@ -535,7 +535,7 @@ class Dataset(msrest.serialization.Model): } _subtype_map = { - 'type': {'AmazonMWSObject': 'AmazonMwsObjectDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AmazonS3Object': 'AmazonS3Dataset', 'Avro': 'AvroDataset', 'AzureBlob': 'AzureBlobDataset', 'AzureBlobFSFile': 'AzureBlobFsDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'AzureDatabricksDeltaLakeDataset': 'AzureDatabricksDeltaLakeDataset', 'AzureMariaDBTable': 'AzureMariaDbTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSqlDwTableDataset', 'AzureSqlMITable': 'AzureSqlMiTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'CustomDataset': 'CustomDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAxResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'Excel': 'ExcelDataset', 'FileShare': 'FileShareDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HttpFile': 'HttpDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDbTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbAtlasCollection': 'MongoDbAtlasCollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MySqlTable': 'MySqlTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SnowflakeTable': 'SnowflakeDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'Xml': 'XmlDataset', 'ZohoObject': 'ZohoObjectDataset'} + 'type': {'AmazonMWSObject': 'AmazonMwsObjectDataset', 'AmazonRdsForOracleTable': 'AmazonRdsForOracleTableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AmazonS3Object': 'AmazonS3Dataset', 'Avro': 'AvroDataset', 'AzureBlob': 'AzureBlobDataset', 'AzureBlobFSFile': 'AzureBlobFsDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'AzureDatabricksDeltaLakeDataset': 'AzureDatabricksDeltaLakeDataset', 'AzureMariaDBTable': 'AzureMariaDbTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSqlDwTableDataset', 'AzureSqlMITable': 'AzureSqlMiTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'CustomDataset': 'CustomDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAxResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'Excel': 'ExcelDataset', 'FileShare': 'FileShareDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HttpFile': 'HttpDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDbTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbAtlasCollection': 'MongoDbAtlasCollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MySqlTable': 'MySqlTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SnowflakeTable': 'SnowflakeDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'Xml': 'XmlDataset', 'ZohoObject': 'ZohoObjectDataset'} } def __init__( @@ -616,7 +616,7 @@ class CopySource(msrest.serialization.Model): """A copy activity source. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSource, AzureBlobFsSource, AzureDataExplorerSource, AzureDataLakeStoreSource, AzureDatabricksDeltaLakeSource, BinarySource, BlobSource, CommonDataServiceForAppsSource, CosmosDbMongoDbApiSource, CosmosDbSqlApiSource, DelimitedTextSource, DocumentDbCollectionSource, DynamicsCrmSource, DynamicsSource, ExcelSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, MongoDbAtlasSource, MongoDbSource, MongoDbV2Source, ODataSource, Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, SharePointOnlineListSource, SnowflakeSource, TabularSource, WebSource, XmlSource. + sub-classes are: AmazonRdsForOracleSource, AvroSource, AzureBlobFsSource, AzureDataExplorerSource, AzureDataLakeStoreSource, AzureDatabricksDeltaLakeSource, BinarySource, BlobSource, CommonDataServiceForAppsSource, CosmosDbMongoDbApiSource, CosmosDbSqlApiSource, DelimitedTextSource, DocumentDbCollectionSource, DynamicsCrmSource, DynamicsSource, ExcelSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, MongoDbAtlasSource, MongoDbSource, MongoDbV2Source, ODataSource, Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, SharePointOnlineListSource, SnowflakeSource, TabularSource, WebSource, XmlSource. All required parameters must be populated in order to send to Azure. @@ -634,6 +634,9 @@ class CopySource(msrest.serialization.Model): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object """ _validation = { @@ -646,10 +649,11 @@ class CopySource(msrest.serialization.Model): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } _subtype_map = { - 'type': {'AvroSource': 'AvroSource', 'AzureBlobFSSource': 'AzureBlobFsSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'AzureDatabricksDeltaLakeSource': 'AzureDatabricksDeltaLakeSource', 'BinarySource': 'BinarySource', 'BlobSource': 'BlobSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DelimitedTextSource': 'DelimitedTextSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'ExcelSource': 'ExcelSource', 'FileSystemSource': 'FileSystemSource', 'HdfsSource': 'HdfsSource', 'HttpSource': 'HttpSource', 'JsonSource': 'JsonSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'MongoDbAtlasSource': 'MongoDbAtlasSource', 'MongoDbSource': 'MongoDbSource', 'MongoDbV2Source': 'MongoDbV2Source', 'ODataSource': 'ODataSource', 'Office365Source': 'Office365Source', 'OracleSource': 'OracleSource', 'OrcSource': 'OrcSource', 'ParquetSource': 'ParquetSource', 'RelationalSource': 'RelationalSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SharePointOnlineListSource': 'SharePointOnlineListSource', 'SnowflakeSource': 'SnowflakeSource', 'TabularSource': 'TabularSource', 'WebSource': 'WebSource', 'XmlSource': 'XmlSource'} + 'type': {'AmazonRdsForOracleSource': 'AmazonRdsForOracleSource', 'AvroSource': 'AvroSource', 'AzureBlobFSSource': 'AzureBlobFsSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'AzureDatabricksDeltaLakeSource': 'AzureDatabricksDeltaLakeSource', 'BinarySource': 'BinarySource', 'BlobSource': 'BlobSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DelimitedTextSource': 'DelimitedTextSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'ExcelSource': 'ExcelSource', 'FileSystemSource': 'FileSystemSource', 'HdfsSource': 'HdfsSource', 'HttpSource': 'HttpSource', 'JsonSource': 'JsonSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'MongoDbAtlasSource': 'MongoDbAtlasSource', 'MongoDbSource': 'MongoDbSource', 'MongoDbV2Source': 'MongoDbV2Source', 'ODataSource': 'ODataSource', 'Office365Source': 'Office365Source', 'OracleSource': 'OracleSource', 'OrcSource': 'OrcSource', 'ParquetSource': 'ParquetSource', 'RelationalSource': 'RelationalSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SharePointOnlineListSource': 'SharePointOnlineListSource', 'SnowflakeSource': 'SnowflakeSource', 'TabularSource': 'TabularSource', 'WebSource': 'WebSource', 'XmlSource': 'XmlSource'} } def __init__( @@ -662,6 +666,7 @@ def __init__( self.source_retry_count = kwargs.get('source_retry_count', None) self.source_retry_wait = kwargs.get('source_retry_wait', None) self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.disable_metrics_collection = kwargs.get('disable_metrics_collection', None) class TabularSource(CopySource): @@ -686,12 +691,15 @@ class TabularSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -704,8 +712,9 @@ class TabularSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } _subtype_map = { @@ -741,12 +750,15 @@ class AmazonMwsSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -762,8 +774,9 @@ class AmazonMwsSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -776,6 +789,234 @@ def __init__( self.query = kwargs.get('query', None) +class AmazonRdsForOracleLinkedService(LinkedService): + """AmazonRdsForOracle database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in connection string. + :type password: ~data_factory_management_client.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AmazonRdsForOracleLinkedService, self).__init__(**kwargs) + self.type = 'AmazonRdsForOracle' # type: str + self.connection_string = kwargs['connection_string'] + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class AmazonRdsForOraclePartitionSettings(msrest.serialization.Model): + """The settings that will be leveraged for AmazonRdsForOracle source partitioning. + + :param partition_names: Names of the physical partitions of AmazonRdsForOracle table. + :type partition_names: object + :param partition_column_name: The name of the column in integer type that will be used for + proceeding range partitioning. Type: string (or Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_names': {'key': 'partitionNames', 'type': 'object'}, + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AmazonRdsForOraclePartitionSettings, self).__init__(**kwargs) + self.partition_names = kwargs.get('partition_names', None) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) + + +class AmazonRdsForOracleSource(CopySource): + """A copy activity AmazonRdsForOracle source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object + :param oracle_reader_query: AmazonRdsForOracle reader query. Type: string (or Expression with + resultType string). + :type oracle_reader_query: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param partition_option: The partition mechanism that will be used for AmazonRdsForOracle read + in parallel. Type: string (or Expression with resultType string). + :type partition_option: object + :param partition_settings: The settings that will be leveraged for AmazonRdsForOracle source + partitioning. + :type partition_settings: + ~data_factory_management_client.models.AmazonRdsForOraclePartitionSettings + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, + 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'AmazonRdsForOraclePartitionSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AmazonRdsForOracleSource, self).__init__(**kwargs) + self.type = 'AmazonRdsForOracleSource' # type: str + self.oracle_reader_query = kwargs.get('oracle_reader_query', None) + self.query_timeout = kwargs.get('query_timeout', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) + self.additional_columns = kwargs.get('additional_columns', None) + + +class AmazonRdsForOracleTableDataset(Dataset): + """The AmazonRdsForOracle database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~data_factory_management_client.models.DatasetFolder + :param schema_type_properties_schema: The schema name of the AmazonRdsForOracle database. Type: + string (or Expression with resultType string). + :type schema_type_properties_schema: object + :param table: The table name of the AmazonRdsForOracle database. Type: string (or Expression + with resultType string). + :type table: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AmazonRdsForOracleTableDataset, self).__init__(**kwargs) + self.type = 'AmazonRdsForOracleTable' # type: str + self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) + self.table = kwargs.get('table', None) + + class AmazonRedshiftLinkedService(LinkedService): """Linked service for Amazon Redshift. @@ -868,12 +1109,15 @@ class AmazonRedshiftSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param redshift_unload_settings: The Amazon S3 settings needed for the interim Amazon S3 when @@ -892,8 +1136,9 @@ class AmazonRedshiftSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, } @@ -1156,6 +1401,9 @@ class StoreReadSettings(msrest.serialization.Model): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object """ _validation = { @@ -1166,6 +1414,7 @@ class StoreReadSettings(msrest.serialization.Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } _subtype_map = { @@ -1180,6 +1429,7 @@ def __init__( self.additional_properties = kwargs.get('additional_properties', None) self.type = 'StoreReadSettings' # type: str self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.disable_metrics_collection = kwargs.get('disable_metrics_collection', None) class AmazonS3CompatibleReadSettings(StoreReadSettings): @@ -1195,6 +1445,9 @@ class AmazonS3CompatibleReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -1235,6 +1488,7 @@ class AmazonS3CompatibleReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -1490,6 +1744,9 @@ class AmazonS3ReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -1530,6 +1787,7 @@ class AmazonS3ReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -1664,10 +1922,9 @@ class AvroDataset(Dataset): :type folder: ~data_factory_management_client.models.DatasetFolder :param location: The location of the avro storage. :type location: ~data_factory_management_client.models.DatasetLocation - :param avro_compression_codec: Possible values include: "none", "deflate", "snappy", "xz", - "bzip2". - :type avro_compression_codec: str or - ~data_factory_management_client.models.AvroCompressionCodec + :param avro_compression_codec: The data avroCompressionCodec. Type: string (or Expression with + resultType string). + :type avro_compression_codec: object :param avro_compression_level: :type avro_compression_level: int """ @@ -1689,7 +1946,7 @@ class AvroDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, + 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'object'}, 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, } @@ -1788,7 +2045,7 @@ class CopySink(msrest.serialization.Model): """A copy activity sink. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDwSink, SqlMiSink, SqlServerSink, SqlSink. + sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, MongoDbAtlasSink, MongoDbV2Sink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDwSink, SqlMiSink, SqlServerSink, SqlSink. All required parameters must be populated in order to send to Azure. @@ -1812,6 +2069,9 @@ class CopySink(msrest.serialization.Model): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object """ _validation = { @@ -1826,10 +2086,11 @@ class CopySink(msrest.serialization.Model): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } _subtype_map = { - 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDwSink', 'SqlMISink': 'SqlMiSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} + 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'MongoDbAtlasSink': 'MongoDbAtlasSink', 'MongoDbV2Sink': 'MongoDbV2Sink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDwSink', 'SqlMISink': 'SqlMiSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} } def __init__( @@ -1844,6 +2105,7 @@ def __init__( self.sink_retry_count = kwargs.get('sink_retry_count', None) self.sink_retry_wait = kwargs.get('sink_retry_wait', None) self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.disable_metrics_collection = kwargs.get('disable_metrics_collection', None) class AvroSink(CopySink): @@ -1871,6 +2133,9 @@ class AvroSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Avro store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings :param format_settings: Avro format settings. @@ -1889,6 +2154,7 @@ class AvroSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, } @@ -1922,11 +2188,14 @@ class AvroSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Avro store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -1939,8 +2208,9 @@ class AvroSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -2135,6 +2405,8 @@ class AzureBatchLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param credential: The credential reference containing authentication information. + :type credential: ~data_factory_management_client.models.CredentialReference """ _validation = { @@ -2158,6 +2430,7 @@ class AzureBatchLinkedService(LinkedService): 'pool_name': {'key': 'typeProperties.poolName', 'type': 'object'}, 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -2172,6 +2445,7 @@ def __init__( self.pool_name = kwargs['pool_name'] self.linked_service_name = kwargs['linked_service_name'] self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.credential = kwargs.get('credential', None) class AzureBlobDataset(Dataset): @@ -2374,6 +2648,8 @@ class AzureBlobFsLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param credential: The credential reference containing authentication information. + :type credential: ~data_factory_management_client.models.CredentialReference """ _validation = { @@ -2395,6 +2671,7 @@ class AzureBlobFsLinkedService(LinkedService): 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -2410,6 +2687,7 @@ def __init__( self.tenant = kwargs.get('tenant', None) self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.credential = kwargs.get('credential', None) class AzureBlobFsLocation(DatasetLocation): @@ -2467,6 +2745,9 @@ class AzureBlobFsReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -2504,6 +2785,7 @@ class AzureBlobFsReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -2557,8 +2839,14 @@ class AzureBlobFsSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :type metadata: list[~data_factory_management_client.models.MetadataItem] """ _validation = { @@ -2573,7 +2861,9 @@ class AzureBlobFsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'metadata': {'key': 'metadata', 'type': '[MetadataItem]'}, } def __init__( @@ -2583,6 +2873,7 @@ def __init__( super(AzureBlobFsSink, self).__init__(**kwargs) self.type = 'AzureBlobFSSink' # type: str self.copy_behavior = kwargs.get('copy_behavior', None) + self.metadata = kwargs.get('metadata', None) class AzureBlobFsSource(CopySource): @@ -2604,6 +2895,9 @@ class AzureBlobFsSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType boolean). :type treat_empty_as_null: object @@ -2625,6 +2919,7 @@ class AzureBlobFsSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, @@ -2657,6 +2952,9 @@ class StoreWriteSettings(msrest.serialization.Model): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object """ @@ -2669,6 +2967,7 @@ class StoreWriteSettings(msrest.serialization.Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -2684,6 +2983,7 @@ def __init__( self.additional_properties = kwargs.get('additional_properties', None) self.type = 'StoreWriteSettings' # type: str self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.disable_metrics_collection = kwargs.get('disable_metrics_collection', None) self.copy_behavior = kwargs.get('copy_behavior', None) @@ -2700,6 +3000,9 @@ class AzureBlobFsWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer @@ -2715,6 +3018,7 @@ class AzureBlobFsWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } @@ -2781,6 +3085,8 @@ class AzureBlobStorageLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: str + :param credential: The credential reference containing authentication information. + :type credential: ~data_factory_management_client.models.CredentialReference """ _validation = { @@ -2805,6 +3111,7 @@ class AzureBlobStorageLinkedService(LinkedService): 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'account_kind': {'key': 'typeProperties.accountKind', 'type': 'str'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -2824,6 +3131,7 @@ def __init__( self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.account_kind = kwargs.get('account_kind', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.credential = kwargs.get('credential', None) class AzureBlobStorageLocation(DatasetLocation): @@ -2881,6 +3189,9 @@ class AzureBlobStorageReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -2921,6 +3232,7 @@ class AzureBlobStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -2964,6 +3276,9 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer @@ -2979,6 +3294,7 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } @@ -3296,6 +3612,9 @@ class AzureDatabricksDeltaLakeSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -3316,6 +3635,7 @@ class AzureDatabricksDeltaLakeSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'import_settings': {'key': 'importSettings', 'type': 'AzureDatabricksDeltaLakeImportCommand'}, } @@ -3349,6 +3669,9 @@ class AzureDatabricksDeltaLakeSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Azure Databricks Delta Lake Sql query. Type: string (or Expression with resultType string). :type query: object @@ -3367,6 +3690,7 @@ class AzureDatabricksDeltaLakeSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'export_settings': {'key': 'exportSettings', 'type': 'AzureDatabricksDeltaLakeExportCommand'}, } @@ -3464,6 +3788,8 @@ class AzureDatabricksLinkedService(LinkedService): :param policy_id: The policy id for limiting the ability to configure clusters based on a user defined set of rules. Type: string (or Expression with resultType string). :type policy_id: object + :param credential: The credential reference containing authentication information. + :type credential: ~data_factory_management_client.models.CredentialReference """ _validation = { @@ -3496,6 +3822,7 @@ class AzureDatabricksLinkedService(LinkedService): 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, 'policy_id': {'key': 'typeProperties.policyId', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -3522,6 +3849,7 @@ def __init__( self.new_cluster_enable_elastic_disk = kwargs.get('new_cluster_enable_elastic_disk', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) self.policy_id = kwargs.get('policy_id', None) + self.credential = kwargs.get('credential', None) class ExecutionActivity(Activity): @@ -3674,6 +4002,8 @@ class AzureDataExplorerLinkedService(LinkedService): :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object + :param credential: The credential reference containing authentication information. + :type credential: ~data_factory_management_client.models.CredentialReference """ _validation = { @@ -3694,6 +4024,7 @@ class AzureDataExplorerLinkedService(LinkedService): 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'database': {'key': 'typeProperties.database', 'type': 'object'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -3707,6 +4038,7 @@ def __init__( self.service_principal_key = kwargs.get('service_principal_key', None) self.database = kwargs['database'] self.tenant = kwargs.get('tenant', None) + self.credential = kwargs.get('credential', None) class AzureDataExplorerSink(CopySink): @@ -3734,6 +4066,9 @@ class AzureDataExplorerSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param ingestion_mapping_name: A name of a pre-created csv mapping that was defined on the target Kusto table. Type: string. :type ingestion_mapping_name: object @@ -3757,6 +4092,7 @@ class AzureDataExplorerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, @@ -3792,6 +4128,9 @@ class AzureDataExplorerSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Required. Database query. Should be a Kusto Query Language (KQL) query. Type: string (or Expression with resultType string). :type query: object @@ -3802,8 +4141,8 @@ class AzureDataExplorerSource(CopySource): pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -3817,10 +4156,11 @@ class AzureDataExplorerSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -4098,6 +4438,8 @@ class AzureDataLakeStoreLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param credential: The credential reference containing authentication information. + :type credential: ~data_factory_management_client.models.CredentialReference """ _validation = { @@ -4121,6 +4463,7 @@ class AzureDataLakeStoreLinkedService(LinkedService): 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -4138,6 +4481,7 @@ def __init__( self.subscription_id = kwargs.get('subscription_id', None) self.resource_group_name = kwargs.get('resource_group_name', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.credential = kwargs.get('credential', None) class AzureDataLakeStoreLocation(DatasetLocation): @@ -4190,6 +4534,9 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -4235,6 +4582,7 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -4292,6 +4640,9 @@ class AzureDataLakeStoreSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param enable_adls_single_file_parallel: Single File Parallel. @@ -4310,6 +4661,7 @@ class AzureDataLakeStoreSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, } @@ -4343,6 +4695,9 @@ class AzureDataLakeStoreSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -4358,6 +4713,7 @@ class AzureDataLakeStoreSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, } @@ -4383,6 +4739,9 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param expiry_date_time: Specifies the expiry time of the written files. The time is applied to @@ -4399,6 +4758,7 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'expiry_date_time': {'key': 'expiryDateTime', 'type': 'object'}, } @@ -4550,6 +4910,9 @@ class AzureFileStorageReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -4590,6 +4953,7 @@ class AzureFileStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -4633,6 +4997,9 @@ class AzureFileStorageWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object """ @@ -4645,6 +5012,7 @@ class AzureFileStorageWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -4754,6 +5122,13 @@ class AzureFunctionLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param credential: The credential reference containing authentication information. + :type credential: ~data_factory_management_client.models.CredentialReference + :param resource_id: Allowed token audiences for azure function. + :type resource_id: object + :param authentication: Type of authentication (Required to specify MSI) used to connect to + AzureFunction. Type: string (or Expression with resultType string). + :type authentication: object """ _validation = { @@ -4771,6 +5146,9 @@ class AzureFunctionLinkedService(LinkedService): 'function_app_url': {'key': 'typeProperties.functionAppUrl', 'type': 'object'}, 'function_key': {'key': 'typeProperties.functionKey', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, + 'resource_id': {'key': 'typeProperties.resourceId', 'type': 'object'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'object'}, } def __init__( @@ -4782,6 +5160,9 @@ def __init__( self.function_app_url = kwargs['function_app_url'] self.function_key = kwargs.get('function_key', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.credential = kwargs.get('credential', None) + self.resource_id = kwargs.get('resource_id', None) + self.authentication = kwargs.get('authentication', None) class AzureKeyVaultLinkedService(LinkedService): @@ -4805,6 +5186,8 @@ class AzureKeyVaultLinkedService(LinkedService): :param base_url: Required. The base URL of the Azure Key Vault. e.g. https://myakv.vault.azure.net Type: string (or Expression with resultType string). :type base_url: object + :param credential: The credential reference containing authentication information. + :type credential: ~data_factory_management_client.models.CredentialReference """ _validation = { @@ -4820,6 +5203,7 @@ class AzureKeyVaultLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -4829,6 +5213,7 @@ def __init__( super(AzureKeyVaultLinkedService, self).__init__(**kwargs) self.type = 'AzureKeyVault' # type: str self.base_url = kwargs['base_url'] + self.credential = kwargs.get('credential', None) class SecretBase(msrest.serialization.Model): @@ -4979,12 +5364,15 @@ class AzureMariaDbSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -5000,8 +5388,9 @@ class AzureMariaDbSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -5278,6 +5667,9 @@ class AzureMlLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param authentication: Type of authentication (Required to specify MSI) used to connect to + AzureML. Type: string (or Expression with resultType string). + :type authentication: object """ _validation = { @@ -5300,6 +5692,7 @@ class AzureMlLinkedService(LinkedService): 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'object'}, } def __init__( @@ -5315,6 +5708,7 @@ def __init__( self.service_principal_key = kwargs.get('service_principal_key', None) self.tenant = kwargs.get('tenant', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.authentication = kwargs.get('authentication', None) class AzureMlServiceLinkedService(LinkedService): @@ -5580,6 +5974,9 @@ class AzureMySqlSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -5597,6 +5994,7 @@ class AzureMySqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -5628,12 +6026,15 @@ class AzureMySqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -5648,8 +6049,9 @@ class AzureMySqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -5807,6 +6209,9 @@ class AzurePostgreSqlSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -5824,6 +6229,7 @@ class AzurePostgreSqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -5855,12 +6261,15 @@ class AzurePostgreSqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -5876,8 +6285,9 @@ class AzurePostgreSqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -5984,6 +6394,9 @@ class AzureQueueSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object """ _validation = { @@ -5998,6 +6411,7 @@ class AzureQueueSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } def __init__( @@ -6093,6 +6507,9 @@ class AzureSearchIndexSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Specify the write behavior when upserting documents into Azure Search Index. Possible values include: "Merge", "Upload". :type write_behavior: str or @@ -6111,6 +6528,7 @@ class AzureSearchIndexSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } @@ -6223,6 +6641,8 @@ class AzureSqlDatabaseLinkedService(LinkedService): :param always_encrypted_settings: Sql always encrypted properties. :type always_encrypted_settings: ~data_factory_management_client.models.SqlAlwaysEncryptedProperties + :param credential: The credential reference containing authentication information. + :type credential: ~data_factory_management_client.models.CredentialReference """ _validation = { @@ -6245,6 +6665,7 @@ class AzureSqlDatabaseLinkedService(LinkedService): 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -6261,6 +6682,7 @@ def __init__( self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) self.always_encrypted_settings = kwargs.get('always_encrypted_settings', None) + self.credential = kwargs.get('credential', None) class AzureSqlDwLinkedService(LinkedService): @@ -6303,6 +6725,8 @@ class AzureSqlDwLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param credential: The credential reference containing authentication information. + :type credential: ~data_factory_management_client.models.CredentialReference """ _validation = { @@ -6324,6 +6748,7 @@ class AzureSqlDwLinkedService(LinkedService): 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -6339,6 +6764,7 @@ def __init__( self.tenant = kwargs.get('tenant', None) self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.credential = kwargs.get('credential', None) class AzureSqlDwTableDataset(Dataset): @@ -6453,6 +6879,8 @@ class AzureSqlMiLinkedService(LinkedService): :param always_encrypted_settings: Sql always encrypted properties. :type always_encrypted_settings: ~data_factory_management_client.models.SqlAlwaysEncryptedProperties + :param credential: The credential reference containing authentication information. + :type credential: ~data_factory_management_client.models.CredentialReference """ _validation = { @@ -6475,6 +6903,7 @@ class AzureSqlMiLinkedService(LinkedService): 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -6491,6 +6920,7 @@ def __init__( self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) self.always_encrypted_settings = kwargs.get('always_encrypted_settings', None) + self.credential = kwargs.get('credential', None) class AzureSqlMiTableDataset(Dataset): @@ -6587,6 +7017,9 @@ class AzureSqlSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). :type sql_writer_stored_procedure_name: object @@ -6605,6 +7038,14 @@ class AzureSqlSink(CopySink): :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). :type table_option: object + :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or + Expression with resultType boolean). + :type sql_writer_use_table_lock: object + :param write_behavior: Write behavior when copying data into Azure SQL. Type: + SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). + :type write_behavior: object + :param upsert_settings: SQL upsert settings. + :type upsert_settings: ~data_factory_management_client.models.SqlUpsertSettings """ _validation = { @@ -6619,12 +7060,16 @@ class AzureSqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, 'table_option': {'key': 'tableOption', 'type': 'object'}, + 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'}, } def __init__( @@ -6639,6 +7084,9 @@ def __init__( self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) self.table_option = kwargs.get('table_option', None) + self.sql_writer_use_table_lock = kwargs.get('sql_writer_use_table_lock', None) + self.write_behavior = kwargs.get('write_behavior', None) + self.upsert_settings = kwargs.get('upsert_settings', None) class AzureSqlSource(TabularSource): @@ -6660,12 +7108,15 @@ class AzureSqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database @@ -6695,8 +7146,9 @@ class AzureSqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, @@ -6938,6 +7390,9 @@ class AzureTableSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param azure_table_default_partition_key_value: Azure Table default partition key value. Type: string (or Expression with resultType string). :type azure_table_default_partition_key_value: object @@ -6964,6 +7419,7 @@ class AzureTableSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, 'azure_table_row_key_name': {'key': 'azureTableRowKeyName', 'type': 'object'}, @@ -7001,12 +7457,15 @@ class AzureTableSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param azure_table_source_query: Azure Table source query. Type: string (or Expression with resultType string). :type azure_table_source_query: object @@ -7025,8 +7484,9 @@ class AzureTableSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, } @@ -7263,6 +7723,9 @@ class BinarySink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Binary store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings """ @@ -7279,6 +7742,7 @@ class BinarySink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } @@ -7310,6 +7774,9 @@ class BinarySource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Binary store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param format_settings: Binary format settings. @@ -7326,6 +7793,7 @@ class BinarySource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'BinaryReadSettings'}, } @@ -7543,6 +8011,9 @@ class BlobSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param blob_writer_overwrite_files: Blob writer overwrite files. Type: boolean (or Expression with resultType boolean). :type blob_writer_overwrite_files: object @@ -7554,6 +8025,9 @@ class BlobSink(CopySink): :type blob_writer_add_header: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :type metadata: list[~data_factory_management_client.models.MetadataItem] """ _validation = { @@ -7568,10 +8042,12 @@ class BlobSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'metadata': {'key': 'metadata', 'type': '[MetadataItem]'}, } def __init__( @@ -7584,6 +8060,7 @@ def __init__( self.blob_writer_date_time_format = kwargs.get('blob_writer_date_time_format', None) self.blob_writer_add_header = kwargs.get('blob_writer_add_header', None) self.copy_behavior = kwargs.get('copy_behavior', None) + self.metadata = kwargs.get('metadata', None) class BlobSource(CopySource): @@ -7605,6 +8082,9 @@ class BlobSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType boolean). :type treat_empty_as_null: object @@ -7626,6 +8106,7 @@ class BlobSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, @@ -7794,12 +8275,15 @@ class CassandraSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command. Type: string (or Expression with resultType string). :type query: object @@ -7823,8 +8307,9 @@ class CassandraSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, } @@ -8140,8 +8625,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService): :param deployment_type: Required. The deployment type of the Common Data Service for Apps instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType - string). Possible values include: "Online", "OnPremisesWithIfd". - :type deployment_type: str or ~data_factory_management_client.models.DynamicsDeploymentType + string). + :type deployment_type: object :param host_name: The host name of the on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). @@ -8162,10 +8647,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService): :param authentication_type: Required. The authentication type to connect to Common Data Service for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). Possible values include: "Office365", "Ifd", - "AADServicePrincipal". - :type authentication_type: str or - ~data_factory_management_client.models.DynamicsAuthenticationType + Expression with resultType string). + :type authentication_type: object :param username: User name to access the Common Data Service for Apps instance. Type: string (or Expression with resultType string). :type username: object @@ -8176,10 +8659,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService): :type service_principal_id: object :param service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~data_factory_management_client.models.DynamicsServicePrincipalCredentialType + for certificate. Type: string (or Expression with resultType string). + :type service_principal_credential_type: object :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -8205,16 +8686,16 @@ class CommonDataServiceForAppsLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'}, 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -8264,6 +8745,9 @@ class CommonDataServiceForAppsSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Required. The write behavior for the operation. Possible values include: "Upsert". :type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior @@ -8289,6 +8773,7 @@ class CommonDataServiceForAppsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, @@ -8324,12 +8809,15 @@ class CommonDataServiceForAppsSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: FetchXML is a proprietary query language that is used in Microsoft Common Data Service for Apps (online & on-premises). Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -8342,8 +8830,9 @@ class CommonDataServiceForAppsSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -8587,12 +9076,15 @@ class ConcurSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -8608,8 +9100,9 @@ class ConcurSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -9069,6 +9562,9 @@ class CosmosDbMongoDbApiLinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param is_server_version_above32: Whether the CosmosDB (MongoDB API) server version is higher + than 3.2. The default value is false. Type: boolean (or Expression with resultType boolean). + :type is_server_version_above32: object :param connection_string: Required. The CosmosDB (MongoDB API) connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. @@ -9091,6 +9587,7 @@ class CosmosDbMongoDbApiLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'is_server_version_above32': {'key': 'typeProperties.isServerVersionAbove32', 'type': 'object'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'database': {'key': 'typeProperties.database', 'type': 'object'}, } @@ -9101,6 +9598,7 @@ def __init__( ): super(CosmosDbMongoDbApiLinkedService, self).__init__(**kwargs) self.type = 'CosmosDbMongoDbApi' # type: str + self.is_server_version_above32 = kwargs.get('is_server_version_above32', None) self.connection_string = kwargs['connection_string'] self.database = kwargs['database'] @@ -9130,6 +9628,9 @@ class CosmosDbMongoDbApiSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). @@ -9148,6 +9649,7 @@ class CosmosDbMongoDbApiSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } @@ -9179,6 +9681,9 @@ class CosmosDbMongoDbApiSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). @@ -9194,8 +9699,8 @@ class CosmosDbMongoDbApiSource(CopySource): pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -9208,11 +9713,12 @@ class CosmosDbMongoDbApiSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -9313,6 +9819,9 @@ class CosmosDbSqlApiSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. :type write_behavior: object @@ -9330,6 +9839,7 @@ class CosmosDbSqlApiSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } @@ -9361,6 +9871,9 @@ class CosmosDbSqlApiSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: SQL API query. Type: string (or Expression with resultType string). :type query: object :param page_size: Page size of the result. Type: integer (or Expression with resultType @@ -9373,8 +9886,8 @@ class CosmosDbSqlApiSource(CopySource): Expression with resultType boolean). :type detect_datetime: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -9387,11 +9900,12 @@ class CosmosDbSqlApiSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'page_size': {'key': 'pageSize', 'type': 'object'}, 'preferred_regions': {'key': 'preferredRegions', 'type': 'object'}, 'detect_datetime': {'key': 'detectDatetime', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -9482,12 +9996,15 @@ class CouchbaseSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -9503,8 +10020,9 @@ class CouchbaseSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -9691,6 +10209,172 @@ def __init__( self.run_id = kwargs['run_id'] +class Credential(msrest.serialization.Model): + """The Azure Data Factory nested object which contains the information and credential which can be used to connect with related store or compute resource. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ManagedIdentityCredential, ServicePrincipalCredential. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of credential.Constant filled by server. + :type type: str + :param description: Credential description. + :type description: str + :param annotations: List of tags that can be used for describing the Credential. + :type annotations: list[object] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + } + + _subtype_map = { + 'type': {'ManagedIdentity': 'ManagedIdentityCredential', 'ServicePrincipal': 'ServicePrincipalCredential'} + } + + def __init__( + self, + **kwargs + ): + super(Credential, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'Credential' # type: str + self.description = kwargs.get('description', None) + self.annotations = kwargs.get('annotations', None) + + +class CredentialReference(msrest.serialization.Model): + """Credential reference type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :ivar type: Required. Credential reference type. Default value: "CredentialReference". + :vartype type: str + :param reference_name: Required. Reference credential name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + type = "CredentialReference" + + def __init__( + self, + **kwargs + ): + super(CredentialReference, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.reference_name = kwargs['reference_name'] + + +class SubResource(msrest.serialization.Model): + """Azure Data Factory nested resource, which belongs to a factory. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SubResource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.etag = None + + +class CredentialResource(SubResource): + """Credential resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Properties of credentials. + :type properties: ~data_factory_management_client.models.Credential + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Credential'}, + } + + def __init__( + self, + **kwargs + ): + super(CredentialResource, self).__init__(**kwargs) + self.properties = kwargs['properties'] + + class CustomActivity(ExecutionActivity): """Custom activity type. @@ -10546,46 +11230,6 @@ def __init__( self.dataset_parameters = kwargs.get('dataset_parameters', None) -class SubResource(msrest.serialization.Model): - """Azure Data Factory nested resource, which belongs to a factory. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SubResource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.etag = None - - class DataFlowResource(SubResource): """Data flow resource type. @@ -10883,8 +11527,9 @@ class DatasetCompression(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. - :type type: str + :param type: Required. Type of dataset compression. Type: string (or Expression with resultType + string).Constant filled by server. + :type type: object """ _validation = { @@ -10893,7 +11538,7 @@ class DatasetCompression(msrest.serialization.Model): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'object'}, } _subtype_map = { @@ -10917,8 +11562,9 @@ class DatasetBZip2Compression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. - :type type: str + :param type: Required. Type of dataset compression. Type: string (or Expression with resultType + string).Constant filled by server. + :type type: object """ _validation = { @@ -10927,7 +11573,7 @@ class DatasetBZip2Compression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'object'}, } def __init__( @@ -10997,10 +11643,11 @@ class DatasetDeflateCompression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. - :type type: str - :param level: The Deflate compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + :param type: Required. Type of dataset compression. Type: string (or Expression with resultType + string).Constant filled by server. + :type type: object + :param level: The Deflate compression level. + :type level: object """ _validation = { @@ -11009,8 +11656,8 @@ class DatasetDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'object'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( @@ -11049,10 +11696,11 @@ class DatasetGZipCompression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. - :type type: str - :param level: The GZip compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + :param type: Required. Type of dataset compression. Type: string (or Expression with resultType + string).Constant filled by server. + :type type: object + :param level: The GZip compression level. + :type level: object """ _validation = { @@ -11061,8 +11709,8 @@ class DatasetGZipCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'object'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( @@ -11219,8 +11867,9 @@ class DatasetTarCompression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. - :type type: str + :param type: Required. Type of dataset compression. Type: string (or Expression with resultType + string).Constant filled by server. + :type type: object """ _validation = { @@ -11229,7 +11878,7 @@ class DatasetTarCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'object'}, } def __init__( @@ -11248,10 +11897,11 @@ class DatasetTarGZipCompression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. - :type type: str - :param level: The TarGZip compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + :param type: Required. Type of dataset compression. Type: string (or Expression with resultType + string).Constant filled by server. + :type type: object + :param level: The TarGZip compression level. + :type level: object """ _validation = { @@ -11260,8 +11910,8 @@ class DatasetTarGZipCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'object'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( @@ -11281,10 +11931,11 @@ class DatasetZipDeflateCompression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. - :type type: str - :param level: The ZipDeflate compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + :param type: Required. Type of dataset compression. Type: string (or Expression with resultType + string).Constant filled by server. + :type type: object + :param level: The ZipDeflate compression level. + :type level: object """ _validation = { @@ -11293,8 +11944,8 @@ class DatasetZipDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'object'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( @@ -11413,12 +12064,15 @@ class Db2Source(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -11433,8 +12087,9 @@ class Db2Source(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -11652,12 +12307,11 @@ class DelimitedTextDataset(Dataset): https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). :type encoding_name: object - :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4", "tar", "tarGZip". - :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec - :param compression_level: The data compression method used for DelimitedText. Possible values - include: "Optimal", "Fastest". - :type compression_level: str or ~data_factory_management_client.models.DatasetCompressionLevel + :param compression_codec: The data compressionCodec. Type: string (or Expression with + resultType string). + :type compression_codec: object + :param compression_level: The data compression method used for DelimitedText. + :type compression_level: object :param quote_char: The quote character. Type: string (or Expression with resultType string). :type quote_char: object :param escape_char: The escape character. Type: string (or Expression with resultType string). @@ -11689,8 +12343,8 @@ class DelimitedTextDataset(Dataset): 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, - 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'str'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, @@ -11778,6 +12432,9 @@ class DelimitedTextSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: DelimitedText store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings :param format_settings: DelimitedText format settings. @@ -11796,6 +12453,7 @@ class DelimitedTextSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, } @@ -11829,13 +12487,16 @@ class DelimitedTextSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: DelimitedText store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param format_settings: DelimitedText format settings. :type format_settings: ~data_factory_management_client.models.DelimitedTextReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -11848,9 +12509,10 @@ class DelimitedTextSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -12070,6 +12732,9 @@ class DocumentDbCollectionSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param nesting_separator: Nested properties separator. Default is . (dot). Type: string (or Expression with resultType string). :type nesting_separator: object @@ -12090,6 +12755,7 @@ class DocumentDbCollectionSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } @@ -12123,6 +12789,9 @@ class DocumentDbCollectionSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Documents query. Type: string (or Expression with resultType string). :type query: object :param nesting_separator: Nested properties separator. Type: string (or Expression with @@ -12132,8 +12801,8 @@ class DocumentDbCollectionSource(CopySource): pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -12146,10 +12815,11 @@ class DocumentDbCollectionSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -12239,12 +12909,15 @@ class DrillSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -12260,8 +12933,9 @@ class DrillSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -12552,12 +13226,15 @@ class DynamicsAxSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -12578,8 +13255,9 @@ class DynamicsAxSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -12673,9 +13351,8 @@ class DynamicsCrmLinkedService(LinkedService): :type annotations: list[object] :param deployment_type: Required. The deployment type of the Dynamics CRM instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: - string (or Expression with resultType string). Possible values include: "Online", - "OnPremisesWithIfd". - :type deployment_type: str or ~data_factory_management_client.models.DynamicsDeploymentType + string (or Expression with resultType string). + :type deployment_type: object :param host_name: The host name of the on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). @@ -12694,10 +13371,8 @@ class DynamicsCrmLinkedService(LinkedService): :param authentication_type: Required. The authentication type to connect to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). Possible values include: "Office365", "Ifd", - "AADServicePrincipal". - :type authentication_type: str or - ~data_factory_management_client.models.DynamicsAuthenticationType + Expression with resultType string). + :type authentication_type: object :param username: User name to access the Dynamics CRM instance. Type: string (or Expression with resultType string). :type username: object @@ -12708,10 +13383,8 @@ class DynamicsCrmLinkedService(LinkedService): :type service_principal_id: object :param service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~data_factory_management_client.models.DynamicsServicePrincipalCredentialType + for certificate. Type: string (or Expression with resultType string). + :type service_principal_credential_type: object :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -12737,16 +13410,16 @@ class DynamicsCrmLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'}, 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -12796,6 +13469,9 @@ class DynamicsCrmSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Required. The write behavior for the operation. Possible values include: "Upsert". :type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior @@ -12821,6 +13497,7 @@ class DynamicsCrmSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, @@ -12856,12 +13533,15 @@ class DynamicsCrmSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -12874,8 +13554,9 @@ class DynamicsCrmSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -12967,8 +13648,8 @@ class DynamicsLinkedService(LinkedService): :type annotations: list[object] :param deployment_type: Required. The deployment type of the Dynamics instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or - Expression with resultType string). Possible values include: "Online", "OnPremisesWithIfd". - :type deployment_type: str or ~data_factory_management_client.models.DynamicsDeploymentType + Expression with resultType string). + :type deployment_type: object :param host_name: The host name of the on-premises Dynamics server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). :type host_name: object @@ -12986,9 +13667,8 @@ class DynamicsLinkedService(LinkedService): :param authentication_type: Required. The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with - resultType string). Possible values include: "Office365", "Ifd", "AADServicePrincipal". - :type authentication_type: str or - ~data_factory_management_client.models.DynamicsAuthenticationType + resultType string). + :type authentication_type: object :param username: User name to access the Dynamics instance. Type: string (or Expression with resultType string). :type username: object @@ -12999,10 +13679,8 @@ class DynamicsLinkedService(LinkedService): :type service_principal_id: object :param service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~data_factory_management_client.models.DynamicsServicePrincipalCredentialType + for certificate. Type: string (or Expression with resultType string). + :type service_principal_credential_type: str :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -13028,12 +13706,12 @@ class DynamicsLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, @@ -13087,6 +13765,9 @@ class DynamicsSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Required. The write behavior for the operation. Possible values include: "Upsert". :type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior @@ -13112,6 +13793,7 @@ class DynamicsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, @@ -13147,12 +13829,15 @@ class DynamicsSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics (online & on-premises). Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -13165,8 +13850,9 @@ class DynamicsSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -13334,12 +14020,15 @@ class EloquaSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -13355,8 +14044,9 @@ class EloquaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -13498,9 +14188,12 @@ class ExcelDataset(Dataset): :type folder: ~data_factory_management_client.models.DatasetFolder :param location: The location of the excel storage. :type location: ~data_factory_management_client.models.DatasetLocation - :param sheet_name: The sheet of excel file. Type: string (or Expression with resultType + :param sheet_name: The sheet name of excel file. Type: string (or Expression with resultType string). :type sheet_name: object + :param sheet_index: The sheet index of excel file and default value is 0. Type: integer (or + Expression with resultType integer). + :type sheet_index: object :param range: The partial data of one sheet. Type: string (or Expression with resultType string). :type range: object @@ -13531,6 +14224,7 @@ class ExcelDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, 'sheet_name': {'key': 'typeProperties.sheetName', 'type': 'object'}, + 'sheet_index': {'key': 'typeProperties.sheetIndex', 'type': 'object'}, 'range': {'key': 'typeProperties.range', 'type': 'object'}, 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, @@ -13545,6 +14239,7 @@ def __init__( self.type = 'Excel' # type: str self.location = kwargs.get('location', None) self.sheet_name = kwargs.get('sheet_name', None) + self.sheet_index = kwargs.get('sheet_index', None) self.range = kwargs.get('range', None) self.first_row_as_header = kwargs.get('first_row_as_header', None) self.compression = kwargs.get('compression', None) @@ -13570,11 +14265,14 @@ class ExcelSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Excel store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -13587,8 +14285,9 @@ class ExcelSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -14216,6 +14915,10 @@ class FactoryGitHubConfiguration(FactoryRepoConfiguration): :type last_commit_id: str :param host_name: GitHub Enterprise host name. For example: https://github.mydomain.com. :type host_name: str + :param client_id: GitHub bring your own app client id. + :type client_id: str + :param client_secret: GitHub bring your own app client secret information. + :type client_secret: ~data_factory_management_client.models.GitHubClientSecret """ _validation = { @@ -14234,6 +14937,8 @@ class FactoryGitHubConfiguration(FactoryRepoConfiguration): 'root_folder': {'key': 'rootFolder', 'type': 'str'}, 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, 'host_name': {'key': 'hostName', 'type': 'str'}, + 'client_id': {'key': 'clientId', 'type': 'str'}, + 'client_secret': {'key': 'clientSecret', 'type': 'GitHubClientSecret'}, } def __init__( @@ -14243,6 +14948,8 @@ def __init__( super(FactoryGitHubConfiguration, self).__init__(**kwargs) self.type = 'FactoryGitHubConfiguration' # type: str self.host_name = kwargs.get('host_name', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) class FactoryIdentity(msrest.serialization.Model): @@ -14527,6 +15234,9 @@ class FileServerReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -14567,6 +15277,7 @@ class FileServerReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -14610,6 +15321,9 @@ class FileServerWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object """ @@ -14622,6 +15336,7 @@ class FileServerWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -14745,6 +15460,9 @@ class FileSystemSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object """ @@ -14761,6 +15479,7 @@ class FileSystemSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -14792,12 +15511,15 @@ class FileSystemSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -14810,8 +15532,9 @@ class FileSystemSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -14951,6 +15674,9 @@ class FtpReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -14984,6 +15710,7 @@ class FtpReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -15247,6 +15974,8 @@ class GitHubAccessTokenRequest(msrest.serialization.Model): :type git_hub_access_code: str :param git_hub_client_id: GitHub application client ID. :type git_hub_client_id: str + :param git_hub_client_secret: GitHub bring your own app client secret information. + :type git_hub_client_secret: ~data_factory_management_client.models.GitHubClientSecret :param git_hub_access_token_base_url: Required. GitHub access token base URL. :type git_hub_access_token_base_url: str """ @@ -15259,6 +15988,7 @@ class GitHubAccessTokenRequest(msrest.serialization.Model): _attribute_map = { 'git_hub_access_code': {'key': 'gitHubAccessCode', 'type': 'str'}, 'git_hub_client_id': {'key': 'gitHubClientId', 'type': 'str'}, + 'git_hub_client_secret': {'key': 'gitHubClientSecret', 'type': 'GitHubClientSecret'}, 'git_hub_access_token_base_url': {'key': 'gitHubAccessTokenBaseUrl', 'type': 'str'}, } @@ -15269,6 +15999,7 @@ def __init__( super(GitHubAccessTokenRequest, self).__init__(**kwargs) self.git_hub_access_code = kwargs['git_hub_access_code'] self.git_hub_client_id = kwargs.get('git_hub_client_id', None) + self.git_hub_client_secret = kwargs.get('git_hub_client_secret', None) self.git_hub_access_token_base_url = kwargs['git_hub_access_token_base_url'] @@ -15291,6 +16022,29 @@ def __init__( self.git_hub_access_token = kwargs.get('git_hub_access_token', None) +class GitHubClientSecret(msrest.serialization.Model): + """Client secret information for factory's bring your own app repository configuration. + + :param byoa_secret_akv_url: Bring your own app client secret AKV URL. + :type byoa_secret_akv_url: str + :param byoa_secret_name: Bring your own app client secret name in AKV. + :type byoa_secret_name: str + """ + + _attribute_map = { + 'byoa_secret_akv_url': {'key': 'byoaSecretAkvUrl', 'type': 'str'}, + 'byoa_secret_name': {'key': 'byoaSecretName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(GitHubClientSecret, self).__init__(**kwargs) + self.byoa_secret_akv_url = kwargs.get('byoa_secret_akv_url', None) + self.byoa_secret_name = kwargs.get('byoa_secret_name', None) + + class GlobalParameterSpecification(msrest.serialization.Model): """Definition of a single parameter for an entity. @@ -15502,12 +16256,15 @@ class GoogleAdWordsSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -15523,8 +16280,9 @@ class GoogleAdWordsSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -15731,12 +16489,15 @@ class GoogleBigQuerySource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -15752,8 +16513,9 @@ class GoogleBigQuerySource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -15890,6 +16652,9 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -15930,6 +16695,7 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -16035,12 +16801,15 @@ class GreenplumSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -16056,8 +16825,9 @@ class GreenplumSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -16313,12 +17083,15 @@ class HBaseSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -16334,8 +17107,9 @@ class HBaseSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -16465,6 +17239,9 @@ class HdfsReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -16504,6 +17281,7 @@ class HdfsReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -16553,6 +17331,9 @@ class HdfsSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -16570,6 +17351,7 @@ class HdfsSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, } @@ -16952,6 +17734,8 @@ class HdInsightOnDemandLinkedService(LinkedService): :param subnet_name: The ARM resource ID for the subnet in the vNet. If virtualNetworkId was specified, then this property is required. Type: string (or Expression with resultType string). :type subnet_name: object + :param credential: The credential reference containing authentication information. + :type credential: ~data_factory_management_client.models.CredentialReference """ _validation = { @@ -17005,6 +17789,7 @@ class HdInsightOnDemandLinkedService(LinkedService): 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, 'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'}, 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -17046,6 +17831,7 @@ def __init__( self.script_actions = kwargs.get('script_actions', None) self.virtual_network_id = kwargs.get('virtual_network_id', None) self.subnet_name = kwargs.get('subnet_name', None) + self.credential = kwargs.get('credential', None) class HdInsightPigActivity(ExecutionActivity): @@ -17530,12 +18316,15 @@ class HiveSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -17551,8 +18340,9 @@ class HiveSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -17755,6 +18545,9 @@ class HttpReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). :type request_method: object @@ -17782,6 +18575,7 @@ class HttpReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'request_method': {'key': 'requestMethod', 'type': 'object'}, 'request_body': {'key': 'requestBody', 'type': 'object'}, 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, @@ -17865,6 +18659,9 @@ class HttpSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param http_request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string (or Expression with resultType string), pattern: @@ -17882,6 +18679,7 @@ class HttpSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -18053,12 +18851,15 @@ class HubspotSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -18074,8 +18875,9 @@ class HubspotSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -18335,12 +19137,15 @@ class ImpalaSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -18356,8 +19161,9 @@ class ImpalaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -18469,6 +19275,9 @@ class InformixSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -18486,6 +19295,7 @@ class InformixSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -18517,12 +19327,15 @@ class InformixSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -18537,8 +19350,9 @@ class InformixSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -18825,6 +19639,9 @@ class IntegrationRuntimeDataFlowProperties(msrest.serialization.Model): :param time_to_live: Time to live (in minutes) setting of the cluster which will execute data flow job. :type time_to_live: int + :param cleanup: Cluster will not be recycled and it will be used in next data flow activity run + until TTL (time to live) is reached if this is set as false. Default is true. + :type cleanup: bool """ _validation = { @@ -18836,6 +19653,7 @@ class IntegrationRuntimeDataFlowProperties(msrest.serialization.Model): 'compute_type': {'key': 'computeType', 'type': 'str'}, 'core_count': {'key': 'coreCount', 'type': 'int'}, 'time_to_live': {'key': 'timeToLive', 'type': 'int'}, + 'cleanup': {'key': 'cleanup', 'type': 'bool'}, } def __init__( @@ -18847,6 +19665,7 @@ def __init__( self.compute_type = kwargs.get('compute_type', None) self.core_count = kwargs.get('core_count', None) self.time_to_live = kwargs.get('time_to_live', None) + self.cleanup = kwargs.get('cleanup', None) class IntegrationRuntimeDataProxyProperties(msrest.serialization.Model): @@ -19047,6 +19866,93 @@ def __init__( self.received_bytes = None +class IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint(msrest.serialization.Model): + """Azure-SSIS integration runtime outbound network dependency endpoints for one category. + + :param category: The category of outbound network dependency. + :type category: str + :param endpoints: The endpoints for outbound network dependency. + :type endpoints: + list[~data_factory_management_client.models.IntegrationRuntimeOutboundNetworkDependenciesEndpoint] + """ + + _attribute_map = { + 'category': {'key': 'category', 'type': 'str'}, + 'endpoints': {'key': 'endpoints', 'type': '[IntegrationRuntimeOutboundNetworkDependenciesEndpoint]'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint, self).__init__(**kwargs) + self.category = kwargs.get('category', None) + self.endpoints = kwargs.get('endpoints', None) + + +class IntegrationRuntimeOutboundNetworkDependenciesEndpoint(msrest.serialization.Model): + """The endpoint for Azure-SSIS integration runtime outbound network dependency. + + :param domain_name: The domain name of endpoint. + :type domain_name: str + :param endpoint_details: The details of endpoint. + :type endpoint_details: + list[~data_factory_management_client.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails] + """ + + _attribute_map = { + 'domain_name': {'key': 'domainName', 'type': 'str'}, + 'endpoint_details': {'key': 'endpointDetails', 'type': '[IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails]'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeOutboundNetworkDependenciesEndpoint, self).__init__(**kwargs) + self.domain_name = kwargs.get('domain_name', None) + self.endpoint_details = kwargs.get('endpoint_details', None) + + +class IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails(msrest.serialization.Model): + """The details of Azure-SSIS integration runtime outbound network dependency endpoint. + + :param port: The port of endpoint. + :type port: int + """ + + _attribute_map = { + 'port': {'key': 'port', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails, self).__init__(**kwargs) + self.port = kwargs.get('port', None) + + +class IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse(msrest.serialization.Model): + """Azure-SSIS integration runtime outbound network dependency endpoints. + + :param value: The list of outbound network dependency endpoints. + :type value: + list[~data_factory_management_client.models.IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint]'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + + class IntegrationRuntimeReference(msrest.serialization.Model): """Integration runtime reference type. @@ -19225,6 +20131,8 @@ class IntegrationRuntimeSsisProperties(msrest.serialization.Model): list[~data_factory_management_client.models.CustomSetupBase] :param package_stores: Package stores for the SSIS Integration Runtime. :type package_stores: list[~data_factory_management_client.models.PackageStore] + :param credential: The credential reference containing authentication information. + :type credential: ~data_factory_management_client.models.CredentialReference """ _attribute_map = { @@ -19236,6 +20144,7 @@ class IntegrationRuntimeSsisProperties(msrest.serialization.Model): 'edition': {'key': 'edition', 'type': 'str'}, 'express_custom_setup_properties': {'key': 'expressCustomSetupProperties', 'type': '[CustomSetupBase]'}, 'package_stores': {'key': 'packageStores', 'type': '[PackageStore]'}, + 'credential': {'key': 'credential', 'type': 'CredentialReference'}, } def __init__( @@ -19251,6 +20160,7 @@ def __init__( self.edition = kwargs.get('edition', None) self.express_custom_setup_properties = kwargs.get('express_custom_setup_properties', None) self.package_stores = kwargs.get('package_stores', None) + self.credential = kwargs.get('credential', None) class IntegrationRuntimeStatus(msrest.serialization.Model): @@ -19379,6 +20289,9 @@ class IntegrationRuntimeVNetProperties(msrest.serialization.Model): :param public_i_ps: Resource IDs of the public IP addresses that this integration runtime will use. :type public_i_ps: list[str] + :param subnet_id: The ID of subnet, to which this Azure-SSIS integration runtime will be + joined. + :type subnet_id: str """ _attribute_map = { @@ -19386,6 +20299,7 @@ class IntegrationRuntimeVNetProperties(msrest.serialization.Model): 'v_net_id': {'key': 'vNetId', 'type': 'str'}, 'subnet': {'key': 'subnet', 'type': 'str'}, 'public_i_ps': {'key': 'publicIPs', 'type': '[str]'}, + 'subnet_id': {'key': 'subnetId', 'type': 'str'}, } def __init__( @@ -19397,6 +20311,7 @@ def __init__( self.v_net_id = kwargs.get('v_net_id', None) self.subnet = kwargs.get('subnet', None) self.public_i_ps = kwargs.get('public_i_ps', None) + self.subnet_id = kwargs.get('subnet_id', None) class JiraLinkedService(LinkedService): @@ -19560,12 +20475,15 @@ class JiraSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -19581,8 +20499,9 @@ class JiraSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -19680,9 +20599,8 @@ class JsonFormat(DatasetStorageFormat): :param deserializer: Deserializer. Type: string (or Expression with resultType string). :type deserializer: object :param file_pattern: File pattern of JSON. To be more specific, the way of separating a - collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. Possible - values include: "setOfObjects", "arrayOfObjects". - :type file_pattern: str or ~data_factory_management_client.models.JsonFormatFilePattern + collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. + :type file_pattern: object :param nesting_separator: The character used to separate nesting levels. Default value is '.' (dot). Type: string (or Expression with resultType string). :type nesting_separator: object @@ -19712,7 +20630,7 @@ class JsonFormat(DatasetStorageFormat): 'type': {'key': 'type', 'type': 'str'}, 'serializer': {'key': 'serializer', 'type': 'object'}, 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'encoding_name': {'key': 'encodingName', 'type': 'object'}, 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, @@ -19790,6 +20708,9 @@ class JsonSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Json store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings :param format_settings: Json format settings. @@ -19808,6 +20729,7 @@ class JsonSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'}, } @@ -19841,13 +20763,16 @@ class JsonSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Json store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param format_settings: Json format settings. :type format_settings: ~data_factory_management_client.models.JsonReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -19860,9 +20785,10 @@ class JsonSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'JsonReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -19887,9 +20813,8 @@ class JsonWriteSettings(FormatWriteSettings): :param type: Required. The write setting type.Constant filled by server. :type type: str :param file_pattern: File pattern of JSON. This setting controls the way a collection of JSON - objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. Possible - values include: "setOfObjects", "arrayOfObjects". - :type file_pattern: str or ~data_factory_management_client.models.JsonWriteFilePattern + objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. + :type file_pattern: object """ _validation = { @@ -19899,7 +20824,7 @@ class JsonWriteSettings(FormatWriteSettings): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'object'}, } def __init__( @@ -20538,12 +21463,15 @@ class MagentoSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -20559,8 +21487,9 @@ class MagentoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -20573,6 +21502,45 @@ def __init__( self.query = kwargs.get('query', None) +class ManagedIdentityCredential(Credential): + """Managed identity credential. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of credential.Constant filled by server. + :type type: str + :param description: Credential description. + :type description: str + :param annotations: List of tags that can be used for describing the Credential. + :type annotations: list[object] + :param resource_id: The resource id of user assigned managed identity. + :type resource_id: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'resource_id': {'key': 'typeProperties.resourceId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ManagedIdentityCredential, self).__init__(**kwargs) + self.type = 'ManagedIdentity' # type: str + self.resource_id = kwargs.get('resource_id', None) + + class ManagedIntegrationRuntime(IntegrationRuntime): """Managed integration runtime, including managed elastic and managed dedicated integration runtimes. @@ -21219,12 +22187,15 @@ class MariaDbSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -21240,8 +22211,9 @@ class MariaDbSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -21466,12 +22438,15 @@ class MarketoSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -21487,8 +22462,9 @@ class MarketoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -21501,6 +22477,29 @@ def __init__( self.query = kwargs.get('query', None) +class MetadataItem(msrest.serialization.Model): + """Specify the name and value of custom metadata item. + + :param name: Metadata item key name. Type: string (or Expression with resultType string). + :type name: object + :param value: Metadata item value. Type: string (or Expression with resultType string). + :type value: object + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'object'}, + 'value': {'key': 'value', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MetadataItem, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.value = kwargs.get('value', None) + + class MicrosoftAccessLinkedService(LinkedService): """Microsoft Access linked service. @@ -21600,6 +22599,9 @@ class MicrosoftAccessSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -21617,6 +22619,7 @@ class MicrosoftAccessSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -21648,11 +22651,14 @@ class MicrosoftAccessSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -21665,8 +22671,9 @@ class MicrosoftAccessSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -21852,6 +22859,65 @@ def __init__( self.database = kwargs['database'] +class MongoDbAtlasSink(CopySink): + """A copy activity MongoDB Atlas sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object + :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) + rather than throw exception (insert). The default value is "insert". Type: string (or + Expression with resultType string). Type: string (or Expression with resultType string). + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MongoDbAtlasSink, self).__init__(**kwargs) + self.type = 'MongoDbAtlasSink' # type: str + self.write_behavior = kwargs.get('write_behavior', None) + + class MongoDbAtlasSource(CopySource): """A copy activity source for a MongoDB Atlas database. @@ -21871,6 +22937,9 @@ class MongoDbAtlasSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). @@ -21886,8 +22955,8 @@ class MongoDbAtlasSource(CopySource): pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -21900,11 +22969,12 @@ class MongoDbAtlasSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -22136,12 +23206,15 @@ class MongoDbSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Database query. Should be a SQL-92 query expression. Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -22154,8 +23227,9 @@ class MongoDbSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -22281,6 +23355,65 @@ def __init__( self.database = kwargs['database'] +class MongoDbV2Sink(CopySink): + """A copy activity MongoDB sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object + :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) + rather than throw exception (insert). The default value is "insert". Type: string (or + Expression with resultType string). Type: string (or Expression with resultType string). + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MongoDbV2Sink, self).__init__(**kwargs) + self.type = 'MongoDbV2Sink' # type: str + self.write_behavior = kwargs.get('write_behavior', None) + + class MongoDbV2Source(CopySource): """A copy activity source for a MongoDB database. @@ -22300,6 +23433,9 @@ class MongoDbV2Source(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). @@ -22315,8 +23451,8 @@ class MongoDbV2Source(CopySource): pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -22329,11 +23465,12 @@ class MongoDbV2Source(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -22424,12 +23561,15 @@ class MySqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -22444,8 +23584,9 @@ class MySqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -22623,12 +23764,15 @@ class NetezzaSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -22649,8 +23793,9 @@ class NetezzaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, @@ -22932,6 +24077,9 @@ class ODataSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -22941,8 +24089,8 @@ class ODataSource(CopySource): ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type http_request_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -22955,9 +24103,10 @@ class ODataSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -23069,6 +24218,9 @@ class OdbcSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -23086,6 +24238,7 @@ class OdbcSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -23117,12 +24270,15 @@ class OdbcSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -23137,8 +24293,9 @@ class OdbcSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -23363,6 +24520,9 @@ class Office365Source(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param allowed_groups: The groups containing all the users. Type: array of strings (or Expression with resultType array of strings). :type allowed_groups: object @@ -23394,6 +24554,7 @@ class Office365Source(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'allowed_groups': {'key': 'allowedGroups', 'type': 'object'}, 'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'}, 'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'}, @@ -23783,6 +24944,9 @@ class OracleCloudStorageReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -23823,6 +24987,7 @@ class OracleCloudStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -24103,12 +25268,15 @@ class OracleServiceCloudSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -24124,8 +25292,9 @@ class OracleServiceCloudSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -24163,6 +25332,9 @@ class OracleSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -24180,6 +25352,7 @@ class OracleSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -24211,6 +25384,9 @@ class OracleSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param oracle_reader_query: Oracle reader query. Type: string (or Expression with resultType string). :type oracle_reader_query: object @@ -24223,8 +25399,8 @@ class OracleSource(CopySource): :param partition_settings: The settings that will be leveraged for Oracle source partitioning. :type partition_settings: ~data_factory_management_client.models.OraclePartitionSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -24237,11 +25413,12 @@ class OracleSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -24355,8 +25532,9 @@ class OrcDataset(Dataset): :type folder: ~data_factory_management_client.models.DatasetFolder :param location: The location of the ORC data storage. :type location: ~data_factory_management_client.models.DatasetLocation - :param orc_compression_codec: Possible values include: "none", "zlib", "snappy", "lzo". - :type orc_compression_codec: str or ~data_factory_management_client.models.OrcCompressionCodec + :param orc_compression_codec: The data orcCompressionCodec. Type: string (or Expression with + resultType string). + :type orc_compression_codec: object """ _validation = { @@ -24375,7 +25553,7 @@ class OrcDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'str'}, + 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'object'}, } def __init__( @@ -24448,6 +25626,9 @@ class OrcSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: ORC store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings :param format_settings: ORC format settings. @@ -24466,6 +25647,7 @@ class OrcSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'OrcWriteSettings'}, } @@ -24499,11 +25681,14 @@ class OrcSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: ORC store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -24516,8 +25701,9 @@ class OrcSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -24659,9 +25845,9 @@ class ParquetDataset(Dataset): :type folder: ~data_factory_management_client.models.DatasetFolder :param location: The location of the parquet storage. :type location: ~data_factory_management_client.models.DatasetLocation - :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4", "tar", "tarGZip". - :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec + :param compression_codec: The data compressionCodec. Type: string (or Expression with + resultType string). + :type compression_codec: object """ _validation = { @@ -24680,7 +25866,7 @@ class ParquetDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, } def __init__( @@ -24753,6 +25939,9 @@ class ParquetSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Parquet store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings :param format_settings: Parquet format settings. @@ -24771,6 +25960,7 @@ class ParquetSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'ParquetWriteSettings'}, } @@ -24804,11 +25994,14 @@ class ParquetSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Parquet store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -24821,8 +26014,9 @@ class ParquetSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -25029,12 +26223,15 @@ class PaypalSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -25050,8 +26247,9 @@ class PaypalSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -25258,12 +26456,15 @@ class PhoenixSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -25279,8 +26480,9 @@ class PhoenixSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -25598,18 +26800,26 @@ class PipelineRunInvokedBy(msrest.serialization.Model): :vartype id: str :ivar invoked_by_type: The type of the entity that started the run. :vartype invoked_by_type: str + :ivar pipeline_name: The name of the pipeline that triggered the run, if any. + :vartype pipeline_name: str + :ivar pipeline_run_id: The run id of the pipeline that triggered the run, if any. + :vartype pipeline_run_id: str """ _validation = { 'name': {'readonly': True}, 'id': {'readonly': True}, 'invoked_by_type': {'readonly': True}, + 'pipeline_name': {'readonly': True}, + 'pipeline_run_id': {'readonly': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'invoked_by_type': {'key': 'invokedByType', 'type': 'str'}, + 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, + 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, } def __init__( @@ -25620,6 +26830,8 @@ def __init__( self.name = None self.id = None self.invoked_by_type = None + self.pipeline_name = None + self.pipeline_run_id = None class PipelineRunsQueryResponse(msrest.serialization.Model): @@ -25768,12 +26980,15 @@ class PostgreSqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -25788,8 +27003,9 @@ class PostgreSqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -26072,12 +27288,15 @@ class PrestoSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -26093,8 +27312,9 @@ class PrestoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -26544,12 +27764,15 @@ class QuickBooksSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -26565,8 +27788,9 @@ class QuickBooksSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -26739,11 +27963,14 @@ class RelationalSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -26756,8 +27983,9 @@ class RelationalSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -27085,12 +28313,15 @@ class ResponsysSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -27106,8 +28337,9 @@ class ResponsysSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -27254,6 +28486,8 @@ class RestServiceLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param credential: The credential reference containing authentication information. + :type credential: ~data_factory_management_client.models.CredentialReference """ _validation = { @@ -27281,6 +28515,7 @@ class RestServiceLinkedService(LinkedService): 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -27301,6 +28536,7 @@ def __init__( self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.aad_resource_id = kwargs.get('aad_resource_id', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.credential = kwargs.get('credential', None) class RestSink(CopySink): @@ -27328,6 +28564,9 @@ class RestSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param request_method: The HTTP method used to call the RESTful API. The default is POST. Type: string (or Expression with resultType string). :type request_method: object @@ -27358,6 +28597,7 @@ class RestSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'request_method': {'key': 'requestMethod', 'type': 'object'}, 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, @@ -27397,6 +28637,9 @@ class RestSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). :type request_method: object @@ -27417,8 +28660,8 @@ class RestSource(CopySource): :param request_interval: The time to await before sending next page request. :type request_interval: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -27431,13 +28674,14 @@ class RestSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'request_method': {'key': 'requestMethod', 'type': 'object'}, 'request_body': {'key': 'requestBody', 'type': 'object'}, 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, 'pagination_rules': {'key': 'paginationRules', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, 'request_interval': {'key': 'requestInterval', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -27832,12 +29076,15 @@ class SalesforceMarketingCloudSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -27853,8 +29100,9 @@ class SalesforceMarketingCloudSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -28087,6 +29335,9 @@ class SalesforceServiceCloudSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: The write behavior for the operation. Default is Insert. Possible values include: "Insert", "Upsert". :type write_behavior: str or ~data_factory_management_client.models.SalesforceSinkWriteBehavior @@ -28114,6 +29365,7 @@ class SalesforceServiceCloudSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -28149,14 +29401,17 @@ class SalesforceServiceCloudSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param read_behavior: The read behavior for the operation. Default is Query. Possible values include: "Query", "QueryAll". :type read_behavior: str or ~data_factory_management_client.models.SalesforceSourceReadBehavior :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -28169,9 +29424,10 @@ class SalesforceServiceCloudSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -28210,6 +29466,9 @@ class SalesforceSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: The write behavior for the operation. Default is Insert. Possible values include: "Insert", "Upsert". :type write_behavior: str or ~data_factory_management_client.models.SalesforceSinkWriteBehavior @@ -28237,6 +29496,7 @@ class SalesforceSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -28272,12 +29532,15 @@ class SalesforceSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param read_behavior: The read behavior for the operation. Default is Query. Possible values @@ -28295,8 +29558,9 @@ class SalesforceSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, } @@ -28458,12 +29722,15 @@ class SapBwSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: MDX query. Type: string (or Expression with resultType string). :type query: object """ @@ -28478,8 +29745,9 @@ class SapBwSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -28640,6 +29908,9 @@ class SapCloudForCustomerSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: The write behavior for the operation. Default is 'Insert'. Possible values include: "Insert", "Update". :type write_behavior: str or @@ -28663,6 +29934,7 @@ class SapCloudForCustomerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -28696,12 +29968,15 @@ class SapCloudForCustomerSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -28722,8 +29997,9 @@ class SapCloudForCustomerSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -28880,12 +30156,15 @@ class SapEccSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -28906,8 +30185,9 @@ class SapEccSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -29033,12 +30313,15 @@ class SapHanaSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: SAP HANA Sql query. Type: string (or Expression with resultType string). :type query: object :param packet_size: The packet size of data read from SAP HANA. Type: integer(or Expression @@ -29062,8 +30345,9 @@ class SapHanaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'packet_size': {'key': 'packetSize', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'object'}, @@ -29263,12 +30547,15 @@ class SapOpenHubSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param exclude_last_request: Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). :type exclude_last_request: object @@ -29295,8 +30582,9 @@ class SapOpenHubSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, @@ -29625,12 +30913,15 @@ class SapTableSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param row_count: The number of rows to be retrieved. Type: integer(or Expression with resultType integer). :type row_count: object @@ -29672,8 +30963,9 @@ class SapTableSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'row_count': {'key': 'rowCount', 'type': 'object'}, 'row_skips': {'key': 'rowSkips', 'type': 'object'}, 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, @@ -29806,9 +31098,8 @@ class ScriptAction(msrest.serialization.Model): :type name: str :param uri: Required. The URI for the script action. :type uri: str - :param roles: Required. The node types on which the script action should be executed. Possible - values include: "Headnode", "Workernode", "Zookeeper". - :type roles: str or ~data_factory_management_client.models.HdiNodeTypes + :param roles: Required. The node types on which the script action should be executed. + :type roles: str :param parameters: The parameters for the script action. :type parameters: str """ @@ -30364,12 +31655,15 @@ class ServiceNowSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -30385,8 +31679,9 @@ class ServiceNowSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -30399,6 +31694,54 @@ def __init__( self.query = kwargs.get('query', None) +class ServicePrincipalCredential(Credential): + """Service principal credential. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of credential.Constant filled by server. + :type type: str + :param description: Credential description. + :type description: str + :param annotations: List of tags that can be used for describing the Credential. + :type annotations: list[object] + :param service_principal_id: The app ID of the service principal used to authenticate. + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to authenticate. + :type service_principal_key: + ~data_factory_management_client.models.AzureKeyVaultSecretReference + :param tenant: The ID of the tenant to which the service principal belongs. + :type tenant: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'AzureKeyVaultSecretReference'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ServicePrincipalCredential, self).__init__(**kwargs) + self.type = 'ServicePrincipal' # type: str + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + + class SetVariableActivity(Activity): """Set value for a Variable. @@ -30499,6 +31842,9 @@ class SftpReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -30536,6 +31882,7 @@ class SftpReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -30678,6 +32025,9 @@ class SftpWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param operation_timeout: Specifies the timeout for writing each chunk to SFTP server. Default @@ -30697,6 +32047,7 @@ class SftpWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'operation_timeout': {'key': 'operationTimeout', 'type': 'object'}, 'use_temp_file_rename': {'key': 'useTempFileRename', 'type': 'object'}, @@ -30864,6 +32215,9 @@ class SharePointOnlineListSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: The OData query to filter the data in SharePoint Online list. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -30883,6 +32237,7 @@ class SharePointOnlineListSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -31047,12 +32402,15 @@ class ShopifySource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -31068,8 +32426,9 @@ class ShopifySource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -31339,6 +32698,9 @@ class SnowflakeSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -31358,6 +32720,7 @@ class SnowflakeSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'import_settings': {'key': 'importSettings', 'type': 'SnowflakeImportCopyCommand'}, } @@ -31391,6 +32754,9 @@ class SnowflakeSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Snowflake Sql query. Type: string (or Expression with resultType string). :type query: object :param export_settings: Snowflake export settings. @@ -31407,6 +32773,7 @@ class SnowflakeSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'export_settings': {'key': 'exportSettings', 'type': 'SnowflakeExportCopyCommand'}, } @@ -31624,12 +32991,15 @@ class SparkSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -31645,8 +33015,9 @@ class SparkSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -31722,6 +33093,9 @@ class SqlDwSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -31739,6 +33113,14 @@ class SqlDwSink(CopySink): :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). :type table_option: object + :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or + Expression with resultType boolean). + :type sql_writer_use_table_lock: object + :param write_behavior: Write behavior when copying data into azure SQL DW. Type: + SqlDWWriteBehaviorEnum (or Expression with resultType SqlDWWriteBehaviorEnum). + :type write_behavior: object + :param upsert_settings: SQL DW upsert settings. + :type upsert_settings: ~data_factory_management_client.models.SqlDwUpsertSettings """ _validation = { @@ -31753,12 +33135,16 @@ class SqlDwSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, 'allow_copy_command': {'key': 'allowCopyCommand', 'type': 'object'}, 'copy_command_settings': {'key': 'copyCommandSettings', 'type': 'DwCopyCommandSettings'}, 'table_option': {'key': 'tableOption', 'type': 'object'}, + 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlDwUpsertSettings'}, } def __init__( @@ -31773,6 +33159,9 @@ def __init__( self.allow_copy_command = kwargs.get('allow_copy_command', None) self.copy_command_settings = kwargs.get('copy_command_settings', None) self.table_option = kwargs.get('table_option', None) + self.sql_writer_use_table_lock = kwargs.get('sql_writer_use_table_lock', None) + self.write_behavior = kwargs.get('write_behavior', None) + self.upsert_settings = kwargs.get('upsert_settings', None) class SqlDwSource(TabularSource): @@ -31794,12 +33183,15 @@ class SqlDwSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -31828,8 +33220,9 @@ class SqlDwSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, @@ -31850,6 +33243,31 @@ def __init__( self.partition_settings = kwargs.get('partition_settings', None) +class SqlDwUpsertSettings(msrest.serialization.Model): + """Sql DW upsert option settings. + + :param interim_schema_name: Schema name for interim table. Type: string (or Expression with + resultType string). + :type interim_schema_name: object + :param keys: Key column names for unique row identification. Type: array of strings (or + Expression with resultType array of strings). + :type keys: object + """ + + _attribute_map = { + 'interim_schema_name': {'key': 'interimSchemaName', 'type': 'object'}, + 'keys': {'key': 'keys', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SqlDwUpsertSettings, self).__init__(**kwargs) + self.interim_schema_name = kwargs.get('interim_schema_name', None) + self.keys = kwargs.get('keys', None) + + class SqlMiSink(CopySink): """A copy activity Azure SQL Managed Instance sink. @@ -31875,6 +33293,9 @@ class SqlMiSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). :type sql_writer_stored_procedure_name: object @@ -31893,6 +33314,14 @@ class SqlMiSink(CopySink): :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). :type table_option: object + :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or + Expression with resultType boolean). + :type sql_writer_use_table_lock: object + :param write_behavior: White behavior when copying data into azure SQL MI. Type: + SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). + :type write_behavior: object + :param upsert_settings: SQL upsert settings. + :type upsert_settings: ~data_factory_management_client.models.SqlUpsertSettings """ _validation = { @@ -31907,12 +33336,16 @@ class SqlMiSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, 'table_option': {'key': 'tableOption', 'type': 'object'}, + 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'}, } def __init__( @@ -31927,6 +33360,9 @@ def __init__( self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) self.table_option = kwargs.get('table_option', None) + self.sql_writer_use_table_lock = kwargs.get('sql_writer_use_table_lock', None) + self.write_behavior = kwargs.get('write_behavior', None) + self.upsert_settings = kwargs.get('upsert_settings', None) class SqlMiSource(TabularSource): @@ -31948,12 +33384,15 @@ class SqlMiSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object :param sql_reader_stored_procedure_name: Name of the stored procedure for a Azure SQL Managed @@ -31983,8 +33422,9 @@ class SqlMiSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, @@ -32134,6 +33574,9 @@ class SqlServerSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). :type sql_writer_stored_procedure_name: object @@ -32152,6 +33595,14 @@ class SqlServerSink(CopySink): :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). :type table_option: object + :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or + Expression with resultType boolean). + :type sql_writer_use_table_lock: object + :param write_behavior: Write behavior when copying data into sql server. Type: + SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). + :type write_behavior: object + :param upsert_settings: SQL upsert settings. + :type upsert_settings: ~data_factory_management_client.models.SqlUpsertSettings """ _validation = { @@ -32166,12 +33617,16 @@ class SqlServerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, 'table_option': {'key': 'tableOption', 'type': 'object'}, + 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'}, } def __init__( @@ -32186,6 +33641,9 @@ def __init__( self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) self.table_option = kwargs.get('table_option', None) + self.sql_writer_use_table_lock = kwargs.get('sql_writer_use_table_lock', None) + self.write_behavior = kwargs.get('write_behavior', None) + self.upsert_settings = kwargs.get('upsert_settings', None) class SqlServerSource(TabularSource): @@ -32207,12 +33665,15 @@ class SqlServerSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database @@ -32242,8 +33703,9 @@ class SqlServerSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, @@ -32420,6 +33882,9 @@ class SqlSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). :type sql_writer_stored_procedure_name: object @@ -32438,6 +33903,14 @@ class SqlSink(CopySink): :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). :type table_option: object + :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or + Expression with resultType boolean). + :type sql_writer_use_table_lock: object + :param write_behavior: Write behavior when copying data into sql. Type: SqlWriteBehaviorEnum + (or Expression with resultType SqlWriteBehaviorEnum). + :type write_behavior: object + :param upsert_settings: SQL upsert settings. + :type upsert_settings: ~data_factory_management_client.models.SqlUpsertSettings """ _validation = { @@ -32452,12 +33925,16 @@ class SqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, 'table_option': {'key': 'tableOption', 'type': 'object'}, + 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'}, } def __init__( @@ -32472,6 +33949,9 @@ def __init__( self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) self.table_option = kwargs.get('table_option', None) + self.sql_writer_use_table_lock = kwargs.get('sql_writer_use_table_lock', None) + self.write_behavior = kwargs.get('write_behavior', None) + self.upsert_settings = kwargs.get('upsert_settings', None) class SqlSource(TabularSource): @@ -32493,12 +33973,15 @@ class SqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database @@ -32530,8 +34013,9 @@ class SqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, @@ -32554,6 +34038,36 @@ def __init__( self.partition_settings = kwargs.get('partition_settings', None) +class SqlUpsertSettings(msrest.serialization.Model): + """Sql upsert option settings. + + :param use_temp_db: Specifies whether to use temp db for upsert interim table. Type: boolean + (or Expression with resultType boolean). + :type use_temp_db: object + :param interim_schema_name: Schema name for interim table. Type: string (or Expression with + resultType string). + :type interim_schema_name: object + :param keys: Key column names for unique row identification. Type: array of strings (or + Expression with resultType array of strings). + :type keys: object + """ + + _attribute_map = { + 'use_temp_db': {'key': 'useTempDB', 'type': 'object'}, + 'interim_schema_name': {'key': 'interimSchemaName', 'type': 'object'}, + 'keys': {'key': 'keys', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SqlUpsertSettings, self).__init__(**kwargs) + self.use_temp_db = kwargs.get('use_temp_db', None) + self.interim_schema_name = kwargs.get('interim_schema_name', None) + self.keys = kwargs.get('keys', None) + + class SquareLinkedService(LinkedService): """Square Service linked service. @@ -32716,12 +34230,15 @@ class SquareSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -32737,8 +34254,9 @@ class SquareSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -33679,12 +35197,15 @@ class SybaseSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -33699,8 +35220,9 @@ class SybaseSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -34030,12 +35552,15 @@ class TeradataSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Teradata query. Type: string (or Expression with resultType string). :type query: object :param partition_option: The partition mechanism that will be used for teradata read in @@ -34056,8 +35581,9 @@ class TeradataSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, @@ -35107,12 +36633,15 @@ class VerticaSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -35128,8 +36657,9 @@ class VerticaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -35348,10 +36878,7 @@ def __init__( class WebActivityAuthentication(msrest.serialization.Model): """Web activity authentication properties. - All required parameters must be populated in order to send to Azure. - - :param type: Required. Web activity authentication - (Basic/ClientCertificate/MSI/ServicePrincipal). + :param type: Web activity authentication (Basic/ClientCertificate/MSI/ServicePrincipal). :type type: str :param pfx: Base64-encoded contents of a PFX file or Certificate when used for ServicePrincipal. @@ -35368,12 +36895,10 @@ class WebActivityAuthentication(msrest.serialization.Model): :param user_tenant: TenantId for which Azure Auth token will be requested when using ServicePrincipal Authentication. Type: string (or Expression with resultType string). :type user_tenant: object + :param credential: The credential reference containing authentication information. + :type credential: ~data_factory_management_client.models.CredentialReference """ - _validation = { - 'type': {'required': True}, - } - _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'pfx': {'key': 'pfx', 'type': 'SecretBase'}, @@ -35381,6 +36906,7 @@ class WebActivityAuthentication(msrest.serialization.Model): 'password': {'key': 'password', 'type': 'SecretBase'}, 'resource': {'key': 'resource', 'type': 'object'}, 'user_tenant': {'key': 'userTenant', 'type': 'object'}, + 'credential': {'key': 'credential', 'type': 'CredentialReference'}, } def __init__( @@ -35388,12 +36914,13 @@ def __init__( **kwargs ): super(WebActivityAuthentication, self).__init__(**kwargs) - self.type = kwargs['type'] + self.type = kwargs.get('type', None) self.pfx = kwargs.get('pfx', None) self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) self.resource = kwargs.get('resource', None) self.user_tenant = kwargs.get('user_tenant', None) + self.credential = kwargs.get('credential', None) class WebLinkedServiceTypeProperties(msrest.serialization.Model): @@ -35699,9 +37226,12 @@ class WebSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -35714,7 +37244,8 @@ class WebSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -35950,12 +37481,15 @@ class XeroSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -35971,8 +37505,9 @@ class XeroSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -36133,13 +37668,16 @@ class XmlSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Xml store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param format_settings: Xml format settings. :type format_settings: ~data_factory_management_client.models.XmlReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -36152,9 +37690,10 @@ class XmlSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'XmlReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -36355,12 +37894,15 @@ class ZohoSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -36376,8 +37918,9 @@ class ZohoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py index f6ebc8328ae..7d50b0a1515 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py @@ -378,7 +378,7 @@ class LinkedService(msrest.serialization.Model): """The Azure Data Factory nested object which contains the information and credential which can be used to connect with related store or compute resource. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMwsLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, AmazonS3CompatibleLinkedService, AzureBatchLinkedService, AzureBlobFsLinkedService, AzureBlobStorageLinkedService, AzureDataExplorerLinkedService, AzureDataLakeAnalyticsLinkedService, AzureDataLakeStoreLinkedService, AzureDatabricksLinkedService, AzureDatabricksDeltaLakeLinkedService, AzureFileStorageLinkedService, AzureFunctionLinkedService, AzureKeyVaultLinkedService, AzureMlLinkedService, AzureMlServiceLinkedService, AzureMariaDbLinkedService, AzureMySqlLinkedService, AzurePostgreSqlLinkedService, AzureSearchLinkedService, AzureSqlDwLinkedService, AzureSqlDatabaseLinkedService, AzureSqlMiLinkedService, AzureStorageLinkedService, AzureTableStorageLinkedService, CassandraLinkedService, CommonDataServiceForAppsLinkedService, ConcurLinkedService, CosmosDbLinkedService, CosmosDbMongoDbApiLinkedService, CouchbaseLinkedService, CustomDataSourceLinkedService, Db2LinkedService, DrillLinkedService, DynamicsLinkedService, DynamicsAxLinkedService, DynamicsCrmLinkedService, EloquaLinkedService, FileServerLinkedService, FtpServerLinkedService, GoogleAdWordsLinkedService, GoogleBigQueryLinkedService, GoogleCloudStorageLinkedService, GreenplumLinkedService, HBaseLinkedService, HdInsightLinkedService, HdInsightOnDemandLinkedService, HdfsLinkedService, HiveLinkedService, HttpLinkedService, HubspotLinkedService, ImpalaLinkedService, InformixLinkedService, JiraLinkedService, MagentoLinkedService, MariaDbLinkedService, MarketoLinkedService, MicrosoftAccessLinkedService, MongoDbLinkedService, MongoDbAtlasLinkedService, MongoDbV2LinkedService, MySqlLinkedService, NetezzaLinkedService, ODataLinkedService, OdbcLinkedService, Office365LinkedService, OracleLinkedService, OracleCloudStorageLinkedService, OracleServiceCloudLinkedService, PaypalLinkedService, PhoenixLinkedService, PostgreSqlLinkedService, PrestoLinkedService, QuickBooksLinkedService, ResponsysLinkedService, RestServiceLinkedService, SalesforceLinkedService, SalesforceMarketingCloudLinkedService, SalesforceServiceCloudLinkedService, SapBwLinkedService, SapCloudForCustomerLinkedService, SapEccLinkedService, SapHanaLinkedService, SapOpenHubLinkedService, SapTableLinkedService, ServiceNowLinkedService, SftpServerLinkedService, SharePointOnlineListLinkedService, ShopifyLinkedService, SnowflakeLinkedService, SparkLinkedService, SqlServerLinkedService, SquareLinkedService, SybaseLinkedService, TeradataLinkedService, VerticaLinkedService, WebLinkedService, XeroLinkedService, ZohoLinkedService. + sub-classes are: AmazonMwsLinkedService, AmazonRdsForOracleLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, AmazonS3CompatibleLinkedService, AzureBatchLinkedService, AzureBlobFsLinkedService, AzureBlobStorageLinkedService, AzureDataExplorerLinkedService, AzureDataLakeAnalyticsLinkedService, AzureDataLakeStoreLinkedService, AzureDatabricksLinkedService, AzureDatabricksDeltaLakeLinkedService, AzureFileStorageLinkedService, AzureFunctionLinkedService, AzureKeyVaultLinkedService, AzureMlLinkedService, AzureMlServiceLinkedService, AzureMariaDbLinkedService, AzureMySqlLinkedService, AzurePostgreSqlLinkedService, AzureSearchLinkedService, AzureSqlDwLinkedService, AzureSqlDatabaseLinkedService, AzureSqlMiLinkedService, AzureStorageLinkedService, AzureTableStorageLinkedService, CassandraLinkedService, CommonDataServiceForAppsLinkedService, ConcurLinkedService, CosmosDbLinkedService, CosmosDbMongoDbApiLinkedService, CouchbaseLinkedService, CustomDataSourceLinkedService, Db2LinkedService, DrillLinkedService, DynamicsLinkedService, DynamicsAxLinkedService, DynamicsCrmLinkedService, EloquaLinkedService, FileServerLinkedService, FtpServerLinkedService, GoogleAdWordsLinkedService, GoogleBigQueryLinkedService, GoogleCloudStorageLinkedService, GreenplumLinkedService, HBaseLinkedService, HdInsightLinkedService, HdInsightOnDemandLinkedService, HdfsLinkedService, HiveLinkedService, HttpLinkedService, HubspotLinkedService, ImpalaLinkedService, InformixLinkedService, JiraLinkedService, MagentoLinkedService, MariaDbLinkedService, MarketoLinkedService, MicrosoftAccessLinkedService, MongoDbLinkedService, MongoDbAtlasLinkedService, MongoDbV2LinkedService, MySqlLinkedService, NetezzaLinkedService, ODataLinkedService, OdbcLinkedService, Office365LinkedService, OracleLinkedService, OracleCloudStorageLinkedService, OracleServiceCloudLinkedService, PaypalLinkedService, PhoenixLinkedService, PostgreSqlLinkedService, PrestoLinkedService, QuickBooksLinkedService, ResponsysLinkedService, RestServiceLinkedService, SalesforceLinkedService, SalesforceMarketingCloudLinkedService, SalesforceServiceCloudLinkedService, SapBwLinkedService, SapCloudForCustomerLinkedService, SapEccLinkedService, SapHanaLinkedService, SapOpenHubLinkedService, SapTableLinkedService, ServiceNowLinkedService, SftpServerLinkedService, SharePointOnlineListLinkedService, ShopifyLinkedService, SnowflakeLinkedService, SparkLinkedService, SqlServerLinkedService, SquareLinkedService, SybaseLinkedService, TeradataLinkedService, VerticaLinkedService, WebLinkedService, XeroLinkedService, ZohoLinkedService. All required parameters must be populated in order to send to Azure. @@ -411,7 +411,7 @@ class LinkedService(msrest.serialization.Model): } _subtype_map = { - 'type': {'AmazonMWS': 'AmazonMwsLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'AmazonS3Compatible': 'AmazonS3CompatibleLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureBlobFS': 'AzureBlobFsLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDatabricksDeltaLake': 'AzureDatabricksDeltaLakeLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureML': 'AzureMlLinkedService', 'AzureMLService': 'AzureMlServiceLinkedService', 'AzureMariaDB': 'AzureMariaDbLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'AzureSqlDW': 'AzureSqlDwLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'AzureSqlMI': 'AzureSqlMiLinkedService', 'AzureStorage': 'AzureStorageLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'Cassandra': 'CassandraLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'Concur': 'ConcurLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'Db2': 'Db2LinkedService', 'Drill': 'DrillLinkedService', 'Dynamics': 'DynamicsLinkedService', 'DynamicsAX': 'DynamicsAxLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Eloqua': 'EloquaLinkedService', 'FileServer': 'FileServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'Greenplum': 'GreenplumLinkedService', 'HBase': 'HBaseLinkedService', 'HDInsight': 'HdInsightLinkedService', 'HDInsightOnDemand': 'HdInsightOnDemandLinkedService', 'Hdfs': 'HdfsLinkedService', 'Hive': 'HiveLinkedService', 'HttpServer': 'HttpLinkedService', 'Hubspot': 'HubspotLinkedService', 'Impala': 'ImpalaLinkedService', 'Informix': 'InformixLinkedService', 'Jira': 'JiraLinkedService', 'Magento': 'MagentoLinkedService', 'MariaDB': 'MariaDbLinkedService', 'Marketo': 'MarketoLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'MongoDb': 'MongoDbLinkedService', 'MongoDbAtlas': 'MongoDbAtlasLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MySql': 'MySqlLinkedService', 'Netezza': 'NetezzaLinkedService', 'OData': 'ODataLinkedService', 'Odbc': 'OdbcLinkedService', 'Office365': 'Office365LinkedService', 'Oracle': 'OracleLinkedService', 'OracleCloudStorage': 'OracleCloudStorageLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'Paypal': 'PaypalLinkedService', 'Phoenix': 'PhoenixLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'Presto': 'PrestoLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Responsys': 'ResponsysLinkedService', 'RestService': 'RestServiceLinkedService', 'Salesforce': 'SalesforceLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'SapBW': 'SapBwLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapTable': 'SapTableLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'Sftp': 'SftpServerLinkedService', 'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Shopify': 'ShopifyLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'Spark': 'SparkLinkedService', 'SqlServer': 'SqlServerLinkedService', 'Square': 'SquareLinkedService', 'Sybase': 'SybaseLinkedService', 'Teradata': 'TeradataLinkedService', 'Vertica': 'VerticaLinkedService', 'Web': 'WebLinkedService', 'Xero': 'XeroLinkedService', 'Zoho': 'ZohoLinkedService'} + 'type': {'AmazonMWS': 'AmazonMwsLinkedService', 'AmazonRdsForOracle': 'AmazonRdsForOracleLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'AmazonS3Compatible': 'AmazonS3CompatibleLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureBlobFS': 'AzureBlobFsLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDatabricksDeltaLake': 'AzureDatabricksDeltaLakeLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureML': 'AzureMlLinkedService', 'AzureMLService': 'AzureMlServiceLinkedService', 'AzureMariaDB': 'AzureMariaDbLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'AzureSqlDW': 'AzureSqlDwLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'AzureSqlMI': 'AzureSqlMiLinkedService', 'AzureStorage': 'AzureStorageLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'Cassandra': 'CassandraLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'Concur': 'ConcurLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'Db2': 'Db2LinkedService', 'Drill': 'DrillLinkedService', 'Dynamics': 'DynamicsLinkedService', 'DynamicsAX': 'DynamicsAxLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Eloqua': 'EloquaLinkedService', 'FileServer': 'FileServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'Greenplum': 'GreenplumLinkedService', 'HBase': 'HBaseLinkedService', 'HDInsight': 'HdInsightLinkedService', 'HDInsightOnDemand': 'HdInsightOnDemandLinkedService', 'Hdfs': 'HdfsLinkedService', 'Hive': 'HiveLinkedService', 'HttpServer': 'HttpLinkedService', 'Hubspot': 'HubspotLinkedService', 'Impala': 'ImpalaLinkedService', 'Informix': 'InformixLinkedService', 'Jira': 'JiraLinkedService', 'Magento': 'MagentoLinkedService', 'MariaDB': 'MariaDbLinkedService', 'Marketo': 'MarketoLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'MongoDb': 'MongoDbLinkedService', 'MongoDbAtlas': 'MongoDbAtlasLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MySql': 'MySqlLinkedService', 'Netezza': 'NetezzaLinkedService', 'OData': 'ODataLinkedService', 'Odbc': 'OdbcLinkedService', 'Office365': 'Office365LinkedService', 'Oracle': 'OracleLinkedService', 'OracleCloudStorage': 'OracleCloudStorageLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'Paypal': 'PaypalLinkedService', 'Phoenix': 'PhoenixLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'Presto': 'PrestoLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Responsys': 'ResponsysLinkedService', 'RestService': 'RestServiceLinkedService', 'Salesforce': 'SalesforceLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'SapBW': 'SapBwLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapTable': 'SapTableLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'Sftp': 'SftpServerLinkedService', 'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Shopify': 'ShopifyLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'Spark': 'SparkLinkedService', 'SqlServer': 'SqlServerLinkedService', 'Square': 'SquareLinkedService', 'Sybase': 'SybaseLinkedService', 'Teradata': 'TeradataLinkedService', 'Vertica': 'VerticaLinkedService', 'Web': 'WebLinkedService', 'Xero': 'XeroLinkedService', 'Zoho': 'ZohoLinkedService'} } def __init__( @@ -547,7 +547,7 @@ class Dataset(msrest.serialization.Model): """The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMwsObjectDataset, AmazonRedshiftTableDataset, AmazonS3Dataset, AvroDataset, AzureBlobDataset, AzureBlobFsDataset, AzureDataExplorerTableDataset, AzureDataLakeStoreDataset, AzureDatabricksDeltaLakeDataset, AzureMariaDbTableDataset, AzureMySqlTableDataset, AzurePostgreSqlTableDataset, AzureSearchIndexDataset, AzureSqlDwTableDataset, AzureSqlMiTableDataset, AzureSqlTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDbMongoDbApiCollectionDataset, CosmosDbSqlApiCollectionDataset, CouchbaseTableDataset, CustomDataset, Db2TableDataset, DelimitedTextDataset, DocumentDbCollectionDataset, DrillTableDataset, DynamicsAxResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, ExcelDataset, FileShareDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HttpDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDbTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDbAtlasCollectionDataset, MongoDbCollectionDataset, MongoDbV2CollectionDataset, MySqlTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSqlTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, SharePointOnlineListResourceDataset, ShopifyObjectDataset, SnowflakeDataset, SparkObjectDataset, SqlServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, XmlDataset, ZohoObjectDataset. + sub-classes are: AmazonMwsObjectDataset, AmazonRdsForOracleTableDataset, AmazonRedshiftTableDataset, AmazonS3Dataset, AvroDataset, AzureBlobDataset, AzureBlobFsDataset, AzureDataExplorerTableDataset, AzureDataLakeStoreDataset, AzureDatabricksDeltaLakeDataset, AzureMariaDbTableDataset, AzureMySqlTableDataset, AzurePostgreSqlTableDataset, AzureSearchIndexDataset, AzureSqlDwTableDataset, AzureSqlMiTableDataset, AzureSqlTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDbMongoDbApiCollectionDataset, CosmosDbSqlApiCollectionDataset, CouchbaseTableDataset, CustomDataset, Db2TableDataset, DelimitedTextDataset, DocumentDbCollectionDataset, DrillTableDataset, DynamicsAxResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, ExcelDataset, FileShareDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HttpDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDbTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDbAtlasCollectionDataset, MongoDbCollectionDataset, MongoDbV2CollectionDataset, MySqlTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSqlTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, SharePointOnlineListResourceDataset, ShopifyObjectDataset, SnowflakeDataset, SparkObjectDataset, SqlServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, XmlDataset, ZohoObjectDataset. All required parameters must be populated in order to send to Azure. @@ -593,7 +593,7 @@ class Dataset(msrest.serialization.Model): } _subtype_map = { - 'type': {'AmazonMWSObject': 'AmazonMwsObjectDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AmazonS3Object': 'AmazonS3Dataset', 'Avro': 'AvroDataset', 'AzureBlob': 'AzureBlobDataset', 'AzureBlobFSFile': 'AzureBlobFsDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'AzureDatabricksDeltaLakeDataset': 'AzureDatabricksDeltaLakeDataset', 'AzureMariaDBTable': 'AzureMariaDbTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSqlDwTableDataset', 'AzureSqlMITable': 'AzureSqlMiTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'CustomDataset': 'CustomDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAxResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'Excel': 'ExcelDataset', 'FileShare': 'FileShareDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HttpFile': 'HttpDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDbTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbAtlasCollection': 'MongoDbAtlasCollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MySqlTable': 'MySqlTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SnowflakeTable': 'SnowflakeDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'Xml': 'XmlDataset', 'ZohoObject': 'ZohoObjectDataset'} + 'type': {'AmazonMWSObject': 'AmazonMwsObjectDataset', 'AmazonRdsForOracleTable': 'AmazonRdsForOracleTableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AmazonS3Object': 'AmazonS3Dataset', 'Avro': 'AvroDataset', 'AzureBlob': 'AzureBlobDataset', 'AzureBlobFSFile': 'AzureBlobFsDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'AzureDatabricksDeltaLakeDataset': 'AzureDatabricksDeltaLakeDataset', 'AzureMariaDBTable': 'AzureMariaDbTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSqlDwTableDataset', 'AzureSqlMITable': 'AzureSqlMiTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'CustomDataset': 'CustomDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAxResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'Excel': 'ExcelDataset', 'FileShare': 'FileShareDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HttpFile': 'HttpDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDbTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbAtlasCollection': 'MongoDbAtlasCollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MySqlTable': 'MySqlTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SnowflakeTable': 'SnowflakeDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'Xml': 'XmlDataset', 'ZohoObject': 'ZohoObjectDataset'} } def __init__( @@ -693,7 +693,7 @@ class CopySource(msrest.serialization.Model): """A copy activity source. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSource, AzureBlobFsSource, AzureDataExplorerSource, AzureDataLakeStoreSource, AzureDatabricksDeltaLakeSource, BinarySource, BlobSource, CommonDataServiceForAppsSource, CosmosDbMongoDbApiSource, CosmosDbSqlApiSource, DelimitedTextSource, DocumentDbCollectionSource, DynamicsCrmSource, DynamicsSource, ExcelSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, MongoDbAtlasSource, MongoDbSource, MongoDbV2Source, ODataSource, Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, SharePointOnlineListSource, SnowflakeSource, TabularSource, WebSource, XmlSource. + sub-classes are: AmazonRdsForOracleSource, AvroSource, AzureBlobFsSource, AzureDataExplorerSource, AzureDataLakeStoreSource, AzureDatabricksDeltaLakeSource, BinarySource, BlobSource, CommonDataServiceForAppsSource, CosmosDbMongoDbApiSource, CosmosDbSqlApiSource, DelimitedTextSource, DocumentDbCollectionSource, DynamicsCrmSource, DynamicsSource, ExcelSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, MongoDbAtlasSource, MongoDbSource, MongoDbV2Source, ODataSource, Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, SharePointOnlineListSource, SnowflakeSource, TabularSource, WebSource, XmlSource. All required parameters must be populated in order to send to Azure. @@ -711,6 +711,9 @@ class CopySource(msrest.serialization.Model): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object """ _validation = { @@ -723,10 +726,11 @@ class CopySource(msrest.serialization.Model): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } _subtype_map = { - 'type': {'AvroSource': 'AvroSource', 'AzureBlobFSSource': 'AzureBlobFsSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'AzureDatabricksDeltaLakeSource': 'AzureDatabricksDeltaLakeSource', 'BinarySource': 'BinarySource', 'BlobSource': 'BlobSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DelimitedTextSource': 'DelimitedTextSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'ExcelSource': 'ExcelSource', 'FileSystemSource': 'FileSystemSource', 'HdfsSource': 'HdfsSource', 'HttpSource': 'HttpSource', 'JsonSource': 'JsonSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'MongoDbAtlasSource': 'MongoDbAtlasSource', 'MongoDbSource': 'MongoDbSource', 'MongoDbV2Source': 'MongoDbV2Source', 'ODataSource': 'ODataSource', 'Office365Source': 'Office365Source', 'OracleSource': 'OracleSource', 'OrcSource': 'OrcSource', 'ParquetSource': 'ParquetSource', 'RelationalSource': 'RelationalSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SharePointOnlineListSource': 'SharePointOnlineListSource', 'SnowflakeSource': 'SnowflakeSource', 'TabularSource': 'TabularSource', 'WebSource': 'WebSource', 'XmlSource': 'XmlSource'} + 'type': {'AmazonRdsForOracleSource': 'AmazonRdsForOracleSource', 'AvroSource': 'AvroSource', 'AzureBlobFSSource': 'AzureBlobFsSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'AzureDatabricksDeltaLakeSource': 'AzureDatabricksDeltaLakeSource', 'BinarySource': 'BinarySource', 'BlobSource': 'BlobSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DelimitedTextSource': 'DelimitedTextSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'ExcelSource': 'ExcelSource', 'FileSystemSource': 'FileSystemSource', 'HdfsSource': 'HdfsSource', 'HttpSource': 'HttpSource', 'JsonSource': 'JsonSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'MongoDbAtlasSource': 'MongoDbAtlasSource', 'MongoDbSource': 'MongoDbSource', 'MongoDbV2Source': 'MongoDbV2Source', 'ODataSource': 'ODataSource', 'Office365Source': 'Office365Source', 'OracleSource': 'OracleSource', 'OrcSource': 'OrcSource', 'ParquetSource': 'ParquetSource', 'RelationalSource': 'RelationalSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SharePointOnlineListSource': 'SharePointOnlineListSource', 'SnowflakeSource': 'SnowflakeSource', 'TabularSource': 'TabularSource', 'WebSource': 'WebSource', 'XmlSource': 'XmlSource'} } def __init__( @@ -736,6 +740,7 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, **kwargs ): super(CopySource, self).__init__(**kwargs) @@ -744,6 +749,7 @@ def __init__( self.source_retry_count = source_retry_count self.source_retry_wait = source_retry_wait self.max_concurrent_connections = max_concurrent_connections + self.disable_metrics_collection = disable_metrics_collection class TabularSource(CopySource): @@ -768,12 +774,15 @@ class TabularSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -786,8 +795,9 @@ class TabularSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } _subtype_map = { @@ -801,11 +811,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(TabularSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(TabularSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'TabularSource' # type: str self.query_timeout = query_timeout self.additional_columns = additional_columns @@ -830,12 +841,15 @@ class AmazonMwsSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -851,8 +865,9 @@ class AmazonMwsSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -863,16 +878,281 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(AmazonMwsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AmazonMwsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AmazonMWSSource' # type: str self.query = query +class AmazonRdsForOracleLinkedService(LinkedService): + """AmazonRdsForOracle database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in connection string. + :type password: ~data_factory_management_client.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + connection_string: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + password: Optional["SecretBase"] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(AmazonRdsForOracleLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AmazonRdsForOracle' # type: str + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential + + +class AmazonRdsForOraclePartitionSettings(msrest.serialization.Model): + """The settings that will be leveraged for AmazonRdsForOracle source partitioning. + + :param partition_names: Names of the physical partitions of AmazonRdsForOracle table. + :type partition_names: object + :param partition_column_name: The name of the column in integer type that will be used for + proceeding range partitioning. Type: string (or Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_names': {'key': 'partitionNames', 'type': 'object'}, + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__( + self, + *, + partition_names: Optional[object] = None, + partition_column_name: Optional[object] = None, + partition_upper_bound: Optional[object] = None, + partition_lower_bound: Optional[object] = None, + **kwargs + ): + super(AmazonRdsForOraclePartitionSettings, self).__init__(**kwargs) + self.partition_names = partition_names + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound + + +class AmazonRdsForOracleSource(CopySource): + """A copy activity AmazonRdsForOracle source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object + :param oracle_reader_query: AmazonRdsForOracle reader query. Type: string (or Expression with + resultType string). + :type oracle_reader_query: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param partition_option: The partition mechanism that will be used for AmazonRdsForOracle read + in parallel. Type: string (or Expression with resultType string). + :type partition_option: object + :param partition_settings: The settings that will be leveraged for AmazonRdsForOracle source + partitioning. + :type partition_settings: + ~data_factory_management_client.models.AmazonRdsForOraclePartitionSettings + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, + 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'AmazonRdsForOraclePartitionSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, + oracle_reader_query: Optional[object] = None, + query_timeout: Optional[object] = None, + partition_option: Optional[object] = None, + partition_settings: Optional["AmazonRdsForOraclePartitionSettings"] = None, + additional_columns: Optional[object] = None, + **kwargs + ): + super(AmazonRdsForOracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) + self.type = 'AmazonRdsForOracleSource' # type: str + self.oracle_reader_query = oracle_reader_query + self.query_timeout = query_timeout + self.partition_option = partition_option + self.partition_settings = partition_settings + self.additional_columns = additional_columns + + +class AmazonRdsForOracleTableDataset(Dataset): + """The AmazonRdsForOracle database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~data_factory_management_client.models.DatasetFolder + :param schema_type_properties_schema: The schema name of the AmazonRdsForOracle database. Type: + string (or Expression with resultType string). + :type schema_type_properties_schema: object + :param table: The table name of the AmazonRdsForOracle database. Type: string (or Expression + with resultType string). + :type table: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + schema_type_properties_schema: Optional[object] = None, + table: Optional[object] = None, + **kwargs + ): + super(AmazonRdsForOracleTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AmazonRdsForOracleTable' # type: str + self.schema_type_properties_schema = schema_type_properties_schema + self.table = table + + class AmazonRedshiftLinkedService(LinkedService): """Linked service for Amazon Redshift. @@ -977,12 +1257,15 @@ class AmazonRedshiftSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param redshift_unload_settings: The Amazon S3 settings needed for the interim Amazon S3 when @@ -1001,8 +1284,9 @@ class AmazonRedshiftSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, } @@ -1014,13 +1298,14 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, redshift_unload_settings: Optional["RedshiftUnloadSettings"] = None, **kwargs ): - super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AmazonRedshiftSource' # type: str self.query = query self.redshift_unload_settings = redshift_unload_settings @@ -1307,6 +1592,9 @@ class StoreReadSettings(msrest.serialization.Model): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object """ _validation = { @@ -1317,6 +1605,7 @@ class StoreReadSettings(msrest.serialization.Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } _subtype_map = { @@ -1328,12 +1617,14 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, **kwargs ): super(StoreReadSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = 'StoreReadSettings' # type: str self.max_concurrent_connections = max_concurrent_connections + self.disable_metrics_collection = disable_metrics_collection class AmazonS3CompatibleReadSettings(StoreReadSettings): @@ -1349,6 +1640,9 @@ class AmazonS3CompatibleReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -1389,6 +1683,7 @@ class AmazonS3CompatibleReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -1406,6 +1701,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -1418,7 +1714,7 @@ def __init__( modified_datetime_end: Optional[object] = None, **kwargs ): - super(AmazonS3CompatibleReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AmazonS3CompatibleReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AmazonS3CompatibleReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -1692,6 +1988,9 @@ class AmazonS3ReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -1732,6 +2031,7 @@ class AmazonS3ReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -1749,6 +2049,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -1761,7 +2062,7 @@ def __init__( modified_datetime_end: Optional[object] = None, **kwargs ): - super(AmazonS3ReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AmazonS3ReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AmazonS3ReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -1887,10 +2188,9 @@ class AvroDataset(Dataset): :type folder: ~data_factory_management_client.models.DatasetFolder :param location: The location of the avro storage. :type location: ~data_factory_management_client.models.DatasetLocation - :param avro_compression_codec: Possible values include: "none", "deflate", "snappy", "xz", - "bzip2". - :type avro_compression_codec: str or - ~data_factory_management_client.models.AvroCompressionCodec + :param avro_compression_codec: The data avroCompressionCodec. Type: string (or Expression with + resultType string). + :type avro_compression_codec: object :param avro_compression_level: :type avro_compression_level: int """ @@ -1912,7 +2212,7 @@ class AvroDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, + 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'object'}, 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, } @@ -1928,7 +2228,7 @@ def __init__( annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, location: Optional["DatasetLocation"] = None, - avro_compression_codec: Optional[Union[str, "AvroCompressionCodec"]] = None, + avro_compression_codec: Optional[object] = None, avro_compression_level: Optional[int] = None, **kwargs ): @@ -2031,7 +2331,7 @@ class CopySink(msrest.serialization.Model): """A copy activity sink. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDwSink, SqlMiSink, SqlServerSink, SqlSink. + sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, MongoDbAtlasSink, MongoDbV2Sink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDwSink, SqlMiSink, SqlServerSink, SqlSink. All required parameters must be populated in order to send to Azure. @@ -2055,6 +2355,9 @@ class CopySink(msrest.serialization.Model): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object """ _validation = { @@ -2069,10 +2372,11 @@ class CopySink(msrest.serialization.Model): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } _subtype_map = { - 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDwSink', 'SqlMISink': 'SqlMiSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} + 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'MongoDbAtlasSink': 'MongoDbAtlasSink', 'MongoDbV2Sink': 'MongoDbV2Sink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDwSink', 'SqlMISink': 'SqlMiSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} } def __init__( @@ -2084,6 +2388,7 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, **kwargs ): super(CopySink, self).__init__(**kwargs) @@ -2094,6 +2399,7 @@ def __init__( self.sink_retry_count = sink_retry_count self.sink_retry_wait = sink_retry_wait self.max_concurrent_connections = max_concurrent_connections + self.disable_metrics_collection = disable_metrics_collection class AvroSink(CopySink): @@ -2121,6 +2427,9 @@ class AvroSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Avro store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings :param format_settings: Avro format settings. @@ -2139,6 +2448,7 @@ class AvroSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, } @@ -2152,11 +2462,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreWriteSettings"] = None, format_settings: Optional["AvroWriteSettings"] = None, **kwargs ): - super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AvroSink' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -2181,11 +2492,14 @@ class AvroSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Avro store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -2198,8 +2512,9 @@ class AvroSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -2209,11 +2524,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreReadSettings"] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(AvroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AvroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AvroSource' # type: str self.store_settings = store_settings self.additional_columns = additional_columns @@ -2411,6 +2727,8 @@ class AzureBatchLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param credential: The credential reference containing authentication information. + :type credential: ~data_factory_management_client.models.CredentialReference """ _validation = { @@ -2434,6 +2752,7 @@ class AzureBatchLinkedService(LinkedService): 'pool_name': {'key': 'typeProperties.poolName', 'type': 'object'}, 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -2450,6 +2769,7 @@ def __init__( annotations: Optional[List[object]] = None, access_key: Optional["SecretBase"] = None, encrypted_credential: Optional[object] = None, + credential: Optional["CredentialReference"] = None, **kwargs ): super(AzureBatchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -2460,6 +2780,7 @@ def __init__( self.pool_name = pool_name self.linked_service_name = linked_service_name self.encrypted_credential = encrypted_credential + self.credential = credential class AzureBlobDataset(Dataset): @@ -2691,6 +3012,8 @@ class AzureBlobFsLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param credential: The credential reference containing authentication information. + :type credential: ~data_factory_management_client.models.CredentialReference """ _validation = { @@ -2712,6 +3035,7 @@ class AzureBlobFsLinkedService(LinkedService): 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -2729,6 +3053,7 @@ def __init__( tenant: Optional[object] = None, azure_cloud_type: Optional[object] = None, encrypted_credential: Optional[object] = None, + credential: Optional["CredentialReference"] = None, **kwargs ): super(AzureBlobFsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -2740,6 +3065,7 @@ def __init__( self.tenant = tenant self.azure_cloud_type = azure_cloud_type self.encrypted_credential = encrypted_credential + self.credential = credential class AzureBlobFsLocation(DatasetLocation): @@ -2802,6 +3128,9 @@ class AzureBlobFsReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -2839,6 +3168,7 @@ class AzureBlobFsReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -2855,6 +3185,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -2866,7 +3197,7 @@ def __init__( modified_datetime_end: Optional[object] = None, **kwargs ): - super(AzureBlobFsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureBlobFsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureBlobFSReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -2904,8 +3235,14 @@ class AzureBlobFsSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :type metadata: list[~data_factory_management_client.models.MetadataItem] """ _validation = { @@ -2920,7 +3257,9 @@ class AzureBlobFsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'metadata': {'key': 'metadata', 'type': '[MetadataItem]'}, } def __init__( @@ -2932,12 +3271,15 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, copy_behavior: Optional[object] = None, + metadata: Optional[List["MetadataItem"]] = None, **kwargs ): - super(AzureBlobFsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureBlobFsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureBlobFSSink' # type: str self.copy_behavior = copy_behavior + self.metadata = metadata class AzureBlobFsSource(CopySource): @@ -2959,6 +3301,9 @@ class AzureBlobFsSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType boolean). :type treat_empty_as_null: object @@ -2980,6 +3325,7 @@ class AzureBlobFsSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, @@ -2992,12 +3338,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, treat_empty_as_null: Optional[object] = None, skip_header_line_count: Optional[object] = None, recursive: Optional[object] = None, **kwargs ): - super(AzureBlobFsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureBlobFsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureBlobFSSource' # type: str self.treat_empty_as_null = treat_empty_as_null self.skip_header_line_count = skip_header_line_count @@ -3020,6 +3367,9 @@ class StoreWriteSettings(msrest.serialization.Model): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object """ @@ -3032,6 +3382,7 @@ class StoreWriteSettings(msrest.serialization.Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -3044,6 +3395,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, copy_behavior: Optional[object] = None, **kwargs ): @@ -3051,6 +3403,7 @@ def __init__( self.additional_properties = additional_properties self.type = 'StoreWriteSettings' # type: str self.max_concurrent_connections = max_concurrent_connections + self.disable_metrics_collection = disable_metrics_collection self.copy_behavior = copy_behavior @@ -3067,6 +3420,9 @@ class AzureBlobFsWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer @@ -3082,6 +3438,7 @@ class AzureBlobFsWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } @@ -3091,11 +3448,12 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, copy_behavior: Optional[object] = None, block_size_in_mb: Optional[object] = None, **kwargs ): - super(AzureBlobFsWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + super(AzureBlobFsWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) self.type = 'AzureBlobFSWriteSettings' # type: str self.block_size_in_mb = block_size_in_mb @@ -3153,6 +3511,8 @@ class AzureBlobStorageLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: str + :param credential: The credential reference containing authentication information. + :type credential: ~data_factory_management_client.models.CredentialReference """ _validation = { @@ -3177,6 +3537,7 @@ class AzureBlobStorageLinkedService(LinkedService): 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'account_kind': {'key': 'typeProperties.accountKind', 'type': 'str'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -3198,6 +3559,7 @@ def __init__( azure_cloud_type: Optional[object] = None, account_kind: Optional[str] = None, encrypted_credential: Optional[str] = None, + credential: Optional["CredentialReference"] = None, **kwargs ): super(AzureBlobStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -3213,6 +3575,7 @@ def __init__( self.azure_cloud_type = azure_cloud_type self.account_kind = account_kind self.encrypted_credential = encrypted_credential + self.credential = credential class AzureBlobStorageLocation(DatasetLocation): @@ -3275,6 +3638,9 @@ class AzureBlobStorageReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -3315,6 +3681,7 @@ class AzureBlobStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -3332,6 +3699,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -3344,7 +3712,7 @@ def __init__( modified_datetime_end: Optional[object] = None, **kwargs ): - super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureBlobStorageReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -3371,6 +3739,9 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer @@ -3386,6 +3757,7 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } @@ -3395,11 +3767,12 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, copy_behavior: Optional[object] = None, block_size_in_mb: Optional[object] = None, **kwargs ): - super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) self.type = 'AzureBlobStorageWriteSettings' # type: str self.block_size_in_mb = block_size_in_mb @@ -3741,6 +4114,9 @@ class AzureDatabricksDeltaLakeSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -3761,6 +4137,7 @@ class AzureDatabricksDeltaLakeSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'import_settings': {'key': 'importSettings', 'type': 'AzureDatabricksDeltaLakeImportCommand'}, } @@ -3774,11 +4151,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, pre_copy_script: Optional[object] = None, import_settings: Optional["AzureDatabricksDeltaLakeImportCommand"] = None, **kwargs ): - super(AzureDatabricksDeltaLakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureDatabricksDeltaLakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDatabricksDeltaLakeSink' # type: str self.pre_copy_script = pre_copy_script self.import_settings = import_settings @@ -3803,6 +4181,9 @@ class AzureDatabricksDeltaLakeSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Azure Databricks Delta Lake Sql query. Type: string (or Expression with resultType string). :type query: object @@ -3821,6 +4202,7 @@ class AzureDatabricksDeltaLakeSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'export_settings': {'key': 'exportSettings', 'type': 'AzureDatabricksDeltaLakeExportCommand'}, } @@ -3832,11 +4214,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, export_settings: Optional["AzureDatabricksDeltaLakeExportCommand"] = None, **kwargs ): - super(AzureDatabricksDeltaLakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureDatabricksDeltaLakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDatabricksDeltaLakeSource' # type: str self.query = query self.export_settings = export_settings @@ -3925,6 +4308,8 @@ class AzureDatabricksLinkedService(LinkedService): :param policy_id: The policy id for limiting the ability to configure clusters based on a user defined set of rules. Type: string (or Expression with resultType string). :type policy_id: object + :param credential: The credential reference containing authentication information. + :type credential: ~data_factory_management_client.models.CredentialReference """ _validation = { @@ -3957,6 +4342,7 @@ class AzureDatabricksLinkedService(LinkedService): 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, 'policy_id': {'key': 'typeProperties.policyId', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -3985,6 +4371,7 @@ def __init__( new_cluster_enable_elastic_disk: Optional[object] = None, encrypted_credential: Optional[object] = None, policy_id: Optional[object] = None, + credential: Optional["CredentialReference"] = None, **kwargs ): super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -4007,6 +4394,7 @@ def __init__( self.new_cluster_enable_elastic_disk = new_cluster_enable_elastic_disk self.encrypted_credential = encrypted_credential self.policy_id = policy_id + self.credential = credential class ExecutionActivity(Activity): @@ -4177,6 +4565,8 @@ class AzureDataExplorerLinkedService(LinkedService): :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object + :param credential: The credential reference containing authentication information. + :type credential: ~data_factory_management_client.models.CredentialReference """ _validation = { @@ -4197,6 +4587,7 @@ class AzureDataExplorerLinkedService(LinkedService): 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'database': {'key': 'typeProperties.database', 'type': 'object'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -4212,6 +4603,7 @@ def __init__( service_principal_id: Optional[object] = None, service_principal_key: Optional["SecretBase"] = None, tenant: Optional[object] = None, + credential: Optional["CredentialReference"] = None, **kwargs ): super(AzureDataExplorerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -4221,6 +4613,7 @@ def __init__( self.service_principal_key = service_principal_key self.database = database self.tenant = tenant + self.credential = credential class AzureDataExplorerSink(CopySink): @@ -4248,6 +4641,9 @@ class AzureDataExplorerSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param ingestion_mapping_name: A name of a pre-created csv mapping that was defined on the target Kusto table. Type: string. :type ingestion_mapping_name: object @@ -4271,6 +4667,7 @@ class AzureDataExplorerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, @@ -4285,12 +4682,13 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, ingestion_mapping_name: Optional[object] = None, ingestion_mapping_as_json: Optional[object] = None, flush_immediately: Optional[object] = None, **kwargs ): - super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDataExplorerSink' # type: str self.ingestion_mapping_name = ingestion_mapping_name self.ingestion_mapping_as_json = ingestion_mapping_as_json @@ -4316,6 +4714,9 @@ class AzureDataExplorerSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Required. Database query. Should be a Kusto Query Language (KQL) query. Type: string (or Expression with resultType string). :type query: object @@ -4326,8 +4727,8 @@ class AzureDataExplorerSource(CopySource): pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -4341,10 +4742,11 @@ class AzureDataExplorerSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -4355,12 +4757,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, no_truncation: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDataExplorerSource' # type: str self.query = query self.no_truncation = no_truncation @@ -4668,6 +5071,8 @@ class AzureDataLakeStoreLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param credential: The credential reference containing authentication information. + :type credential: ~data_factory_management_client.models.CredentialReference """ _validation = { @@ -4691,6 +5096,7 @@ class AzureDataLakeStoreLinkedService(LinkedService): 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -4710,6 +5116,7 @@ def __init__( subscription_id: Optional[object] = None, resource_group_name: Optional[object] = None, encrypted_credential: Optional[object] = None, + credential: Optional["CredentialReference"] = None, **kwargs ): super(AzureDataLakeStoreLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -4723,6 +5130,7 @@ def __init__( self.subscription_id = subscription_id self.resource_group_name = resource_group_name self.encrypted_credential = encrypted_credential + self.credential = credential class AzureDataLakeStoreLocation(DatasetLocation): @@ -4779,6 +5187,9 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -4824,6 +5235,7 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -4842,6 +5254,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -4855,7 +5268,7 @@ def __init__( modified_datetime_end: Optional[object] = None, **kwargs ): - super(AzureDataLakeStoreReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureDataLakeStoreReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDataLakeStoreReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -4895,6 +5308,9 @@ class AzureDataLakeStoreSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param enable_adls_single_file_parallel: Single File Parallel. @@ -4913,6 +5329,7 @@ class AzureDataLakeStoreSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, } @@ -4926,11 +5343,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, copy_behavior: Optional[object] = None, enable_adls_single_file_parallel: Optional[object] = None, **kwargs ): - super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDataLakeStoreSink' # type: str self.copy_behavior = copy_behavior self.enable_adls_single_file_parallel = enable_adls_single_file_parallel @@ -4955,6 +5373,9 @@ class AzureDataLakeStoreSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -4970,6 +5391,7 @@ class AzureDataLakeStoreSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, } @@ -4980,10 +5402,11 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, **kwargs ): - super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDataLakeStoreSource' # type: str self.recursive = recursive @@ -5001,6 +5424,9 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param expiry_date_time: Specifies the expiry time of the written files. The time is applied to @@ -5017,6 +5443,7 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'expiry_date_time': {'key': 'expiryDateTime', 'type': 'object'}, } @@ -5026,11 +5453,12 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, copy_behavior: Optional[object] = None, expiry_date_time: Optional[object] = None, **kwargs ): - super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) self.type = 'AzureDataLakeStoreWriteSettings' # type: str self.expiry_date_time = expiry_date_time @@ -5193,6 +5621,9 @@ class AzureFileStorageReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -5233,6 +5664,7 @@ class AzureFileStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -5250,6 +5682,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -5262,7 +5695,7 @@ def __init__( modified_datetime_end: Optional[object] = None, **kwargs ): - super(AzureFileStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureFileStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureFileStorageReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -5289,6 +5722,9 @@ class AzureFileStorageWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object """ @@ -5301,6 +5737,7 @@ class AzureFileStorageWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -5309,10 +5746,11 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, copy_behavior: Optional[object] = None, **kwargs ): - super(AzureFileStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + super(AzureFileStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) self.type = 'AzureFileStorageWriteSettings' # type: str @@ -5426,6 +5864,13 @@ class AzureFunctionLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param credential: The credential reference containing authentication information. + :type credential: ~data_factory_management_client.models.CredentialReference + :param resource_id: Allowed token audiences for azure function. + :type resource_id: object + :param authentication: Type of authentication (Required to specify MSI) used to connect to + AzureFunction. Type: string (or Expression with resultType string). + :type authentication: object """ _validation = { @@ -5443,6 +5888,9 @@ class AzureFunctionLinkedService(LinkedService): 'function_app_url': {'key': 'typeProperties.functionAppUrl', 'type': 'object'}, 'function_key': {'key': 'typeProperties.functionKey', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, + 'resource_id': {'key': 'typeProperties.resourceId', 'type': 'object'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'object'}, } def __init__( @@ -5456,6 +5904,9 @@ def __init__( annotations: Optional[List[object]] = None, function_key: Optional["SecretBase"] = None, encrypted_credential: Optional[object] = None, + credential: Optional["CredentialReference"] = None, + resource_id: Optional[object] = None, + authentication: Optional[object] = None, **kwargs ): super(AzureFunctionLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -5463,6 +5914,9 @@ def __init__( self.function_app_url = function_app_url self.function_key = function_key self.encrypted_credential = encrypted_credential + self.credential = credential + self.resource_id = resource_id + self.authentication = authentication class AzureKeyVaultLinkedService(LinkedService): @@ -5486,6 +5940,8 @@ class AzureKeyVaultLinkedService(LinkedService): :param base_url: Required. The base URL of the Azure Key Vault. e.g. https://myakv.vault.azure.net Type: string (or Expression with resultType string). :type base_url: object + :param credential: The credential reference containing authentication information. + :type credential: ~data_factory_management_client.models.CredentialReference """ _validation = { @@ -5501,6 +5957,7 @@ class AzureKeyVaultLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -5512,11 +5969,13 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + credential: Optional["CredentialReference"] = None, **kwargs ): super(AzureKeyVaultLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureKeyVault' # type: str self.base_url = base_url + self.credential = credential class SecretBase(msrest.serialization.Model): @@ -5680,12 +6139,15 @@ class AzureMariaDbSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -5701,8 +6163,9 @@ class AzureMariaDbSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -5713,12 +6176,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(AzureMariaDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AzureMariaDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzureMariaDBSource' # type: str self.query = query @@ -6024,6 +6488,9 @@ class AzureMlLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param authentication: Type of authentication (Required to specify MSI) used to connect to + AzureML. Type: string (or Expression with resultType string). + :type authentication: object """ _validation = { @@ -6046,6 +6513,7 @@ class AzureMlLinkedService(LinkedService): 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'object'}, } def __init__( @@ -6063,6 +6531,7 @@ def __init__( service_principal_key: Optional["SecretBase"] = None, tenant: Optional[object] = None, encrypted_credential: Optional[object] = None, + authentication: Optional[object] = None, **kwargs ): super(AzureMlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -6074,6 +6543,7 @@ def __init__( self.service_principal_key = service_principal_key self.tenant = tenant self.encrypted_credential = encrypted_credential + self.authentication = authentication class AzureMlServiceLinkedService(LinkedService): @@ -6375,6 +6845,9 @@ class AzureMySqlSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -6392,6 +6865,7 @@ class AzureMySqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -6404,10 +6878,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, pre_copy_script: Optional[object] = None, **kwargs ): - super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureMySqlSink' # type: str self.pre_copy_script = pre_copy_script @@ -6431,12 +6906,15 @@ class AzureMySqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -6451,8 +6929,9 @@ class AzureMySqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -6463,12 +6942,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzureMySqlSource' # type: str self.query = query @@ -6638,6 +7118,9 @@ class AzurePostgreSqlSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -6655,6 +7138,7 @@ class AzurePostgreSqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -6667,10 +7151,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, pre_copy_script: Optional[object] = None, **kwargs ): - super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzurePostgreSqlSink' # type: str self.pre_copy_script = pre_copy_script @@ -6694,12 +7179,15 @@ class AzurePostgreSqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -6715,8 +7203,9 @@ class AzurePostgreSqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -6727,12 +7216,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzurePostgreSqlSource' # type: str self.query = query @@ -6843,6 +7333,9 @@ class AzureQueueSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object """ _validation = { @@ -6857,6 +7350,7 @@ class AzureQueueSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } def __init__( @@ -6868,9 +7362,10 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, **kwargs ): - super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureQueueSink' # type: str @@ -6969,6 +7464,9 @@ class AzureSearchIndexSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Specify the write behavior when upserting documents into Azure Search Index. Possible values include: "Merge", "Upload". :type write_behavior: str or @@ -6987,6 +7485,7 @@ class AzureSearchIndexSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } @@ -6999,10 +7498,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, write_behavior: Optional[Union[str, "AzureSearchIndexWriteBehaviorType"]] = None, **kwargs ): - super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureSearchIndexSink' # type: str self.write_behavior = write_behavior @@ -7116,6 +7616,8 @@ class AzureSqlDatabaseLinkedService(LinkedService): :param always_encrypted_settings: Sql always encrypted properties. :type always_encrypted_settings: ~data_factory_management_client.models.SqlAlwaysEncryptedProperties + :param credential: The credential reference containing authentication information. + :type credential: ~data_factory_management_client.models.CredentialReference """ _validation = { @@ -7138,6 +7640,7 @@ class AzureSqlDatabaseLinkedService(LinkedService): 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -7156,6 +7659,7 @@ def __init__( azure_cloud_type: Optional[object] = None, encrypted_credential: Optional[object] = None, always_encrypted_settings: Optional["SqlAlwaysEncryptedProperties"] = None, + credential: Optional["CredentialReference"] = None, **kwargs ): super(AzureSqlDatabaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -7168,6 +7672,7 @@ def __init__( self.azure_cloud_type = azure_cloud_type self.encrypted_credential = encrypted_credential self.always_encrypted_settings = always_encrypted_settings + self.credential = credential class AzureSqlDwLinkedService(LinkedService): @@ -7210,6 +7715,8 @@ class AzureSqlDwLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param credential: The credential reference containing authentication information. + :type credential: ~data_factory_management_client.models.CredentialReference """ _validation = { @@ -7231,6 +7738,7 @@ class AzureSqlDwLinkedService(LinkedService): 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -7248,6 +7756,7 @@ def __init__( tenant: Optional[object] = None, azure_cloud_type: Optional[object] = None, encrypted_credential: Optional[object] = None, + credential: Optional["CredentialReference"] = None, **kwargs ): super(AzureSqlDwLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -7259,6 +7768,7 @@ def __init__( self.tenant = tenant self.azure_cloud_type = azure_cloud_type self.encrypted_credential = encrypted_credential + self.credential = credential class AzureSqlDwTableDataset(Dataset): @@ -7385,6 +7895,8 @@ class AzureSqlMiLinkedService(LinkedService): :param always_encrypted_settings: Sql always encrypted properties. :type always_encrypted_settings: ~data_factory_management_client.models.SqlAlwaysEncryptedProperties + :param credential: The credential reference containing authentication information. + :type credential: ~data_factory_management_client.models.CredentialReference """ _validation = { @@ -7407,6 +7919,7 @@ class AzureSqlMiLinkedService(LinkedService): 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -7425,6 +7938,7 @@ def __init__( azure_cloud_type: Optional[object] = None, encrypted_credential: Optional[object] = None, always_encrypted_settings: Optional["SqlAlwaysEncryptedProperties"] = None, + credential: Optional["CredentialReference"] = None, **kwargs ): super(AzureSqlMiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -7437,6 +7951,7 @@ def __init__( self.azure_cloud_type = azure_cloud_type self.encrypted_credential = encrypted_credential self.always_encrypted_settings = always_encrypted_settings + self.credential = credential class AzureSqlMiTableDataset(Dataset): @@ -7545,6 +8060,9 @@ class AzureSqlSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). :type sql_writer_stored_procedure_name: object @@ -7563,6 +8081,14 @@ class AzureSqlSink(CopySink): :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). :type table_option: object + :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or + Expression with resultType boolean). + :type sql_writer_use_table_lock: object + :param write_behavior: Write behavior when copying data into Azure SQL. Type: + SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). + :type write_behavior: object + :param upsert_settings: SQL upsert settings. + :type upsert_settings: ~data_factory_management_client.models.SqlUpsertSettings """ _validation = { @@ -7577,12 +8103,16 @@ class AzureSqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, 'table_option': {'key': 'tableOption', 'type': 'object'}, + 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'}, } def __init__( @@ -7594,15 +8124,19 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, sql_writer_stored_procedure_name: Optional[object] = None, sql_writer_table_type: Optional[object] = None, pre_copy_script: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, stored_procedure_table_type_parameter_name: Optional[object] = None, table_option: Optional[object] = None, + sql_writer_use_table_lock: Optional[object] = None, + write_behavior: Optional[object] = None, + upsert_settings: Optional["SqlUpsertSettings"] = None, **kwargs ): - super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureSqlSink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type @@ -7610,6 +8144,9 @@ def __init__( self.stored_procedure_parameters = stored_procedure_parameters self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name self.table_option = table_option + self.sql_writer_use_table_lock = sql_writer_use_table_lock + self.write_behavior = write_behavior + self.upsert_settings = upsert_settings class AzureSqlSource(TabularSource): @@ -7631,12 +8168,15 @@ class AzureSqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database @@ -7666,8 +8206,9 @@ class AzureSqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, @@ -7683,8 +8224,9 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, sql_reader_query: Optional[object] = None, sql_reader_stored_procedure_name: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, @@ -7693,7 +8235,7 @@ def __init__( partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzureSqlSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name @@ -7955,6 +8497,9 @@ class AzureTableSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param azure_table_default_partition_key_value: Azure Table default partition key value. Type: string (or Expression with resultType string). :type azure_table_default_partition_key_value: object @@ -7981,6 +8526,7 @@ class AzureTableSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, 'azure_table_row_key_name': {'key': 'azureTableRowKeyName', 'type': 'object'}, @@ -7996,13 +8542,14 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, azure_table_default_partition_key_value: Optional[object] = None, azure_table_partition_key_name: Optional[object] = None, azure_table_row_key_name: Optional[object] = None, azure_table_insert_type: Optional[object] = None, **kwargs ): - super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureTableSink' # type: str self.azure_table_default_partition_key_value = azure_table_default_partition_key_value self.azure_table_partition_key_name = azure_table_partition_key_name @@ -8029,12 +8576,15 @@ class AzureTableSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param azure_table_source_query: Azure Table source query. Type: string (or Expression with resultType string). :type azure_table_source_query: object @@ -8053,8 +8603,9 @@ class AzureTableSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, } @@ -8066,13 +8617,14 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, azure_table_source_query: Optional[object] = None, azure_table_source_ignore_table_not_found: Optional[object] = None, **kwargs ): - super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzureTableSource' # type: str self.azure_table_source_query = azure_table_source_query self.azure_table_source_ignore_table_not_found = azure_table_source_ignore_table_not_found @@ -8327,6 +8879,9 @@ class BinarySink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Binary store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings """ @@ -8343,6 +8898,7 @@ class BinarySink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } @@ -8355,10 +8911,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreWriteSettings"] = None, **kwargs ): - super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'BinarySink' # type: str self.store_settings = store_settings @@ -8382,6 +8939,9 @@ class BinarySource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Binary store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param format_settings: Binary format settings. @@ -8398,6 +8958,7 @@ class BinarySource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'BinaryReadSettings'}, } @@ -8409,11 +8970,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreReadSettings"] = None, format_settings: Optional["BinaryReadSettings"] = None, **kwargs ): - super(BinarySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(BinarySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'BinarySource' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -8641,6 +9203,9 @@ class BlobSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param blob_writer_overwrite_files: Blob writer overwrite files. Type: boolean (or Expression with resultType boolean). :type blob_writer_overwrite_files: object @@ -8652,6 +9217,9 @@ class BlobSink(CopySink): :type blob_writer_add_header: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :type metadata: list[~data_factory_management_client.models.MetadataItem] """ _validation = { @@ -8666,10 +9234,12 @@ class BlobSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'metadata': {'key': 'metadata', 'type': '[MetadataItem]'}, } def __init__( @@ -8681,18 +9251,21 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, blob_writer_overwrite_files: Optional[object] = None, blob_writer_date_time_format: Optional[object] = None, blob_writer_add_header: Optional[object] = None, copy_behavior: Optional[object] = None, + metadata: Optional[List["MetadataItem"]] = None, **kwargs ): - super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'BlobSink' # type: str self.blob_writer_overwrite_files = blob_writer_overwrite_files self.blob_writer_date_time_format = blob_writer_date_time_format self.blob_writer_add_header = blob_writer_add_header self.copy_behavior = copy_behavior + self.metadata = metadata class BlobSource(CopySource): @@ -8714,6 +9287,9 @@ class BlobSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType boolean). :type treat_empty_as_null: object @@ -8735,6 +9311,7 @@ class BlobSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, @@ -8747,12 +9324,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, treat_empty_as_null: Optional[object] = None, skip_header_line_count: Optional[object] = None, recursive: Optional[object] = None, **kwargs ): - super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'BlobSource' # type: str self.treat_empty_as_null = treat_empty_as_null self.skip_header_line_count = skip_header_line_count @@ -8931,12 +9509,15 @@ class CassandraSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command. Type: string (or Expression with resultType string). :type query: object @@ -8960,8 +9541,9 @@ class CassandraSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, } @@ -8973,13 +9555,14 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, consistency_level: Optional[Union[str, "CassandraSourceReadConsistencyLevels"]] = None, **kwargs ): - super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'CassandraSource' # type: str self.query = query self.consistency_level = consistency_level @@ -9325,8 +9908,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService): :param deployment_type: Required. The deployment type of the Common Data Service for Apps instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType - string). Possible values include: "Online", "OnPremisesWithIfd". - :type deployment_type: str or ~data_factory_management_client.models.DynamicsDeploymentType + string). + :type deployment_type: object :param host_name: The host name of the on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). @@ -9347,10 +9930,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService): :param authentication_type: Required. The authentication type to connect to Common Data Service for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). Possible values include: "Office365", "Ifd", - "AADServicePrincipal". - :type authentication_type: str or - ~data_factory_management_client.models.DynamicsAuthenticationType + Expression with resultType string). + :type authentication_type: object :param username: User name to access the Common Data Service for Apps instance. Type: string (or Expression with resultType string). :type username: object @@ -9361,10 +9942,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService): :type service_principal_id: object :param service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~data_factory_management_client.models.DynamicsServicePrincipalCredentialType + for certificate. Type: string (or Expression with resultType string). + :type service_principal_credential_type: object :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -9390,16 +9969,16 @@ class CommonDataServiceForAppsLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'}, 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -9407,8 +9986,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService): def __init__( self, *, - deployment_type: Union[str, "DynamicsDeploymentType"], - authentication_type: Union[str, "DynamicsAuthenticationType"], + deployment_type: object, + authentication_type: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, @@ -9421,7 +10000,7 @@ def __init__( username: Optional[object] = None, password: Optional["SecretBase"] = None, service_principal_id: Optional[object] = None, - service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None, + service_principal_credential_type: Optional[object] = None, service_principal_credential: Optional["SecretBase"] = None, encrypted_credential: Optional[object] = None, **kwargs @@ -9467,6 +10046,9 @@ class CommonDataServiceForAppsSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Required. The write behavior for the operation. Possible values include: "Upsert". :type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior @@ -9492,6 +10074,7 @@ class CommonDataServiceForAppsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, @@ -9507,11 +10090,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, ignore_null_values: Optional[object] = None, alternate_key_name: Optional[object] = None, **kwargs ): - super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'CommonDataServiceForAppsSink' # type: str self.write_behavior = write_behavior self.ignore_null_values = ignore_null_values @@ -9537,12 +10121,15 @@ class CommonDataServiceForAppsSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: FetchXML is a proprietary query language that is used in Microsoft Common Data Service for Apps (online & on-premises). Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -9555,8 +10142,9 @@ class CommonDataServiceForAppsSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -9566,11 +10154,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'CommonDataServiceForAppsSource' # type: str self.query = query self.additional_columns = additional_columns @@ -9836,12 +10425,15 @@ class ConcurSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -9857,8 +10449,9 @@ class ConcurSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -9869,12 +10462,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ConcurSource' # type: str self.query = query @@ -10389,6 +10983,9 @@ class CosmosDbMongoDbApiLinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param is_server_version_above32: Whether the CosmosDB (MongoDB API) server version is higher + than 3.2. The default value is false. Type: boolean (or Expression with resultType boolean). + :type is_server_version_above32: object :param connection_string: Required. The CosmosDB (MongoDB API) connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. @@ -10411,6 +11008,7 @@ class CosmosDbMongoDbApiLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'is_server_version_above32': {'key': 'typeProperties.isServerVersionAbove32', 'type': 'object'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'database': {'key': 'typeProperties.database', 'type': 'object'}, } @@ -10425,10 +11023,12 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + is_server_version_above32: Optional[object] = None, **kwargs ): super(CosmosDbMongoDbApiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'CosmosDbMongoDbApi' # type: str + self.is_server_version_above32 = is_server_version_above32 self.connection_string = connection_string self.database = database @@ -10458,6 +11058,9 @@ class CosmosDbMongoDbApiSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). @@ -10476,6 +11079,7 @@ class CosmosDbMongoDbApiSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } @@ -10488,10 +11092,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, write_behavior: Optional[object] = None, **kwargs ): - super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'CosmosDbMongoDbApiSink' # type: str self.write_behavior = write_behavior @@ -10515,6 +11120,9 @@ class CosmosDbMongoDbApiSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). @@ -10530,8 +11138,8 @@ class CosmosDbMongoDbApiSource(CopySource): pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -10544,11 +11152,12 @@ class CosmosDbMongoDbApiSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -10558,14 +11167,15 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, filter: Optional[object] = None, cursor_methods: Optional["MongoDbCursorMethodsProperties"] = None, batch_size: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'CosmosDbMongoDbApiSource' # type: str self.filter = filter self.cursor_methods = cursor_methods @@ -10669,6 +11279,9 @@ class CosmosDbSqlApiSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. :type write_behavior: object @@ -10686,6 +11299,7 @@ class CosmosDbSqlApiSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } @@ -10698,10 +11312,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, write_behavior: Optional[object] = None, **kwargs ): - super(CosmosDbSqlApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(CosmosDbSqlApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'CosmosDbSqlApiSink' # type: str self.write_behavior = write_behavior @@ -10725,6 +11340,9 @@ class CosmosDbSqlApiSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: SQL API query. Type: string (or Expression with resultType string). :type query: object :param page_size: Page size of the result. Type: integer (or Expression with resultType @@ -10737,8 +11355,8 @@ class CosmosDbSqlApiSource(CopySource): Expression with resultType boolean). :type detect_datetime: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -10751,11 +11369,12 @@ class CosmosDbSqlApiSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'page_size': {'key': 'pageSize', 'type': 'object'}, 'preferred_regions': {'key': 'preferredRegions', 'type': 'object'}, 'detect_datetime': {'key': 'detectDatetime', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -10765,14 +11384,15 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, page_size: Optional[object] = None, preferred_regions: Optional[object] = None, detect_datetime: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(CosmosDbSqlApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(CosmosDbSqlApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'CosmosDbSqlApiSource' # type: str self.query = query self.page_size = page_size @@ -10865,12 +11485,15 @@ class CouchbaseSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -10886,8 +11509,9 @@ class CouchbaseSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -10898,12 +11522,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'CouchbaseSource' # type: str self.query = query @@ -11107,6 +11732,181 @@ def __init__( self.run_id = run_id +class Credential(msrest.serialization.Model): + """The Azure Data Factory nested object which contains the information and credential which can be used to connect with related store or compute resource. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ManagedIdentityCredential, ServicePrincipalCredential. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of credential.Constant filled by server. + :type type: str + :param description: Credential description. + :type description: str + :param annotations: List of tags that can be used for describing the Credential. + :type annotations: list[object] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + } + + _subtype_map = { + 'type': {'ManagedIdentity': 'ManagedIdentityCredential', 'ServicePrincipal': 'ServicePrincipalCredential'} + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + annotations: Optional[List[object]] = None, + **kwargs + ): + super(Credential, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'Credential' # type: str + self.description = description + self.annotations = annotations + + +class CredentialReference(msrest.serialization.Model): + """Credential reference type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :ivar type: Required. Credential reference type. Default value: "CredentialReference". + :vartype type: str + :param reference_name: Required. Reference credential name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + type = "CredentialReference" + + def __init__( + self, + *, + reference_name: str, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(CredentialReference, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.reference_name = reference_name + + +class SubResource(msrest.serialization.Model): + """Azure Data Factory nested resource, which belongs to a factory. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SubResource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.etag = None + + +class CredentialResource(SubResource): + """Credential resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Properties of credentials. + :type properties: ~data_factory_management_client.models.Credential + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Credential'}, + } + + def __init__( + self, + *, + properties: "Credential", + **kwargs + ): + super(CredentialResource, self).__init__(**kwargs) + self.properties = properties + + class CustomActivity(ExecutionActivity): """Custom activity type. @@ -12092,46 +12892,6 @@ def __init__( self.dataset_parameters = dataset_parameters -class SubResource(msrest.serialization.Model): - """Azure Data Factory nested resource, which belongs to a factory. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SubResource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.etag = None - - class DataFlowResource(SubResource): """Data flow resource type. @@ -12468,8 +13228,9 @@ class DatasetCompression(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. - :type type: str + :param type: Required. Type of dataset compression. Type: string (or Expression with resultType + string).Constant filled by server. + :type type: object """ _validation = { @@ -12478,7 +13239,7 @@ class DatasetCompression(msrest.serialization.Model): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'object'}, } _subtype_map = { @@ -12504,8 +13265,9 @@ class DatasetBZip2Compression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. - :type type: str + :param type: Required. Type of dataset compression. Type: string (or Expression with resultType + string).Constant filled by server. + :type type: object """ _validation = { @@ -12514,7 +13276,7 @@ class DatasetBZip2Compression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'object'}, } def __init__( @@ -12592,10 +13354,11 @@ class DatasetDeflateCompression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. - :type type: str - :param level: The Deflate compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + :param type: Required. Type of dataset compression. Type: string (or Expression with resultType + string).Constant filled by server. + :type type: object + :param level: The Deflate compression level. + :type level: object """ _validation = { @@ -12604,15 +13367,15 @@ class DatasetDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'object'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, - level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + level: Optional[object] = None, **kwargs ): super(DatasetDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) @@ -12649,10 +13412,11 @@ class DatasetGZipCompression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. - :type type: str - :param level: The GZip compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + :param type: Required. Type of dataset compression. Type: string (or Expression with resultType + string).Constant filled by server. + :type type: object + :param level: The GZip compression level. + :type level: object """ _validation = { @@ -12661,15 +13425,15 @@ class DatasetGZipCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'object'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, - level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + level: Optional[object] = None, **kwargs ): super(DatasetGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs) @@ -12834,8 +13598,9 @@ class DatasetTarCompression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. - :type type: str + :param type: Required. Type of dataset compression. Type: string (or Expression with resultType + string).Constant filled by server. + :type type: object """ _validation = { @@ -12844,7 +13609,7 @@ class DatasetTarCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'object'}, } def __init__( @@ -12865,10 +13630,11 @@ class DatasetTarGZipCompression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. - :type type: str - :param level: The TarGZip compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + :param type: Required. Type of dataset compression. Type: string (or Expression with resultType + string).Constant filled by server. + :type type: object + :param level: The TarGZip compression level. + :type level: object """ _validation = { @@ -12877,15 +13643,15 @@ class DatasetTarGZipCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'object'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, - level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + level: Optional[object] = None, **kwargs ): super(DatasetTarGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs) @@ -12901,10 +13667,11 @@ class DatasetZipDeflateCompression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. - :type type: str - :param level: The ZipDeflate compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + :param type: Required. Type of dataset compression. Type: string (or Expression with resultType + string).Constant filled by server. + :type type: object + :param level: The ZipDeflate compression level. + :type level: object """ _validation = { @@ -12913,15 +13680,15 @@ class DatasetZipDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'object'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, - level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + level: Optional[object] = None, **kwargs ): super(DatasetZipDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) @@ -13051,12 +13818,15 @@ class Db2Source(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -13071,8 +13841,9 @@ class Db2Source(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -13083,12 +13854,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'Db2Source' # type: str self.query = query @@ -13326,12 +14098,11 @@ class DelimitedTextDataset(Dataset): https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). :type encoding_name: object - :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4", "tar", "tarGZip". - :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec - :param compression_level: The data compression method used for DelimitedText. Possible values - include: "Optimal", "Fastest". - :type compression_level: str or ~data_factory_management_client.models.DatasetCompressionLevel + :param compression_codec: The data compressionCodec. Type: string (or Expression with + resultType string). + :type compression_codec: object + :param compression_level: The data compression method used for DelimitedText. + :type compression_level: object :param quote_char: The quote character. Type: string (or Expression with resultType string). :type quote_char: object :param escape_char: The escape character. Type: string (or Expression with resultType string). @@ -13363,8 +14134,8 @@ class DelimitedTextDataset(Dataset): 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, - 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'str'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, @@ -13386,8 +14157,8 @@ def __init__( column_delimiter: Optional[object] = None, row_delimiter: Optional[object] = None, encoding_name: Optional[object] = None, - compression_codec: Optional[Union[str, "CompressionCodec"]] = None, - compression_level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + compression_codec: Optional[object] = None, + compression_level: Optional[object] = None, quote_char: Optional[object] = None, escape_char: Optional[object] = None, first_row_as_header: Optional[object] = None, @@ -13475,6 +14246,9 @@ class DelimitedTextSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: DelimitedText store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings :param format_settings: DelimitedText format settings. @@ -13493,6 +14267,7 @@ class DelimitedTextSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, } @@ -13506,11 +14281,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreWriteSettings"] = None, format_settings: Optional["DelimitedTextWriteSettings"] = None, **kwargs ): - super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DelimitedTextSink' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -13535,13 +14311,16 @@ class DelimitedTextSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: DelimitedText store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param format_settings: DelimitedText format settings. :type format_settings: ~data_factory_management_client.models.DelimitedTextReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -13554,9 +14333,10 @@ class DelimitedTextSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -13566,12 +14346,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreReadSettings"] = None, format_settings: Optional["DelimitedTextReadSettings"] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DelimitedTextSource' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -13804,6 +14585,9 @@ class DocumentDbCollectionSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param nesting_separator: Nested properties separator. Default is . (dot). Type: string (or Expression with resultType string). :type nesting_separator: object @@ -13824,6 +14608,7 @@ class DocumentDbCollectionSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } @@ -13837,11 +14622,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, nesting_separator: Optional[object] = None, write_behavior: Optional[object] = None, **kwargs ): - super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DocumentDbCollectionSink' # type: str self.nesting_separator = nesting_separator self.write_behavior = write_behavior @@ -13866,6 +14652,9 @@ class DocumentDbCollectionSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Documents query. Type: string (or Expression with resultType string). :type query: object :param nesting_separator: Nested properties separator. Type: string (or Expression with @@ -13875,8 +14664,8 @@ class DocumentDbCollectionSource(CopySource): pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -13889,10 +14678,11 @@ class DocumentDbCollectionSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -13902,13 +14692,14 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, nesting_separator: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DocumentDbCollectionSource' # type: str self.query = query self.nesting_separator = nesting_separator @@ -14000,12 +14791,15 @@ class DrillSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -14021,8 +14815,9 @@ class DrillSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -14033,12 +14828,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'DrillSource' # type: str self.query = query @@ -14361,12 +15157,15 @@ class DynamicsAxSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -14387,8 +15186,9 @@ class DynamicsAxSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -14400,13 +15200,14 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, http_request_timeout: Optional[object] = None, **kwargs ): - super(DynamicsAxSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(DynamicsAxSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'DynamicsAXSource' # type: str self.query = query self.http_request_timeout = http_request_timeout @@ -14501,9 +15302,8 @@ class DynamicsCrmLinkedService(LinkedService): :type annotations: list[object] :param deployment_type: Required. The deployment type of the Dynamics CRM instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: - string (or Expression with resultType string). Possible values include: "Online", - "OnPremisesWithIfd". - :type deployment_type: str or ~data_factory_management_client.models.DynamicsDeploymentType + string (or Expression with resultType string). + :type deployment_type: object :param host_name: The host name of the on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). @@ -14522,10 +15322,8 @@ class DynamicsCrmLinkedService(LinkedService): :param authentication_type: Required. The authentication type to connect to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). Possible values include: "Office365", "Ifd", - "AADServicePrincipal". - :type authentication_type: str or - ~data_factory_management_client.models.DynamicsAuthenticationType + Expression with resultType string). + :type authentication_type: object :param username: User name to access the Dynamics CRM instance. Type: string (or Expression with resultType string). :type username: object @@ -14536,10 +15334,8 @@ class DynamicsCrmLinkedService(LinkedService): :type service_principal_id: object :param service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~data_factory_management_client.models.DynamicsServicePrincipalCredentialType + for certificate. Type: string (or Expression with resultType string). + :type service_principal_credential_type: object :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -14565,16 +15361,16 @@ class DynamicsCrmLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'}, 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -14582,8 +15378,8 @@ class DynamicsCrmLinkedService(LinkedService): def __init__( self, *, - deployment_type: Union[str, "DynamicsDeploymentType"], - authentication_type: Union[str, "DynamicsAuthenticationType"], + deployment_type: object, + authentication_type: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, @@ -14596,7 +15392,7 @@ def __init__( username: Optional[object] = None, password: Optional["SecretBase"] = None, service_principal_id: Optional[object] = None, - service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None, + service_principal_credential_type: Optional[object] = None, service_principal_credential: Optional["SecretBase"] = None, encrypted_credential: Optional[object] = None, **kwargs @@ -14642,6 +15438,9 @@ class DynamicsCrmSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Required. The write behavior for the operation. Possible values include: "Upsert". :type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior @@ -14667,6 +15466,7 @@ class DynamicsCrmSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, @@ -14682,11 +15482,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, ignore_null_values: Optional[object] = None, alternate_key_name: Optional[object] = None, **kwargs ): - super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DynamicsCrmSink' # type: str self.write_behavior = write_behavior self.ignore_null_values = ignore_null_values @@ -14712,12 +15513,15 @@ class DynamicsCrmSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -14730,8 +15534,9 @@ class DynamicsCrmSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -14741,11 +15546,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DynamicsCrmSource' # type: str self.query = query self.additional_columns = additional_columns @@ -14840,8 +15646,8 @@ class DynamicsLinkedService(LinkedService): :type annotations: list[object] :param deployment_type: Required. The deployment type of the Dynamics instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or - Expression with resultType string). Possible values include: "Online", "OnPremisesWithIfd". - :type deployment_type: str or ~data_factory_management_client.models.DynamicsDeploymentType + Expression with resultType string). + :type deployment_type: object :param host_name: The host name of the on-premises Dynamics server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). :type host_name: object @@ -14859,9 +15665,8 @@ class DynamicsLinkedService(LinkedService): :param authentication_type: Required. The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with - resultType string). Possible values include: "Office365", "Ifd", "AADServicePrincipal". - :type authentication_type: str or - ~data_factory_management_client.models.DynamicsAuthenticationType + resultType string). + :type authentication_type: object :param username: User name to access the Dynamics instance. Type: string (or Expression with resultType string). :type username: object @@ -14872,10 +15677,8 @@ class DynamicsLinkedService(LinkedService): :type service_principal_id: object :param service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~data_factory_management_client.models.DynamicsServicePrincipalCredentialType + for certificate. Type: string (or Expression with resultType string). + :type service_principal_credential_type: str :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -14901,12 +15704,12 @@ class DynamicsLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, @@ -14918,8 +15721,8 @@ class DynamicsLinkedService(LinkedService): def __init__( self, *, - deployment_type: Union[str, "DynamicsDeploymentType"], - authentication_type: Union[str, "DynamicsAuthenticationType"], + deployment_type: object, + authentication_type: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, @@ -14932,7 +15735,7 @@ def __init__( username: Optional[object] = None, password: Optional["SecretBase"] = None, service_principal_id: Optional[object] = None, - service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None, + service_principal_credential_type: Optional[str] = None, service_principal_credential: Optional["SecretBase"] = None, encrypted_credential: Optional[object] = None, **kwargs @@ -14978,6 +15781,9 @@ class DynamicsSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Required. The write behavior for the operation. Possible values include: "Upsert". :type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior @@ -15003,6 +15809,7 @@ class DynamicsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, @@ -15018,11 +15825,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, ignore_null_values: Optional[object] = None, alternate_key_name: Optional[object] = None, **kwargs ): - super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DynamicsSink' # type: str self.write_behavior = write_behavior self.ignore_null_values = ignore_null_values @@ -15048,12 +15856,15 @@ class DynamicsSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics (online & on-premises). Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -15066,8 +15877,9 @@ class DynamicsSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -15077,11 +15889,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DynamicsSource' # type: str self.query = query self.additional_columns = additional_columns @@ -15265,12 +16078,15 @@ class EloquaSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -15286,8 +16102,9 @@ class EloquaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -15298,12 +16115,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'EloquaSource' # type: str self.query = query @@ -15448,9 +16266,12 @@ class ExcelDataset(Dataset): :type folder: ~data_factory_management_client.models.DatasetFolder :param location: The location of the excel storage. :type location: ~data_factory_management_client.models.DatasetLocation - :param sheet_name: The sheet of excel file. Type: string (or Expression with resultType + :param sheet_name: The sheet name of excel file. Type: string (or Expression with resultType string). :type sheet_name: object + :param sheet_index: The sheet index of excel file and default value is 0. Type: integer (or + Expression with resultType integer). + :type sheet_index: object :param range: The partial data of one sheet. Type: string (or Expression with resultType string). :type range: object @@ -15481,6 +16302,7 @@ class ExcelDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, 'sheet_name': {'key': 'typeProperties.sheetName', 'type': 'object'}, + 'sheet_index': {'key': 'typeProperties.sheetIndex', 'type': 'object'}, 'range': {'key': 'typeProperties.range', 'type': 'object'}, 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, @@ -15500,6 +16322,7 @@ def __init__( folder: Optional["DatasetFolder"] = None, location: Optional["DatasetLocation"] = None, sheet_name: Optional[object] = None, + sheet_index: Optional[object] = None, range: Optional[object] = None, first_row_as_header: Optional[object] = None, compression: Optional["DatasetCompression"] = None, @@ -15510,6 +16333,7 @@ def __init__( self.type = 'Excel' # type: str self.location = location self.sheet_name = sheet_name + self.sheet_index = sheet_index self.range = range self.first_row_as_header = first_row_as_header self.compression = compression @@ -15535,11 +16359,14 @@ class ExcelSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Excel store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -15552,8 +16379,9 @@ class ExcelSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -15563,11 +16391,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreReadSettings"] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(ExcelSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(ExcelSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'ExcelSource' # type: str self.store_settings = store_settings self.additional_columns = additional_columns @@ -16262,6 +17091,10 @@ class FactoryGitHubConfiguration(FactoryRepoConfiguration): :type last_commit_id: str :param host_name: GitHub Enterprise host name. For example: https://github.mydomain.com. :type host_name: str + :param client_id: GitHub bring your own app client id. + :type client_id: str + :param client_secret: GitHub bring your own app client secret information. + :type client_secret: ~data_factory_management_client.models.GitHubClientSecret """ _validation = { @@ -16280,6 +17113,8 @@ class FactoryGitHubConfiguration(FactoryRepoConfiguration): 'root_folder': {'key': 'rootFolder', 'type': 'str'}, 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, 'host_name': {'key': 'hostName', 'type': 'str'}, + 'client_id': {'key': 'clientId', 'type': 'str'}, + 'client_secret': {'key': 'clientSecret', 'type': 'GitHubClientSecret'}, } def __init__( @@ -16291,11 +17126,15 @@ def __init__( root_folder: str, last_commit_id: Optional[str] = None, host_name: Optional[str] = None, + client_id: Optional[str] = None, + client_secret: Optional["GitHubClientSecret"] = None, **kwargs ): super(FactoryGitHubConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id, **kwargs) self.type = 'FactoryGitHubConfiguration' # type: str self.host_name = host_name + self.client_id = client_id + self.client_secret = client_secret class FactoryIdentity(msrest.serialization.Model): @@ -16614,6 +17453,9 @@ class FileServerReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -16654,6 +17496,7 @@ class FileServerReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -16671,6 +17514,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -16683,7 +17527,7 @@ def __init__( file_filter: Optional[object] = None, **kwargs ): - super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'FileServerReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -16710,6 +17554,9 @@ class FileServerWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object """ @@ -16722,6 +17569,7 @@ class FileServerWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -16730,10 +17578,11 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, copy_behavior: Optional[object] = None, **kwargs ): - super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) self.type = 'FileServerWriteSettings' # type: str @@ -16865,6 +17714,9 @@ class FileSystemSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object """ @@ -16881,6 +17733,7 @@ class FileSystemSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -16893,10 +17746,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, copy_behavior: Optional[object] = None, **kwargs ): - super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'FileSystemSink' # type: str self.copy_behavior = copy_behavior @@ -16920,12 +17774,15 @@ class FileSystemSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -16938,8 +17795,9 @@ class FileSystemSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -16949,11 +17807,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'FileSystemSource' # type: str self.recursive = recursive self.additional_columns = additional_columns @@ -17104,6 +17963,9 @@ class FtpReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -17137,6 +17999,7 @@ class FtpReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -17152,6 +18015,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -17162,7 +18026,7 @@ def __init__( use_binary_transfer: Optional[bool] = None, **kwargs ): - super(FtpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(FtpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'FtpReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -17446,6 +18310,8 @@ class GitHubAccessTokenRequest(msrest.serialization.Model): :type git_hub_access_code: str :param git_hub_client_id: GitHub application client ID. :type git_hub_client_id: str + :param git_hub_client_secret: GitHub bring your own app client secret information. + :type git_hub_client_secret: ~data_factory_management_client.models.GitHubClientSecret :param git_hub_access_token_base_url: Required. GitHub access token base URL. :type git_hub_access_token_base_url: str """ @@ -17458,6 +18324,7 @@ class GitHubAccessTokenRequest(msrest.serialization.Model): _attribute_map = { 'git_hub_access_code': {'key': 'gitHubAccessCode', 'type': 'str'}, 'git_hub_client_id': {'key': 'gitHubClientId', 'type': 'str'}, + 'git_hub_client_secret': {'key': 'gitHubClientSecret', 'type': 'GitHubClientSecret'}, 'git_hub_access_token_base_url': {'key': 'gitHubAccessTokenBaseUrl', 'type': 'str'}, } @@ -17467,11 +18334,13 @@ def __init__( git_hub_access_code: str, git_hub_access_token_base_url: str, git_hub_client_id: Optional[str] = None, + git_hub_client_secret: Optional["GitHubClientSecret"] = None, **kwargs ): super(GitHubAccessTokenRequest, self).__init__(**kwargs) self.git_hub_access_code = git_hub_access_code self.git_hub_client_id = git_hub_client_id + self.git_hub_client_secret = git_hub_client_secret self.git_hub_access_token_base_url = git_hub_access_token_base_url @@ -17496,6 +18365,32 @@ def __init__( self.git_hub_access_token = git_hub_access_token +class GitHubClientSecret(msrest.serialization.Model): + """Client secret information for factory's bring your own app repository configuration. + + :param byoa_secret_akv_url: Bring your own app client secret AKV URL. + :type byoa_secret_akv_url: str + :param byoa_secret_name: Bring your own app client secret name in AKV. + :type byoa_secret_name: str + """ + + _attribute_map = { + 'byoa_secret_akv_url': {'key': 'byoaSecretAkvUrl', 'type': 'str'}, + 'byoa_secret_name': {'key': 'byoaSecretName', 'type': 'str'}, + } + + def __init__( + self, + *, + byoa_secret_akv_url: Optional[str] = None, + byoa_secret_name: Optional[str] = None, + **kwargs + ): + super(GitHubClientSecret, self).__init__(**kwargs) + self.byoa_secret_akv_url = byoa_secret_akv_url + self.byoa_secret_name = byoa_secret_name + + class GlobalParameterSpecification(msrest.serialization.Model): """Definition of a single parameter for an entity. @@ -17737,12 +18632,15 @@ class GoogleAdWordsSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -17758,8 +18656,9 @@ class GoogleAdWordsSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -17770,12 +18669,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'GoogleAdWordsSource' # type: str self.query = query @@ -18004,12 +18904,15 @@ class GoogleBigQuerySource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -18025,8 +18928,9 @@ class GoogleBigQuerySource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -18037,12 +18941,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'GoogleBigQuerySource' # type: str self.query = query @@ -18187,6 +19092,9 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -18227,6 +19135,7 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -18244,6 +19153,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -18256,7 +19166,7 @@ def __init__( modified_datetime_end: Optional[object] = None, **kwargs ): - super(GoogleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(GoogleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'GoogleCloudStorageReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -18354,12 +19264,15 @@ class GreenplumSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -18375,8 +19288,9 @@ class GreenplumSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -18387,12 +19301,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'GreenplumSource' # type: str self.query = query @@ -18679,12 +19594,15 @@ class HBaseSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -18700,8 +19618,9 @@ class HBaseSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -18712,12 +19631,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'HBaseSource' # type: str self.query = query @@ -18854,6 +19774,9 @@ class HdfsReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -18893,6 +19816,7 @@ class HdfsReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -18910,6 +19834,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -18922,7 +19847,7 @@ def __init__( delete_files_after_completion: Optional[object] = None, **kwargs ): - super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'HdfsReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -18955,6 +19880,9 @@ class HdfsSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -18972,6 +19900,7 @@ class HdfsSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, } @@ -18983,11 +19912,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, distcp_settings: Optional["DistcpSettings"] = None, **kwargs ): - super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'HdfsSource' # type: str self.recursive = recursive self.distcp_settings = distcp_settings @@ -19407,6 +20337,8 @@ class HdInsightOnDemandLinkedService(LinkedService): :param subnet_name: The ARM resource ID for the subnet in the vNet. If virtualNetworkId was specified, then this property is required. Type: string (or Expression with resultType string). :type subnet_name: object + :param credential: The credential reference containing authentication information. + :type credential: ~data_factory_management_client.models.CredentialReference """ _validation = { @@ -19460,6 +20392,7 @@ class HdInsightOnDemandLinkedService(LinkedService): 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, 'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'}, 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -19503,6 +20436,7 @@ def __init__( script_actions: Optional[List["ScriptAction"]] = None, virtual_network_id: Optional[object] = None, subnet_name: Optional[object] = None, + credential: Optional["CredentialReference"] = None, **kwargs ): super(HdInsightOnDemandLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -19540,6 +20474,7 @@ def __init__( self.script_actions = script_actions self.virtual_network_id = virtual_network_id self.subnet_name = subnet_name + self.credential = credential class HdInsightPigActivity(ExecutionActivity): @@ -20109,12 +21044,15 @@ class HiveSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -20130,8 +21068,9 @@ class HiveSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -20142,12 +21081,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'HiveSource' # type: str self.query = query @@ -20372,6 +21312,9 @@ class HttpReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). :type request_method: object @@ -20399,6 +21342,7 @@ class HttpReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'request_method': {'key': 'requestMethod', 'type': 'object'}, 'request_body': {'key': 'requestBody', 'type': 'object'}, 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, @@ -20412,6 +21356,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, request_method: Optional[object] = None, request_body: Optional[object] = None, additional_headers: Optional[object] = None, @@ -20420,7 +21365,7 @@ def __init__( partition_root_path: Optional[object] = None, **kwargs ): - super(HttpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(HttpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'HttpReadSettings' # type: str self.request_method = request_method self.request_body = request_body @@ -20496,6 +21441,9 @@ class HttpSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param http_request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string (or Expression with resultType string), pattern: @@ -20513,6 +21461,7 @@ class HttpSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -20523,10 +21472,11 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, http_request_timeout: Optional[object] = None, **kwargs ): - super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'HttpSource' # type: str self.http_request_timeout = http_request_timeout @@ -20714,12 +21664,15 @@ class HubspotSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -20735,8 +21688,9 @@ class HubspotSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -20747,12 +21701,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'HubspotSource' # type: str self.query = query @@ -21042,12 +21997,15 @@ class ImpalaSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -21063,8 +22021,9 @@ class ImpalaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -21075,12 +22034,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ImpalaSource' # type: str self.query = query @@ -21196,6 +22156,9 @@ class InformixSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -21213,6 +22176,7 @@ class InformixSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -21225,10 +22189,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, pre_copy_script: Optional[object] = None, **kwargs ): - super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'InformixSink' # type: str self.pre_copy_script = pre_copy_script @@ -21252,12 +22217,15 @@ class InformixSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -21272,8 +22240,9 @@ class InformixSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -21284,12 +22253,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'InformixSource' # type: str self.query = query @@ -21597,6 +22567,9 @@ class IntegrationRuntimeDataFlowProperties(msrest.serialization.Model): :param time_to_live: Time to live (in minutes) setting of the cluster which will execute data flow job. :type time_to_live: int + :param cleanup: Cluster will not be recycled and it will be used in next data flow activity run + until TTL (time to live) is reached if this is set as false. Default is true. + :type cleanup: bool """ _validation = { @@ -21608,6 +22581,7 @@ class IntegrationRuntimeDataFlowProperties(msrest.serialization.Model): 'compute_type': {'key': 'computeType', 'type': 'str'}, 'core_count': {'key': 'coreCount', 'type': 'int'}, 'time_to_live': {'key': 'timeToLive', 'type': 'int'}, + 'cleanup': {'key': 'cleanup', 'type': 'bool'}, } def __init__( @@ -21617,6 +22591,7 @@ def __init__( compute_type: Optional[Union[str, "DataFlowComputeType"]] = None, core_count: Optional[int] = None, time_to_live: Optional[int] = None, + cleanup: Optional[bool] = None, **kwargs ): super(IntegrationRuntimeDataFlowProperties, self).__init__(**kwargs) @@ -21624,6 +22599,7 @@ def __init__( self.compute_type = compute_type self.core_count = core_count self.time_to_live = time_to_live + self.cleanup = cleanup class IntegrationRuntimeDataProxyProperties(msrest.serialization.Model): @@ -21839,6 +22815,103 @@ def __init__( self.received_bytes = None +class IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint(msrest.serialization.Model): + """Azure-SSIS integration runtime outbound network dependency endpoints for one category. + + :param category: The category of outbound network dependency. + :type category: str + :param endpoints: The endpoints for outbound network dependency. + :type endpoints: + list[~data_factory_management_client.models.IntegrationRuntimeOutboundNetworkDependenciesEndpoint] + """ + + _attribute_map = { + 'category': {'key': 'category', 'type': 'str'}, + 'endpoints': {'key': 'endpoints', 'type': '[IntegrationRuntimeOutboundNetworkDependenciesEndpoint]'}, + } + + def __init__( + self, + *, + category: Optional[str] = None, + endpoints: Optional[List["IntegrationRuntimeOutboundNetworkDependenciesEndpoint"]] = None, + **kwargs + ): + super(IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint, self).__init__(**kwargs) + self.category = category + self.endpoints = endpoints + + +class IntegrationRuntimeOutboundNetworkDependenciesEndpoint(msrest.serialization.Model): + """The endpoint for Azure-SSIS integration runtime outbound network dependency. + + :param domain_name: The domain name of endpoint. + :type domain_name: str + :param endpoint_details: The details of endpoint. + :type endpoint_details: + list[~data_factory_management_client.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails] + """ + + _attribute_map = { + 'domain_name': {'key': 'domainName', 'type': 'str'}, + 'endpoint_details': {'key': 'endpointDetails', 'type': '[IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails]'}, + } + + def __init__( + self, + *, + domain_name: Optional[str] = None, + endpoint_details: Optional[List["IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails"]] = None, + **kwargs + ): + super(IntegrationRuntimeOutboundNetworkDependenciesEndpoint, self).__init__(**kwargs) + self.domain_name = domain_name + self.endpoint_details = endpoint_details + + +class IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails(msrest.serialization.Model): + """The details of Azure-SSIS integration runtime outbound network dependency endpoint. + + :param port: The port of endpoint. + :type port: int + """ + + _attribute_map = { + 'port': {'key': 'port', 'type': 'int'}, + } + + def __init__( + self, + *, + port: Optional[int] = None, + **kwargs + ): + super(IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails, self).__init__(**kwargs) + self.port = port + + +class IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse(msrest.serialization.Model): + """Azure-SSIS integration runtime outbound network dependency endpoints. + + :param value: The list of outbound network dependency endpoints. + :type value: + list[~data_factory_management_client.models.IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint]'}, + } + + def __init__( + self, + *, + value: Optional[List["IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint"]] = None, + **kwargs + ): + super(IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse, self).__init__(**kwargs) + self.value = value + + class IntegrationRuntimeReference(msrest.serialization.Model): """Integration runtime reference type. @@ -22031,6 +23104,8 @@ class IntegrationRuntimeSsisProperties(msrest.serialization.Model): list[~data_factory_management_client.models.CustomSetupBase] :param package_stores: Package stores for the SSIS Integration Runtime. :type package_stores: list[~data_factory_management_client.models.PackageStore] + :param credential: The credential reference containing authentication information. + :type credential: ~data_factory_management_client.models.CredentialReference """ _attribute_map = { @@ -22042,6 +23117,7 @@ class IntegrationRuntimeSsisProperties(msrest.serialization.Model): 'edition': {'key': 'edition', 'type': 'str'}, 'express_custom_setup_properties': {'key': 'expressCustomSetupProperties', 'type': '[CustomSetupBase]'}, 'package_stores': {'key': 'packageStores', 'type': '[PackageStore]'}, + 'credential': {'key': 'credential', 'type': 'CredentialReference'}, } def __init__( @@ -22055,6 +23131,7 @@ def __init__( edition: Optional[Union[str, "IntegrationRuntimeEdition"]] = None, express_custom_setup_properties: Optional[List["CustomSetupBase"]] = None, package_stores: Optional[List["PackageStore"]] = None, + credential: Optional["CredentialReference"] = None, **kwargs ): super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) @@ -22066,6 +23143,7 @@ def __init__( self.edition = edition self.express_custom_setup_properties = express_custom_setup_properties self.package_stores = package_stores + self.credential = credential class IntegrationRuntimeStatus(msrest.serialization.Model): @@ -22201,6 +23279,9 @@ class IntegrationRuntimeVNetProperties(msrest.serialization.Model): :param public_i_ps: Resource IDs of the public IP addresses that this integration runtime will use. :type public_i_ps: list[str] + :param subnet_id: The ID of subnet, to which this Azure-SSIS integration runtime will be + joined. + :type subnet_id: str """ _attribute_map = { @@ -22208,6 +23289,7 @@ class IntegrationRuntimeVNetProperties(msrest.serialization.Model): 'v_net_id': {'key': 'vNetId', 'type': 'str'}, 'subnet': {'key': 'subnet', 'type': 'str'}, 'public_i_ps': {'key': 'publicIPs', 'type': '[str]'}, + 'subnet_id': {'key': 'subnetId', 'type': 'str'}, } def __init__( @@ -22217,6 +23299,7 @@ def __init__( v_net_id: Optional[str] = None, subnet: Optional[str] = None, public_i_ps: Optional[List[str]] = None, + subnet_id: Optional[str] = None, **kwargs ): super(IntegrationRuntimeVNetProperties, self).__init__(**kwargs) @@ -22224,6 +23307,7 @@ def __init__( self.v_net_id = v_net_id self.subnet = subnet self.public_i_ps = public_i_ps + self.subnet_id = subnet_id class JiraLinkedService(LinkedService): @@ -22411,12 +23495,15 @@ class JiraSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -22432,8 +23519,9 @@ class JiraSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -22444,12 +23532,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'JiraSource' # type: str self.query = query @@ -22551,9 +23640,8 @@ class JsonFormat(DatasetStorageFormat): :param deserializer: Deserializer. Type: string (or Expression with resultType string). :type deserializer: object :param file_pattern: File pattern of JSON. To be more specific, the way of separating a - collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. Possible - values include: "setOfObjects", "arrayOfObjects". - :type file_pattern: str or ~data_factory_management_client.models.JsonFormatFilePattern + collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. + :type file_pattern: object :param nesting_separator: The character used to separate nesting levels. Default value is '.' (dot). Type: string (or Expression with resultType string). :type nesting_separator: object @@ -22583,7 +23671,7 @@ class JsonFormat(DatasetStorageFormat): 'type': {'key': 'type', 'type': 'str'}, 'serializer': {'key': 'serializer', 'type': 'object'}, 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'encoding_name': {'key': 'encodingName', 'type': 'object'}, 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, @@ -22596,7 +23684,7 @@ def __init__( additional_properties: Optional[Dict[str, object]] = None, serializer: Optional[object] = None, deserializer: Optional[object] = None, - file_pattern: Optional[Union[str, "JsonFormatFilePattern"]] = None, + file_pattern: Optional[object] = None, nesting_separator: Optional[object] = None, encoding_name: Optional[object] = None, json_node_reference: Optional[object] = None, @@ -22673,6 +23761,9 @@ class JsonSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Json store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings :param format_settings: Json format settings. @@ -22691,6 +23782,7 @@ class JsonSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'}, } @@ -22704,11 +23796,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreWriteSettings"] = None, format_settings: Optional["JsonWriteSettings"] = None, **kwargs ): - super(JsonSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(JsonSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'JsonSink' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -22733,13 +23826,16 @@ class JsonSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Json store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param format_settings: Json format settings. :type format_settings: ~data_factory_management_client.models.JsonReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -22752,9 +23848,10 @@ class JsonSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'JsonReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -22764,12 +23861,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreReadSettings"] = None, format_settings: Optional["JsonReadSettings"] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(JsonSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(JsonSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'JsonSource' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -22787,9 +23885,8 @@ class JsonWriteSettings(FormatWriteSettings): :param type: Required. The write setting type.Constant filled by server. :type type: str :param file_pattern: File pattern of JSON. This setting controls the way a collection of JSON - objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. Possible - values include: "setOfObjects", "arrayOfObjects". - :type file_pattern: str or ~data_factory_management_client.models.JsonWriteFilePattern + objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. + :type file_pattern: object """ _validation = { @@ -22799,14 +23896,14 @@ class JsonWriteSettings(FormatWriteSettings): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'object'}, } def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, - file_pattern: Optional[Union[str, "JsonWriteFilePattern"]] = None, + file_pattern: Optional[object] = None, **kwargs ): super(JsonWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) @@ -23504,12 +24601,15 @@ class MagentoSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -23525,8 +24625,9 @@ class MagentoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -23537,16 +24638,61 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'MagentoSource' # type: str self.query = query +class ManagedIdentityCredential(Credential): + """Managed identity credential. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of credential.Constant filled by server. + :type type: str + :param description: Credential description. + :type description: str + :param annotations: List of tags that can be used for describing the Credential. + :type annotations: list[object] + :param resource_id: The resource id of user assigned managed identity. + :type resource_id: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'resource_id': {'key': 'typeProperties.resourceId', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + annotations: Optional[List[object]] = None, + resource_id: Optional[str] = None, + **kwargs + ): + super(ManagedIdentityCredential, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) + self.type = 'ManagedIdentity' # type: str + self.resource_id = resource_id + + class ManagedIntegrationRuntime(IntegrationRuntime): """Managed integration runtime, including managed elastic and managed dedicated integration runtimes. @@ -24245,12 +25391,15 @@ class MariaDbSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -24266,8 +25415,9 @@ class MariaDbSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -24278,12 +25428,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(MariaDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(MariaDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'MariaDBSource' # type: str self.query = query @@ -24533,12 +25684,15 @@ class MarketoSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -24554,8 +25708,9 @@ class MarketoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -24566,16 +25721,43 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'MarketoSource' # type: str self.query = query +class MetadataItem(msrest.serialization.Model): + """Specify the name and value of custom metadata item. + + :param name: Metadata item key name. Type: string (or Expression with resultType string). + :type name: object + :param value: Metadata item value. Type: string (or Expression with resultType string). + :type value: object + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'object'}, + 'value': {'key': 'value', 'type': 'object'}, + } + + def __init__( + self, + *, + name: Optional[object] = None, + value: Optional[object] = None, + **kwargs + ): + super(MetadataItem, self).__init__(**kwargs) + self.name = name + self.value = value + + class MicrosoftAccessLinkedService(LinkedService): """Microsoft Access linked service. @@ -24687,6 +25869,9 @@ class MicrosoftAccessSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -24704,6 +25889,7 @@ class MicrosoftAccessSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -24716,10 +25902,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, pre_copy_script: Optional[object] = None, **kwargs ): - super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'MicrosoftAccessSink' # type: str self.pre_copy_script = pre_copy_script @@ -24743,11 +25930,14 @@ class MicrosoftAccessSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -24760,8 +25950,9 @@ class MicrosoftAccessSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -24771,11 +25962,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'MicrosoftAccessSource' # type: str self.query = query self.additional_columns = additional_columns @@ -24982,6 +26174,74 @@ def __init__( self.database = database +class MongoDbAtlasSink(CopySink): + """A copy activity MongoDB Atlas sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object + :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) + rather than throw exception (insert). The default value is "insert". Type: string (or + Expression with resultType string). Type: string (or Expression with resultType string). + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, + write_behavior: Optional[object] = None, + **kwargs + ): + super(MongoDbAtlasSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) + self.type = 'MongoDbAtlasSink' # type: str + self.write_behavior = write_behavior + + class MongoDbAtlasSource(CopySource): """A copy activity source for a MongoDB Atlas database. @@ -25001,6 +26261,9 @@ class MongoDbAtlasSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). @@ -25016,8 +26279,8 @@ class MongoDbAtlasSource(CopySource): pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -25030,11 +26293,12 @@ class MongoDbAtlasSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -25044,14 +26308,15 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, filter: Optional[object] = None, cursor_methods: Optional["MongoDbCursorMethodsProperties"] = None, batch_size: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(MongoDbAtlasSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(MongoDbAtlasSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'MongoDbAtlasSource' # type: str self.filter = filter self.cursor_methods = cursor_methods @@ -25308,12 +26573,15 @@ class MongoDbSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Database query. Should be a SQL-92 query expression. Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -25326,8 +26594,9 @@ class MongoDbSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -25337,11 +26606,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'MongoDbSource' # type: str self.query = query self.additional_columns = additional_columns @@ -25478,6 +26748,74 @@ def __init__( self.database = database +class MongoDbV2Sink(CopySink): + """A copy activity MongoDB sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object + :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) + rather than throw exception (insert). The default value is "insert". Type: string (or + Expression with resultType string). Type: string (or Expression with resultType string). + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, + write_behavior: Optional[object] = None, + **kwargs + ): + super(MongoDbV2Sink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) + self.type = 'MongoDbV2Sink' # type: str + self.write_behavior = write_behavior + + class MongoDbV2Source(CopySource): """A copy activity source for a MongoDB database. @@ -25497,6 +26835,9 @@ class MongoDbV2Source(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). @@ -25512,8 +26853,8 @@ class MongoDbV2Source(CopySource): pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -25526,11 +26867,12 @@ class MongoDbV2Source(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -25540,14 +26882,15 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, filter: Optional[object] = None, cursor_methods: Optional["MongoDbCursorMethodsProperties"] = None, batch_size: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'MongoDbV2Source' # type: str self.filter = filter self.cursor_methods = cursor_methods @@ -25640,12 +26983,15 @@ class MySqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -25660,8 +27006,9 @@ class MySqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -25672,12 +27019,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'MySqlSource' # type: str self.query = query @@ -25870,12 +27218,15 @@ class NetezzaSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -25896,8 +27247,9 @@ class NetezzaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, @@ -25910,14 +27262,15 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, partition_option: Optional[object] = None, partition_settings: Optional["NetezzaPartitionSettings"] = None, **kwargs ): - super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'NetezzaSource' # type: str self.query = query self.partition_option = partition_option @@ -26231,6 +27584,9 @@ class ODataSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -26240,8 +27596,8 @@ class ODataSource(CopySource): ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type http_request_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -26254,9 +27610,10 @@ class ODataSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -26266,12 +27623,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, http_request_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'ODataSource' # type: str self.query = query self.http_request_timeout = http_request_timeout @@ -26388,6 +27746,9 @@ class OdbcSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -26405,6 +27766,7 @@ class OdbcSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -26417,10 +27779,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, pre_copy_script: Optional[object] = None, **kwargs ): - super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'OdbcSink' # type: str self.pre_copy_script = pre_copy_script @@ -26444,12 +27807,15 @@ class OdbcSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -26464,8 +27830,9 @@ class OdbcSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -26476,12 +27843,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'OdbcSource' # type: str self.query = query @@ -26730,6 +28098,9 @@ class Office365Source(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param allowed_groups: The groups containing all the users. Type: array of strings (or Expression with resultType array of strings). :type allowed_groups: object @@ -26761,6 +28132,7 @@ class Office365Source(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'allowed_groups': {'key': 'allowedGroups', 'type': 'object'}, 'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'}, 'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'}, @@ -26776,6 +28148,7 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, allowed_groups: Optional[object] = None, user_scope_filter_uri: Optional[object] = None, date_filter_column: Optional[object] = None, @@ -26784,7 +28157,7 @@ def __init__( output_columns: Optional[object] = None, **kwargs ): - super(Office365Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(Office365Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'Office365Source' # type: str self.allowed_groups = allowed_groups self.user_scope_filter_uri = user_scope_filter_uri @@ -27215,6 +28588,9 @@ class OracleCloudStorageReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -27255,6 +28631,7 @@ class OracleCloudStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -27272,6 +28649,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -27284,7 +28662,7 @@ def __init__( modified_datetime_end: Optional[object] = None, **kwargs ): - super(OracleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(OracleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'OracleCloudStorageReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -27585,12 +28963,15 @@ class OracleServiceCloudSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -27606,8 +28987,9 @@ class OracleServiceCloudSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -27618,12 +29000,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'OracleServiceCloudSource' # type: str self.query = query @@ -27653,6 +29036,9 @@ class OracleSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -27670,6 +29056,7 @@ class OracleSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -27682,10 +29069,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, pre_copy_script: Optional[object] = None, **kwargs ): - super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'OracleSink' # type: str self.pre_copy_script = pre_copy_script @@ -27709,6 +29097,9 @@ class OracleSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param oracle_reader_query: Oracle reader query. Type: string (or Expression with resultType string). :type oracle_reader_query: object @@ -27721,8 +29112,8 @@ class OracleSource(CopySource): :param partition_settings: The settings that will be leveraged for Oracle source partitioning. :type partition_settings: ~data_factory_management_client.models.OraclePartitionSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -27735,11 +29126,12 @@ class OracleSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -27749,14 +29141,15 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, oracle_reader_query: Optional[object] = None, query_timeout: Optional[object] = None, partition_option: Optional[object] = None, partition_settings: Optional["OraclePartitionSettings"] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'OracleSource' # type: str self.oracle_reader_query = oracle_reader_query self.query_timeout = query_timeout @@ -27875,8 +29268,9 @@ class OrcDataset(Dataset): :type folder: ~data_factory_management_client.models.DatasetFolder :param location: The location of the ORC data storage. :type location: ~data_factory_management_client.models.DatasetLocation - :param orc_compression_codec: Possible values include: "none", "zlib", "snappy", "lzo". - :type orc_compression_codec: str or ~data_factory_management_client.models.OrcCompressionCodec + :param orc_compression_codec: The data orcCompressionCodec. Type: string (or Expression with + resultType string). + :type orc_compression_codec: object """ _validation = { @@ -27895,7 +29289,7 @@ class OrcDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'str'}, + 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'object'}, } def __init__( @@ -27910,7 +29304,7 @@ def __init__( annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, location: Optional["DatasetLocation"] = None, - orc_compression_codec: Optional[Union[str, "OrcCompressionCodec"]] = None, + orc_compression_codec: Optional[object] = None, **kwargs ): super(OrcDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -27983,6 +29377,9 @@ class OrcSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: ORC store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings :param format_settings: ORC format settings. @@ -28001,6 +29398,7 @@ class OrcSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'OrcWriteSettings'}, } @@ -28014,11 +29412,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreWriteSettings"] = None, format_settings: Optional["OrcWriteSettings"] = None, **kwargs ): - super(OrcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(OrcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'OrcSink' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -28043,11 +29442,14 @@ class OrcSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: ORC store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -28060,8 +29462,9 @@ class OrcSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -28071,11 +29474,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreReadSettings"] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(OrcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(OrcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'OrcSource' # type: str self.store_settings = store_settings self.additional_columns = additional_columns @@ -28220,9 +29624,9 @@ class ParquetDataset(Dataset): :type folder: ~data_factory_management_client.models.DatasetFolder :param location: The location of the parquet storage. :type location: ~data_factory_management_client.models.DatasetLocation - :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4", "tar", "tarGZip". - :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec + :param compression_codec: The data compressionCodec. Type: string (or Expression with + resultType string). + :type compression_codec: object """ _validation = { @@ -28241,7 +29645,7 @@ class ParquetDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, } def __init__( @@ -28256,7 +29660,7 @@ def __init__( annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, location: Optional["DatasetLocation"] = None, - compression_codec: Optional[Union[str, "CompressionCodec"]] = None, + compression_codec: Optional[object] = None, **kwargs ): super(ParquetDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -28329,6 +29733,9 @@ class ParquetSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Parquet store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings :param format_settings: Parquet format settings. @@ -28347,6 +29754,7 @@ class ParquetSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'ParquetWriteSettings'}, } @@ -28360,11 +29768,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreWriteSettings"] = None, format_settings: Optional["ParquetWriteSettings"] = None, **kwargs ): - super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'ParquetSink' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -28389,11 +29798,14 @@ class ParquetSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Parquet store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -28406,8 +29818,9 @@ class ParquetSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -28417,11 +29830,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreReadSettings"] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'ParquetSource' # type: str self.store_settings = store_settings self.additional_columns = additional_columns @@ -28648,12 +30062,15 @@ class PaypalSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -28669,8 +30086,9 @@ class PaypalSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -28681,12 +30099,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'PaypalSource' # type: str self.query = query @@ -28915,12 +30334,15 @@ class PhoenixSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -28936,8 +30358,9 @@ class PhoenixSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -28948,12 +30371,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'PhoenixSource' # type: str self.query = query @@ -29288,18 +30712,26 @@ class PipelineRunInvokedBy(msrest.serialization.Model): :vartype id: str :ivar invoked_by_type: The type of the entity that started the run. :vartype invoked_by_type: str + :ivar pipeline_name: The name of the pipeline that triggered the run, if any. + :vartype pipeline_name: str + :ivar pipeline_run_id: The run id of the pipeline that triggered the run, if any. + :vartype pipeline_run_id: str """ _validation = { 'name': {'readonly': True}, 'id': {'readonly': True}, 'invoked_by_type': {'readonly': True}, + 'pipeline_name': {'readonly': True}, + 'pipeline_run_id': {'readonly': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'invoked_by_type': {'key': 'invokedByType', 'type': 'str'}, + 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, + 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, } def __init__( @@ -29310,6 +30742,8 @@ def __init__( self.name = None self.id = None self.invoked_by_type = None + self.pipeline_name = None + self.pipeline_run_id = None class PipelineRunsQueryResponse(msrest.serialization.Model): @@ -29476,12 +30910,15 @@ class PostgreSqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -29496,8 +30933,9 @@ class PostgreSqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -29508,12 +30946,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'PostgreSqlSource' # type: str self.query = query @@ -29832,12 +31271,15 @@ class PrestoSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -29853,8 +31295,9 @@ class PrestoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -29865,12 +31308,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'PrestoSource' # type: str self.query = query @@ -30357,12 +31801,15 @@ class QuickBooksSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -30378,8 +31825,9 @@ class QuickBooksSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -30390,12 +31838,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'QuickBooksSource' # type: str self.query = query @@ -30578,11 +32027,14 @@ class RelationalSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -30595,8 +32047,9 @@ class RelationalSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -30606,11 +32059,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'RelationalSource' # type: str self.query = query self.additional_columns = additional_columns @@ -30975,12 +32429,15 @@ class ResponsysSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -30996,8 +32453,9 @@ class ResponsysSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -31008,12 +32466,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ResponsysSource' # type: str self.query = query @@ -31166,6 +32625,8 @@ class RestServiceLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param credential: The credential reference containing authentication information. + :type credential: ~data_factory_management_client.models.CredentialReference """ _validation = { @@ -31193,6 +32654,7 @@ class RestServiceLinkedService(LinkedService): 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, } def __init__( @@ -31215,6 +32677,7 @@ def __init__( azure_cloud_type: Optional[object] = None, aad_resource_id: Optional[object] = None, encrypted_credential: Optional[object] = None, + credential: Optional["CredentialReference"] = None, **kwargs ): super(RestServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -31231,6 +32694,7 @@ def __init__( self.azure_cloud_type = azure_cloud_type self.aad_resource_id = aad_resource_id self.encrypted_credential = encrypted_credential + self.credential = credential class RestSink(CopySink): @@ -31258,6 +32722,9 @@ class RestSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param request_method: The HTTP method used to call the RESTful API. The default is POST. Type: string (or Expression with resultType string). :type request_method: object @@ -31288,6 +32755,7 @@ class RestSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'request_method': {'key': 'requestMethod', 'type': 'object'}, 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, @@ -31304,6 +32772,7 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, request_method: Optional[object] = None, additional_headers: Optional[object] = None, http_request_timeout: Optional[object] = None, @@ -31311,7 +32780,7 @@ def __init__( http_compression_type: Optional[object] = None, **kwargs ): - super(RestSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(RestSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'RestSink' # type: str self.request_method = request_method self.additional_headers = additional_headers @@ -31339,6 +32808,9 @@ class RestSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). :type request_method: object @@ -31359,8 +32831,8 @@ class RestSource(CopySource): :param request_interval: The time to await before sending next page request. :type request_interval: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -31373,13 +32845,14 @@ class RestSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'request_method': {'key': 'requestMethod', 'type': 'object'}, 'request_body': {'key': 'requestBody', 'type': 'object'}, 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, 'pagination_rules': {'key': 'paginationRules', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, 'request_interval': {'key': 'requestInterval', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -31389,16 +32862,17 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, request_method: Optional[object] = None, request_body: Optional[object] = None, additional_headers: Optional[object] = None, pagination_rules: Optional[object] = None, http_request_timeout: Optional[object] = None, request_interval: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'RestSource' # type: str self.request_method = request_method self.request_body = request_body @@ -31837,12 +33311,15 @@ class SalesforceMarketingCloudSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -31858,8 +33335,9 @@ class SalesforceMarketingCloudSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -31870,12 +33348,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SalesforceMarketingCloudSource' # type: str self.query = query @@ -32133,6 +33612,9 @@ class SalesforceServiceCloudSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: The write behavior for the operation. Default is Insert. Possible values include: "Insert", "Upsert". :type write_behavior: str or ~data_factory_management_client.models.SalesforceSinkWriteBehavior @@ -32160,6 +33642,7 @@ class SalesforceServiceCloudSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -32174,12 +33657,13 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, write_behavior: Optional[Union[str, "SalesforceSinkWriteBehavior"]] = None, external_id_field_name: Optional[object] = None, ignore_null_values: Optional[object] = None, **kwargs ): - super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SalesforceServiceCloudSink' # type: str self.write_behavior = write_behavior self.external_id_field_name = external_id_field_name @@ -32205,14 +33689,17 @@ class SalesforceServiceCloudSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param read_behavior: The read behavior for the operation. Default is Query. Possible values include: "Query", "QueryAll". :type read_behavior: str or ~data_factory_management_client.models.SalesforceSourceReadBehavior :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -32225,9 +33712,10 @@ class SalesforceServiceCloudSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -32237,12 +33725,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, read_behavior: Optional[Union[str, "SalesforceSourceReadBehavior"]] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SalesforceServiceCloudSource' # type: str self.query = query self.read_behavior = read_behavior @@ -32274,6 +33763,9 @@ class SalesforceSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: The write behavior for the operation. Default is Insert. Possible values include: "Insert", "Upsert". :type write_behavior: str or ~data_factory_management_client.models.SalesforceSinkWriteBehavior @@ -32301,6 +33793,7 @@ class SalesforceSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -32315,12 +33808,13 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, write_behavior: Optional[Union[str, "SalesforceSinkWriteBehavior"]] = None, external_id_field_name: Optional[object] = None, ignore_null_values: Optional[object] = None, **kwargs ): - super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SalesforceSink' # type: str self.write_behavior = write_behavior self.external_id_field_name = external_id_field_name @@ -32346,12 +33840,15 @@ class SalesforceSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param read_behavior: The read behavior for the operation. Default is Query. Possible values @@ -32369,8 +33866,9 @@ class SalesforceSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, } @@ -32382,13 +33880,14 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, read_behavior: Optional[Union[str, "SalesforceSourceReadBehavior"]] = None, **kwargs ): - super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SalesforceSource' # type: str self.query = query self.read_behavior = read_behavior @@ -32562,12 +34061,15 @@ class SapBwSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: MDX query. Type: string (or Expression with resultType string). :type query: object """ @@ -32582,8 +34084,9 @@ class SapBwSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -32594,12 +34097,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapBwSource' # type: str self.query = query @@ -32772,6 +34276,9 @@ class SapCloudForCustomerSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: The write behavior for the operation. Default is 'Insert'. Possible values include: "Insert", "Update". :type write_behavior: str or @@ -32795,6 +34302,7 @@ class SapCloudForCustomerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -32808,11 +34316,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, write_behavior: Optional[Union[str, "SapCloudForCustomerSinkWriteBehavior"]] = None, http_request_timeout: Optional[object] = None, **kwargs ): - super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SapCloudForCustomerSink' # type: str self.write_behavior = write_behavior self.http_request_timeout = http_request_timeout @@ -32837,12 +34346,15 @@ class SapCloudForCustomerSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -32863,8 +34375,9 @@ class SapCloudForCustomerSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -32876,13 +34389,14 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, http_request_timeout: Optional[object] = None, **kwargs ): - super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapCloudForCustomerSource' # type: str self.query = query self.http_request_timeout = http_request_timeout @@ -33050,12 +34564,15 @@ class SapEccSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -33076,8 +34593,9 @@ class SapEccSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -33089,13 +34607,14 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, http_request_timeout: Optional[object] = None, **kwargs ): - super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapEccSource' # type: str self.query = query self.http_request_timeout = http_request_timeout @@ -33226,12 +34745,15 @@ class SapHanaSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: SAP HANA Sql query. Type: string (or Expression with resultType string). :type query: object :param packet_size: The packet size of data read from SAP HANA. Type: integer(or Expression @@ -33255,8 +34777,9 @@ class SapHanaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'packet_size': {'key': 'packetSize', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'object'}, @@ -33270,15 +34793,16 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, packet_size: Optional[object] = None, partition_option: Optional[object] = None, partition_settings: Optional["SapHanaPartitionSettings"] = None, **kwargs ): - super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapHanaSource' # type: str self.query = query self.packet_size = packet_size @@ -33495,12 +35019,15 @@ class SapOpenHubSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param exclude_last_request: Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). :type exclude_last_request: object @@ -33527,8 +35054,9 @@ class SapOpenHubSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, @@ -33542,15 +35070,16 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, exclude_last_request: Optional[object] = None, base_request_id: Optional[object] = None, custom_rfc_read_table_function_module: Optional[object] = None, sap_data_column_delimiter: Optional[object] = None, **kwargs ): - super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapOpenHubSource' # type: str self.exclude_last_request = exclude_last_request self.base_request_id = base_request_id @@ -33917,12 +35446,15 @@ class SapTableSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param row_count: The number of rows to be retrieved. Type: integer(or Expression with resultType integer). :type row_count: object @@ -33964,8 +35496,9 @@ class SapTableSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'row_count': {'key': 'rowCount', 'type': 'object'}, 'row_skips': {'key': 'rowSkips', 'type': 'object'}, 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, @@ -33984,8 +35517,9 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, row_count: Optional[object] = None, row_skips: Optional[object] = None, rfc_table_fields: Optional[object] = None, @@ -33997,7 +35531,7 @@ def __init__( partition_settings: Optional["SapTablePartitionSettings"] = None, **kwargs ): - super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapTableSource' # type: str self.row_count = row_count self.row_skips = row_skips @@ -34128,9 +35662,8 @@ class ScriptAction(msrest.serialization.Model): :type name: str :param uri: Required. The URI for the script action. :type uri: str - :param roles: Required. The node types on which the script action should be executed. Possible - values include: "Headnode", "Workernode", "Zookeeper". - :type roles: str or ~data_factory_management_client.models.HdiNodeTypes + :param roles: Required. The node types on which the script action should be executed. + :type roles: str :param parameters: The parameters for the script action. :type parameters: str """ @@ -34153,7 +35686,7 @@ def __init__( *, name: str, uri: str, - roles: Union[str, "HdiNodeTypes"], + roles: str, parameters: Optional[str] = None, **kwargs ): @@ -34732,12 +36265,15 @@ class ServiceNowSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -34753,8 +36289,9 @@ class ServiceNowSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -34765,16 +36302,72 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ServiceNowSource' # type: str self.query = query +class ServicePrincipalCredential(Credential): + """Service principal credential. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of credential.Constant filled by server. + :type type: str + :param description: Credential description. + :type description: str + :param annotations: List of tags that can be used for describing the Credential. + :type annotations: list[object] + :param service_principal_id: The app ID of the service principal used to authenticate. + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to authenticate. + :type service_principal_key: + ~data_factory_management_client.models.AzureKeyVaultSecretReference + :param tenant: The ID of the tenant to which the service principal belongs. + :type tenant: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'AzureKeyVaultSecretReference'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + annotations: Optional[List[object]] = None, + service_principal_id: Optional[object] = None, + service_principal_key: Optional["AzureKeyVaultSecretReference"] = None, + tenant: Optional[object] = None, + **kwargs + ): + super(ServicePrincipalCredential, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) + self.type = 'ServicePrincipal' # type: str + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + + class SetVariableActivity(Activity): """Set value for a Variable. @@ -34887,6 +36480,9 @@ class SftpReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -34924,6 +36520,7 @@ class SftpReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -34940,6 +36537,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -34951,7 +36549,7 @@ def __init__( modified_datetime_end: Optional[object] = None, **kwargs ): - super(SftpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SftpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SftpReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -35095,6 +36693,9 @@ class SftpWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param operation_timeout: Specifies the timeout for writing each chunk to SFTP server. Default @@ -35114,6 +36715,7 @@ class SftpWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'operation_timeout': {'key': 'operationTimeout', 'type': 'object'}, 'use_temp_file_rename': {'key': 'useTempFileRename', 'type': 'object'}, @@ -35124,12 +36726,13 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, copy_behavior: Optional[object] = None, operation_timeout: Optional[object] = None, use_temp_file_rename: Optional[object] = None, **kwargs ): - super(SftpWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + super(SftpWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) self.type = 'SftpWriteSettings' # type: str self.operation_timeout = operation_timeout self.use_temp_file_rename = use_temp_file_rename @@ -35308,6 +36911,9 @@ class SharePointOnlineListSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: The OData query to filter the data in SharePoint Online list. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -35327,6 +36933,7 @@ class SharePointOnlineListSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -35338,11 +36945,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, http_request_timeout: Optional[object] = None, **kwargs ): - super(SharePointOnlineListSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SharePointOnlineListSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SharePointOnlineListSource' # type: str self.query = query self.http_request_timeout = http_request_timeout @@ -35520,12 +37128,15 @@ class ShopifySource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -35541,8 +37152,9 @@ class ShopifySource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -35553,12 +37165,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ShopifySource' # type: str self.query = query @@ -35851,6 +37464,9 @@ class SnowflakeSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -35870,6 +37486,7 @@ class SnowflakeSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'import_settings': {'key': 'importSettings', 'type': 'SnowflakeImportCopyCommand'}, } @@ -35883,11 +37500,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, pre_copy_script: Optional[object] = None, import_settings: Optional["SnowflakeImportCopyCommand"] = None, **kwargs ): - super(SnowflakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SnowflakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SnowflakeSink' # type: str self.pre_copy_script = pre_copy_script self.import_settings = import_settings @@ -35912,6 +37530,9 @@ class SnowflakeSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Snowflake Sql query. Type: string (or Expression with resultType string). :type query: object :param export_settings: Snowflake export settings. @@ -35928,6 +37549,7 @@ class SnowflakeSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'export_settings': {'key': 'exportSettings', 'type': 'SnowflakeExportCopyCommand'}, } @@ -35939,11 +37561,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, export_settings: Optional["SnowflakeExportCopyCommand"] = None, **kwargs ): - super(SnowflakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SnowflakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SnowflakeSource' # type: str self.query = query self.export_settings = export_settings @@ -36184,12 +37807,15 @@ class SparkSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -36205,8 +37831,9 @@ class SparkSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -36217,12 +37844,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SparkSource' # type: str self.query = query @@ -36294,6 +37922,9 @@ class SqlDwSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -36311,6 +37942,14 @@ class SqlDwSink(CopySink): :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). :type table_option: object + :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or + Expression with resultType boolean). + :type sql_writer_use_table_lock: object + :param write_behavior: Write behavior when copying data into azure SQL DW. Type: + SqlDWWriteBehaviorEnum (or Expression with resultType SqlDWWriteBehaviorEnum). + :type write_behavior: object + :param upsert_settings: SQL DW upsert settings. + :type upsert_settings: ~data_factory_management_client.models.SqlDwUpsertSettings """ _validation = { @@ -36325,12 +37964,16 @@ class SqlDwSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, 'allow_copy_command': {'key': 'allowCopyCommand', 'type': 'object'}, 'copy_command_settings': {'key': 'copyCommandSettings', 'type': 'DwCopyCommandSettings'}, 'table_option': {'key': 'tableOption', 'type': 'object'}, + 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlDwUpsertSettings'}, } def __init__( @@ -36342,15 +37985,19 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, pre_copy_script: Optional[object] = None, allow_poly_base: Optional[object] = None, poly_base_settings: Optional["PolybaseSettings"] = None, allow_copy_command: Optional[object] = None, copy_command_settings: Optional["DwCopyCommandSettings"] = None, table_option: Optional[object] = None, + sql_writer_use_table_lock: Optional[object] = None, + write_behavior: Optional[object] = None, + upsert_settings: Optional["SqlDwUpsertSettings"] = None, **kwargs ): - super(SqlDwSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SqlDwSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SqlDWSink' # type: str self.pre_copy_script = pre_copy_script self.allow_poly_base = allow_poly_base @@ -36358,6 +38005,9 @@ def __init__( self.allow_copy_command = allow_copy_command self.copy_command_settings = copy_command_settings self.table_option = table_option + self.sql_writer_use_table_lock = sql_writer_use_table_lock + self.write_behavior = write_behavior + self.upsert_settings = upsert_settings class SqlDwSource(TabularSource): @@ -36379,12 +38029,15 @@ class SqlDwSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -36413,8 +38066,9 @@ class SqlDwSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, @@ -36429,8 +38083,9 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, sql_reader_query: Optional[object] = None, sql_reader_stored_procedure_name: Optional[object] = None, stored_procedure_parameters: Optional[object] = None, @@ -36438,7 +38093,7 @@ def __init__( partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(SqlDwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SqlDwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlDWSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name @@ -36447,6 +38102,34 @@ def __init__( self.partition_settings = partition_settings +class SqlDwUpsertSettings(msrest.serialization.Model): + """Sql DW upsert option settings. + + :param interim_schema_name: Schema name for interim table. Type: string (or Expression with + resultType string). + :type interim_schema_name: object + :param keys: Key column names for unique row identification. Type: array of strings (or + Expression with resultType array of strings). + :type keys: object + """ + + _attribute_map = { + 'interim_schema_name': {'key': 'interimSchemaName', 'type': 'object'}, + 'keys': {'key': 'keys', 'type': 'object'}, + } + + def __init__( + self, + *, + interim_schema_name: Optional[object] = None, + keys: Optional[object] = None, + **kwargs + ): + super(SqlDwUpsertSettings, self).__init__(**kwargs) + self.interim_schema_name = interim_schema_name + self.keys = keys + + class SqlMiSink(CopySink): """A copy activity Azure SQL Managed Instance sink. @@ -36472,6 +38155,9 @@ class SqlMiSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). :type sql_writer_stored_procedure_name: object @@ -36490,6 +38176,14 @@ class SqlMiSink(CopySink): :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). :type table_option: object + :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or + Expression with resultType boolean). + :type sql_writer_use_table_lock: object + :param write_behavior: White behavior when copying data into azure SQL MI. Type: + SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). + :type write_behavior: object + :param upsert_settings: SQL upsert settings. + :type upsert_settings: ~data_factory_management_client.models.SqlUpsertSettings """ _validation = { @@ -36504,12 +38198,16 @@ class SqlMiSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, 'table_option': {'key': 'tableOption', 'type': 'object'}, + 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'}, } def __init__( @@ -36521,15 +38219,19 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, sql_writer_stored_procedure_name: Optional[object] = None, sql_writer_table_type: Optional[object] = None, pre_copy_script: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, stored_procedure_table_type_parameter_name: Optional[object] = None, table_option: Optional[object] = None, + sql_writer_use_table_lock: Optional[object] = None, + write_behavior: Optional[object] = None, + upsert_settings: Optional["SqlUpsertSettings"] = None, **kwargs ): - super(SqlMiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SqlMiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SqlMISink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type @@ -36537,6 +38239,9 @@ def __init__( self.stored_procedure_parameters = stored_procedure_parameters self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name self.table_option = table_option + self.sql_writer_use_table_lock = sql_writer_use_table_lock + self.write_behavior = write_behavior + self.upsert_settings = upsert_settings class SqlMiSource(TabularSource): @@ -36558,12 +38263,15 @@ class SqlMiSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object :param sql_reader_stored_procedure_name: Name of the stored procedure for a Azure SQL Managed @@ -36593,8 +38301,9 @@ class SqlMiSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, @@ -36610,8 +38319,9 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, sql_reader_query: Optional[object] = None, sql_reader_stored_procedure_name: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, @@ -36620,7 +38330,7 @@ def __init__( partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(SqlMiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SqlMiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlMISource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name @@ -36772,6 +38482,9 @@ class SqlServerSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). :type sql_writer_stored_procedure_name: object @@ -36790,6 +38503,14 @@ class SqlServerSink(CopySink): :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). :type table_option: object + :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or + Expression with resultType boolean). + :type sql_writer_use_table_lock: object + :param write_behavior: Write behavior when copying data into sql server. Type: + SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). + :type write_behavior: object + :param upsert_settings: SQL upsert settings. + :type upsert_settings: ~data_factory_management_client.models.SqlUpsertSettings """ _validation = { @@ -36804,12 +38525,16 @@ class SqlServerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, 'table_option': {'key': 'tableOption', 'type': 'object'}, + 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'}, } def __init__( @@ -36821,15 +38546,19 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, sql_writer_stored_procedure_name: Optional[object] = None, sql_writer_table_type: Optional[object] = None, pre_copy_script: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, stored_procedure_table_type_parameter_name: Optional[object] = None, table_option: Optional[object] = None, + sql_writer_use_table_lock: Optional[object] = None, + write_behavior: Optional[object] = None, + upsert_settings: Optional["SqlUpsertSettings"] = None, **kwargs ): - super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SqlServerSink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type @@ -36837,6 +38566,9 @@ def __init__( self.stored_procedure_parameters = stored_procedure_parameters self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name self.table_option = table_option + self.sql_writer_use_table_lock = sql_writer_use_table_lock + self.write_behavior = write_behavior + self.upsert_settings = upsert_settings class SqlServerSource(TabularSource): @@ -36858,12 +38590,15 @@ class SqlServerSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database @@ -36893,8 +38628,9 @@ class SqlServerSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, @@ -36910,8 +38646,9 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, sql_reader_query: Optional[object] = None, sql_reader_stored_procedure_name: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, @@ -36920,7 +38657,7 @@ def __init__( partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlServerSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name @@ -37106,6 +38843,9 @@ class SqlSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). :type sql_writer_stored_procedure_name: object @@ -37124,6 +38864,14 @@ class SqlSink(CopySink): :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). :type table_option: object + :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or + Expression with resultType boolean). + :type sql_writer_use_table_lock: object + :param write_behavior: Write behavior when copying data into sql. Type: SqlWriteBehaviorEnum + (or Expression with resultType SqlWriteBehaviorEnum). + :type write_behavior: object + :param upsert_settings: SQL upsert settings. + :type upsert_settings: ~data_factory_management_client.models.SqlUpsertSettings """ _validation = { @@ -37138,12 +38886,16 @@ class SqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, 'table_option': {'key': 'tableOption', 'type': 'object'}, + 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'}, } def __init__( @@ -37155,15 +38907,19 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, sql_writer_stored_procedure_name: Optional[object] = None, sql_writer_table_type: Optional[object] = None, pre_copy_script: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, stored_procedure_table_type_parameter_name: Optional[object] = None, table_option: Optional[object] = None, + sql_writer_use_table_lock: Optional[object] = None, + write_behavior: Optional[object] = None, + upsert_settings: Optional["SqlUpsertSettings"] = None, **kwargs ): - super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SqlSink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type @@ -37171,6 +38927,9 @@ def __init__( self.stored_procedure_parameters = stored_procedure_parameters self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name self.table_option = table_option + self.sql_writer_use_table_lock = sql_writer_use_table_lock + self.write_behavior = write_behavior + self.upsert_settings = upsert_settings class SqlSource(TabularSource): @@ -37192,12 +38951,15 @@ class SqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database @@ -37229,8 +38991,9 @@ class SqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, @@ -37246,8 +39009,9 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, sql_reader_query: Optional[object] = None, sql_reader_stored_procedure_name: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, @@ -37256,7 +39020,7 @@ def __init__( partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name @@ -37266,6 +39030,40 @@ def __init__( self.partition_settings = partition_settings +class SqlUpsertSettings(msrest.serialization.Model): + """Sql upsert option settings. + + :param use_temp_db: Specifies whether to use temp db for upsert interim table. Type: boolean + (or Expression with resultType boolean). + :type use_temp_db: object + :param interim_schema_name: Schema name for interim table. Type: string (or Expression with + resultType string). + :type interim_schema_name: object + :param keys: Key column names for unique row identification. Type: array of strings (or + Expression with resultType array of strings). + :type keys: object + """ + + _attribute_map = { + 'use_temp_db': {'key': 'useTempDB', 'type': 'object'}, + 'interim_schema_name': {'key': 'interimSchemaName', 'type': 'object'}, + 'keys': {'key': 'keys', 'type': 'object'}, + } + + def __init__( + self, + *, + use_temp_db: Optional[object] = None, + interim_schema_name: Optional[object] = None, + keys: Optional[object] = None, + **kwargs + ): + super(SqlUpsertSettings, self).__init__(**kwargs) + self.use_temp_db = use_temp_db + self.interim_schema_name = interim_schema_name + self.keys = keys + + class SquareLinkedService(LinkedService): """Square Service linked service. @@ -37453,12 +39251,15 @@ class SquareSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -37474,8 +39275,9 @@ class SquareSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -37486,12 +39288,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SquareSource' # type: str self.query = query @@ -38555,12 +40358,15 @@ class SybaseSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -38575,8 +40381,9 @@ class SybaseSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -38587,12 +40394,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SybaseSource' # type: str self.query = query @@ -38955,12 +40763,15 @@ class TeradataSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: Teradata query. Type: string (or Expression with resultType string). :type query: object :param partition_option: The partition mechanism that will be used for teradata read in @@ -38981,8 +40792,9 @@ class TeradataSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, @@ -38995,14 +40807,15 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, partition_option: Optional[object] = None, partition_settings: Optional["TeradataPartitionSettings"] = None, **kwargs ): - super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'TeradataSource' # type: str self.query = query self.partition_option = partition_option @@ -40159,12 +41972,15 @@ class VerticaSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -40180,8 +41996,9 @@ class VerticaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -40192,12 +42009,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'VerticaSource' # type: str self.query = query @@ -40443,10 +42261,7 @@ def __init__( class WebActivityAuthentication(msrest.serialization.Model): """Web activity authentication properties. - All required parameters must be populated in order to send to Azure. - - :param type: Required. Web activity authentication - (Basic/ClientCertificate/MSI/ServicePrincipal). + :param type: Web activity authentication (Basic/ClientCertificate/MSI/ServicePrincipal). :type type: str :param pfx: Base64-encoded contents of a PFX file or Certificate when used for ServicePrincipal. @@ -40463,12 +42278,10 @@ class WebActivityAuthentication(msrest.serialization.Model): :param user_tenant: TenantId for which Azure Auth token will be requested when using ServicePrincipal Authentication. Type: string (or Expression with resultType string). :type user_tenant: object + :param credential: The credential reference containing authentication information. + :type credential: ~data_factory_management_client.models.CredentialReference """ - _validation = { - 'type': {'required': True}, - } - _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'pfx': {'key': 'pfx', 'type': 'SecretBase'}, @@ -40476,17 +42289,19 @@ class WebActivityAuthentication(msrest.serialization.Model): 'password': {'key': 'password', 'type': 'SecretBase'}, 'resource': {'key': 'resource', 'type': 'object'}, 'user_tenant': {'key': 'userTenant', 'type': 'object'}, + 'credential': {'key': 'credential', 'type': 'CredentialReference'}, } def __init__( self, *, - type: str, + type: Optional[str] = None, pfx: Optional["SecretBase"] = None, username: Optional[object] = None, password: Optional["SecretBase"] = None, resource: Optional[object] = None, user_tenant: Optional[object] = None, + credential: Optional["CredentialReference"] = None, **kwargs ): super(WebActivityAuthentication, self).__init__(**kwargs) @@ -40496,6 +42311,7 @@ def __init__( self.password = password self.resource = resource self.user_tenant = user_tenant + self.credential = credential class WebLinkedServiceTypeProperties(msrest.serialization.Model): @@ -40833,9 +42649,12 @@ class WebSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -40848,7 +42667,8 @@ class WebSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -40858,10 +42678,11 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + disable_metrics_collection: Optional[object] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'WebSource' # type: str self.additional_columns = additional_columns @@ -41125,12 +42946,15 @@ class XeroSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -41146,8 +42970,9 @@ class XeroSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -41158,12 +42983,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'XeroSource' # type: str self.query = query @@ -41336,13 +43162,16 @@ class XmlSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Xml store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param format_settings: Xml format settings. :type format_settings: ~data_factory_management_client.models.XmlReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object """ _validation = { @@ -41355,9 +43184,10 @@ class XmlSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'XmlReadSettings'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, } def __init__( @@ -41367,12 +43197,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreReadSettings"] = None, format_settings: Optional["XmlReadSettings"] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, **kwargs ): - super(XmlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(XmlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'XmlSource' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -41592,12 +43423,15 @@ class ZohoSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects (or Expression with resultType array of objects). - :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :type additional_columns: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -41613,8 +43447,9 @@ class ZohoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'additional_columns': {'key': 'additionalColumns', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -41625,11 +43460,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, - additional_columns: Optional[List["AdditionalColumns"]] = None, + additional_columns: Optional[object] = None, query: Optional[object] = None, **kwargs ): - super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ZohoSource' # type: str self.query = query diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_run_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_run_operations.py deleted file mode 100644 index 192e09232ad..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_run_operations.py +++ /dev/null @@ -1,132 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class ActivityRunOperations(object): - """ActivityRunOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def query_by_pipeline_run( - self, - resource_group_name, # type: str - factory_name, # type: str - run_id, # type: str - last_updated_after, # type: datetime.datetime - last_updated_before, # type: datetime.datetime - continuation_token_parameter=None, # type: Optional[str] - filters=None, # type: Optional[List["models.RunQueryFilter"]] - order_by=None, # type: Optional[List["models.RunQueryOrderBy"]] - **kwargs # type: Any - ): - # type: (...) -> "models.ActivityRunsQueryResponse" - """Query activity runs based on input filter conditions. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param run_id: The pipeline run identifier. - :type run_id: str - :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601' - format. - :type last_updated_after: ~datetime.datetime - :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601' - format. - :type last_updated_before: ~datetime.datetime - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param filters: List of filters. - :type filters: list[~data_factory_management_client.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ActivityRunsQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ActivityRunsQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ActivityRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.query_by_pipeline_run.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ActivityRunsQueryResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - query_by_pipeline_run.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/queryActivityruns'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_operations.py deleted file mode 100644 index e0bd3be1783..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_operations.py +++ /dev/null @@ -1,317 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class DataFlowOperations(object): - """DataFlowOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def create_or_update( - self, - resource_group_name, # type: str - factory_name, # type: str - data_flow_name, # type: str - properties, # type: "models.DataFlow" - if_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.DataFlowResource" - """Creates or updates a data flow. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param data_flow_name: The data flow name. - :type data_flow_name: str - :param properties: Data flow properties. - :type properties: ~data_factory_management_client.models.DataFlow - :param if_match: ETag of the data flow entity. Should only be specified for update, for which - it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataFlowResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DataFlowResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - data_flow = models.DataFlowResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(data_flow, 'DataFlowResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('DataFlowResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - data_flow_name, # type: str - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.DataFlowResource" - """Gets a data flow. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param data_flow_name: The data flow name. - :type data_flow_name: str - :param if_none_match: ETag of the data flow entity. Should only be specified for get. If the - ETag matches the existing entity tag, or if * was provided, then no content will be returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataFlowResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DataFlowResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('DataFlowResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore - - def delete( - self, - resource_group_name, # type: str - factory_name, # type: str - data_flow_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Deletes a data flow. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param data_flow_name: The data flow name. - :type data_flow_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore - - def list_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.DataFlowListResponse"] - """Lists data flows. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either DataFlowListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.DataFlowListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('DataFlowListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_dataset_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_dataset_operations.py deleted file mode 100644 index 2f866416c74..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_dataset_operations.py +++ /dev/null @@ -1,319 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class DatasetOperations(object): - """DatasetOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.DatasetListResponse"] - """Lists datasets. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either DatasetListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.DatasetListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('DatasetListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets'} # type: ignore - - def create_or_update( - self, - resource_group_name, # type: str - factory_name, # type: str - dataset_name, # type: str - properties, # type: "models.Dataset" - if_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.DatasetResource" - """Creates or updates a dataset. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param dataset_name: The dataset name. - :type dataset_name: str - :param properties: Dataset properties. - :type properties: ~data_factory_management_client.models.Dataset - :param if_match: ETag of the dataset entity. Should only be specified for update, for which it - should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DatasetResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DatasetResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - dataset = models.DatasetResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(dataset, 'DatasetResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('DatasetResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - dataset_name, # type: str - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Optional["models.DatasetResource"] - """Gets a dataset. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param dataset_name: The dataset name. - :type dataset_name: str - :param if_none_match: ETag of the dataset entity. Should only be specified for get. If the ETag - matches the existing entity tag, or if * was provided, then no content will be returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DatasetResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DatasetResource or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DatasetResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('DatasetResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore - - def delete( - self, - resource_group_name, # type: str - factory_name, # type: str - dataset_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Deletes a dataset. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param dataset_name: The dataset name. - :type dataset_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factory_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factory_operations.py deleted file mode 100644 index 5b8622e97f9..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factory_operations.py +++ /dev/null @@ -1,671 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class FactoryOperations(object): - """FactoryOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list( - self, - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.FactoryListResponse"] - """Lists factories under the specified subscription. - - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either FactoryListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.FactoryListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('FactoryListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/factories'} # type: ignore - - def configure_factory_repo( - self, - location_id, # type: str - factory_resource_id=None, # type: Optional[str] - repo_configuration=None, # type: Optional["models.FactoryRepoConfiguration"] - **kwargs # type: Any - ): - # type: (...) -> "models.Factory" - """Updates a factory's repo information. - - :param location_id: The location identifier. - :type location_id: str - :param factory_resource_id: The factory resource id. - :type factory_resource_id: str - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration - :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - factory_repo_update = models.FactoryRepoUpdate(factory_resource_id=factory_resource_id, repo_configuration=repo_configuration) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.configure_factory_repo.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'locationId': self._serialize.url("location_id", location_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(factory_repo_update, 'FactoryRepoUpdate') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('Factory', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - configure_factory_repo.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/configureFactoryRepo'} # type: ignore - - def list_by_resource_group( - self, - resource_group_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.FactoryListResponse"] - """Lists factories. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either FactoryListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.FactoryListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_resource_group.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('FactoryListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories'} # type: ignore - - def create_or_update( - self, - resource_group_name, # type: str - factory_name, # type: str - if_match=None, # type: Optional[str] - location=None, # type: Optional[str] - tags=None, # type: Optional[Dict[str, str]] - identity=None, # type: Optional["models.FactoryIdentity"] - repo_configuration=None, # type: Optional["models.FactoryRepoConfiguration"] - global_parameters=None, # type: Optional[Dict[str, "models.GlobalParameterSpecification"]] - **kwargs # type: Any - ): - # type: (...) -> "models.Factory" - """Creates or updates a factory. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param if_match: ETag of the factory entity. Should only be specified for update, for which it - should match existing entity or can be * for unconditional update. - :type if_match: str - :param location: The resource location. - :type location: str - :param tags: The resource tags. - :type tags: dict[str, str] - :param identity: Managed service identity of the factory. - :type identity: ~data_factory_management_client.models.FactoryIdentity - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration - :param global_parameters: List of parameters for factory. - :type global_parameters: dict[str, ~data_factory_management_client.models.GlobalParameterSpecification] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - factory = models.Factory(location=location, tags=tags, identity=identity, repo_configuration=repo_configuration, global_parameters=global_parameters) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(factory, 'Factory') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('Factory', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore - - def update( - self, - resource_group_name, # type: str - factory_name, # type: str - tags=None, # type: Optional[Dict[str, str]] - identity=None, # type: Optional["models.FactoryIdentity"] - **kwargs # type: Any - ): - # type: (...) -> "models.Factory" - """Updates a factory. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param tags: The resource tags. - :type tags: dict[str, str] - :param identity: Managed service identity of the factory. - :type identity: ~data_factory_management_client.models.FactoryIdentity - :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - factory_update_parameters = models.FactoryUpdateParameters(tags=tags, identity=identity) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(factory_update_parameters, 'FactoryUpdateParameters') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('Factory', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Optional["models.Factory"] - """Gets a factory. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param if_none_match: ETag of the factory entity. Should only be specified for get. If the ETag - matches the existing entity tag, or if * was provided, then no content will be returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Factory"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('Factory', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore - - def delete( - self, - resource_group_name, # type: str - factory_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Deletes a factory. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore - - def get_git_hub_access_token( - self, - resource_group_name, # type: str - factory_name, # type: str - git_hub_access_code, # type: str - git_hub_access_token_base_url, # type: str - git_hub_client_id=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.GitHubAccessTokenResponse" - """Get GitHub Access Token. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param git_hub_access_code: GitHub access code. - :type git_hub_access_code: str - :param git_hub_access_token_base_url: GitHub access token base URL. - :type git_hub_access_token_base_url: str - :param git_hub_client_id: GitHub application client ID. - :type git_hub_client_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: GitHubAccessTokenResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.GitHubAccessTokenResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.GitHubAccessTokenResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - git_hub_access_token_request = models.GitHubAccessTokenRequest(git_hub_access_code=git_hub_access_code, git_hub_client_id=git_hub_client_id, git_hub_access_token_base_url=git_hub_access_token_base_url) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.get_git_hub_access_token.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(git_hub_access_token_request, 'GitHubAccessTokenRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('GitHubAccessTokenResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_git_hub_access_token.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getGitHubAccessToken'} # type: ignore - - def get_data_plane_access( - self, - resource_group_name, # type: str - factory_name, # type: str - permissions=None, # type: Optional[str] - access_resource_path=None, # type: Optional[str] - profile_name=None, # type: Optional[str] - start_time=None, # type: Optional[str] - expire_time=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.AccessPolicyResponse" - """Get Data Plane access. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param permissions: The string with permissions for Data Plane access. Currently only 'r' is - supported which grants read only access. - :type permissions: str - :param access_resource_path: The resource path to get access relative to factory. Currently - only empty string is supported which corresponds to the factory resource. - :type access_resource_path: str - :param profile_name: The name of the profile. Currently only the default is supported. The - default value is DefaultProfile. - :type profile_name: str - :param start_time: Start time for the token. If not specified the current time will be used. - :type start_time: str - :param expire_time: Expiration time for the token. Maximum duration for the token is eight - hours and by default the token will expire in eight hours. - :type expire_time: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: AccessPolicyResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.AccessPolicyResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.AccessPolicyResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - policy = models.UserAccessPolicy(permissions=permissions, access_resource_path=access_resource_path, profile_name=profile_name, start_time=start_time, expire_time=expire_time) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.get_data_plane_access.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(policy, 'UserAccessPolicy') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('AccessPolicyResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_data_plane_access.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getDataPlaneAccess'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_node_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_node_operations.py deleted file mode 100644 index a7903633080..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_node_operations.py +++ /dev/null @@ -1,309 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class IntegrationRuntimeNodeOperations(object): - """IntegrationRuntimeNodeOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - node_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "models.SelfHostedIntegrationRuntimeNode" - """Gets a self-hosted integration runtime node. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param node_name: The integration runtime node name. - :type node_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response) - :rtype: ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore - - def delete( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - node_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Deletes a self-hosted integration runtime node. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param node_name: The integration runtime node name. - :type node_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore - - def update( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - node_name, # type: str - concurrent_jobs_limit=None, # type: Optional[int] - **kwargs # type: Any - ): - # type: (...) -> "models.SelfHostedIntegrationRuntimeNode" - """Updates a self-hosted integration runtime node. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param node_name: The integration runtime node name. - :type node_name: str - :param concurrent_jobs_limit: The number of concurrent jobs permitted to run on the integration - runtime node. Values between 1 and maxConcurrentJobs(inclusive) are allowed. - :type concurrent_jobs_limit: int - :keyword callable cls: A custom type or function that will be passed the direct response - :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response) - :rtype: ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - update_integration_runtime_node_request = models.UpdateIntegrationRuntimeNodeRequest(concurrent_jobs_limit=concurrent_jobs_limit) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(update_integration_runtime_node_request, 'UpdateIntegrationRuntimeNodeRequest') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore - - def get_ip_address( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - node_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "models.IntegrationRuntimeNodeIpAddress" - """Get the IP address of self-hosted integration runtime node. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param node_name: The integration runtime node name. - :type node_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeNodeIpAddress, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeNodeIpAddress - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeNodeIpAddress"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get_ip_address.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeNodeIpAddress', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_ip_address.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}/ipAddress'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py deleted file mode 100644 index 1fb5fc6b30d..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py +++ /dev/null @@ -1,1198 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.arm_polling import ARMPolling - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class IntegrationRuntimeOperations(object): - """IntegrationRuntimeOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.IntegrationRuntimeListResponse"] - """Lists integration runtimes. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either IntegrationRuntimeListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.IntegrationRuntimeListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('IntegrationRuntimeListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes'} # type: ignore - - def create_or_update( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - properties, # type: "models.IntegrationRuntime" - if_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.IntegrationRuntimeResource" - """Creates or updates an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param properties: Integration runtime properties. - :type properties: ~data_factory_management_client.models.IntegrationRuntime - :param if_match: ETag of the integration runtime entity. Should only be specified for update, - for which it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - integration_runtime = models.IntegrationRuntimeResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(integration_runtime, 'IntegrationRuntimeResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Optional["models.IntegrationRuntimeResource"] - """Gets an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param if_none_match: ETag of the integration runtime entity. Should only be specified for get. - If the ETag matches the existing entity tag, or if * was provided, then no content will be - returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore - - def update( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - auto_update=None, # type: Optional[Union[str, "models.IntegrationRuntimeAutoUpdate"]] - update_delay_offset=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.IntegrationRuntimeResource" - """Updates an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param auto_update: Enables or disables the auto-update feature of the self-hosted integration - runtime. See https://go.microsoft.com/fwlink/?linkid=854189. - :type auto_update: str or ~data_factory_management_client.models.IntegrationRuntimeAutoUpdate - :param update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. The - integration runtime auto update will happen on that time. - :type update_delay_offset: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - update_integration_runtime_request = models.UpdateIntegrationRuntimeRequest(auto_update=auto_update, update_delay_offset=update_delay_offset) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(update_integration_runtime_request, 'UpdateIntegrationRuntimeRequest') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore - - def delete( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Deletes an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore - - def get_status( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "models.IntegrationRuntimeStatusResponse" - """Gets detailed status information for an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeStatusResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeStatusResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get_status.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus'} # type: ignore - - def get_connection_info( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "models.IntegrationRuntimeConnectionInfo" - """Gets the on-premises integration runtime connection information for encrypting the on-premises - data source credentials. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeConnectionInfo, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeConnectionInfo - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeConnectionInfo"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get_connection_info.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeConnectionInfo', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_connection_info.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getConnectionInfo'} # type: ignore - - def regenerate_auth_key( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - key_name=None, # type: Optional[Union[str, "models.IntegrationRuntimeAuthKeyName"]] - **kwargs # type: Any - ): - # type: (...) -> "models.IntegrationRuntimeAuthKeys" - """Regenerates the authentication key for an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param key_name: The name of the authentication key to regenerate. - :type key_name: str or ~data_factory_management_client.models.IntegrationRuntimeAuthKeyName - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeAuthKeys, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeAuthKeys - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - regenerate_key_parameters = models.IntegrationRuntimeRegenerateKeyParameters(key_name=key_name) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.regenerate_auth_key.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(regenerate_key_parameters, 'IntegrationRuntimeRegenerateKeyParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - regenerate_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/regenerateAuthKey'} # type: ignore - - def list_auth_key( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "models.IntegrationRuntimeAuthKeys" - """Retrieves the authentication keys for an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeAuthKeys, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeAuthKeys - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.list_auth_key.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - list_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/listAuthKeys'} # type: ignore - - def _start_initial( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Optional["models.IntegrationRuntimeStatusResponse"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeStatusResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._start_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore - - def begin_start( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller["models.IntegrationRuntimeStatusResponse"] - """Starts a ManagedReserved type integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either IntegrationRuntimeStatusResponse or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~data_factory_management_client.models.IntegrationRuntimeStatusResponse] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = self._start_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore - - def _stop_initial( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._stop_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore - - def begin_stop( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] - """Stops a ManagedReserved type integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = self._stop_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore - - def sync_credentials( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Force the integration runtime to synchronize credentials across integration runtime nodes, and - this will override the credentials across all worker nodes with those available on the - dispatcher node. If you already have the latest credential backup file, you should manually - import it (preferred) on any self-hosted integration runtime node than using this API directly. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.sync_credentials.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - sync_credentials.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/syncCredentials'} # type: ignore - - def get_monitoring_data( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "models.IntegrationRuntimeMonitoringData" - """Get the integration runtime monitoring data, which includes the monitor data for all the nodes - under this integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeMonitoringData, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeMonitoringData - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeMonitoringData"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get_monitoring_data.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeMonitoringData', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_monitoring_data.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/monitoringData'} # type: ignore - - def upgrade( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Upgrade self-hosted integration runtime to latest version if availability. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.upgrade.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - upgrade.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/upgrade'} # type: ignore - - def remove_link( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - linked_factory_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Remove all linked integration runtimes under specific data factory in a self-hosted integration - runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param linked_factory_name: The data factory name for linked integration runtime. - :type linked_factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - linked_integration_runtime_request = models.LinkedIntegrationRuntimeRequest(linked_factory_name=linked_factory_name) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.remove_link.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(linked_integration_runtime_request, 'LinkedIntegrationRuntimeRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - remove_link.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/removeLinks'} # type: ignore - - def create_linked_integration_runtime( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - name=None, # type: Optional[str] - subscription_id=None, # type: Optional[str] - data_factory_name=None, # type: Optional[str] - data_factory_location=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.IntegrationRuntimeStatusResponse" - """Create a linked integration runtime entry in a shared integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param name: The name of the linked integration runtime. - :type name: str - :param subscription_id: The ID of the subscription that the linked integration runtime belongs - to. - :type subscription_id: str - :param data_factory_name: The name of the data factory that the linked integration runtime - belongs to. - :type data_factory_name: str - :param data_factory_location: The location of the data factory that the linked integration - runtime belongs to. - :type data_factory_location: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeStatusResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeStatusResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - create_linked_integration_runtime_request = models.CreateLinkedIntegrationRuntimeRequest(name=name, subscription_id=subscription_id, data_factory_name=data_factory_name, data_factory_location=data_factory_location) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_linked_integration_runtime.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(create_linked_integration_runtime_request, 'CreateLinkedIntegrationRuntimeRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_linked_integration_runtime.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/linkedIntegrationRuntime'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtimes_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtimes_operations.py index d0a57313403..651865ae0ea 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtimes_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtimes_operations.py @@ -465,6 +465,69 @@ def get_status( return deserialized get_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus'} # type: ignore + def list_outbound_network_dependencies_endpoints( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse" + """Gets the list of outbound network dependencies for a given Azure-SSIS integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse, or the result of cls(response) + :rtype: ~data_factory_management_client.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.list_outbound_network_dependencies_endpoints.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_outbound_network_dependencies_endpoints.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/outboundNetworkDependenciesEndpoints'} # type: ignore + def get_connection_info( self, resource_group_name, # type: str diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_service_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_service_operations.py deleted file mode 100644 index 7124cb588eb..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_service_operations.py +++ /dev/null @@ -1,320 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class LinkedServiceOperations(object): - """LinkedServiceOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.LinkedServiceListResponse"] - """Lists linked services. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either LinkedServiceListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.LinkedServiceListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('LinkedServiceListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices'} # type: ignore - - def create_or_update( - self, - resource_group_name, # type: str - factory_name, # type: str - linked_service_name, # type: str - properties, # type: "models.LinkedService" - if_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.LinkedServiceResource" - """Creates or updates a linked service. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param linked_service_name: The linked service name. - :type linked_service_name: str - :param properties: Properties of linked service. - :type properties: ~data_factory_management_client.models.LinkedService - :param if_match: ETag of the linkedService entity. Should only be specified for update, for - which it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: LinkedServiceResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.LinkedServiceResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - linked_service = models.LinkedServiceResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(linked_service, 'LinkedServiceResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('LinkedServiceResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - linked_service_name, # type: str - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Optional["models.LinkedServiceResource"] - """Gets a linked service. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param linked_service_name: The linked service name. - :type linked_service_name: str - :param if_none_match: ETag of the linked service entity. Should only be specified for get. If - the ETag matches the existing entity tag, or if * was provided, then no content will be - returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: LinkedServiceResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.LinkedServiceResource or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedServiceResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('LinkedServiceResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore - - def delete( - self, - resource_group_name, # type: str - factory_name, # type: str - linked_service_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Deletes a linked service. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param linked_service_name: The linked service name. - :type linked_service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoint_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoint_operations.py deleted file mode 100644 index 29be0bd0e6d..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoint_operations.py +++ /dev/null @@ -1,344 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class ManagedPrivateEndpointOperations(object): - """ManagedPrivateEndpointOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - managed_virtual_network_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.ManagedPrivateEndpointListResponse"] - """Lists managed private endpoints. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ManagedPrivateEndpointListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.ManagedPrivateEndpointListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('ManagedPrivateEndpointListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints'} # type: ignore - - def create_or_update( - self, - resource_group_name, # type: str - factory_name, # type: str - managed_virtual_network_name, # type: str - managed_private_endpoint_name, # type: str - if_match=None, # type: Optional[str] - connection_state=None, # type: Optional["models.ConnectionStateProperties"] - fqdns=None, # type: Optional[List[str]] - group_id=None, # type: Optional[str] - private_link_resource_id=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.ManagedPrivateEndpointResource" - """Creates or updates a managed private endpoint. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :param managed_private_endpoint_name: Managed private endpoint name. - :type managed_private_endpoint_name: str - :param if_match: ETag of the managed private endpoint entity. Should only be specified for - update, for which it should match existing entity or can be * for unconditional update. - :type if_match: str - :param connection_state: The managed private endpoint connection state. - :type connection_state: ~data_factory_management_client.models.ConnectionStateProperties - :param fqdns: Fully qualified domain names. - :type fqdns: list[str] - :param group_id: The groupId to which the managed private endpoint is created. - :type group_id: str - :param private_link_resource_id: The ARM resource ID of the resource to which the managed - private endpoint is created. - :type private_link_resource_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedPrivateEndpointResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - managed_private_endpoint = models.ManagedPrivateEndpointResource(connection_state=connection_state, fqdns=fqdns, group_id=group_id, private_link_resource_id=private_link_resource_id) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(managed_private_endpoint, 'ManagedPrivateEndpointResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - managed_virtual_network_name, # type: str - managed_private_endpoint_name, # type: str - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.ManagedPrivateEndpointResource" - """Gets a managed private endpoint. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :param managed_private_endpoint_name: Managed private endpoint name. - :type managed_private_endpoint_name: str - :param if_none_match: ETag of the managed private endpoint entity. Should only be specified for - get. If the ETag matches the existing entity tag, or if * was provided, then no content will be - returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedPrivateEndpointResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore - - def delete( - self, - resource_group_name, # type: str - factory_name, # type: str - managed_virtual_network_name, # type: str - managed_private_endpoint_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Deletes a managed private endpoint. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :param managed_private_endpoint_name: Managed private endpoint name. - :type managed_private_endpoint_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_network_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_network_operations.py deleted file mode 100644 index fa043ca3e59..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_network_operations.py +++ /dev/null @@ -1,262 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class ManagedVirtualNetworkOperations(object): - """ManagedVirtualNetworkOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.ManagedVirtualNetworkListResponse"] - """Lists managed Virtual Networks. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ManagedVirtualNetworkListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.ManagedVirtualNetworkListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('ManagedVirtualNetworkListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks'} # type: ignore - - def create_or_update( - self, - resource_group_name, # type: str - factory_name, # type: str - managed_virtual_network_name, # type: str - properties, # type: "models.ManagedVirtualNetwork" - if_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.ManagedVirtualNetworkResource" - """Creates or updates a managed Virtual Network. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :param properties: Managed Virtual Network properties. - :type properties: ~data_factory_management_client.models.ManagedVirtualNetwork - :param if_match: ETag of the managed Virtual Network entity. Should only be specified for - update, for which it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedVirtualNetworkResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - managed_virtual_network = models.ManagedVirtualNetworkResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(managed_virtual_network, 'ManagedVirtualNetworkResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - managed_virtual_network_name, # type: str - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.ManagedVirtualNetworkResource" - """Gets a managed Virtual Network. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :param if_none_match: ETag of the managed Virtual Network entity. Should only be specified for - get. If the ETag matches the existing entity tag, or if * was provided, then no content will be - returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedVirtualNetworkResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operation_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operation_operations.py deleted file mode 100644 index c5cf3d43f6d..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operation_operations.py +++ /dev/null @@ -1,106 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class OperationOperations(object): - """OperationOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list( - self, - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.OperationListResponse"] - """Lists the available Azure Data Factory API operations. - - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either OperationListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.OperationListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('OperationListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': '/providers/Microsoft.DataFactory/operations'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_operations.py deleted file mode 100644 index d82f423f2cb..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_operations.py +++ /dev/null @@ -1,414 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class PipelineOperations(object): - """PipelineOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.PipelineListResponse"] - """Lists pipelines. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PipelineListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.PipelineListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('PipelineListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines'} # type: ignore - - def create_or_update( - self, - resource_group_name, # type: str - factory_name, # type: str - pipeline_name, # type: str - pipeline, # type: "models.PipelineResource" - if_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.PipelineResource" - """Creates or updates a pipeline. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :param pipeline: Pipeline resource definition. - :type pipeline: ~data_factory_management_client.models.PipelineResource - :param if_match: ETag of the pipeline entity. Should only be specified for update, for which - it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(pipeline, 'PipelineResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('PipelineResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - pipeline_name, # type: str - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Optional["models.PipelineResource"] - """Gets a pipeline. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :param if_none_match: ETag of the pipeline entity. Should only be specified for get. If the - ETag matches the existing entity tag, or if * was provided, then no content will be returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineResource or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.PipelineResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('PipelineResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore - - def delete( - self, - resource_group_name, # type: str - factory_name, # type: str - pipeline_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Deletes a pipeline. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore - - def create_run( - self, - resource_group_name, # type: str - factory_name, # type: str - pipeline_name, # type: str - reference_pipeline_run_id=None, # type: Optional[str] - is_recovery=None, # type: Optional[bool] - start_activity_name=None, # type: Optional[str] - start_from_failure=None, # type: Optional[bool] - parameters=None, # type: Optional[Dict[str, object]] - **kwargs # type: Any - ): - # type: (...) -> "models.CreateRunResponse" - """Creates a run of a pipeline. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :param reference_pipeline_run_id: The pipeline run identifier. If run ID is specified the - parameters of the specified run will be used to create a new run. - :type reference_pipeline_run_id: str - :param is_recovery: Recovery mode flag. If recovery mode is set to true, the specified - referenced pipeline run and the new run will be grouped under the same groupId. - :type is_recovery: bool - :param start_activity_name: In recovery mode, the rerun will start from this activity. If not - specified, all activities will run. - :type start_activity_name: str - :param start_from_failure: In recovery mode, if set to true, the rerun will start from failed - activities. The property will be used only if startActivityName is not specified. - :type start_from_failure: bool - :param parameters: Parameters of the pipeline run. These parameters will be used only if the - runId is not specified. - :type parameters: dict[str, object] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: CreateRunResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.CreateRunResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.CreateRunResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_run.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - if reference_pipeline_run_id is not None: - query_parameters['referencePipelineRunId'] = self._serialize.query("reference_pipeline_run_id", reference_pipeline_run_id, 'str') - if is_recovery is not None: - query_parameters['isRecovery'] = self._serialize.query("is_recovery", is_recovery, 'bool') - if start_activity_name is not None: - query_parameters['startActivityName'] = self._serialize.query("start_activity_name", start_activity_name, 'str') - if start_from_failure is not None: - query_parameters['startFromFailure'] = self._serialize.query("start_from_failure", start_from_failure, 'bool') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - if parameters is not None: - body_content = self._serialize.body(parameters, '{object}') - else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('CreateRunResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_run.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}/createRun'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_run_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_run_operations.py deleted file mode 100644 index 75634fde5ac..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_run_operations.py +++ /dev/null @@ -1,250 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class PipelineRunOperations(object): - """PipelineRunOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def query_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - last_updated_after, # type: datetime.datetime - last_updated_before, # type: datetime.datetime - continuation_token_parameter=None, # type: Optional[str] - filters=None, # type: Optional[List["models.RunQueryFilter"]] - order_by=None, # type: Optional[List["models.RunQueryOrderBy"]] - **kwargs # type: Any - ): - # type: (...) -> "models.PipelineRunsQueryResponse" - """Query pipeline runs in the factory based on input filter conditions. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601' - format. - :type last_updated_after: ~datetime.datetime - :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601' - format. - :type last_updated_before: ~datetime.datetime - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param filters: List of filters. - :type filters: list[~data_factory_management_client.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineRunsQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineRunsQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.query_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('PipelineRunsQueryResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryPipelineRuns'} # type: ignore - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - run_id, # type: str - **kwargs # type: Any - ): - # type: (...) -> "models.PipelineRun" - """Get a pipeline run by its run ID. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param run_id: The pipeline run identifier. - :type run_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineRun, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineRun - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRun"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('PipelineRun', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}'} # type: ignore - - def cancel( - self, - resource_group_name, # type: str - factory_name, # type: str - run_id, # type: str - is_recursive=None, # type: Optional[bool] - **kwargs # type: Any - ): - # type: (...) -> None - """Cancel a pipeline run by its run ID. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param run_id: The pipeline run identifier. - :type run_id: str - :param is_recursive: If true, cancel all the Child pipelines that are triggered by the current - pipeline. - :type is_recursive: bool - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.cancel.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if is_recursive is not None: - query_parameters['isRecursive'] = self._serialize.query("is_recursive", is_recursive, 'bool') - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/cancel'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py deleted file mode 100644 index 142f32f2c31..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py +++ /dev/null @@ -1,895 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.arm_polling import ARMPolling - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class TriggerOperations(object): - """TriggerOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.TriggerListResponse"] - """Lists triggers. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either TriggerListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.TriggerListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('TriggerListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers'} # type: ignore - - def query_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - continuation_token_parameter=None, # type: Optional[str] - parent_trigger_name=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.TriggerQueryResponse" - """Query triggers. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param parent_trigger_name: The name of the parent TumblingWindowTrigger to get the child rerun - triggers. - :type parent_trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.TriggerFilterParameters(continuation_token=continuation_token_parameter, parent_trigger_name=parent_trigger_name) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.query_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'TriggerFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('TriggerQueryResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/querytriggers'} # type: ignore - - def create_or_update( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - properties, # type: "models.Trigger" - if_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.TriggerResource" - """Creates or updates a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param properties: Properties of the trigger. - :type properties: ~data_factory_management_client.models.Trigger - :param if_match: ETag of the trigger entity. Should only be specified for update, for which it - should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - trigger = models.TriggerResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(trigger, 'TriggerResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('TriggerResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Optional["models.TriggerResource"] - """Gets a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param if_none_match: ETag of the trigger entity. Should only be specified for get. If the ETag - matches the existing entity tag, or if * was provided, then no content will be returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerResource or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('TriggerResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore - - def delete( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Deletes a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore - - def _subscribe_to_event_initial( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Optional["models.TriggerSubscriptionOperationStatus"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._subscribe_to_event_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - _subscribe_to_event_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore - - def begin_subscribe_to_event( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller["models.TriggerSubscriptionOperationStatus"] - """Subscribe event trigger to events. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~data_factory_management_client.models.TriggerSubscriptionOperationStatus] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = self._subscribe_to_event_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_subscribe_to_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore - - def get_event_subscription_status( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "models.TriggerSubscriptionOperationStatus" - """Get a trigger's event subscription status. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerSubscriptionOperationStatus, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerSubscriptionOperationStatus - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get_event_subscription_status.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_event_subscription_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/getEventSubscriptionStatus'} # type: ignore - - def _unsubscribe_from_event_initial( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Optional["models.TriggerSubscriptionOperationStatus"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._unsubscribe_from_event_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - _unsubscribe_from_event_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore - - def begin_unsubscribe_from_event( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller["models.TriggerSubscriptionOperationStatus"] - """Unsubscribe event trigger from events. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~data_factory_management_client.models.TriggerSubscriptionOperationStatus] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = self._unsubscribe_from_event_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_unsubscribe_from_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore - - def _start_initial( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._start_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore - - def begin_start( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] - """Starts a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = self._start_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore - - def _stop_initial( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._stop_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore - - def begin_stop( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] - """Stops a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = self._stop_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_run_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_run_operations.py deleted file mode 100644 index 3290d8196ab..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_run_operations.py +++ /dev/null @@ -1,248 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class TriggerRunOperations(object): - """TriggerRunOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def rerun( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - run_id, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Rerun single trigger instance by runId. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param run_id: The pipeline run identifier. - :type run_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.rerun.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - rerun.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/rerun'} # type: ignore - - def cancel( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - run_id, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Cancel a single trigger instance by runId. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param run_id: The pipeline run identifier. - :type run_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.cancel.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/cancel'} # type: ignore - - def query_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - last_updated_after, # type: datetime.datetime - last_updated_before, # type: datetime.datetime - continuation_token_parameter=None, # type: Optional[str] - filters=None, # type: Optional[List["models.RunQueryFilter"]] - order_by=None, # type: Optional[List["models.RunQueryOrderBy"]] - **kwargs # type: Any - ): - # type: (...) -> "models.TriggerRunsQueryResponse" - """Query trigger runs. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601' - format. - :type last_updated_after: ~datetime.datetime - :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601' - format. - :type last_updated_before: ~datetime.datetime - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param filters: List of filters. - :type filters: list[~data_factory_management_client.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerRunsQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerRunsQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.query_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('TriggerRunsQueryResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryTriggerRuns'} # type: ignore diff --git a/src/datafactory/gen.zip b/src/datafactory/gen.zip deleted file mode 100644 index 296cd2dfd07..00000000000 Binary files a/src/datafactory/gen.zip and /dev/null differ diff --git a/src/datafactory/linter_exclusions.yml b/src/datafactory/linter_exclusions.yml deleted file mode 100644 index cdfa831be54..00000000000 --- a/src/datafactory/linter_exclusions.yml +++ /dev/null @@ -1,5 +0,0 @@ -datafactory get-git-hub-access-token: - parameters: - git_hub_access_token_base_url: - rule_exclusions: - - option_length_too_long diff --git a/src/datafactory/report.md b/src/datafactory/report.md index 1d9bdfb4cf1..0f562e875d9 100644 --- a/src/datafactory/report.md +++ b/src/datafactory/report.md @@ -19,6 +19,9 @@ |az datafactory activity-run|ActivityRuns|[commands](#CommandsInActivityRuns)| |az datafactory trigger|Triggers|[commands](#CommandsInTriggers)| |az datafactory trigger-run|TriggerRuns|[commands](#CommandsInTriggerRuns)| +|az datafactory private-end-point-connection|privateEndPointConnections|[commands](#CommandsInprivateEndPointConnections)| +|az datafactory private-endpoint-connection|PrivateEndpointConnection|[commands](#CommandsInPrivateEndpointConnection)| +|az datafactory private-link-resource|privateLinkResources|[commands](#CommandsInprivateLinkResources)| ## COMMANDS ### Commands in `az datafactory` group @@ -62,6 +65,7 @@ |[az datafactory integration-runtime get-monitoring-data](#IntegrationRuntimesGetMonitoringData)|GetMonitoringData|[Parameters](#ParametersIntegrationRuntimesGetMonitoringData)|[Example](#ExamplesIntegrationRuntimesGetMonitoringData)| |[az datafactory integration-runtime get-status](#IntegrationRuntimesGetStatus)|GetStatus|[Parameters](#ParametersIntegrationRuntimesGetStatus)|[Example](#ExamplesIntegrationRuntimesGetStatus)| |[az datafactory integration-runtime list-auth-key](#IntegrationRuntimesListAuthKeys)|ListAuthKeys|[Parameters](#ParametersIntegrationRuntimesListAuthKeys)|[Example](#ExamplesIntegrationRuntimesListAuthKeys)| +|[az datafactory integration-runtime list-outbound-network-dependency-endpoint](#IntegrationRuntimesListOutboundNetworkDependenciesEndpoints)|ListOutboundNetworkDependenciesEndpoints|[Parameters](#ParametersIntegrationRuntimesListOutboundNetworkDependenciesEndpoints)|[Example](#ExamplesIntegrationRuntimesListOutboundNetworkDependenciesEndpoints)| |[az datafactory integration-runtime regenerate-auth-key](#IntegrationRuntimesRegenerateAuthKey)|RegenerateAuthKey|[Parameters](#ParametersIntegrationRuntimesRegenerateAuthKey)|[Example](#ExamplesIntegrationRuntimesRegenerateAuthKey)| |[az datafactory integration-runtime remove-link](#IntegrationRuntimesRemoveLinks)|RemoveLinks|[Parameters](#ParametersIntegrationRuntimesRemoveLinks)|[Example](#ExamplesIntegrationRuntimesRemoveLinks)| |[az datafactory integration-runtime start](#IntegrationRuntimesStart)|Start|[Parameters](#ParametersIntegrationRuntimesStart)|[Example](#ExamplesIntegrationRuntimesStart)| @@ -103,6 +107,24 @@ |[az datafactory pipeline-run cancel](#PipelineRunsCancel)|Cancel|[Parameters](#ParametersPipelineRunsCancel)|[Example](#ExamplesPipelineRunsCancel)| |[az datafactory pipeline-run query-by-factory](#PipelineRunsQueryByFactory)|QueryByFactory|[Parameters](#ParametersPipelineRunsQueryByFactory)|[Example](#ExamplesPipelineRunsQueryByFactory)| +### Commands in `az datafactory private-end-point-connection` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory private-end-point-connection list](#privateEndPointConnectionsListByFactory)|ListByFactory|[Parameters](#ParametersprivateEndPointConnectionsListByFactory)|[Example](#ExamplesprivateEndPointConnectionsListByFactory)| + +### Commands in `az datafactory private-endpoint-connection` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory private-endpoint-connection show](#PrivateEndpointConnectionGet)|Get|[Parameters](#ParametersPrivateEndpointConnectionGet)|[Example](#ExamplesPrivateEndpointConnectionGet)| +|[az datafactory private-endpoint-connection create](#PrivateEndpointConnectionCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersPrivateEndpointConnectionCreateOrUpdate#Create)|[Example](#ExamplesPrivateEndpointConnectionCreateOrUpdate#Create)| +|[az datafactory private-endpoint-connection update](#PrivateEndpointConnectionCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersPrivateEndpointConnectionCreateOrUpdate#Update)|Not Found| +|[az datafactory private-endpoint-connection delete](#PrivateEndpointConnectionDelete)|Delete|[Parameters](#ParametersPrivateEndpointConnectionDelete)|[Example](#ExamplesPrivateEndpointConnectionDelete)| + +### Commands in `az datafactory private-link-resource` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory private-link-resource show](#privateLinkResourcesGet)|Get|[Parameters](#ParametersprivateLinkResourcesGet)|[Example](#ExamplesprivateLinkResourcesGet)| + ### Commands in `az datafactory trigger` group |CLI Command|Operation Swagger name|Parameters|Examples| |---------|------------|--------|-----------| @@ -176,9 +198,15 @@ az datafactory create --location "East US" --name "exampleFactoryName" --resourc |**--if-match**|string|ETag of the factory entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| |**--location**|string|The resource location.|location|location| |**--tags**|dictionary|The resource tags.|tags|tags| -|**--factory-vsts-configuration**|object|Factory's VSTS repo information.|factory_vsts_configuration|FactoryVSTSConfiguration| -|**--factory-git-hub-configuration**|object|Factory's GitHub repo information.|factory_git_hub_configuration|FactoryGitHubConfiguration| +|**--repo-configuration**|object|Git repo information of the factory.|repo_configuration|repoConfiguration| |**--global-parameters**|dictionary|List of parameters for factory.|global_parameters|globalParameters| +|**--public-network-access**|choice|Whether or not public network access is allowed for the data factory.|public_network_access|publicNetworkAccess| +|**--key-name**|string|The name of the key in Azure Key Vault to use as Customer Managed Key.|key_name|keyName| +|**--vault-base-url**|string|The url of the Azure Key Vault used for CMK.|vault_base_url|vaultBaseUrl| +|**--key-version**|string|The version of the key used for CMK. If not provided, latest version will be used.|key_version|keyVersion| +|**--identity**|object|User assigned identity to use to authenticate to customer's key vault. If not provided Managed Service Identity will be used.|identity|identity| +|**--type**|choice|The identity type.|type|type| +|**--user-assigned-identities**|dictionary|List of user assigned identities for the factory.|user_assigned_identities|userAssignedIdentities| #### Command `az datafactory update` @@ -193,6 +221,8 @@ az datafactory update --name "exampleFactoryName" --tags exampleTag="exampleValu |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--tags**|dictionary|The resource tags.|tags|tags| +|**--type**|choice|The identity type.|type|type| +|**--user-assigned-identities**|dictionary|List of user assigned identities for the factory.|user_assigned_identities|userAssignedIdentities| #### Command `az datafactory delete` @@ -211,17 +241,17 @@ az datafactory delete --name "exampleFactoryName" --resource-group "exampleResou ##### Example ``` az datafactory configure-factory-repo --factory-resource-id "/subscriptions/12345678-1234-1234-1234-12345678abc/resourc\ -eGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName" \ ---factory-vsts-configuration account-name="ADF" collaboration-branch="master" last-commit-id="" project-name="project" \ -repository-name="repo" root-folder="/" tenant-id="" --location "East US" +eGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName" --repo-configuration \ +"{\\"type\\":\\"FactoryVSTSConfiguration\\",\\"accountName\\":\\"ADF\\",\\"collaborationBranch\\":\\"master\\",\\"lastC\ +ommitId\\":\\"\\",\\"projectName\\":\\"project\\",\\"repositoryName\\":\\"repo\\",\\"rootFolder\\":\\"/\\",\\"tenantId\ +\\":\\"\\"}" --location "East US" ``` ##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--location**|string|The location identifier.|location|locationId| |**--factory-resource-id**|string|The factory resource id.|factory_resource_id|factoryResourceId| -|**--factory-vsts-configuration**|object|Factory's VSTS repo information.|factory_vsts_configuration|FactoryVSTSConfiguration| -|**--factory-git-hub-configuration**|object|Factory's GitHub repo information.|factory_git_hub_configuration|FactoryGitHubConfiguration| +|**--repo-configuration**|object|Git repo information of the factory.|repo_configuration|repoConfiguration| #### Command `az datafactory get-data-plane-access` @@ -257,6 +287,7 @@ az datafactory get-git-hub-access-token --name "exampleFactoryName" --git-hub-ac |**--git-hub-access-code**|string|GitHub access code.|git_hub_access_code|gitHubAccessCode| |**--git-hub-access-token-base-url**|string|GitHub access token base URL.|git_hub_access_token_base_url|gitHubAccessTokenBaseUrl| |**--git-hub-client-id**|string|GitHub application client ID.|git_hub_client_id|gitHubClientId| +|**--git-hub-client-secret**|object|GitHub bring your own app client secret information.|git_hub_client_secret|gitHubClientSecret| ### group `az datafactory activity-run` #### Command `az datafactory activity-run query-by-pipeline-run` @@ -423,6 +454,7 @@ az datafactory integration-runtime linked-integration-runtime create --name "bfa |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| |**--if-match**|string|ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| |**--description**|string|Integration runtime description.|managed_description|description| +|**--managed-virtual-network**|object|Managed Virtual Network reference.|managed_managed_virtual_network|managedVirtualNetwork| |**--compute-properties**|object|The compute resource for managed integration runtime.|managed_compute_properties|computeProperties| |**--ssis-properties**|object|SSIS properties for managed integration runtime.|managed_ssis_properties|ssisProperties| @@ -529,6 +561,20 @@ az datafactory integration-runtime list-auth-key --factory-name "exampleFactoryN |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| +#### Command `az datafactory integration-runtime list-outbound-network-dependency-endpoint` + +##### Example +``` +az datafactory integration-runtime list-outbound-network-dependency-endpoint --factory-name "exampleFactoryName" \ +--name "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| + #### Command `az datafactory integration-runtime regenerate-auth-key` ##### Example @@ -823,7 +869,7 @@ et\\"}],\\"outputs\\":[{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{\\"M "typeProperties\\":{\\"dataIntegrationUnits\\":32,\\"sink\\":{\\"type\\":\\"BlobSink\\"},\\"source\\":{\\"type\\":\\"Bl\ obSource\\"}}}],\\"isSequential\\":true,\\"items\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@pipeline().parameters.\ OutputBlobNameList\\"}}}]" --parameters "{\\"OutputBlobNameList\\":{\\"type\\":\\"Array\\"}}" --duration "0.00:10:00" \ ---name "examplePipeline" --resource-group "exampleResourceGroup" +--pipeline-name "examplePipeline" --resource-group "exampleResourceGroup" ``` ##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| @@ -840,7 +886,7 @@ OutputBlobNameList\\"}}}]" --parameters "{\\"OutputBlobNameList\\":{\\"type\\":\ |**--annotations**|array|List of tags that can be used for describing the Pipeline.|annotations|annotations| |**--run-dimensions**|dictionary|Dimensions emitted by Pipeline.|run_dimensions|runDimensions| |**--duration**|any|TimeSpan value, after which an Azure Monitoring Metric is fired.|duration|duration| -|**--folder-name**|string|The name of the folder that this Pipeline is in.|folder_name|name| +|**--name**|string|The name of the folder that this Pipeline is in.|name|name| #### Command `az datafactory pipeline delete` @@ -924,6 +970,91 @@ operator="Equals" values="examplePipeline" --last-updated-after "2018-06-16T00:3 |**--filters**|array|List of filters.|filters|filters| |**--order-by**|array|List of OrderBy option.|order_by|orderBy| +### group `az datafactory private-end-point-connection` +#### Command `az datafactory private-end-point-connection list` + +##### Example +``` +az datafactory private-end-point-connection list --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| + +### group `az datafactory private-endpoint-connection` +#### Command `az datafactory private-endpoint-connection show` + +##### Example +``` +az datafactory private-endpoint-connection show --factory-name "exampleFactoryName" --name "connection" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--private-endpoint-connection-name**|string|The private endpoint connection name.|private_endpoint_connection_name|privateEndpointConnectionName| +|**--if-none-match**|string|ETag of the private endpoint connection entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| + +#### Command `az datafactory private-endpoint-connection create` + +##### Example +``` +az datafactory private-endpoint-connection create --factory-name "exampleFactoryName" --name "connection" \ +--private-link-service-connection-state description="Approved by admin." actions-required="" status="Approved" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--private-endpoint-connection-name**|string|The private endpoint connection name.|private_endpoint_connection_name|privateEndpointConnectionName| +|**--if-match**|string|ETag of the private endpoint connection entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| +|**--private-link-service-connection-state**|object|The state of a private link connection|private_link_service_connection_state|privateLinkServiceConnectionState| + +#### Command `az datafactory private-endpoint-connection update` + +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--private-endpoint-connection-name**|string|The private endpoint connection name.|private_endpoint_connection_name|privateEndpointConnectionName| +|**--if-match**|string|ETag of the private endpoint connection entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| +|**--private-link-service-connection-state**|object|The state of a private link connection|private_link_service_connection_state|privateLinkServiceConnectionState| + +#### Command `az datafactory private-endpoint-connection delete` + +##### Example +``` +az datafactory private-endpoint-connection delete --factory-name "exampleFactoryName" --name "connection" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--private-endpoint-connection-name**|string|The private endpoint connection name.|private_endpoint_connection_name|privateEndpointConnectionName| + +### group `az datafactory private-link-resource` +#### Command `az datafactory private-link-resource show` + +##### Example +``` +az datafactory private-link-resource show --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| + ### group `az datafactory trigger` #### Command `az datafactory trigger list`