diff --git a/sdk/synapse/azure-synapse-accesscontrol/CHANGELOG.md b/sdk/synapse/azure-synapse-accesscontrol/CHANGELOG.md index 18048a1f5127..85595cf9f42e 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/CHANGELOG.md +++ b/sdk/synapse/azure-synapse-accesscontrol/CHANGELOG.md @@ -1,5 +1,10 @@ # Release History +## 0.6.0 (2021-03-09) + +* Internal bugfixes (re-generated with latest generator) +* Stop Python 3.5 support + ## 0.5.0 (2021-02-09) * Update to API version 2020-08-01 diff --git a/sdk/synapse/azure-synapse-accesscontrol/README.md b/sdk/synapse/azure-synapse-accesscontrol/README.md index 84d1ca78544a..5238d56b16d7 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/README.md +++ b/sdk/synapse/azure-synapse-accesscontrol/README.md @@ -1,7 +1,7 @@ # Microsoft Azure SDK for Python This is the Microsoft Azure Synapse AccessControl Client Library. -This package has been tested with Python 2.7, 3.5, 3.6, 3.7 and 3.8. +This package has been tested with Python 2.7, 3.6, 3.7, 3.8 and 3.9. For a more complete view of Azure libraries, see the [azure sdk python release](https://aka.ms/azsdk/python/all). diff --git a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_configuration.py b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_configuration.py index e0483341c9c0..98760fa803af 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_configuration.py +++ b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_configuration.py @@ -49,7 +49,7 @@ def __init__( self.endpoint = endpoint self.api_version = "2020-08-01-preview" self.credential_scopes = kwargs.pop('credential_scopes', ['https://dev.azuresynapse.net/.default']) - kwargs.setdefault('sdk_moniker', 'synapse/{}'.format(VERSION)) + kwargs.setdefault('sdk_moniker', 'synapse-accesscontrol/{}'.format(VERSION)) self._configure(**kwargs) def _configure( diff --git a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_version.py b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_version.py index c4551baee432..9d17420e1c80 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_version.py +++ b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "0.5.0" +VERSION = "0.6.0" diff --git a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_configuration.py b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_configuration.py index 45c6ee3bdbac..0066c4e742b7 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_configuration.py +++ b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_configuration.py @@ -46,7 +46,7 @@ def __init__( self.endpoint = endpoint self.api_version = "2020-08-01-preview" self.credential_scopes = kwargs.pop('credential_scopes', ['https://dev.azuresynapse.net/.default']) - kwargs.setdefault('sdk_moniker', 'synapse/{}'.format(VERSION)) + kwargs.setdefault('sdk_moniker', 'synapse-accesscontrol/{}'.format(VERSION)) self._configure(**kwargs) def _configure( diff --git a/sdk/synapse/azure-synapse-accesscontrol/setup.py b/sdk/synapse/azure-synapse-accesscontrol/setup.py index 0075458edebd..f03ccf83e79a 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/setup.py +++ b/sdk/synapse/azure-synapse-accesscontrol/setup.py @@ -66,7 +66,6 @@ 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', diff --git a/sdk/synapse/azure-synapse-artifacts/CHANGELOG.md b/sdk/synapse/azure-synapse-artifacts/CHANGELOG.md index 94e692bf2377..b4cddc36faa6 100644 --- a/sdk/synapse/azure-synapse-artifacts/CHANGELOG.md +++ b/sdk/synapse/azure-synapse-artifacts/CHANGELOG.md @@ -1,5 +1,16 @@ # Release History +## 0.5.0 (2021-03-09) + +** Features ** + +- Add library operations +- Change create_or_update_sql_script, delete_sql_script, rename_sql_script to long running operations + +** Breaking changes ** + +- Stop Python 3.5 support + ## 0.4.0 (2020-12-08) ** Features ** diff --git a/sdk/synapse/azure-synapse-artifacts/README.md b/sdk/synapse/azure-synapse-artifacts/README.md index e68c078fc3d3..66c14aaf08be 100644 --- a/sdk/synapse/azure-synapse-artifacts/README.md +++ b/sdk/synapse/azure-synapse-artifacts/README.md @@ -1,7 +1,7 @@ # Microsoft Azure SDK for Python This is the Microsoft Azure Synapse Artifacts Client Library. -This package has been tested with Python 2.7, 3.5, 3.6, 3.7 and 3.8. +This package has been tested with Python 2.7, 3.6, 3.7, 3.8 and 3.9. For a more complete view of Azure libraries, see the [azure sdk python release](https://aka.ms/azsdk/python/all). diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_artifacts_client.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_artifacts_client.py index 0f2fb3378bcc..5c93dad0d85a 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_artifacts_client.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_artifacts_client.py @@ -33,6 +33,7 @@ from .operations import SqlPoolsOperations from .operations import BigDataPoolsOperations from .operations import IntegrationRuntimesOperations +from .operations import LibraryOperations from .operations import WorkspaceGitRepoManagementOperations from . import models @@ -70,6 +71,8 @@ class ArtifactsClient(object): :vartype big_data_pools: azure.synapse.artifacts.operations.BigDataPoolsOperations :ivar integration_runtimes: IntegrationRuntimesOperations operations :vartype integration_runtimes: azure.synapse.artifacts.operations.IntegrationRuntimesOperations + :ivar library: LibraryOperations operations + :vartype library: azure.synapse.artifacts.operations.LibraryOperations :ivar workspace_git_repo_management: WorkspaceGitRepoManagementOperations operations :vartype workspace_git_repo_management: azure.synapse.artifacts.operations.WorkspaceGitRepoManagementOperations :param credential: Credential needed for the client to connect to Azure. @@ -125,6 +128,8 @@ def __init__( self._client, self._config, self._serialize, self._deserialize) self.integration_runtimes = IntegrationRuntimesOperations( self._client, self._config, self._serialize, self._deserialize) + self.library = LibraryOperations( + self._client, self._config, self._serialize, self._deserialize) self.workspace_git_repo_management = WorkspaceGitRepoManagementOperations( self._client, self._config, self._serialize, self._deserialize) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_configuration.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_configuration.py index 5d0aff821595..bf76a1848b51 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_configuration.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_configuration.py @@ -49,7 +49,7 @@ def __init__( self.endpoint = endpoint self.api_version = "2019-06-01-preview" self.credential_scopes = kwargs.pop('credential_scopes', ['https://dev.azuresynapse.net/.default']) - kwargs.setdefault('sdk_moniker', 'synapse/{}'.format(VERSION)) + kwargs.setdefault('sdk_moniker', 'synapse-artifacts/{}'.format(VERSION)) self._configure(**kwargs) def _configure( diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_version.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_version.py index c8c0d6c52c29..c4551baee432 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_version.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "0.4.0" +VERSION = "0.5.0" diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_artifacts_client.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_artifacts_client.py index 8770ed43b42e..8bc04b0ad0e5 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_artifacts_client.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_artifacts_client.py @@ -31,6 +31,7 @@ from .operations import SqlPoolsOperations from .operations import BigDataPoolsOperations from .operations import IntegrationRuntimesOperations +from .operations import LibraryOperations from .operations import WorkspaceGitRepoManagementOperations from .. import models @@ -68,6 +69,8 @@ class ArtifactsClient(object): :vartype big_data_pools: azure.synapse.artifacts.aio.operations.BigDataPoolsOperations :ivar integration_runtimes: IntegrationRuntimesOperations operations :vartype integration_runtimes: azure.synapse.artifacts.aio.operations.IntegrationRuntimesOperations + :ivar library: LibraryOperations operations + :vartype library: azure.synapse.artifacts.aio.operations.LibraryOperations :ivar workspace_git_repo_management: WorkspaceGitRepoManagementOperations operations :vartype workspace_git_repo_management: azure.synapse.artifacts.aio.operations.WorkspaceGitRepoManagementOperations :param credential: Credential needed for the client to connect to Azure. @@ -122,6 +125,8 @@ def __init__( self._client, self._config, self._serialize, self._deserialize) self.integration_runtimes = IntegrationRuntimesOperations( self._client, self._config, self._serialize, self._deserialize) + self.library = LibraryOperations( + self._client, self._config, self._serialize, self._deserialize) self.workspace_git_repo_management = WorkspaceGitRepoManagementOperations( self._client, self._config, self._serialize, self._deserialize) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_configuration.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_configuration.py index 9c9e4c5fa938..be104a2a2e15 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_configuration.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_configuration.py @@ -46,7 +46,7 @@ def __init__( self.endpoint = endpoint self.api_version = "2019-06-01-preview" self.credential_scopes = kwargs.pop('credential_scopes', ['https://dev.azuresynapse.net/.default']) - kwargs.setdefault('sdk_moniker', 'synapse/{}'.format(VERSION)) + kwargs.setdefault('sdk_moniker', 'synapse-artifacts/{}'.format(VERSION)) self._configure(**kwargs) def _configure( diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/__init__.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/__init__.py index 74faadba8788..6b54fe6c25ae 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/__init__.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/__init__.py @@ -21,6 +21,7 @@ from ._sql_pools_operations import SqlPoolsOperations from ._big_data_pools_operations import BigDataPoolsOperations from ._integration_runtimes_operations import IntegrationRuntimesOperations +from ._library_operations import LibraryOperations from ._workspace_git_repo_management_operations import WorkspaceGitRepoManagementOperations __all__ = [ @@ -39,5 +40,6 @@ 'SqlPoolsOperations', 'BigDataPoolsOperations', 'IntegrationRuntimesOperations', + 'LibraryOperations', 'WorkspaceGitRepoManagementOperations', ] diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_big_data_pools_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_big_data_pools_operations.py index f554ea0f7b21..9e4b9ed7bfb4 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_big_data_pools_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_big_data_pools_operations.py @@ -79,7 +79,7 @@ async def list( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.ErrorContract, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorContract, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('BigDataPoolResourceInfoListResult', pipeline_response) @@ -134,7 +134,7 @@ async def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.ErrorContract, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorContract, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('BigDataPoolResourceInfo', pipeline_response) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_data_flow_debug_session_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_data_flow_debug_session_operations.py index dede00786c79..ac9613055e4d 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_data_flow_debug_session_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_data_flow_debug_session_operations.py @@ -81,7 +81,7 @@ async def _create_data_flow_debug_session_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) response_headers = {} @@ -109,8 +109,8 @@ async def begin_create_data_flow_debug_session( :type request: ~azure.synapse.artifacts.models.CreateDataFlowDebugSessionRequest :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either CreateDataFlowDebugSessionResponse or the result of cls(response) @@ -219,7 +219,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) @@ -278,7 +278,7 @@ async def add_data_flow( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('AddDataFlowToDebugSessionResponse', pipeline_response) @@ -337,7 +337,7 @@ async def delete_data_flow_debug_session( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -384,7 +384,7 @@ async def _execute_command_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) response_headers = {} @@ -412,8 +412,8 @@ async def begin_execute_command( :type request: ~azure.synapse.artifacts.models.DataFlowDebugCommandRequest :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either DataFlowDebugCommandResponse or the result of cls(response) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_data_flow_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_data_flow_operations.py index 6ef91eade4dd..7cc6d2aa363a 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_data_flow_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_data_flow_operations.py @@ -88,7 +88,7 @@ async def _create_or_update_data_flow_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = None @@ -119,8 +119,8 @@ async def begin_create_or_update_data_flow( :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either DataFlowResource or the result of cls(response) @@ -222,7 +222,7 @@ async def get_data_flow( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('DataFlowResource', pipeline_response) @@ -268,7 +268,7 @@ async def _delete_data_flow_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -287,8 +287,8 @@ async def begin_delete_data_flow( :type data_flow_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -378,7 +378,7 @@ async def _rename_data_flow_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -400,8 +400,8 @@ async def begin_rename_data_flow( :type new_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -509,7 +509,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_dataset_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_dataset_operations.py index 071601917130..85faff37328f 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_dataset_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_dataset_operations.py @@ -102,7 +102,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) @@ -159,7 +159,7 @@ async def _create_or_update_dataset_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = None @@ -190,8 +190,8 @@ async def begin_create_or_update_dataset( :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either DatasetResource or the result of cls(response) @@ -293,7 +293,7 @@ async def get_dataset( if response.status_code not in [200, 304]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = None @@ -341,7 +341,7 @@ async def _delete_dataset_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -360,8 +360,8 @@ async def begin_delete_dataset( :type dataset_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -451,7 +451,7 @@ async def _rename_dataset_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -473,8 +473,8 @@ async def begin_rename_dataset( :type new_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_integration_runtimes_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_integration_runtimes_operations.py index 29eb7dcbb884..83febf0eb605 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_integration_runtimes_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_integration_runtimes_operations.py @@ -79,7 +79,7 @@ async def list( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.ErrorContract, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorContract, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('IntegrationRuntimeListResponse', pipeline_response) @@ -134,7 +134,7 @@ async def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.ErrorContract, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorContract, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_library_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_library_operations.py new file mode 100644 index 000000000000..781a29e35314 --- /dev/null +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_library_operations.py @@ -0,0 +1,633 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, IO, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.polling.async_base_polling import AsyncLROBasePolling + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class LibraryOperations: + """LibraryOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.synapse.artifacts.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs + ) -> AsyncIterable["_models.LibraryListResponse"]: + """Lists Library. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either LibraryListResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.synapse.artifacts.models.LibraryListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.LibraryListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('LibraryListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/libraries'} # type: ignore + + async def _flush_initial( + self, + library_name: str, + **kwargs + ) -> Optional["_models.LibraryResourceInfo"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.LibraryResourceInfo"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self._flush_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'libraryName': self._serialize.url("library_name", library_name, 'str', max_length=100, min_length=0), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error) + + deserialized = None + if response.status_code == 202: + deserialized = self._deserialize('LibraryResourceInfo', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _flush_initial.metadata = {'url': '/libraries/{libraryName}/flush'} # type: ignore + + async def begin_flush( + self, + library_name: str, + **kwargs + ) -> AsyncLROPoller["_models.LibraryResourceInfo"]: + """Flush Library. + + :param library_name: file name to upload. Minimum length of the filename should be 1 excluding + the extension length. + :type library_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.LibraryResourceInfo"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._flush_initial( + library_name=library_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('LibraryResourceInfo', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'libraryName': self._serialize.url("library_name", library_name, 'str', max_length=100, min_length=0), + } + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_flush.metadata = {'url': '/libraries/{libraryName}/flush'} # type: ignore + + async def get_operation_result( + self, + operation_id: str, + **kwargs + ) -> Union["_models.LibraryResource", "_models.OperationResult"]: + """Get Operation result for Library. + + :param operation_id: operation id for which status is requested. + :type operation_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LibraryResource or OperationResult, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.LibraryResource or ~azure.synapse.artifacts.models.OperationResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[Union["_models.LibraryResource", "_models.OperationResult"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get_operation_result.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'operationId': self._serialize.url("operation_id", operation_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error) + + if response.status_code == 200: + deserialized = self._deserialize('LibraryResource', pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize('OperationResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_operation_result.metadata = {'url': '/libraryOperationResults/{operationId}'} # type: ignore + + async def _delete_initial( + self, + library_name: str, + **kwargs + ) -> Optional["_models.LibraryResourceInfo"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.LibraryResourceInfo"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'libraryName': self._serialize.url("library_name", library_name, 'str', max_length=100, min_length=0), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 409]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error) + + deserialized = None + if response.status_code == 202: + deserialized = self._deserialize('LibraryResourceInfo', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _delete_initial.metadata = {'url': '/libraries/{libraryName}'} # type: ignore + + async def begin_delete( + self, + library_name: str, + **kwargs + ) -> AsyncLROPoller["_models.LibraryResourceInfo"]: + """Delete Library. + + :param library_name: file name to upload. Minimum length of the filename should be 1 excluding + the extension length. + :type library_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.LibraryResourceInfo"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( + library_name=library_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('LibraryResourceInfo', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'libraryName': self._serialize.url("library_name", library_name, 'str', max_length=100, min_length=0), + } + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/libraries/{libraryName}'} # type: ignore + + async def get( + self, + library_name: str, + **kwargs + ) -> Optional["_models.LibraryResource"]: + """Get Library. + + :param library_name: file name to upload. Minimum length of the filename should be 1 excluding + the extension length. + :type library_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LibraryResource, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.LibraryResource or None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.LibraryResource"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'libraryName': self._serialize.url("library_name", library_name, 'str', max_length=100, min_length=0), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 304]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('LibraryResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/libraries/{libraryName}'} # type: ignore + + async def _create_initial( + self, + library_name: str, + **kwargs + ) -> Optional["_models.LibraryResourceInfo"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.LibraryResourceInfo"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self._create_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'libraryName': self._serialize.url("library_name", library_name, 'str', max_length=100, min_length=0), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error) + + deserialized = None + if response.status_code == 202: + deserialized = self._deserialize('LibraryResourceInfo', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _create_initial.metadata = {'url': '/libraries/{libraryName}'} # type: ignore + + async def begin_create( + self, + library_name: str, + **kwargs + ) -> AsyncLROPoller["_models.LibraryResourceInfo"]: + """Creates a library with the library name. + + :param library_name: file name to upload. Minimum length of the filename should be 1 excluding + the extension length. + :type library_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.LibraryResourceInfo"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_initial( + library_name=library_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('LibraryResourceInfo', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'libraryName': self._serialize.url("library_name", library_name, 'str', max_length=100, min_length=0), + } + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create.metadata = {'url': '/libraries/{libraryName}'} # type: ignore + + async def append( + self, + library_name: str, + content: IO, + x_ms_blob_condition_appendpos: Optional[int] = None, + **kwargs + ) -> None: + """Append the content to the library resource created using the create operation. The maximum + content size is 4MiB. Content larger than 4MiB must be appended in 4MiB chunks. + + :param library_name: file name to upload. Minimum length of the filename should be 1 excluding + the extension length. + :type library_name: str + :param content: Library file chunk. + :type content: IO + :param x_ms_blob_condition_appendpos: Set this header to a byte offset at which the block is + expected to be appended. The request succeeds only if the current offset matches this value. + Otherwise, the request fails with the AppendPositionConditionNotMet error (HTTP status code 412 + – Precondition Failed). + :type x_ms_blob_condition_appendpos: long + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + content_type = kwargs.pop("content_type", "application/octet-stream") + accept = "application/json" + + # Construct URL + url = self.append.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'libraryName': self._serialize.url("library_name", library_name, 'str', max_length=100, min_length=0), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if x_ms_blob_condition_appendpos is not None: + header_parameters['x-ms-blob-condition-appendpos'] = self._serialize.header("x_ms_blob_condition_appendpos", x_ms_blob_condition_appendpos, 'long') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content_kwargs['stream_content'] = content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error) + + if cls: + return cls(pipeline_response, None, {}) + + append.metadata = {'url': '/libraries/{libraryName}'} # type: ignore diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_linked_service_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_linked_service_operations.py index 429cb9630de3..03717566bbfc 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_linked_service_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_linked_service_operations.py @@ -102,7 +102,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) @@ -159,7 +159,7 @@ async def _create_or_update_linked_service_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = None @@ -190,8 +190,8 @@ async def begin_create_or_update_linked_service( :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either LinkedServiceResource or the result of cls(response) @@ -294,7 +294,7 @@ async def get_linked_service( if response.status_code not in [200, 304]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = None @@ -342,7 +342,7 @@ async def _delete_linked_service_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -361,8 +361,8 @@ async def begin_delete_linked_service( :type linked_service_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -452,7 +452,7 @@ async def _rename_linked_service_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -474,8 +474,8 @@ async def begin_rename_linked_service( :type new_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_notebook_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_notebook_operations.py index 0fb56b1c1b3a..901a24f1a6b6 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_notebook_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_notebook_operations.py @@ -102,7 +102,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) @@ -173,7 +173,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) @@ -228,7 +228,7 @@ async def _create_or_update_notebook_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = None @@ -259,8 +259,8 @@ async def begin_create_or_update_notebook( :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either NotebookResource or the result of cls(response) @@ -362,7 +362,7 @@ async def get_notebook( if response.status_code not in [200, 304]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = None @@ -410,7 +410,7 @@ async def _delete_notebook_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -429,8 +429,8 @@ async def begin_delete_notebook( :type notebook_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -520,7 +520,7 @@ async def _rename_notebook_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -542,8 +542,8 @@ async def begin_rename_notebook( :type new_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_pipeline_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_pipeline_operations.py index a2cfbaabe55c..43357b829546 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_pipeline_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_pipeline_operations.py @@ -102,7 +102,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) @@ -157,7 +157,7 @@ async def _create_or_update_pipeline_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = None @@ -188,8 +188,8 @@ async def begin_create_or_update_pipeline( :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either PipelineResource or the result of cls(response) @@ -291,7 +291,7 @@ async def get_pipeline( if response.status_code not in [200, 304]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = None @@ -339,7 +339,7 @@ async def _delete_pipeline_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -358,8 +358,8 @@ async def begin_delete_pipeline( :type pipeline_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -449,7 +449,7 @@ async def _rename_pipeline_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -471,8 +471,8 @@ async def begin_rename_pipeline( :type new_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -594,7 +594,7 @@ async def create_pipeline_run( if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('CreateRunResponse', pipeline_response) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_pipeline_run_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_pipeline_run_operations.py index 631c23488031..3139a758ee09 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_pipeline_run_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_pipeline_run_operations.py @@ -87,7 +87,7 @@ async def query_pipeline_runs_by_workspace( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('PipelineRunsQueryResponse', pipeline_response) @@ -142,7 +142,7 @@ async def get_pipeline_run( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('PipelineRun', pipeline_response) @@ -209,7 +209,7 @@ async def query_activity_runs( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('ActivityRunsQueryResponse', pipeline_response) @@ -270,7 +270,7 @@ async def cancel_pipeline_run( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_spark_job_definition_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_spark_job_definition_operations.py index a7e901faad24..40cc0bfe8800 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_spark_job_definition_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_spark_job_definition_operations.py @@ -102,7 +102,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) @@ -113,28 +113,14 @@ async def get_next(next_link=None): ) get_spark_job_definitions_by_workspace.metadata = {'url': '/sparkJobDefinitions'} # type: ignore - async def create_or_update_spark_job_definition( + async def _create_or_update_spark_job_definition_initial( self, spark_job_definition_name: str, properties: "_models.SparkJobDefinition", if_match: Optional[str] = None, **kwargs - ) -> "_models.SparkJobDefinitionResource": - """Creates or updates a Spark Job Definition. - - :param spark_job_definition_name: The spark job definition name. - :type spark_job_definition_name: str - :param properties: Properties of spark job definition. - :type properties: ~azure.synapse.artifacts.models.SparkJobDefinition - :param if_match: ETag of the Spark Job Definition entity. Should only be specified for update, - for which it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: SparkJobDefinitionResource, or the result of cls(response) - :rtype: ~azure.synapse.artifacts.models.SparkJobDefinitionResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.SparkJobDefinitionResource"] + ) -> Optional["_models.SparkJobDefinitionResource"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.SparkJobDefinitionResource"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -146,7 +132,7 @@ async def create_or_update_spark_job_definition( accept = "application/json" # Construct URL - url = self.create_or_update_spark_job_definition.metadata['url'] # type: ignore + url = self._create_or_update_spark_job_definition_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'sparkJobDefinitionName': self._serialize.url("spark_job_definition_name", spark_job_definition_name, 'str'), @@ -171,18 +157,91 @@ async def create_or_update_spark_job_definition( pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) - deserialized = self._deserialize('SparkJobDefinitionResource', pipeline_response) + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('SparkJobDefinitionResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update_spark_job_definition.metadata = {'url': '/sparkJobDefinitions/{sparkJobDefinitionName}'} # type: ignore + _create_or_update_spark_job_definition_initial.metadata = {'url': '/sparkJobDefinitions/{sparkJobDefinitionName}'} # type: ignore + + async def begin_create_or_update_spark_job_definition( + self, + spark_job_definition_name: str, + properties: "_models.SparkJobDefinition", + if_match: Optional[str] = None, + **kwargs + ) -> AsyncLROPoller["_models.SparkJobDefinitionResource"]: + """Creates or updates a Spark Job Definition. + + :param spark_job_definition_name: The spark job definition name. + :type spark_job_definition_name: str + :param properties: Properties of spark job definition. + :type properties: ~azure.synapse.artifacts.models.SparkJobDefinition + :param if_match: ETag of the Spark Job Definition entity. Should only be specified for update, + for which it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either SparkJobDefinitionResource or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.synapse.artifacts.models.SparkJobDefinitionResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.SparkJobDefinitionResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_spark_job_definition_initial( + spark_job_definition_name=spark_job_definition_name, + properties=properties, + if_match=if_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('SparkJobDefinitionResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'sparkJobDefinitionName': self._serialize.url("spark_job_definition_name", spark_job_definition_name, 'str'), + } + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update_spark_job_definition.metadata = {'url': '/sparkJobDefinitions/{sparkJobDefinitionName}'} # type: ignore async def get_spark_job_definition( self, @@ -235,7 +294,7 @@ async def get_spark_job_definition( if response.status_code not in [200, 304]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = None @@ -248,20 +307,11 @@ async def get_spark_job_definition( return deserialized get_spark_job_definition.metadata = {'url': '/sparkJobDefinitions/{sparkJobDefinitionName}'} # type: ignore - async def delete_spark_job_definition( + async def _delete_spark_job_definition_initial( self, spark_job_definition_name: str, **kwargs ) -> None: - """Deletes a Spark Job Definition. - - :param spark_job_definition_name: The spark job definition name. - :type spark_job_definition_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError @@ -271,7 +321,7 @@ async def delete_spark_job_definition( accept = "application/json" # Construct URL - url = self.delete_spark_job_definition.metadata['url'] # type: ignore + url = self._delete_spark_job_definition_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'sparkJobDefinitionName': self._serialize.url("spark_job_definition_name", spark_job_definition_name, 'str'), @@ -290,15 +340,74 @@ async def delete_spark_job_definition( pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: return cls(pipeline_response, None, {}) - delete_spark_job_definition.metadata = {'url': '/sparkJobDefinitions/{sparkJobDefinitionName}'} # type: ignore + _delete_spark_job_definition_initial.metadata = {'url': '/sparkJobDefinitions/{sparkJobDefinitionName}'} # type: ignore + + async def begin_delete_spark_job_definition( + self, + spark_job_definition_name: str, + **kwargs + ) -> AsyncLROPoller[None]: + """Deletes a Spark Job Definition. + + :param spark_job_definition_name: The spark job definition name. + :type spark_job_definition_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_spark_job_definition_initial( + spark_job_definition_name=spark_job_definition_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'sparkJobDefinitionName': self._serialize.url("spark_job_definition_name", spark_job_definition_name, 'str'), + } + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete_spark_job_definition.metadata = {'url': '/sparkJobDefinitions/{sparkJobDefinitionName}'} # type: ignore async def _execute_spark_job_definition_initial( self, @@ -335,7 +444,7 @@ async def _execute_spark_job_definition_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if response.status_code == 200: @@ -361,8 +470,8 @@ async def begin_execute_spark_job_definition( :type spark_job_definition_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either SparkBatchJob or the result of cls(response) @@ -455,7 +564,7 @@ async def _rename_spark_job_definition_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -477,8 +586,8 @@ async def begin_rename_spark_job_definition( :type new_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -567,7 +676,7 @@ async def _debug_spark_job_definition_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if response.status_code == 200: @@ -593,8 +702,8 @@ async def begin_debug_spark_job_definition( :type properties: ~azure.synapse.artifacts.models.SparkJobDefinition :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either SparkBatchJob or the result of cls(response) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_sql_pools_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_sql_pools_operations.py index fec9c44b2c96..409abfaddf79 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_sql_pools_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_sql_pools_operations.py @@ -79,7 +79,7 @@ async def list( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.ErrorContract, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorContract, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('SqlPoolInfoListResult', pipeline_response) @@ -134,7 +134,7 @@ async def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.ErrorContract, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorContract, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('SqlPool', pipeline_response) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_sql_script_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_sql_script_operations.py index ef49112a9d66..9db76a8cc779 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_sql_script_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_sql_script_operations.py @@ -102,7 +102,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) @@ -113,28 +113,14 @@ async def get_next(next_link=None): ) get_sql_scripts_by_workspace.metadata = {'url': '/sqlScripts'} # type: ignore - async def create_or_update_sql_script( + async def _create_or_update_sql_script_initial( self, sql_script_name: str, sql_script: "_models.SqlScriptResource", if_match: Optional[str] = None, **kwargs - ) -> "_models.SqlScriptResource": - """Creates or updates a Sql Script. - - :param sql_script_name: The sql script name. - :type sql_script_name: str - :param sql_script: Sql Script resource definition. - :type sql_script: ~azure.synapse.artifacts.models.SqlScriptResource - :param if_match: ETag of the SQL script entity. Should only be specified for update, for which - it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: SqlScriptResource, or the result of cls(response) - :rtype: ~azure.synapse.artifacts.models.SqlScriptResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlScriptResource"] + ) -> Optional["_models.SqlScriptResource"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.SqlScriptResource"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -144,7 +130,7 @@ async def create_or_update_sql_script( accept = "application/json" # Construct URL - url = self.create_or_update_sql_script.metadata['url'] # type: ignore + url = self._create_or_update_sql_script_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'sqlScriptName': self._serialize.url("sql_script_name", sql_script_name, 'str'), @@ -169,18 +155,91 @@ async def create_or_update_sql_script( pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) - deserialized = self._deserialize('SqlScriptResource', pipeline_response) + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('SqlScriptResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update_sql_script.metadata = {'url': '/sqlScripts/{sqlScriptName}'} # type: ignore + _create_or_update_sql_script_initial.metadata = {'url': '/sqlScripts/{sqlScriptName}'} # type: ignore + + async def begin_create_or_update_sql_script( + self, + sql_script_name: str, + sql_script: "_models.SqlScriptResource", + if_match: Optional[str] = None, + **kwargs + ) -> AsyncLROPoller["_models.SqlScriptResource"]: + """Creates or updates a Sql Script. + + :param sql_script_name: The sql script name. + :type sql_script_name: str + :param sql_script: Sql Script resource definition. + :type sql_script: ~azure.synapse.artifacts.models.SqlScriptResource + :param if_match: ETag of the SQL script entity. Should only be specified for update, for which + it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either SqlScriptResource or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.synapse.artifacts.models.SqlScriptResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlScriptResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_sql_script_initial( + sql_script_name=sql_script_name, + sql_script=sql_script, + if_match=if_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('SqlScriptResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'sqlScriptName': self._serialize.url("sql_script_name", sql_script_name, 'str'), + } + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update_sql_script.metadata = {'url': '/sqlScripts/{sqlScriptName}'} # type: ignore async def get_sql_script( self, @@ -232,7 +291,7 @@ async def get_sql_script( if response.status_code not in [200, 304]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = None @@ -245,20 +304,11 @@ async def get_sql_script( return deserialized get_sql_script.metadata = {'url': '/sqlScripts/{sqlScriptName}'} # type: ignore - async def delete_sql_script( + async def _delete_sql_script_initial( self, sql_script_name: str, **kwargs ) -> None: - """Deletes a Sql Script. - - :param sql_script_name: The sql script name. - :type sql_script_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError @@ -268,7 +318,7 @@ async def delete_sql_script( accept = "application/json" # Construct URL - url = self.delete_sql_script.metadata['url'] # type: ignore + url = self._delete_sql_script_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'sqlScriptName': self._serialize.url("sql_script_name", sql_script_name, 'str'), @@ -287,15 +337,74 @@ async def delete_sql_script( pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: return cls(pipeline_response, None, {}) - delete_sql_script.metadata = {'url': '/sqlScripts/{sqlScriptName}'} # type: ignore + _delete_sql_script_initial.metadata = {'url': '/sqlScripts/{sqlScriptName}'} # type: ignore + + async def begin_delete_sql_script( + self, + sql_script_name: str, + **kwargs + ) -> AsyncLROPoller[None]: + """Deletes a Sql Script. + + :param sql_script_name: The sql script name. + :type sql_script_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_sql_script_initial( + sql_script_name=sql_script_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'sqlScriptName': self._serialize.url("sql_script_name", sql_script_name, 'str'), + } + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete_sql_script.metadata = {'url': '/sqlScripts/{sqlScriptName}'} # type: ignore async def _rename_sql_script_initial( self, @@ -340,7 +449,7 @@ async def _rename_sql_script_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -362,8 +471,8 @@ async def begin_rename_sql_script( :type new_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_trigger_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_trigger_operations.py index df533ac0cf0c..a4d2e7806dd4 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_trigger_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_trigger_operations.py @@ -102,7 +102,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) @@ -159,7 +159,7 @@ async def _create_or_update_trigger_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = None @@ -190,8 +190,8 @@ async def begin_create_or_update_trigger( :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either TriggerResource or the result of cls(response) @@ -293,7 +293,7 @@ async def get_trigger( if response.status_code not in [200, 304]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = None @@ -341,7 +341,7 @@ async def _delete_trigger_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -360,8 +360,8 @@ async def begin_delete_trigger( :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -443,7 +443,7 @@ async def _subscribe_trigger_to_events_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = None @@ -467,8 +467,8 @@ async def begin_subscribe_trigger_to_events( :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) @@ -562,7 +562,7 @@ async def get_event_subscription_status( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) @@ -608,7 +608,7 @@ async def _unsubscribe_trigger_from_events_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = None @@ -632,8 +632,8 @@ async def begin_unsubscribe_trigger_from_events( :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) @@ -718,7 +718,7 @@ async def _start_trigger_initial( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -737,8 +737,8 @@ async def begin_start_trigger( :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -820,7 +820,7 @@ async def _stop_trigger_initial( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -839,8 +839,8 @@ async def begin_stop_trigger( :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_trigger_run_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_trigger_run_operations.py index 484be6295186..e44d9fab567e 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_trigger_run_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_trigger_run_operations.py @@ -87,7 +87,7 @@ async def rerun_trigger_instance( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -143,7 +143,7 @@ async def cancel_trigger_instance( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -199,7 +199,7 @@ async def query_trigger_runs_by_workspace( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('TriggerRunsQueryResponse', pipeline_response) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_workspace_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_workspace_operations.py index 117e554f5846..aae72abfcfc8 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_workspace_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_workspace_operations.py @@ -79,7 +79,7 @@ async def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.ErrorContract, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorContract, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('Workspace', pipeline_response) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/__init__.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/__init__.py index c169b82fbb3f..b132229bdfa9 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/__init__.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/__init__.py @@ -112,6 +112,7 @@ from ._models_py3 import CassandraTableDataset from ._models_py3 import ChainingTrigger from ._models_py3 import CloudError + from ._models_py3 import CloudErrorAutoGenerated from ._models_py3 import CommonDataServiceForAppsEntityDataset from ._models_py3 import CommonDataServiceForAppsLinkedService from ._models_py3 import CommonDataServiceForAppsSink @@ -203,6 +204,7 @@ from ._models_py3 import DrillLinkedService from ._models_py3 import DrillSource from ._models_py3 import DrillTableDataset + from ._models_py3 import DynamicExecutorAllocation from ._models_py3 import DynamicsAXLinkedService from ._models_py3 import DynamicsAXResourceDataset from ._models_py3 import DynamicsAXSource @@ -311,7 +313,12 @@ from ._models_py3 import JsonSink from ._models_py3 import JsonSource from ._models_py3 import JsonWriteSettings + from ._models_py3 import LibraryInfo + from ._models_py3 import LibraryListResponse from ._models_py3 import LibraryRequirements + from ._models_py3 import LibraryResource + from ._models_py3 import LibraryResourceInfo + from ._models_py3 import LibraryResourceProperties from ._models_py3 import LinkedIntegrationRuntimeKeyAuthorization from ._models_py3 import LinkedIntegrationRuntimeRbacAuthorization from ._models_py3 import LinkedIntegrationRuntimeType @@ -373,6 +380,7 @@ from ._models_py3 import Office365Dataset from ._models_py3 import Office365LinkedService from ._models_py3 import Office365Source + from ._models_py3 import OperationResult from ._models_py3 import OracleLinkedService from ._models_py3 import OraclePartitionSettings from ._models_py3 import OracleServiceCloudLinkedService @@ -710,6 +718,7 @@ from ._models import CassandraTableDataset # type: ignore from ._models import ChainingTrigger # type: ignore from ._models import CloudError # type: ignore + from ._models import CloudErrorAutoGenerated # type: ignore from ._models import CommonDataServiceForAppsEntityDataset # type: ignore from ._models import CommonDataServiceForAppsLinkedService # type: ignore from ._models import CommonDataServiceForAppsSink # type: ignore @@ -801,6 +810,7 @@ from ._models import DrillLinkedService # type: ignore from ._models import DrillSource # type: ignore from ._models import DrillTableDataset # type: ignore + from ._models import DynamicExecutorAllocation # type: ignore from ._models import DynamicsAXLinkedService # type: ignore from ._models import DynamicsAXResourceDataset # type: ignore from ._models import DynamicsAXSource # type: ignore @@ -909,7 +919,12 @@ from ._models import JsonSink # type: ignore from ._models import JsonSource # type: ignore from ._models import JsonWriteSettings # type: ignore + from ._models import LibraryInfo # type: ignore + from ._models import LibraryListResponse # type: ignore from ._models import LibraryRequirements # type: ignore + from ._models import LibraryResource # type: ignore + from ._models import LibraryResourceInfo # type: ignore + from ._models import LibraryResourceProperties # type: ignore from ._models import LinkedIntegrationRuntimeKeyAuthorization # type: ignore from ._models import LinkedIntegrationRuntimeRbacAuthorization # type: ignore from ._models import LinkedIntegrationRuntimeType # type: ignore @@ -971,6 +986,7 @@ from ._models import Office365Dataset # type: ignore from ._models import Office365LinkedService # type: ignore from ._models import Office365Source # type: ignore + from ._models import OperationResult # type: ignore from ._models import OracleLinkedService # type: ignore from ._models import OraclePartitionSettings # type: ignore from ._models import OracleServiceCloudLinkedService # type: ignore @@ -1411,6 +1427,7 @@ 'CassandraTableDataset', 'ChainingTrigger', 'CloudError', + 'CloudErrorAutoGenerated', 'CommonDataServiceForAppsEntityDataset', 'CommonDataServiceForAppsLinkedService', 'CommonDataServiceForAppsSink', @@ -1502,6 +1519,7 @@ 'DrillLinkedService', 'DrillSource', 'DrillTableDataset', + 'DynamicExecutorAllocation', 'DynamicsAXLinkedService', 'DynamicsAXResourceDataset', 'DynamicsAXSource', @@ -1610,7 +1628,12 @@ 'JsonSink', 'JsonSource', 'JsonWriteSettings', + 'LibraryInfo', + 'LibraryListResponse', 'LibraryRequirements', + 'LibraryResource', + 'LibraryResourceInfo', + 'LibraryResourceProperties', 'LinkedIntegrationRuntimeKeyAuthorization', 'LinkedIntegrationRuntimeRbacAuthorization', 'LinkedIntegrationRuntimeType', @@ -1672,6 +1695,7 @@ 'Office365Dataset', 'Office365LinkedService', 'Office365Source', + 'OperationResult', 'OracleLinkedService', 'OraclePartitionSettings', 'OracleServiceCloudLinkedService', diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models.py index 040709df6e58..4ec085b56406 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models.py @@ -14,7 +14,7 @@ class Activity(msrest.serialization.Model): """A pipeline activity. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AppendVariableActivity, ControlActivity, ExecutePipelineActivity, ExecutionActivity, FilterActivity, ForEachActivity, IfConditionActivity, SetVariableActivity, SqlPoolStoredProcedureActivity, SwitchActivity, UntilActivity, ValidationActivity, WaitActivity, WebHookActivity. + sub-classes are: ControlActivity, ExecutionActivity, SqlPoolStoredProcedureActivity. All required parameters must be populated in order to send to Azure. @@ -48,7 +48,7 @@ class Activity(msrest.serialization.Model): } _subtype_map = { - 'type': {'AppendVariable': 'AppendVariableActivity', 'Container': 'ControlActivity', 'ExecutePipeline': 'ExecutePipelineActivity', 'Execution': 'ExecutionActivity', 'Filter': 'FilterActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'SetVariable': 'SetVariableActivity', 'SqlPoolStoredProcedure': 'SqlPoolStoredProcedureActivity', 'Switch': 'SwitchActivity', 'Until': 'UntilActivity', 'Validation': 'ValidationActivity', 'Wait': 'WaitActivity', 'WebHook': 'WebHookActivity'} + 'type': {'Container': 'ControlActivity', 'Execution': 'ExecutionActivity', 'SqlPoolStoredProcedure': 'SqlPoolStoredProcedureActivity'} } def __init__( @@ -1180,7 +1180,56 @@ def __init__( self.modified_datetime_end = kwargs.get('modified_datetime_end', None) -class AppendVariableActivity(Activity): +class ControlActivity(Activity): + """Base class for all control activities like IfCondition, ForEach , Until. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AppendVariableActivity, ExecutePipelineActivity, FilterActivity, ForEachActivity, IfConditionActivity, SetVariableActivity, SwitchActivity, UntilActivity, ValidationActivity, WaitActivity, WebHookActivity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + } + + _subtype_map = { + 'type': {'AppendVariable': 'AppendVariableActivity', 'ExecutePipeline': 'ExecutePipelineActivity', 'Filter': 'FilterActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'SetVariable': 'SetVariableActivity', 'Switch': 'SwitchActivity', 'Until': 'UntilActivity', 'Validation': 'ValidationActivity', 'Wait': 'WaitActivity', 'WebHook': 'WebHookActivity'} + } + + def __init__( + self, + **kwargs + ): + super(ControlActivity, self).__init__(**kwargs) + self.type = 'Container' # type: str + + +class AppendVariableActivity(ControlActivity): """Append value for a Variable of type Array. All required parameters must be populated in order to send to Azure. @@ -5986,16 +6035,20 @@ class BigDataPoolResourceInfo(TrackedResource): :type auto_pause: ~azure.synapse.artifacts.models.AutoPauseProperties :param is_compute_isolation_enabled: Whether compute isolation is required or not. :type is_compute_isolation_enabled: bool - :param have_library_requirements_changed: Whether library requirements changed. - :type have_library_requirements_changed: bool :param session_level_packages_enabled: Whether session level packages enabled. :type session_level_packages_enabled: bool + :param cache_size: The cache size. + :type cache_size: int + :param dynamic_executor_allocation: Dynamic Executor Allocation. + :type dynamic_executor_allocation: ~azure.synapse.artifacts.models.DynamicExecutorAllocation :param spark_events_folder: The Spark events folder. :type spark_events_folder: str :param node_count: The number of nodes in the Big Data pool. :type node_count: int :param library_requirements: Library version requirements. :type library_requirements: ~azure.synapse.artifacts.models.LibraryRequirements + :param custom_libraries: List of custom libraries/packages associated with the spark pool. + :type custom_libraries: list[~azure.synapse.artifacts.models.LibraryInfo] :param spark_config_properties: Spark configuration file to specify additional properties. :type spark_config_properties: ~azure.synapse.artifacts.models.LibraryRequirements :param spark_version: The Apache Spark version. @@ -6008,6 +6061,8 @@ class BigDataPoolResourceInfo(TrackedResource): :param node_size_family: The kind of nodes that the Big Data pool provides. Possible values include: "None", "MemoryOptimized". :type node_size_family: str or ~azure.synapse.artifacts.models.NodeSizeFamily + :ivar last_succeeded_timestamp: The time when the Big Data pool was updated successfully. + :vartype last_succeeded_timestamp: ~datetime.datetime """ _validation = { @@ -6015,6 +6070,7 @@ class BigDataPoolResourceInfo(TrackedResource): 'name': {'readonly': True}, 'type': {'readonly': True}, 'location': {'required': True}, + 'last_succeeded_timestamp': {'readonly': True}, } _attribute_map = { @@ -6028,16 +6084,19 @@ class BigDataPoolResourceInfo(TrackedResource): 'creation_date': {'key': 'properties.creationDate', 'type': 'iso-8601'}, 'auto_pause': {'key': 'properties.autoPause', 'type': 'AutoPauseProperties'}, 'is_compute_isolation_enabled': {'key': 'properties.isComputeIsolationEnabled', 'type': 'bool'}, - 'have_library_requirements_changed': {'key': 'properties.haveLibraryRequirementsChanged', 'type': 'bool'}, 'session_level_packages_enabled': {'key': 'properties.sessionLevelPackagesEnabled', 'type': 'bool'}, + 'cache_size': {'key': 'properties.cacheSize', 'type': 'int'}, + 'dynamic_executor_allocation': {'key': 'properties.dynamicExecutorAllocation', 'type': 'DynamicExecutorAllocation'}, 'spark_events_folder': {'key': 'properties.sparkEventsFolder', 'type': 'str'}, 'node_count': {'key': 'properties.nodeCount', 'type': 'int'}, 'library_requirements': {'key': 'properties.libraryRequirements', 'type': 'LibraryRequirements'}, + 'custom_libraries': {'key': 'properties.customLibraries', 'type': '[LibraryInfo]'}, 'spark_config_properties': {'key': 'properties.sparkConfigProperties', 'type': 'LibraryRequirements'}, 'spark_version': {'key': 'properties.sparkVersion', 'type': 'str'}, 'default_spark_log_folder': {'key': 'properties.defaultSparkLogFolder', 'type': 'str'}, 'node_size': {'key': 'properties.nodeSize', 'type': 'str'}, 'node_size_family': {'key': 'properties.nodeSizeFamily', 'type': 'str'}, + 'last_succeeded_timestamp': {'key': 'properties.lastSucceededTimestamp', 'type': 'iso-8601'}, } def __init__( @@ -6050,16 +6109,19 @@ def __init__( self.creation_date = kwargs.get('creation_date', None) self.auto_pause = kwargs.get('auto_pause', None) self.is_compute_isolation_enabled = kwargs.get('is_compute_isolation_enabled', None) - self.have_library_requirements_changed = kwargs.get('have_library_requirements_changed', None) self.session_level_packages_enabled = kwargs.get('session_level_packages_enabled', None) + self.cache_size = kwargs.get('cache_size', None) + self.dynamic_executor_allocation = kwargs.get('dynamic_executor_allocation', None) self.spark_events_folder = kwargs.get('spark_events_folder', None) self.node_count = kwargs.get('node_count', None) self.library_requirements = kwargs.get('library_requirements', None) + self.custom_libraries = kwargs.get('custom_libraries', None) self.spark_config_properties = kwargs.get('spark_config_properties', None) self.spark_version = kwargs.get('spark_version', None) self.default_spark_log_folder = kwargs.get('default_spark_log_folder', None) self.node_size = kwargs.get('node_size', None) self.node_size_family = kwargs.get('node_size_family', None) + self.last_succeeded_timestamp = None class BigDataPoolResourceInfoListResult(msrest.serialization.Model): @@ -6901,6 +6963,44 @@ def __init__( self.details = kwargs.get('details', None) +class CloudErrorAutoGenerated(msrest.serialization.Model): + """The object that defines the structure of an Azure Synapse error response. + + All required parameters must be populated in order to send to Azure. + + :param code: Required. Error code. + :type code: str + :param message: Required. Error message. + :type message: str + :param target: Property name/path in request associated with error. + :type target: str + :param details: Array with additional error details. + :type details: list[~azure.synapse.artifacts.models.CloudErrorAutoGenerated] + """ + + _validation = { + 'code': {'required': True}, + 'message': {'required': True}, + } + + _attribute_map = { + 'code': {'key': 'error.code', 'type': 'str'}, + 'message': {'key': 'error.message', 'type': 'str'}, + 'target': {'key': 'error.target', 'type': 'str'}, + 'details': {'key': 'error.details', 'type': '[CloudErrorAutoGenerated]'}, + } + + def __init__( + self, + **kwargs + ): + super(CloudErrorAutoGenerated, self).__init__(**kwargs) + self.code = kwargs['code'] + self.message = kwargs['message'] + self.target = kwargs.get('target', None) + self.details = kwargs.get('details', None) + + class CommonDataServiceForAppsEntityDataset(Dataset): """The Common Data Service for Apps entity dataset. @@ -7377,48 +7477,6 @@ def __init__( self.query = kwargs.get('query', None) -class ControlActivity(Activity): - """Base class for all control activities like IfCondition, ForEach , Until. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - } - - def __init__( - self, - **kwargs - ): - super(ControlActivity, self).__init__(**kwargs) - self.type = 'Container' # type: str - - class CopyActivity(ExecutionActivity): """Copy activity. @@ -9205,7 +9263,45 @@ def __init__( self.dataset_parameters = kwargs.get('dataset_parameters', None) -class DataFlowResource(AzureEntityResource): +class SubResource(AzureEntityResource): + """Azure Synapse nested resource, which belongs to a workspace. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SubResource, self).__init__(**kwargs) + + +class DataFlowResource(SubResource): """Data flow resource type. Variables are only populated by the server, and will be ignored when sending a request. @@ -9764,7 +9860,7 @@ def __init__( self.parameters = kwargs.get('parameters', None) -class DatasetResource(AzureEntityResource): +class DatasetResource(SubResource): """Dataset resource type. Variables are only populated by the server, and will be ignored when sending a request. @@ -10957,6 +11053,25 @@ def __init__( self.additional_options = kwargs.get('additional_options', None) +class DynamicExecutorAllocation(msrest.serialization.Model): + """Dynamic Executor Allocation Properties. + + :param enabled: Indicates whether Dynamic Executor Allocation is enabled or not. + :type enabled: bool + """ + + _attribute_map = { + 'enabled': {'key': 'enabled', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(DynamicExecutorAllocation, self).__init__(**kwargs) + self.enabled = kwargs.get('enabled', None) + + class DynamicsAXLinkedService(LinkedService): """Dynamics AX linked service. @@ -12177,7 +12292,7 @@ def __init__( self.core_count = kwargs.get('core_count', None) -class ExecutePipelineActivity(Activity): +class ExecutePipelineActivity(ControlActivity): """Execute pipeline activity. All required parameters must be populated in order to send to Azure. @@ -12276,10 +12391,12 @@ class ExecuteSSISPackageActivity(ExecutionActivity): :type package_parameters: dict[str, ~azure.synapse.artifacts.models.SSISExecutionParameter] :param project_connection_managers: The project level connection managers to execute the SSIS package. - :type project_connection_managers: dict[str, object] + :type project_connection_managers: dict[str, dict[str, + ~azure.synapse.artifacts.models.SSISExecutionParameter]] :param package_connection_managers: The package level connection managers to execute the SSIS package. - :type package_connection_managers: dict[str, object] + :type package_connection_managers: dict[str, dict[str, + ~azure.synapse.artifacts.models.SSISExecutionParameter]] :param property_overrides: The property overrides to execute the SSIS package. :type property_overrides: dict[str, ~azure.synapse.artifacts.models.SSISPropertyOverride] :param log_location: SSIS package execution log location. @@ -12310,8 +12427,8 @@ class ExecuteSSISPackageActivity(ExecutionActivity): 'connect_via': {'key': 'typeProperties.connectVia', 'type': 'IntegrationRuntimeReference'}, 'project_parameters': {'key': 'typeProperties.projectParameters', 'type': '{SSISExecutionParameter}'}, 'package_parameters': {'key': 'typeProperties.packageParameters', 'type': '{SSISExecutionParameter}'}, - 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{object}'}, - 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{object}'}, + 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, + 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, } @@ -12715,7 +12832,7 @@ def __init__( self.recursive = kwargs.get('recursive', None) -class FilterActivity(Activity): +class FilterActivity(ControlActivity): """Filter and return results from input array based on the conditions. All required parameters must be populated in order to send to Azure. @@ -12767,7 +12884,7 @@ def __init__( self.condition = kwargs['condition'] -class ForEachActivity(Activity): +class ForEachActivity(ControlActivity): """This activity is used for iterating over a collection and execute given activities. All required parameters must be populated in order to send to Azure. @@ -15730,7 +15847,7 @@ def __init__( self.query = kwargs.get('query', None) -class IfConditionActivity(Activity): +class IfConditionActivity(ControlActivity): """This activity evaluates a boolean expression and executes either the activities under the ifTrueActivities property or the ifFalseActivities property depending on the result of the expression. All required parameters must be populated in order to send to Azure. @@ -16492,7 +16609,7 @@ def __init__( self.parameters = kwargs.get('parameters', None) -class IntegrationRuntimeResource(AzureEntityResource): +class IntegrationRuntimeResource(SubResource): """Integration runtime resource type. Variables are only populated by the server, and will be ignored when sending a request. @@ -17131,6 +17248,86 @@ def __init__( self.file_pattern = kwargs.get('file_pattern', None) +class LibraryInfo(msrest.serialization.Model): + """Library/package information of a Big Data pool powered by Apache Spark. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param name: Name of the library. + :type name: str + :param path: Storage blob path of library. + :type path: str + :param container_name: Storage blob container name. + :type container_name: str + :ivar uploaded_timestamp: The last update time of the library. + :vartype uploaded_timestamp: ~datetime.datetime + :param type: Type of the library. + :type type: str + :ivar provisioning_status: Provisioning status of the library/package. + :vartype provisioning_status: str + :ivar creator_id: Creator Id of the library/package. + :vartype creator_id: str + """ + + _validation = { + 'uploaded_timestamp': {'readonly': True}, + 'provisioning_status': {'readonly': True}, + 'creator_id': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'path': {'key': 'path', 'type': 'str'}, + 'container_name': {'key': 'containerName', 'type': 'str'}, + 'uploaded_timestamp': {'key': 'uploadedTimestamp', 'type': 'iso-8601'}, + 'type': {'key': 'type', 'type': 'str'}, + 'provisioning_status': {'key': 'provisioningStatus', 'type': 'str'}, + 'creator_id': {'key': 'creatorId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(LibraryInfo, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.path = kwargs.get('path', None) + self.container_name = kwargs.get('container_name', None) + self.uploaded_timestamp = None + self.type = kwargs.get('type', None) + self.provisioning_status = None + self.creator_id = None + + +class LibraryListResponse(msrest.serialization.Model): + """A list of Library resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of Library. + :type value: list[~azure.synapse.artifacts.models.LibraryResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[LibraryResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(LibraryListResponse, self).__init__(**kwargs) + self.value = kwargs['value'] + self.next_link = kwargs.get('next_link', None) + + class LibraryRequirements(msrest.serialization.Model): """Library requirements for a Big Data pool powered by Apache Spark. @@ -17164,6 +17361,172 @@ def __init__( self.filename = kwargs.get('filename', None) +class LibraryResource(SubResource): + """Library response details. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :param properties: Required. Library/package properties. + :type properties: ~azure.synapse.artifacts.models.LibraryResourceProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'LibraryResourceProperties'}, + } + + def __init__( + self, + **kwargs + ): + super(LibraryResource, self).__init__(**kwargs) + self.properties = kwargs['properties'] + + +class LibraryResourceInfo(msrest.serialization.Model): + """Library resource info. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar record_id: record Id of the library/package. + :vartype record_id: int + :ivar state: Provisioning status of the library/package. + :vartype state: str + :ivar created: The creation time of the library/package. + :vartype created: str + :ivar changed: The last updated time of the library/package. + :vartype changed: str + :ivar type: The type of the resource. E.g. LibraryArtifact. + :vartype type: str + :ivar name: Name of the library/package. + :vartype name: str + :ivar operation_id: Operation Id of the operation performed on library/package. + :vartype operation_id: str + :ivar artifact_id: artifact Id of the library/package. + :vartype artifact_id: str + """ + + _validation = { + 'id': {'readonly': True}, + 'record_id': {'readonly': True}, + 'state': {'readonly': True}, + 'created': {'readonly': True}, + 'changed': {'readonly': True}, + 'type': {'readonly': True}, + 'name': {'readonly': True}, + 'operation_id': {'readonly': True}, + 'artifact_id': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'record_id': {'key': 'recordId', 'type': 'int'}, + 'state': {'key': 'state', 'type': 'str'}, + 'created': {'key': 'created', 'type': 'str'}, + 'changed': {'key': 'changed', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'operation_id': {'key': 'operationId', 'type': 'str'}, + 'artifact_id': {'key': 'artifactId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(LibraryResourceInfo, self).__init__(**kwargs) + self.id = None + self.record_id = None + self.state = None + self.created = None + self.changed = None + self.type = None + self.name = None + self.operation_id = None + self.artifact_id = None + + +class LibraryResourceProperties(msrest.serialization.Model): + """Library/package properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: Name of the library/package. + :vartype name: str + :ivar path: Location of library/package in storage account. + :vartype path: str + :ivar container_name: Container name of the library/package. + :vartype container_name: str + :ivar uploaded_timestamp: The last update time of the library/package. + :vartype uploaded_timestamp: str + :ivar type: Type of the library/package. + :vartype type: str + :ivar provisioning_status: Provisioning status of the library/package. + :vartype provisioning_status: str + :ivar creator_id: Creator Id of the library/package. + :vartype creator_id: str + """ + + _validation = { + 'name': {'readonly': True}, + 'path': {'readonly': True}, + 'container_name': {'readonly': True}, + 'uploaded_timestamp': {'readonly': True}, + 'type': {'readonly': True}, + 'provisioning_status': {'readonly': True}, + 'creator_id': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'path': {'key': 'path', 'type': 'str'}, + 'container_name': {'key': 'containerName', 'type': 'str'}, + 'uploaded_timestamp': {'key': 'uploadedTimestamp', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'provisioning_status': {'key': 'provisioningStatus', 'type': 'str'}, + 'creator_id': {'key': 'creatorId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(LibraryResourceProperties, self).__init__(**kwargs) + self.name = None + self.path = None + self.container_name = None + self.uploaded_timestamp = None + self.type = None + self.provisioning_status = None + self.creator_id = None + + class LinkedIntegrationRuntimeType(msrest.serialization.Model): """The base definition of a linked integration runtime. @@ -17351,7 +17714,7 @@ def __init__( self.parameters = kwargs.get('parameters', None) -class LinkedServiceResource(AzureEntityResource): +class LinkedServiceResource(SubResource): """Linked service resource type. Variables are only populated by the server, and will be ignored when sending a request. @@ -20240,6 +20603,47 @@ def __init__( self.output_columns = kwargs.get('output_columns', None) +class OperationResult(msrest.serialization.Model): + """Operation status for the operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar status: Operation status. + :vartype status: str + :param code: Error code. + :type code: str + :param message: Error message. + :type message: str + :param target: Property name/path in request associated with error. + :type target: str + :param details: Array with additional error details. + :type details: list[~azure.synapse.artifacts.models.CloudErrorAutoGenerated] + """ + + _validation = { + 'status': {'readonly': True}, + } + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'code': {'key': 'error.code', 'type': 'str'}, + 'message': {'key': 'error.message', 'type': 'str'}, + 'target': {'key': 'error.target', 'type': 'str'}, + 'details': {'key': 'error.details', 'type': '[CloudErrorAutoGenerated]'}, + } + + def __init__( + self, + **kwargs + ): + super(OperationResult, self).__init__(**kwargs) + self.status = None + self.code = kwargs.get('code', None) + self.message = kwargs.get('message', None) + self.target = kwargs.get('target', None) + self.details = kwargs.get('details', None) + + class OracleLinkedService(LinkedService): """Oracle database. @@ -21615,7 +22019,7 @@ def __init__( self.name = kwargs.get('name', None) -class PipelineResource(AzureEntityResource): +class PipelineResource(SubResource): """Pipeline resource type. Variables are only populated by the server, and will be ignored when sending a request. @@ -22314,7 +22718,41 @@ def __init__( self.id = None -class PrivateEndpointConnection(Resource): +class ProxyResource(Resource): + """The resource model definition for a Azure Resource Manager proxy resource. It will not have tags and a location. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ProxyResource, self).__init__(**kwargs) + + +class PrivateEndpointConnection(ProxyResource): """A private endpoint connection. Variables are only populated by the server, and will be ignored when sending a request. @@ -22396,40 +22834,6 @@ def __init__( self.actions_required = None -class ProxyResource(Resource): - """The resource model definition for a Azure Resource Manager proxy resource. It will not have tags and a location. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ProxyResource, self).__init__(**kwargs) - - class PurviewConfiguration(msrest.serialization.Model): """Purview Configuration. @@ -22940,7 +23344,7 @@ def __init__( self.next_link = None -class RerunTriggerResource(AzureEntityResource): +class RerunTriggerResource(SubResource): """RerunTrigger resource type. Variables are only populated by the server, and will be ignored when sending a request. @@ -26099,7 +26503,7 @@ def __init__( self.query = kwargs.get('query', None) -class SetVariableActivity(Activity): +class SetVariableActivity(ControlActivity): """Set value for a Variable. All required parameters must be populated in order to send to Azure. @@ -26787,7 +27191,7 @@ def __init__( self.job_properties = kwargs['job_properties'] -class SparkJobDefinitionResource(AzureEntityResource): +class SparkJobDefinitionResource(SubResource): """Spark job definition resource type. Variables are only populated by the server, and will be ignored when sending a request. @@ -29131,45 +29535,7 @@ def __init__( self.type = kwargs.get('type', None) -class SubResource(AzureEntityResource): - """Azure Synapse nested resource, which belongs to a workspace. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar etag: Resource Etag. - :vartype etag: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SubResource, self).__init__(**kwargs) - - -class SwitchActivity(Activity): +class SwitchActivity(ControlActivity): """This activity evaluates an expression and executes activities under the cases property that correspond to the expression evaluation expected in the equals property. All required parameters must be populated in order to send to Azure. @@ -30120,7 +30486,7 @@ def __init__( self.reference_name = kwargs['reference_name'] -class TriggerResource(AzureEntityResource): +class TriggerResource(SubResource): """Trigger resource type. Variables are only populated by the server, and will be ignored when sending a request. @@ -30425,7 +30791,7 @@ def __init__( self.size = kwargs.get('size', None) -class UntilActivity(Activity): +class UntilActivity(ControlActivity): """This activity executes inner activities until the specified boolean expression results to true or timeout is reached, whichever is earlier. All required parameters must be populated in order to send to Azure. @@ -30517,7 +30883,7 @@ def __init__( self.value = kwargs['value'] -class ValidationActivity(Activity): +class ValidationActivity(ControlActivity): """This activity verifies that an external resource exists. All required parameters must be populated in order to send to Azure. @@ -30811,7 +31177,7 @@ def __init__( self.compute_subnet_id = kwargs.get('compute_subnet_id', None) -class WaitActivity(Activity): +class WaitActivity(ControlActivity): """This activity suspends pipeline execution for the specified interval. All required parameters must be populated in order to send to Azure. @@ -31144,7 +31510,7 @@ def __init__( self.password = kwargs['password'] -class WebHookActivity(Activity): +class WebHookActivity(ControlActivity): """WebHook activity. All required parameters must be populated in order to send to Azure. @@ -31435,6 +31801,8 @@ class Workspace(TrackedResource): ~azure.synapse.artifacts.models.WorkspaceRepositoryConfiguration :param purview_configuration: Purview Configuration. :type purview_configuration: ~azure.synapse.artifacts.models.PurviewConfiguration + :ivar adla_resource_id: The ADLA resource ID. + :vartype adla_resource_id: str """ _validation = { @@ -31445,6 +31813,7 @@ class Workspace(TrackedResource): 'provisioning_state': {'readonly': True}, 'workspace_uid': {'readonly': True}, 'extra_properties': {'readonly': True}, + 'adla_resource_id': {'readonly': True}, } _attribute_map = { @@ -31469,6 +31838,7 @@ class Workspace(TrackedResource): 'managed_virtual_network_settings': {'key': 'properties.managedVirtualNetworkSettings', 'type': 'ManagedVirtualNetworkSettings'}, 'workspace_repository_configuration': {'key': 'properties.workspaceRepositoryConfiguration', 'type': 'WorkspaceRepositoryConfiguration'}, 'purview_configuration': {'key': 'properties.purviewConfiguration', 'type': 'PurviewConfiguration'}, + 'adla_resource_id': {'key': 'properties.adlaResourceId', 'type': 'str'}, } def __init__( @@ -31492,6 +31862,7 @@ def __init__( self.managed_virtual_network_settings = kwargs.get('managed_virtual_network_settings', None) self.workspace_repository_configuration = kwargs.get('workspace_repository_configuration', None) self.purview_configuration = kwargs.get('purview_configuration', None) + self.adla_resource_id = None class WorkspaceIdentity(msrest.serialization.Model): @@ -31574,6 +31945,10 @@ class WorkspaceRepositoryConfiguration(msrest.serialization.Model): :type collaboration_branch: str :param root_folder: Root folder to use in the repository. :type root_folder: str + :param last_commit_id: The last commit ID. + :type last_commit_id: str + :param tenant_id: The VSTS tenant ID. + :type tenant_id: str """ _attribute_map = { @@ -31584,6 +31959,8 @@ class WorkspaceRepositoryConfiguration(msrest.serialization.Model): 'repository_name': {'key': 'repositoryName', 'type': 'str'}, 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, 'root_folder': {'key': 'rootFolder', 'type': 'str'}, + 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, } def __init__( @@ -31598,6 +31975,8 @@ def __init__( self.repository_name = kwargs.get('repository_name', None) self.collaboration_branch = kwargs.get('collaboration_branch', None) self.root_folder = kwargs.get('root_folder', None) + self.last_commit_id = kwargs.get('last_commit_id', None) + self.tenant_id = kwargs.get('tenant_id', None) class WorkspaceUpdateParameters(msrest.serialization.Model): diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models_py3.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models_py3.py index b2bc337bc3f9..a09237c67c71 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models_py3.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models_py3.py @@ -19,7 +19,7 @@ class Activity(msrest.serialization.Model): """A pipeline activity. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AppendVariableActivity, ControlActivity, ExecutePipelineActivity, ExecutionActivity, FilterActivity, ForEachActivity, IfConditionActivity, SetVariableActivity, SqlPoolStoredProcedureActivity, SwitchActivity, UntilActivity, ValidationActivity, WaitActivity, WebHookActivity. + sub-classes are: ControlActivity, ExecutionActivity, SqlPoolStoredProcedureActivity. All required parameters must be populated in order to send to Azure. @@ -53,7 +53,7 @@ class Activity(msrest.serialization.Model): } _subtype_map = { - 'type': {'AppendVariable': 'AppendVariableActivity', 'Container': 'ControlActivity', 'ExecutePipeline': 'ExecutePipelineActivity', 'Execution': 'ExecutionActivity', 'Filter': 'FilterActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'SetVariable': 'SetVariableActivity', 'SqlPoolStoredProcedure': 'SqlPoolStoredProcedureActivity', 'Switch': 'SwitchActivity', 'Until': 'UntilActivity', 'Validation': 'ValidationActivity', 'Wait': 'WaitActivity', 'WebHook': 'WebHookActivity'} + 'type': {'Container': 'ControlActivity', 'Execution': 'ExecutionActivity', 'SqlPoolStoredProcedure': 'SqlPoolStoredProcedureActivity'} } def __init__( @@ -1333,7 +1333,62 @@ def __init__( self.modified_datetime_end = modified_datetime_end -class AppendVariableActivity(Activity): +class ControlActivity(Activity): + """Base class for all control activities like IfCondition, ForEach , Until. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AppendVariableActivity, ExecutePipelineActivity, FilterActivity, ForEachActivity, IfConditionActivity, SetVariableActivity, SwitchActivity, UntilActivity, ValidationActivity, WaitActivity, WebHookActivity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + } + + _subtype_map = { + 'type': {'AppendVariable': 'AppendVariableActivity', 'ExecutePipeline': 'ExecutePipelineActivity', 'Filter': 'FilterActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'SetVariable': 'SetVariableActivity', 'Switch': 'SwitchActivity', 'Until': 'UntilActivity', 'Validation': 'ValidationActivity', 'Wait': 'WaitActivity', 'WebHook': 'WebHookActivity'} + } + + def __init__( + self, + *, + name: str, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + **kwargs + ): + super(ControlActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.type = 'Container' # type: str + + +class AppendVariableActivity(ControlActivity): """Append value for a Variable of type Array. All required parameters must be populated in order to send to Azure. @@ -6841,16 +6896,20 @@ class BigDataPoolResourceInfo(TrackedResource): :type auto_pause: ~azure.synapse.artifacts.models.AutoPauseProperties :param is_compute_isolation_enabled: Whether compute isolation is required or not. :type is_compute_isolation_enabled: bool - :param have_library_requirements_changed: Whether library requirements changed. - :type have_library_requirements_changed: bool :param session_level_packages_enabled: Whether session level packages enabled. :type session_level_packages_enabled: bool + :param cache_size: The cache size. + :type cache_size: int + :param dynamic_executor_allocation: Dynamic Executor Allocation. + :type dynamic_executor_allocation: ~azure.synapse.artifacts.models.DynamicExecutorAllocation :param spark_events_folder: The Spark events folder. :type spark_events_folder: str :param node_count: The number of nodes in the Big Data pool. :type node_count: int :param library_requirements: Library version requirements. :type library_requirements: ~azure.synapse.artifacts.models.LibraryRequirements + :param custom_libraries: List of custom libraries/packages associated with the spark pool. + :type custom_libraries: list[~azure.synapse.artifacts.models.LibraryInfo] :param spark_config_properties: Spark configuration file to specify additional properties. :type spark_config_properties: ~azure.synapse.artifacts.models.LibraryRequirements :param spark_version: The Apache Spark version. @@ -6863,6 +6922,8 @@ class BigDataPoolResourceInfo(TrackedResource): :param node_size_family: The kind of nodes that the Big Data pool provides. Possible values include: "None", "MemoryOptimized". :type node_size_family: str or ~azure.synapse.artifacts.models.NodeSizeFamily + :ivar last_succeeded_timestamp: The time when the Big Data pool was updated successfully. + :vartype last_succeeded_timestamp: ~datetime.datetime """ _validation = { @@ -6870,6 +6931,7 @@ class BigDataPoolResourceInfo(TrackedResource): 'name': {'readonly': True}, 'type': {'readonly': True}, 'location': {'required': True}, + 'last_succeeded_timestamp': {'readonly': True}, } _attribute_map = { @@ -6883,16 +6945,19 @@ class BigDataPoolResourceInfo(TrackedResource): 'creation_date': {'key': 'properties.creationDate', 'type': 'iso-8601'}, 'auto_pause': {'key': 'properties.autoPause', 'type': 'AutoPauseProperties'}, 'is_compute_isolation_enabled': {'key': 'properties.isComputeIsolationEnabled', 'type': 'bool'}, - 'have_library_requirements_changed': {'key': 'properties.haveLibraryRequirementsChanged', 'type': 'bool'}, 'session_level_packages_enabled': {'key': 'properties.sessionLevelPackagesEnabled', 'type': 'bool'}, + 'cache_size': {'key': 'properties.cacheSize', 'type': 'int'}, + 'dynamic_executor_allocation': {'key': 'properties.dynamicExecutorAllocation', 'type': 'DynamicExecutorAllocation'}, 'spark_events_folder': {'key': 'properties.sparkEventsFolder', 'type': 'str'}, 'node_count': {'key': 'properties.nodeCount', 'type': 'int'}, 'library_requirements': {'key': 'properties.libraryRequirements', 'type': 'LibraryRequirements'}, + 'custom_libraries': {'key': 'properties.customLibraries', 'type': '[LibraryInfo]'}, 'spark_config_properties': {'key': 'properties.sparkConfigProperties', 'type': 'LibraryRequirements'}, 'spark_version': {'key': 'properties.sparkVersion', 'type': 'str'}, 'default_spark_log_folder': {'key': 'properties.defaultSparkLogFolder', 'type': 'str'}, 'node_size': {'key': 'properties.nodeSize', 'type': 'str'}, 'node_size_family': {'key': 'properties.nodeSizeFamily', 'type': 'str'}, + 'last_succeeded_timestamp': {'key': 'properties.lastSucceededTimestamp', 'type': 'iso-8601'}, } def __init__( @@ -6905,11 +6970,13 @@ def __init__( creation_date: Optional[datetime.datetime] = None, auto_pause: Optional["AutoPauseProperties"] = None, is_compute_isolation_enabled: Optional[bool] = None, - have_library_requirements_changed: Optional[bool] = None, session_level_packages_enabled: Optional[bool] = None, + cache_size: Optional[int] = None, + dynamic_executor_allocation: Optional["DynamicExecutorAllocation"] = None, spark_events_folder: Optional[str] = None, node_count: Optional[int] = None, library_requirements: Optional["LibraryRequirements"] = None, + custom_libraries: Optional[List["LibraryInfo"]] = None, spark_config_properties: Optional["LibraryRequirements"] = None, spark_version: Optional[str] = None, default_spark_log_folder: Optional[str] = None, @@ -6923,16 +6990,19 @@ def __init__( self.creation_date = creation_date self.auto_pause = auto_pause self.is_compute_isolation_enabled = is_compute_isolation_enabled - self.have_library_requirements_changed = have_library_requirements_changed self.session_level_packages_enabled = session_level_packages_enabled + self.cache_size = cache_size + self.dynamic_executor_allocation = dynamic_executor_allocation self.spark_events_folder = spark_events_folder self.node_count = node_count self.library_requirements = library_requirements + self.custom_libraries = custom_libraries self.spark_config_properties = spark_config_properties self.spark_version = spark_version self.default_spark_log_folder = default_spark_log_folder self.node_size = node_size self.node_size_family = node_size_family + self.last_succeeded_timestamp = None class BigDataPoolResourceInfoListResult(msrest.serialization.Model): @@ -7891,6 +7961,49 @@ def __init__( self.details = details +class CloudErrorAutoGenerated(msrest.serialization.Model): + """The object that defines the structure of an Azure Synapse error response. + + All required parameters must be populated in order to send to Azure. + + :param code: Required. Error code. + :type code: str + :param message: Required. Error message. + :type message: str + :param target: Property name/path in request associated with error. + :type target: str + :param details: Array with additional error details. + :type details: list[~azure.synapse.artifacts.models.CloudErrorAutoGenerated] + """ + + _validation = { + 'code': {'required': True}, + 'message': {'required': True}, + } + + _attribute_map = { + 'code': {'key': 'error.code', 'type': 'str'}, + 'message': {'key': 'error.message', 'type': 'str'}, + 'target': {'key': 'error.target', 'type': 'str'}, + 'details': {'key': 'error.details', 'type': '[CloudErrorAutoGenerated]'}, + } + + def __init__( + self, + *, + code: str, + message: str, + target: Optional[str] = None, + details: Optional[List["CloudErrorAutoGenerated"]] = None, + **kwargs + ): + super(CloudErrorAutoGenerated, self).__init__(**kwargs) + self.code = code + self.message = message + self.target = target + self.details = details + + class CommonDataServiceForAppsEntityDataset(Dataset): """The Common Data Service for Apps entity dataset. @@ -8441,54 +8554,6 @@ def __init__( self.query = query -class ControlActivity(Activity): - """Base class for all control activities like IfCondition, ForEach , Until. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - } - - def __init__( - self, - *, - name: str, - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - **kwargs - ): - super(ControlActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type = 'Container' # type: str - - class CopyActivity(ExecutionActivity): """Copy activity. @@ -10541,7 +10606,45 @@ def __init__( self.dataset_parameters = dataset_parameters -class DataFlowResource(AzureEntityResource): +class SubResource(AzureEntityResource): + """Azure Synapse nested resource, which belongs to a workspace. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SubResource, self).__init__(**kwargs) + + +class DataFlowResource(SubResource): """Data flow resource type. Variables are only populated by the server, and will be ignored when sending a request. @@ -11163,7 +11266,7 @@ def __init__( self.parameters = parameters -class DatasetResource(AzureEntityResource): +class DatasetResource(SubResource): """Dataset resource type. Variables are only populated by the server, and will be ignored when sending a request. @@ -12523,6 +12626,27 @@ def __init__( self.additional_options = additional_options +class DynamicExecutorAllocation(msrest.serialization.Model): + """Dynamic Executor Allocation Properties. + + :param enabled: Indicates whether Dynamic Executor Allocation is enabled or not. + :type enabled: bool + """ + + _attribute_map = { + 'enabled': {'key': 'enabled', 'type': 'bool'}, + } + + def __init__( + self, + *, + enabled: Optional[bool] = None, + **kwargs + ): + super(DynamicExecutorAllocation, self).__init__(**kwargs) + self.enabled = enabled + + class DynamicsAXLinkedService(LinkedService): """Dynamics AX linked service. @@ -13918,7 +14042,7 @@ def __init__( self.core_count = core_count -class ExecutePipelineActivity(Activity): +class ExecutePipelineActivity(ControlActivity): """Execute pipeline activity. All required parameters must be populated in order to send to Azure. @@ -14026,10 +14150,12 @@ class ExecuteSSISPackageActivity(ExecutionActivity): :type package_parameters: dict[str, ~azure.synapse.artifacts.models.SSISExecutionParameter] :param project_connection_managers: The project level connection managers to execute the SSIS package. - :type project_connection_managers: dict[str, object] + :type project_connection_managers: dict[str, dict[str, + ~azure.synapse.artifacts.models.SSISExecutionParameter]] :param package_connection_managers: The package level connection managers to execute the SSIS package. - :type package_connection_managers: dict[str, object] + :type package_connection_managers: dict[str, dict[str, + ~azure.synapse.artifacts.models.SSISExecutionParameter]] :param property_overrides: The property overrides to execute the SSIS package. :type property_overrides: dict[str, ~azure.synapse.artifacts.models.SSISPropertyOverride] :param log_location: SSIS package execution log location. @@ -14060,8 +14186,8 @@ class ExecuteSSISPackageActivity(ExecutionActivity): 'connect_via': {'key': 'typeProperties.connectVia', 'type': 'IntegrationRuntimeReference'}, 'project_parameters': {'key': 'typeProperties.projectParameters', 'type': '{SSISExecutionParameter}'}, 'package_parameters': {'key': 'typeProperties.packageParameters', 'type': '{SSISExecutionParameter}'}, - 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{object}'}, - 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{object}'}, + 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, + 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, } @@ -14084,8 +14210,8 @@ def __init__( execution_credential: Optional["SSISExecutionCredential"] = None, project_parameters: Optional[Dict[str, "SSISExecutionParameter"]] = None, package_parameters: Optional[Dict[str, "SSISExecutionParameter"]] = None, - project_connection_managers: Optional[Dict[str, object]] = None, - package_connection_managers: Optional[Dict[str, object]] = None, + project_connection_managers: Optional[Dict[str, Dict[str, "SSISExecutionParameter"]]] = None, + package_connection_managers: Optional[Dict[str, Dict[str, "SSISExecutionParameter"]]] = None, property_overrides: Optional[Dict[str, "SSISPropertyOverride"]] = None, log_location: Optional["SSISLogLocation"] = None, **kwargs @@ -14532,7 +14658,7 @@ def __init__( self.recursive = recursive -class FilterActivity(Activity): +class FilterActivity(ControlActivity): """Filter and return results from input array based on the conditions. All required parameters must be populated in order to send to Azure. @@ -14592,7 +14718,7 @@ def __init__( self.condition = condition -class ForEachActivity(Activity): +class ForEachActivity(ControlActivity): """This activity is used for iterating over a collection and execute given activities. All required parameters must be populated in order to send to Azure. @@ -18039,7 +18165,7 @@ def __init__( self.query = query -class IfConditionActivity(Activity): +class IfConditionActivity(ControlActivity): """This activity evaluates a boolean expression and executes either the activities under the ifTrueActivities property or the ifFalseActivities property depending on the result of the expression. All required parameters must be populated in order to send to Azure. @@ -18913,7 +19039,7 @@ def __init__( self.parameters = parameters -class IntegrationRuntimeResource(AzureEntityResource): +class IntegrationRuntimeResource(SubResource): """Integration runtime resource type. Variables are only populated by the server, and will be ignored when sending a request. @@ -19643,6 +19769,94 @@ def __init__( self.file_pattern = file_pattern +class LibraryInfo(msrest.serialization.Model): + """Library/package information of a Big Data pool powered by Apache Spark. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param name: Name of the library. + :type name: str + :param path: Storage blob path of library. + :type path: str + :param container_name: Storage blob container name. + :type container_name: str + :ivar uploaded_timestamp: The last update time of the library. + :vartype uploaded_timestamp: ~datetime.datetime + :param type: Type of the library. + :type type: str + :ivar provisioning_status: Provisioning status of the library/package. + :vartype provisioning_status: str + :ivar creator_id: Creator Id of the library/package. + :vartype creator_id: str + """ + + _validation = { + 'uploaded_timestamp': {'readonly': True}, + 'provisioning_status': {'readonly': True}, + 'creator_id': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'path': {'key': 'path', 'type': 'str'}, + 'container_name': {'key': 'containerName', 'type': 'str'}, + 'uploaded_timestamp': {'key': 'uploadedTimestamp', 'type': 'iso-8601'}, + 'type': {'key': 'type', 'type': 'str'}, + 'provisioning_status': {'key': 'provisioningStatus', 'type': 'str'}, + 'creator_id': {'key': 'creatorId', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + path: Optional[str] = None, + container_name: Optional[str] = None, + type: Optional[str] = None, + **kwargs + ): + super(LibraryInfo, self).__init__(**kwargs) + self.name = name + self.path = path + self.container_name = container_name + self.uploaded_timestamp = None + self.type = type + self.provisioning_status = None + self.creator_id = None + + +class LibraryListResponse(msrest.serialization.Model): + """A list of Library resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of Library. + :type value: list[~azure.synapse.artifacts.models.LibraryResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[LibraryResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: List["LibraryResource"], + next_link: Optional[str] = None, + **kwargs + ): + super(LibraryListResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + class LibraryRequirements(msrest.serialization.Model): """Library requirements for a Big Data pool powered by Apache Spark. @@ -19679,6 +19893,174 @@ def __init__( self.filename = filename +class LibraryResource(SubResource): + """Library response details. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :param properties: Required. Library/package properties. + :type properties: ~azure.synapse.artifacts.models.LibraryResourceProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'LibraryResourceProperties'}, + } + + def __init__( + self, + *, + properties: "LibraryResourceProperties", + **kwargs + ): + super(LibraryResource, self).__init__(**kwargs) + self.properties = properties + + +class LibraryResourceInfo(msrest.serialization.Model): + """Library resource info. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar record_id: record Id of the library/package. + :vartype record_id: int + :ivar state: Provisioning status of the library/package. + :vartype state: str + :ivar created: The creation time of the library/package. + :vartype created: str + :ivar changed: The last updated time of the library/package. + :vartype changed: str + :ivar type: The type of the resource. E.g. LibraryArtifact. + :vartype type: str + :ivar name: Name of the library/package. + :vartype name: str + :ivar operation_id: Operation Id of the operation performed on library/package. + :vartype operation_id: str + :ivar artifact_id: artifact Id of the library/package. + :vartype artifact_id: str + """ + + _validation = { + 'id': {'readonly': True}, + 'record_id': {'readonly': True}, + 'state': {'readonly': True}, + 'created': {'readonly': True}, + 'changed': {'readonly': True}, + 'type': {'readonly': True}, + 'name': {'readonly': True}, + 'operation_id': {'readonly': True}, + 'artifact_id': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'record_id': {'key': 'recordId', 'type': 'int'}, + 'state': {'key': 'state', 'type': 'str'}, + 'created': {'key': 'created', 'type': 'str'}, + 'changed': {'key': 'changed', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'operation_id': {'key': 'operationId', 'type': 'str'}, + 'artifact_id': {'key': 'artifactId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(LibraryResourceInfo, self).__init__(**kwargs) + self.id = None + self.record_id = None + self.state = None + self.created = None + self.changed = None + self.type = None + self.name = None + self.operation_id = None + self.artifact_id = None + + +class LibraryResourceProperties(msrest.serialization.Model): + """Library/package properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: Name of the library/package. + :vartype name: str + :ivar path: Location of library/package in storage account. + :vartype path: str + :ivar container_name: Container name of the library/package. + :vartype container_name: str + :ivar uploaded_timestamp: The last update time of the library/package. + :vartype uploaded_timestamp: str + :ivar type: Type of the library/package. + :vartype type: str + :ivar provisioning_status: Provisioning status of the library/package. + :vartype provisioning_status: str + :ivar creator_id: Creator Id of the library/package. + :vartype creator_id: str + """ + + _validation = { + 'name': {'readonly': True}, + 'path': {'readonly': True}, + 'container_name': {'readonly': True}, + 'uploaded_timestamp': {'readonly': True}, + 'type': {'readonly': True}, + 'provisioning_status': {'readonly': True}, + 'creator_id': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'path': {'key': 'path', 'type': 'str'}, + 'container_name': {'key': 'containerName', 'type': 'str'}, + 'uploaded_timestamp': {'key': 'uploadedTimestamp', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'provisioning_status': {'key': 'provisioningStatus', 'type': 'str'}, + 'creator_id': {'key': 'creatorId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(LibraryResourceProperties, self).__init__(**kwargs) + self.name = None + self.path = None + self.container_name = None + self.uploaded_timestamp = None + self.type = None + self.provisioning_status = None + self.creator_id = None + + class LinkedIntegrationRuntimeType(msrest.serialization.Model): """The base definition of a linked integration runtime. @@ -19880,7 +20262,7 @@ def __init__( self.parameters = parameters -class LinkedServiceResource(AzureEntityResource): +class LinkedServiceResource(SubResource): """Linked service resource type. Variables are only populated by the server, and will be ignored when sending a request. @@ -23202,6 +23584,52 @@ def __init__( self.output_columns = output_columns +class OperationResult(msrest.serialization.Model): + """Operation status for the operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar status: Operation status. + :vartype status: str + :param code: Error code. + :type code: str + :param message: Error message. + :type message: str + :param target: Property name/path in request associated with error. + :type target: str + :param details: Array with additional error details. + :type details: list[~azure.synapse.artifacts.models.CloudErrorAutoGenerated] + """ + + _validation = { + 'status': {'readonly': True}, + } + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'code': {'key': 'error.code', 'type': 'str'}, + 'message': {'key': 'error.message', 'type': 'str'}, + 'target': {'key': 'error.target', 'type': 'str'}, + 'details': {'key': 'error.details', 'type': '[CloudErrorAutoGenerated]'}, + } + + def __init__( + self, + *, + code: Optional[str] = None, + message: Optional[str] = None, + target: Optional[str] = None, + details: Optional[List["CloudErrorAutoGenerated"]] = None, + **kwargs + ): + super(OperationResult, self).__init__(**kwargs) + self.status = None + self.code = code + self.message = message + self.target = target + self.details = details + + class OracleLinkedService(LinkedService): """Oracle database. @@ -24787,7 +25215,7 @@ def __init__( self.name = name -class PipelineResource(AzureEntityResource): +class PipelineResource(SubResource): """Pipeline resource type. Variables are only populated by the server, and will be ignored when sending a request. @@ -25574,7 +26002,41 @@ def __init__( self.id = None -class PrivateEndpointConnection(Resource): +class ProxyResource(Resource): + """The resource model definition for a Azure Resource Manager proxy resource. It will not have tags and a location. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ProxyResource, self).__init__(**kwargs) + + +class PrivateEndpointConnection(ProxyResource): """A private endpoint connection. Variables are only populated by the server, and will be ignored when sending a request. @@ -25662,40 +26124,6 @@ def __init__( self.actions_required = None -class ProxyResource(Resource): - """The resource model definition for a Azure Resource Manager proxy resource. It will not have tags and a location. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ProxyResource, self).__init__(**kwargs) - - class PurviewConfiguration(msrest.serialization.Model): """Purview Configuration. @@ -26278,7 +26706,7 @@ def __init__( self.next_link = None -class RerunTriggerResource(AzureEntityResource): +class RerunTriggerResource(SubResource): """RerunTrigger resource type. Variables are only populated by the server, and will be ignored when sending a request. @@ -29915,7 +30343,7 @@ def __init__( self.query = query -class SetVariableActivity(Activity): +class SetVariableActivity(ControlActivity): """Set value for a Variable. All required parameters must be populated in order to send to Azure. @@ -30714,7 +31142,7 @@ def __init__( self.job_properties = job_properties -class SparkJobDefinitionResource(AzureEntityResource): +class SparkJobDefinitionResource(SubResource): """Spark job definition resource type. Variables are only populated by the server, and will be ignored when sending a request. @@ -33409,45 +33837,7 @@ def __init__( self.type = type -class SubResource(AzureEntityResource): - """Azure Synapse nested resource, which belongs to a workspace. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar etag: Resource Etag. - :vartype etag: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SubResource, self).__init__(**kwargs) - - -class SwitchActivity(Activity): +class SwitchActivity(ControlActivity): """This activity evaluates an expression and executes activities under the cases property that correspond to the expression evaluation expected in the equals property. All required parameters must be populated in order to send to Azure. @@ -34535,7 +34925,7 @@ def __init__( self.reference_name = reference_name -class TriggerResource(AzureEntityResource): +class TriggerResource(SubResource): """Trigger resource type. Variables are only populated by the server, and will be ignored when sending a request. @@ -34864,7 +35254,7 @@ def __init__( self.size = size -class UntilActivity(Activity): +class UntilActivity(ControlActivity): """This activity executes inner activities until the specified boolean expression results to true or timeout is reached, whichever is earlier. All required parameters must be populated in order to send to Azure. @@ -34968,7 +35358,7 @@ def __init__( self.value = value -class ValidationActivity(Activity): +class ValidationActivity(ControlActivity): """This activity verifies that an external resource exists. All required parameters must be populated in order to send to Azure. @@ -35306,7 +35696,7 @@ def __init__( self.compute_subnet_id = compute_subnet_id -class WaitActivity(Activity): +class WaitActivity(ControlActivity): """This activity suspends pipeline execution for the specified interval. All required parameters must be populated in order to send to Azure. @@ -35680,7 +36070,7 @@ def __init__( self.password = password -class WebHookActivity(Activity): +class WebHookActivity(ControlActivity): """WebHook activity. All required parameters must be populated in order to send to Azure. @@ -36007,6 +36397,8 @@ class Workspace(TrackedResource): ~azure.synapse.artifacts.models.WorkspaceRepositoryConfiguration :param purview_configuration: Purview Configuration. :type purview_configuration: ~azure.synapse.artifacts.models.PurviewConfiguration + :ivar adla_resource_id: The ADLA resource ID. + :vartype adla_resource_id: str """ _validation = { @@ -36017,6 +36409,7 @@ class Workspace(TrackedResource): 'provisioning_state': {'readonly': True}, 'workspace_uid': {'readonly': True}, 'extra_properties': {'readonly': True}, + 'adla_resource_id': {'readonly': True}, } _attribute_map = { @@ -36041,6 +36434,7 @@ class Workspace(TrackedResource): 'managed_virtual_network_settings': {'key': 'properties.managedVirtualNetworkSettings', 'type': 'ManagedVirtualNetworkSettings'}, 'workspace_repository_configuration': {'key': 'properties.workspaceRepositoryConfiguration', 'type': 'WorkspaceRepositoryConfiguration'}, 'purview_configuration': {'key': 'properties.purviewConfiguration', 'type': 'PurviewConfiguration'}, + 'adla_resource_id': {'key': 'properties.adlaResourceId', 'type': 'str'}, } def __init__( @@ -36080,6 +36474,7 @@ def __init__( self.managed_virtual_network_settings = managed_virtual_network_settings self.workspace_repository_configuration = workspace_repository_configuration self.purview_configuration = purview_configuration + self.adla_resource_id = None class WorkspaceIdentity(msrest.serialization.Model): @@ -36165,6 +36560,10 @@ class WorkspaceRepositoryConfiguration(msrest.serialization.Model): :type collaboration_branch: str :param root_folder: Root folder to use in the repository. :type root_folder: str + :param last_commit_id: The last commit ID. + :type last_commit_id: str + :param tenant_id: The VSTS tenant ID. + :type tenant_id: str """ _attribute_map = { @@ -36175,6 +36574,8 @@ class WorkspaceRepositoryConfiguration(msrest.serialization.Model): 'repository_name': {'key': 'repositoryName', 'type': 'str'}, 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, 'root_folder': {'key': 'rootFolder', 'type': 'str'}, + 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, } def __init__( @@ -36187,6 +36588,8 @@ def __init__( repository_name: Optional[str] = None, collaboration_branch: Optional[str] = None, root_folder: Optional[str] = None, + last_commit_id: Optional[str] = None, + tenant_id: Optional[str] = None, **kwargs ): super(WorkspaceRepositoryConfiguration, self).__init__(**kwargs) @@ -36197,6 +36600,8 @@ def __init__( self.repository_name = repository_name self.collaboration_branch = collaboration_branch self.root_folder = root_folder + self.last_commit_id = last_commit_id + self.tenant_id = tenant_id class WorkspaceUpdateParameters(msrest.serialization.Model): diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/__init__.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/__init__.py index 74faadba8788..6b54fe6c25ae 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/__init__.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/__init__.py @@ -21,6 +21,7 @@ from ._sql_pools_operations import SqlPoolsOperations from ._big_data_pools_operations import BigDataPoolsOperations from ._integration_runtimes_operations import IntegrationRuntimesOperations +from ._library_operations import LibraryOperations from ._workspace_git_repo_management_operations import WorkspaceGitRepoManagementOperations __all__ = [ @@ -39,5 +40,6 @@ 'SqlPoolsOperations', 'BigDataPoolsOperations', 'IntegrationRuntimesOperations', + 'LibraryOperations', 'WorkspaceGitRepoManagementOperations', ] diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_big_data_pools_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_big_data_pools_operations.py index 2b2366e73088..e0f0ab45aed1 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_big_data_pools_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_big_data_pools_operations.py @@ -84,7 +84,7 @@ def list( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.ErrorContract, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorContract, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('BigDataPoolResourceInfoListResult', pipeline_response) @@ -140,7 +140,7 @@ def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.ErrorContract, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorContract, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('BigDataPoolResourceInfo', pipeline_response) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_debug_session_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_debug_session_operations.py index f180e1bbc6a3..d15f5967f89e 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_debug_session_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_debug_session_operations.py @@ -86,7 +86,7 @@ def _create_data_flow_debug_session_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) response_headers = {} @@ -115,8 +115,8 @@ def begin_create_data_flow_debug_session( :type request: ~azure.synapse.artifacts.models.CreateDataFlowDebugSessionRequest :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either CreateDataFlowDebugSessionResponse or the result of cls(response) @@ -226,7 +226,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) @@ -286,7 +286,7 @@ def add_data_flow( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('AddDataFlowToDebugSessionResponse', pipeline_response) @@ -346,7 +346,7 @@ def delete_data_flow_debug_session( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -394,7 +394,7 @@ def _execute_command_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) response_headers = {} @@ -423,8 +423,8 @@ def begin_execute_command( :type request: ~azure.synapse.artifacts.models.DataFlowDebugCommandRequest :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either DataFlowDebugCommandResponse or the result of cls(response) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_operations.py index bf2d1aac1533..bde9f3913f6b 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_operations.py @@ -93,7 +93,7 @@ def _create_or_update_data_flow_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = None @@ -125,8 +125,8 @@ def begin_create_or_update_data_flow( :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either DataFlowResource or the result of cls(response) @@ -229,7 +229,7 @@ def get_data_flow( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('DataFlowResource', pipeline_response) @@ -276,7 +276,7 @@ def _delete_data_flow_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -296,8 +296,8 @@ def begin_delete_data_flow( :type data_flow_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) @@ -388,7 +388,7 @@ def _rename_data_flow_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -411,8 +411,8 @@ def begin_rename_data_flow( :type new_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) @@ -521,7 +521,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_dataset_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_dataset_operations.py index 4c3521243d54..7c3daae90b7e 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_dataset_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_dataset_operations.py @@ -107,7 +107,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) @@ -165,7 +165,7 @@ def _create_or_update_dataset_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = None @@ -197,8 +197,8 @@ def begin_create_or_update_dataset( :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either DatasetResource or the result of cls(response) @@ -301,7 +301,7 @@ def get_dataset( if response.status_code not in [200, 304]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = None @@ -350,7 +350,7 @@ def _delete_dataset_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -370,8 +370,8 @@ def begin_delete_dataset( :type dataset_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) @@ -462,7 +462,7 @@ def _rename_dataset_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -485,8 +485,8 @@ def begin_rename_dataset( :type new_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_integration_runtimes_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_integration_runtimes_operations.py index 9e0b0a1175d0..1ed1410749b7 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_integration_runtimes_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_integration_runtimes_operations.py @@ -84,7 +84,7 @@ def list( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.ErrorContract, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorContract, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('IntegrationRuntimeListResponse', pipeline_response) @@ -140,7 +140,7 @@ def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.ErrorContract, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorContract, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_library_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_library_operations.py new file mode 100644 index 000000000000..19964a31957f --- /dev/null +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_library_operations.py @@ -0,0 +1,647 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.polling.base_polling import LROBasePolling + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, IO, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class LibraryOperations(object): + """LibraryOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.synapse.artifacts.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.LibraryListResponse"] + """Lists Library. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either LibraryListResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.synapse.artifacts.models.LibraryListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.LibraryListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('LibraryListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/libraries'} # type: ignore + + def _flush_initial( + self, + library_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Optional["_models.LibraryResourceInfo"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.LibraryResourceInfo"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self._flush_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'libraryName': self._serialize.url("library_name", library_name, 'str', max_length=100, min_length=0), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error) + + deserialized = None + if response.status_code == 202: + deserialized = self._deserialize('LibraryResourceInfo', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _flush_initial.metadata = {'url': '/libraries/{libraryName}/flush'} # type: ignore + + def begin_flush( + self, + library_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller["_models.LibraryResourceInfo"] + """Flush Library. + + :param library_name: file name to upload. Minimum length of the filename should be 1 excluding + the extension length. + :type library_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.LibraryResourceInfo"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._flush_initial( + library_name=library_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('LibraryResourceInfo', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'libraryName': self._serialize.url("library_name", library_name, 'str', max_length=100, min_length=0), + } + + if polling is True: polling_method = LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_flush.metadata = {'url': '/libraries/{libraryName}/flush'} # type: ignore + + def get_operation_result( + self, + operation_id, # type: str + **kwargs # type: Any + ): + # type: (...) -> Union["_models.LibraryResource", "_models.OperationResult"] + """Get Operation result for Library. + + :param operation_id: operation id for which status is requested. + :type operation_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LibraryResource or OperationResult, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.LibraryResource or ~azure.synapse.artifacts.models.OperationResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[Union["_models.LibraryResource", "_models.OperationResult"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get_operation_result.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'operationId': self._serialize.url("operation_id", operation_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error) + + if response.status_code == 200: + deserialized = self._deserialize('LibraryResource', pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize('OperationResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_operation_result.metadata = {'url': '/libraryOperationResults/{operationId}'} # type: ignore + + def _delete_initial( + self, + library_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Optional["_models.LibraryResourceInfo"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.LibraryResourceInfo"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'libraryName': self._serialize.url("library_name", library_name, 'str', max_length=100, min_length=0), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 409]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error) + + deserialized = None + if response.status_code == 202: + deserialized = self._deserialize('LibraryResourceInfo', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _delete_initial.metadata = {'url': '/libraries/{libraryName}'} # type: ignore + + def begin_delete( + self, + library_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller["_models.LibraryResourceInfo"] + """Delete Library. + + :param library_name: file name to upload. Minimum length of the filename should be 1 excluding + the extension length. + :type library_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.LibraryResourceInfo"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( + library_name=library_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('LibraryResourceInfo', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'libraryName': self._serialize.url("library_name", library_name, 'str', max_length=100, min_length=0), + } + + if polling is True: polling_method = LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/libraries/{libraryName}'} # type: ignore + + def get( + self, + library_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Optional["_models.LibraryResource"] + """Get Library. + + :param library_name: file name to upload. Minimum length of the filename should be 1 excluding + the extension length. + :type library_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LibraryResource, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.LibraryResource or None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.LibraryResource"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'libraryName': self._serialize.url("library_name", library_name, 'str', max_length=100, min_length=0), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 304]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('LibraryResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/libraries/{libraryName}'} # type: ignore + + def _create_initial( + self, + library_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Optional["_models.LibraryResourceInfo"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.LibraryResourceInfo"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self._create_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'libraryName': self._serialize.url("library_name", library_name, 'str', max_length=100, min_length=0), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error) + + deserialized = None + if response.status_code == 202: + deserialized = self._deserialize('LibraryResourceInfo', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _create_initial.metadata = {'url': '/libraries/{libraryName}'} # type: ignore + + def begin_create( + self, + library_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller["_models.LibraryResourceInfo"] + """Creates a library with the library name. + + :param library_name: file name to upload. Minimum length of the filename should be 1 excluding + the extension length. + :type library_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.LibraryResourceInfo"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_initial( + library_name=library_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('LibraryResourceInfo', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'libraryName': self._serialize.url("library_name", library_name, 'str', max_length=100, min_length=0), + } + + if polling is True: polling_method = LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create.metadata = {'url': '/libraries/{libraryName}'} # type: ignore + + def append( + self, + library_name, # type: str + content, # type: IO + x_ms_blob_condition_appendpos=None, # type: Optional[int] + **kwargs # type: Any + ): + # type: (...) -> None + """Append the content to the library resource created using the create operation. The maximum + content size is 4MiB. Content larger than 4MiB must be appended in 4MiB chunks. + + :param library_name: file name to upload. Minimum length of the filename should be 1 excluding + the extension length. + :type library_name: str + :param content: Library file chunk. + :type content: IO + :param x_ms_blob_condition_appendpos: Set this header to a byte offset at which the block is + expected to be appended. The request succeeds only if the current offset matches this value. + Otherwise, the request fails with the AppendPositionConditionNotMet error (HTTP status code 412 + – Precondition Failed). + :type x_ms_blob_condition_appendpos: long + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + content_type = kwargs.pop("content_type", "application/octet-stream") + accept = "application/json" + + # Construct URL + url = self.append.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'libraryName': self._serialize.url("library_name", library_name, 'str', max_length=100, min_length=0), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if x_ms_blob_condition_appendpos is not None: + header_parameters['x-ms-blob-condition-appendpos'] = self._serialize.header("x_ms_blob_condition_appendpos", x_ms_blob_condition_appendpos, 'long') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content_kwargs['stream_content'] = content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, response) + raise HttpResponseError(response=response, model=error) + + if cls: + return cls(pipeline_response, None, {}) + + append.metadata = {'url': '/libraries/{libraryName}'} # type: ignore diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_linked_service_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_linked_service_operations.py index e71af2e13def..23de6b964020 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_linked_service_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_linked_service_operations.py @@ -107,7 +107,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) @@ -165,7 +165,7 @@ def _create_or_update_linked_service_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = None @@ -197,8 +197,8 @@ def begin_create_or_update_linked_service( :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either LinkedServiceResource or the result of cls(response) @@ -302,7 +302,7 @@ def get_linked_service( if response.status_code not in [200, 304]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = None @@ -351,7 +351,7 @@ def _delete_linked_service_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -371,8 +371,8 @@ def begin_delete_linked_service( :type linked_service_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) @@ -463,7 +463,7 @@ def _rename_linked_service_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -486,8 +486,8 @@ def begin_rename_linked_service( :type new_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_notebook_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_notebook_operations.py index 5aef9a86e8be..634d04383e55 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_notebook_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_notebook_operations.py @@ -107,7 +107,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) @@ -179,7 +179,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) @@ -235,7 +235,7 @@ def _create_or_update_notebook_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = None @@ -267,8 +267,8 @@ def begin_create_or_update_notebook( :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either NotebookResource or the result of cls(response) @@ -371,7 +371,7 @@ def get_notebook( if response.status_code not in [200, 304]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = None @@ -420,7 +420,7 @@ def _delete_notebook_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -440,8 +440,8 @@ def begin_delete_notebook( :type notebook_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) @@ -532,7 +532,7 @@ def _rename_notebook_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -555,8 +555,8 @@ def begin_rename_notebook( :type new_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_operations.py index a1a04bb67e3d..23529c30fb8b 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_operations.py @@ -107,7 +107,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) @@ -163,7 +163,7 @@ def _create_or_update_pipeline_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = None @@ -195,8 +195,8 @@ def begin_create_or_update_pipeline( :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either PipelineResource or the result of cls(response) @@ -299,7 +299,7 @@ def get_pipeline( if response.status_code not in [200, 304]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = None @@ -348,7 +348,7 @@ def _delete_pipeline_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -368,8 +368,8 @@ def begin_delete_pipeline( :type pipeline_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) @@ -460,7 +460,7 @@ def _rename_pipeline_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -483,8 +483,8 @@ def begin_rename_pipeline( :type new_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) @@ -607,7 +607,7 @@ def create_pipeline_run( if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('CreateRunResponse', pipeline_response) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_run_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_run_operations.py index 13336ffa86f7..bd7716395293 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_run_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_run_operations.py @@ -92,7 +92,7 @@ def query_pipeline_runs_by_workspace( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('PipelineRunsQueryResponse', pipeline_response) @@ -148,7 +148,7 @@ def get_pipeline_run( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('PipelineRun', pipeline_response) @@ -216,7 +216,7 @@ def query_activity_runs( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('ActivityRunsQueryResponse', pipeline_response) @@ -278,7 +278,7 @@ def cancel_pipeline_run( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_spark_job_definition_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_spark_job_definition_operations.py index 276ea73532c7..9b777af9b0b3 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_spark_job_definition_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_spark_job_definition_operations.py @@ -107,7 +107,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) @@ -118,29 +118,15 @@ def get_next(next_link=None): ) get_spark_job_definitions_by_workspace.metadata = {'url': '/sparkJobDefinitions'} # type: ignore - def create_or_update_spark_job_definition( + def _create_or_update_spark_job_definition_initial( self, spark_job_definition_name, # type: str properties, # type: "_models.SparkJobDefinition" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "_models.SparkJobDefinitionResource" - """Creates or updates a Spark Job Definition. - - :param spark_job_definition_name: The spark job definition name. - :type spark_job_definition_name: str - :param properties: Properties of spark job definition. - :type properties: ~azure.synapse.artifacts.models.SparkJobDefinition - :param if_match: ETag of the Spark Job Definition entity. Should only be specified for update, - for which it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: SparkJobDefinitionResource, or the result of cls(response) - :rtype: ~azure.synapse.artifacts.models.SparkJobDefinitionResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.SparkJobDefinitionResource"] + # type: (...) -> Optional["_models.SparkJobDefinitionResource"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.SparkJobDefinitionResource"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -152,7 +138,7 @@ def create_or_update_spark_job_definition( accept = "application/json" # Construct URL - url = self.create_or_update_spark_job_definition.metadata['url'] # type: ignore + url = self._create_or_update_spark_job_definition_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'sparkJobDefinitionName': self._serialize.url("spark_job_definition_name", spark_job_definition_name, 'str'), @@ -177,18 +163,92 @@ def create_or_update_spark_job_definition( pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) - deserialized = self._deserialize('SparkJobDefinitionResource', pipeline_response) + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('SparkJobDefinitionResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update_spark_job_definition.metadata = {'url': '/sparkJobDefinitions/{sparkJobDefinitionName}'} # type: ignore + _create_or_update_spark_job_definition_initial.metadata = {'url': '/sparkJobDefinitions/{sparkJobDefinitionName}'} # type: ignore + + def begin_create_or_update_spark_job_definition( + self, + spark_job_definition_name, # type: str + properties, # type: "_models.SparkJobDefinition" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> LROPoller["_models.SparkJobDefinitionResource"] + """Creates or updates a Spark Job Definition. + + :param spark_job_definition_name: The spark job definition name. + :type spark_job_definition_name: str + :param properties: Properties of spark job definition. + :type properties: ~azure.synapse.artifacts.models.SparkJobDefinition + :param if_match: ETag of the Spark Job Definition entity. Should only be specified for update, + for which it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either SparkJobDefinitionResource or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.synapse.artifacts.models.SparkJobDefinitionResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.SparkJobDefinitionResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_spark_job_definition_initial( + spark_job_definition_name=spark_job_definition_name, + properties=properties, + if_match=if_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('SparkJobDefinitionResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'sparkJobDefinitionName': self._serialize.url("spark_job_definition_name", spark_job_definition_name, 'str'), + } + + if polling is True: polling_method = LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update_spark_job_definition.metadata = {'url': '/sparkJobDefinitions/{sparkJobDefinitionName}'} # type: ignore def get_spark_job_definition( self, @@ -242,7 +302,7 @@ def get_spark_job_definition( if response.status_code not in [200, 304]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = None @@ -255,21 +315,12 @@ def get_spark_job_definition( return deserialized get_spark_job_definition.metadata = {'url': '/sparkJobDefinitions/{sparkJobDefinitionName}'} # type: ignore - def delete_spark_job_definition( + def _delete_spark_job_definition_initial( self, spark_job_definition_name, # type: str **kwargs # type: Any ): # type: (...) -> None - """Deletes a Spark Job Definition. - - :param spark_job_definition_name: The spark job definition name. - :type spark_job_definition_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError @@ -279,7 +330,7 @@ def delete_spark_job_definition( accept = "application/json" # Construct URL - url = self.delete_spark_job_definition.metadata['url'] # type: ignore + url = self._delete_spark_job_definition_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'sparkJobDefinitionName': self._serialize.url("spark_job_definition_name", spark_job_definition_name, 'str'), @@ -298,15 +349,75 @@ def delete_spark_job_definition( pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: return cls(pipeline_response, None, {}) - delete_spark_job_definition.metadata = {'url': '/sparkJobDefinitions/{sparkJobDefinitionName}'} # type: ignore + _delete_spark_job_definition_initial.metadata = {'url': '/sparkJobDefinitions/{sparkJobDefinitionName}'} # type: ignore + + def begin_delete_spark_job_definition( + self, + spark_job_definition_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Deletes a Spark Job Definition. + + :param spark_job_definition_name: The spark job definition name. + :type spark_job_definition_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_spark_job_definition_initial( + spark_job_definition_name=spark_job_definition_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'sparkJobDefinitionName': self._serialize.url("spark_job_definition_name", spark_job_definition_name, 'str'), + } + + if polling is True: polling_method = LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete_spark_job_definition.metadata = {'url': '/sparkJobDefinitions/{sparkJobDefinitionName}'} # type: ignore def _execute_spark_job_definition_initial( self, @@ -344,7 +455,7 @@ def _execute_spark_job_definition_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if response.status_code == 200: @@ -371,8 +482,8 @@ def begin_execute_spark_job_definition( :type spark_job_definition_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either SparkBatchJob or the result of cls(response) @@ -466,7 +577,7 @@ def _rename_spark_job_definition_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -489,8 +600,8 @@ def begin_rename_spark_job_definition( :type new_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) @@ -580,7 +691,7 @@ def _debug_spark_job_definition_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if response.status_code == 200: @@ -607,8 +718,8 @@ def begin_debug_spark_job_definition( :type properties: ~azure.synapse.artifacts.models.SparkJobDefinition :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either SparkBatchJob or the result of cls(response) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_sql_pools_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_sql_pools_operations.py index 0f5d3979bd56..66ca31f240b6 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_sql_pools_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_sql_pools_operations.py @@ -84,7 +84,7 @@ def list( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.ErrorContract, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorContract, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('SqlPoolInfoListResult', pipeline_response) @@ -140,7 +140,7 @@ def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.ErrorContract, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorContract, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('SqlPool', pipeline_response) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_sql_script_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_sql_script_operations.py index c7b0112e8796..cfb4b67a47d9 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_sql_script_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_sql_script_operations.py @@ -107,7 +107,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) @@ -118,29 +118,15 @@ def get_next(next_link=None): ) get_sql_scripts_by_workspace.metadata = {'url': '/sqlScripts'} # type: ignore - def create_or_update_sql_script( + def _create_or_update_sql_script_initial( self, sql_script_name, # type: str sql_script, # type: "_models.SqlScriptResource" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "_models.SqlScriptResource" - """Creates or updates a Sql Script. - - :param sql_script_name: The sql script name. - :type sql_script_name: str - :param sql_script: Sql Script resource definition. - :type sql_script: ~azure.synapse.artifacts.models.SqlScriptResource - :param if_match: ETag of the SQL script entity. Should only be specified for update, for which - it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: SqlScriptResource, or the result of cls(response) - :rtype: ~azure.synapse.artifacts.models.SqlScriptResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlScriptResource"] + # type: (...) -> Optional["_models.SqlScriptResource"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.SqlScriptResource"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } @@ -150,7 +136,7 @@ def create_or_update_sql_script( accept = "application/json" # Construct URL - url = self.create_or_update_sql_script.metadata['url'] # type: ignore + url = self._create_or_update_sql_script_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'sqlScriptName': self._serialize.url("sql_script_name", sql_script_name, 'str'), @@ -175,18 +161,92 @@ def create_or_update_sql_script( pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) - deserialized = self._deserialize('SqlScriptResource', pipeline_response) + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('SqlScriptResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update_sql_script.metadata = {'url': '/sqlScripts/{sqlScriptName}'} # type: ignore + _create_or_update_sql_script_initial.metadata = {'url': '/sqlScripts/{sqlScriptName}'} # type: ignore + + def begin_create_or_update_sql_script( + self, + sql_script_name, # type: str + sql_script, # type: "_models.SqlScriptResource" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> LROPoller["_models.SqlScriptResource"] + """Creates or updates a Sql Script. + + :param sql_script_name: The sql script name. + :type sql_script_name: str + :param sql_script: Sql Script resource definition. + :type sql_script: ~azure.synapse.artifacts.models.SqlScriptResource + :param if_match: ETag of the SQL script entity. Should only be specified for update, for which + it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either SqlScriptResource or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.synapse.artifacts.models.SqlScriptResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlScriptResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_sql_script_initial( + sql_script_name=sql_script_name, + sql_script=sql_script, + if_match=if_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('SqlScriptResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'sqlScriptName': self._serialize.url("sql_script_name", sql_script_name, 'str'), + } + + if polling is True: polling_method = LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update_sql_script.metadata = {'url': '/sqlScripts/{sqlScriptName}'} # type: ignore def get_sql_script( self, @@ -239,7 +299,7 @@ def get_sql_script( if response.status_code not in [200, 304]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = None @@ -252,21 +312,12 @@ def get_sql_script( return deserialized get_sql_script.metadata = {'url': '/sqlScripts/{sqlScriptName}'} # type: ignore - def delete_sql_script( + def _delete_sql_script_initial( self, sql_script_name, # type: str **kwargs # type: Any ): # type: (...) -> None - """Deletes a Sql Script. - - :param sql_script_name: The sql script name. - :type sql_script_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError @@ -276,7 +327,7 @@ def delete_sql_script( accept = "application/json" # Construct URL - url = self.delete_sql_script.metadata['url'] # type: ignore + url = self._delete_sql_script_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'sqlScriptName': self._serialize.url("sql_script_name", sql_script_name, 'str'), @@ -295,15 +346,75 @@ def delete_sql_script( pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: return cls(pipeline_response, None, {}) - delete_sql_script.metadata = {'url': '/sqlScripts/{sqlScriptName}'} # type: ignore + _delete_sql_script_initial.metadata = {'url': '/sqlScripts/{sqlScriptName}'} # type: ignore + + def begin_delete_sql_script( + self, + sql_script_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Deletes a Sql Script. + + :param sql_script_name: The sql script name. + :type sql_script_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_sql_script_initial( + sql_script_name=sql_script_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'sqlScriptName': self._serialize.url("sql_script_name", sql_script_name, 'str'), + } + + if polling is True: polling_method = LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete_sql_script.metadata = {'url': '/sqlScripts/{sqlScriptName}'} # type: ignore def _rename_sql_script_initial( self, @@ -349,7 +460,7 @@ def _rename_sql_script_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -372,8 +483,8 @@ def begin_rename_sql_script( :type new_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_operations.py index a7be6843b9f3..9b6091fcc6d1 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_operations.py @@ -107,7 +107,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) @@ -165,7 +165,7 @@ def _create_or_update_trigger_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = None @@ -197,8 +197,8 @@ def begin_create_or_update_trigger( :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either TriggerResource or the result of cls(response) @@ -301,7 +301,7 @@ def get_trigger( if response.status_code not in [200, 304]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = None @@ -350,7 +350,7 @@ def _delete_trigger_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -370,8 +370,8 @@ def begin_delete_trigger( :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) @@ -454,7 +454,7 @@ def _subscribe_trigger_to_events_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = None @@ -479,8 +479,8 @@ def begin_subscribe_trigger_to_events( :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) @@ -575,7 +575,7 @@ def get_event_subscription_status( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) @@ -622,7 +622,7 @@ def _unsubscribe_trigger_from_events_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = None @@ -647,8 +647,8 @@ def begin_unsubscribe_trigger_from_events( :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) @@ -734,7 +734,7 @@ def _start_trigger_initial( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -754,8 +754,8 @@ def begin_start_trigger( :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) @@ -838,7 +838,7 @@ def _stop_trigger_initial( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -858,8 +858,8 @@ def begin_stop_trigger( :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_run_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_run_operations.py index 5de3475a591d..26c4543c4647 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_run_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_run_operations.py @@ -92,7 +92,7 @@ def rerun_trigger_instance( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -149,7 +149,7 @@ def cancel_trigger_instance( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) if cls: @@ -206,7 +206,7 @@ def query_trigger_runs_by_workspace( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.CloudError, response) + error = self._deserialize.failsafe_deserialize(_models.CloudError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('TriggerRunsQueryResponse', pipeline_response) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_workspace_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_workspace_operations.py index 7404ce27e569..239cb33f957d 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_workspace_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_workspace_operations.py @@ -84,7 +84,7 @@ def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.ErrorContract, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorContract, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('Workspace', pipeline_response) diff --git a/sdk/synapse/azure-synapse-artifacts/setup.py b/sdk/synapse/azure-synapse-artifacts/setup.py index 07ecfb92eeaf..5b44973fac3c 100644 --- a/sdk/synapse/azure-synapse-artifacts/setup.py +++ b/sdk/synapse/azure-synapse-artifacts/setup.py @@ -66,7 +66,6 @@ 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', diff --git a/sdk/synapse/azure-synapse-managedprivateendpoints/CHANGELOG.md b/sdk/synapse/azure-synapse-managedprivateendpoints/CHANGELOG.md index 86e53635ebb0..e9b5daa8740b 100644 --- a/sdk/synapse/azure-synapse-managedprivateendpoints/CHANGELOG.md +++ b/sdk/synapse/azure-synapse-managedprivateendpoints/CHANGELOG.md @@ -1,5 +1,10 @@ # Release History +## 0.3.0 (2021-03-09) + +* Internal bugfixes (re-generated with latest generator) +* Stop Python 3.5 support + ## 0.2.0 (2021-02-09) * Internal bugfixes (re-generated with latest generator) diff --git a/sdk/synapse/azure-synapse-managedprivateendpoints/README.md b/sdk/synapse/azure-synapse-managedprivateendpoints/README.md index a62006d88268..b122d3f58af7 100644 --- a/sdk/synapse/azure-synapse-managedprivateendpoints/README.md +++ b/sdk/synapse/azure-synapse-managedprivateendpoints/README.md @@ -1,7 +1,7 @@ # Microsoft Azure SDK for Python This is the Microsoft Azure Synapse Managed Private Endpoints Client Library. -This package has been tested with Python 2.7, 3.5, 3.6, 3.7 and 3.8. +This package has been tested with Python 2.7, 3.6, 3.7, 3.8 and 3.9. For a more complete view of Azure libraries, see the [azure sdk python release](https://aka.ms/azsdk/python/all). diff --git a/sdk/synapse/azure-synapse-managedprivateendpoints/azure/synapse/managedprivateendpoints/_configuration.py b/sdk/synapse/azure-synapse-managedprivateendpoints/azure/synapse/managedprivateendpoints/_configuration.py index a613ef36e7a5..76004907770a 100644 --- a/sdk/synapse/azure-synapse-managedprivateendpoints/azure/synapse/managedprivateendpoints/_configuration.py +++ b/sdk/synapse/azure-synapse-managedprivateendpoints/azure/synapse/managedprivateendpoints/_configuration.py @@ -49,7 +49,7 @@ def __init__( self.endpoint = endpoint self.api_version = "2019-06-01-preview" self.credential_scopes = kwargs.pop('credential_scopes', ['https://dev.azuresynapse.net/.default']) - kwargs.setdefault('sdk_moniker', 'synapse/{}'.format(VERSION)) + kwargs.setdefault('sdk_moniker', 'synapse-managedprivateendpoints/{}'.format(VERSION)) self._configure(**kwargs) def _configure( diff --git a/sdk/synapse/azure-synapse-managedprivateendpoints/azure/synapse/managedprivateendpoints/_version.py b/sdk/synapse/azure-synapse-managedprivateendpoints/azure/synapse/managedprivateendpoints/_version.py index 035146e99a22..92721eef7dd5 100644 --- a/sdk/synapse/azure-synapse-managedprivateendpoints/azure/synapse/managedprivateendpoints/_version.py +++ b/sdk/synapse/azure-synapse-managedprivateendpoints/azure/synapse/managedprivateendpoints/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "0.2.0" +VERSION = "0.3.0" diff --git a/sdk/synapse/azure-synapse-managedprivateendpoints/azure/synapse/managedprivateendpoints/aio/_configuration.py b/sdk/synapse/azure-synapse-managedprivateendpoints/azure/synapse/managedprivateendpoints/aio/_configuration.py index 1ce66c7ada2f..f7e625b8b218 100644 --- a/sdk/synapse/azure-synapse-managedprivateendpoints/azure/synapse/managedprivateendpoints/aio/_configuration.py +++ b/sdk/synapse/azure-synapse-managedprivateendpoints/azure/synapse/managedprivateendpoints/aio/_configuration.py @@ -46,7 +46,7 @@ def __init__( self.endpoint = endpoint self.api_version = "2019-06-01-preview" self.credential_scopes = kwargs.pop('credential_scopes', ['https://dev.azuresynapse.net/.default']) - kwargs.setdefault('sdk_moniker', 'synapse/{}'.format(VERSION)) + kwargs.setdefault('sdk_moniker', 'synapse-managedprivateendpoints/{}'.format(VERSION)) self._configure(**kwargs) def _configure( diff --git a/sdk/synapse/azure-synapse-managedprivateendpoints/setup.py b/sdk/synapse/azure-synapse-managedprivateendpoints/setup.py index dd07ec12c20f..79e9094b81dd 100644 --- a/sdk/synapse/azure-synapse-managedprivateendpoints/setup.py +++ b/sdk/synapse/azure-synapse-managedprivateendpoints/setup.py @@ -66,7 +66,6 @@ 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', diff --git a/sdk/synapse/azure-synapse-monitoring/CHANGELOG.md b/sdk/synapse/azure-synapse-monitoring/CHANGELOG.md index 11e10e269f42..6218fc445577 100644 --- a/sdk/synapse/azure-synapse-monitoring/CHANGELOG.md +++ b/sdk/synapse/azure-synapse-monitoring/CHANGELOG.md @@ -1,5 +1,10 @@ # Release History +## 0.2.0 (2021-03-09) + +* Internal bugfixes (re-generated with latest generator) +* Stop Python 3.5 support + ## 0.1.0 (2020-12-08) * Initial Release diff --git a/sdk/synapse/azure-synapse-monitoring/README.md b/sdk/synapse/azure-synapse-monitoring/README.md index c7da308e61fa..f5f8f081b36b 100644 --- a/sdk/synapse/azure-synapse-monitoring/README.md +++ b/sdk/synapse/azure-synapse-monitoring/README.md @@ -1,7 +1,7 @@ # Microsoft Azure SDK for Python This is the Microsoft Azure Synapse Monitoring Client Library. -This package has been tested with Python 2.7, 3.5, 3.6, 3.7 and 3.8. +This package has been tested with Python 2.7, 3.6, 3.7, 3.8 and 3.9. For a more complete view of Azure libraries, see the [azure sdk python release](https://aka.ms/azsdk/python/all). diff --git a/sdk/synapse/azure-synapse-monitoring/azure/synapse/monitoring/_configuration.py b/sdk/synapse/azure-synapse-monitoring/azure/synapse/monitoring/_configuration.py index 0cba346428e6..5201f650911c 100644 --- a/sdk/synapse/azure-synapse-monitoring/azure/synapse/monitoring/_configuration.py +++ b/sdk/synapse/azure-synapse-monitoring/azure/synapse/monitoring/_configuration.py @@ -49,7 +49,7 @@ def __init__( self.endpoint = endpoint self.api_version = "2019-11-01-preview" self.credential_scopes = kwargs.pop('credential_scopes', ['https://dev.azuresynapse.net/.default']) - kwargs.setdefault('sdk_moniker', 'synapse/{}'.format(VERSION)) + kwargs.setdefault('sdk_moniker', 'synapse-monitoring/{}'.format(VERSION)) self._configure(**kwargs) def _configure( diff --git a/sdk/synapse/azure-synapse-monitoring/azure/synapse/monitoring/_version.py b/sdk/synapse/azure-synapse-monitoring/azure/synapse/monitoring/_version.py index eae7c95b6fbd..035146e99a22 100644 --- a/sdk/synapse/azure-synapse-monitoring/azure/synapse/monitoring/_version.py +++ b/sdk/synapse/azure-synapse-monitoring/azure/synapse/monitoring/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "0.1.0" +VERSION = "0.2.0" diff --git a/sdk/synapse/azure-synapse-monitoring/azure/synapse/monitoring/aio/_configuration.py b/sdk/synapse/azure-synapse-monitoring/azure/synapse/monitoring/aio/_configuration.py index 48edb9c73c29..1c5684463165 100644 --- a/sdk/synapse/azure-synapse-monitoring/azure/synapse/monitoring/aio/_configuration.py +++ b/sdk/synapse/azure-synapse-monitoring/azure/synapse/monitoring/aio/_configuration.py @@ -46,7 +46,7 @@ def __init__( self.endpoint = endpoint self.api_version = "2019-11-01-preview" self.credential_scopes = kwargs.pop('credential_scopes', ['https://dev.azuresynapse.net/.default']) - kwargs.setdefault('sdk_moniker', 'synapse/{}'.format(VERSION)) + kwargs.setdefault('sdk_moniker', 'synapse-monitoring/{}'.format(VERSION)) self._configure(**kwargs) def _configure( diff --git a/sdk/synapse/azure-synapse-monitoring/setup.py b/sdk/synapse/azure-synapse-monitoring/setup.py index b3d579bda6cd..0063fa2533e4 100644 --- a/sdk/synapse/azure-synapse-monitoring/setup.py +++ b/sdk/synapse/azure-synapse-monitoring/setup.py @@ -66,7 +66,6 @@ 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', diff --git a/sdk/synapse/azure-synapse-spark/CHANGELOG.md b/sdk/synapse/azure-synapse-spark/CHANGELOG.md index dbdeb5d66e1f..8bb49bea5d87 100644 --- a/sdk/synapse/azure-synapse-spark/CHANGELOG.md +++ b/sdk/synapse/azure-synapse-spark/CHANGELOG.md @@ -1,5 +1,10 @@ # Release History +## 0.5.0 (2021-03-09) + +* Internal bugfixes (re-generated with latest generator) +* Stop Python 3.5 support + ## 0.4.0 (2020-12-08) * Internal bugfixes (re-generated with latest generator) diff --git a/sdk/synapse/azure-synapse-spark/README.md b/sdk/synapse/azure-synapse-spark/README.md index 1ed7f54e1645..0efdc1b23548 100644 --- a/sdk/synapse/azure-synapse-spark/README.md +++ b/sdk/synapse/azure-synapse-spark/README.md @@ -1,7 +1,7 @@ # Microsoft Azure SDK for Python This is the Microsoft Azure Synapse Spark Client Library. -This package has been tested with Python 2.7, 3.5, 3.6, 3.7 and 3.8. +This package has been tested with Python 2.7, 3.6, 3.7, 3.8 and 3.9. For a more complete view of Azure libraries, see the [azure sdk python release](https://aka.ms/azsdk/python/all). diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_configuration.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_configuration.py index 574b3dcae0f8..7035762e3191 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_configuration.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_configuration.py @@ -60,7 +60,7 @@ def __init__( self.spark_pool_name = spark_pool_name self.livy_api_version = livy_api_version self.credential_scopes = kwargs.pop('credential_scopes', ['https://dev.azuresynapse.net/.default']) - kwargs.setdefault('sdk_moniker', 'synapse/{}'.format(VERSION)) + kwargs.setdefault('sdk_moniker', 'synapse-spark/{}'.format(VERSION)) self._configure(**kwargs) def _configure( diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_version.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_version.py index c8c0d6c52c29..c4551baee432 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_version.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "0.4.0" +VERSION = "0.5.0" diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_configuration.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_configuration.py index 3d748483e56f..7c009f22364c 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_configuration.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_configuration.py @@ -57,7 +57,7 @@ def __init__( self.spark_pool_name = spark_pool_name self.livy_api_version = livy_api_version self.credential_scopes = kwargs.pop('credential_scopes', ['https://dev.azuresynapse.net/.default']) - kwargs.setdefault('sdk_moniker', 'synapse/{}'.format(VERSION)) + kwargs.setdefault('sdk_moniker', 'synapse-spark/{}'.format(VERSION)) self._configure(**kwargs) def _configure( diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_models.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_models.py index 609f0121140b..8d7d667ceb59 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_models.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_models.py @@ -758,12 +758,12 @@ def __init__( class SparkStatementCancellationResult(msrest.serialization.Model): """SparkStatementCancellationResult. - :param msg: - :type msg: str + :param message: The msg property from the Livy API. The value is always "canceled". + :type message: str """ _attribute_map = { - 'msg': {'key': 'msg', 'type': 'str'}, + 'message': {'key': 'msg', 'type': 'str'}, } def __init__( @@ -771,7 +771,7 @@ def __init__( **kwargs ): super(SparkStatementCancellationResult, self).__init__(**kwargs) - self.msg = kwargs.get('msg', None) + self.message = kwargs.get('message', None) class SparkStatementCollection(msrest.serialization.Model): diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_models_py3.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_models_py3.py index a6bbaea80bb6..7876bcb698f4 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_models_py3.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_models_py3.py @@ -902,22 +902,22 @@ def __init__( class SparkStatementCancellationResult(msrest.serialization.Model): """SparkStatementCancellationResult. - :param msg: - :type msg: str + :param message: The msg property from the Livy API. The value is always "canceled". + :type message: str """ _attribute_map = { - 'msg': {'key': 'msg', 'type': 'str'}, + 'message': {'key': 'msg', 'type': 'str'}, } def __init__( self, *, - msg: Optional[str] = None, + message: Optional[str] = None, **kwargs ): super(SparkStatementCancellationResult, self).__init__(**kwargs) - self.msg = msg + self.message = message class SparkStatementCollection(msrest.serialization.Model): diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_spark_client_enums.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_spark_client_enums.py index 19d776f2657c..a1312531d5dc 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_spark_client_enums.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_spark_client_enums.py @@ -75,6 +75,6 @@ class SparkSessionResultType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum) class SparkStatementLanguageType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): SPARK = "spark" - PYSPARK = "pyspark" - DOTNETSPARK = "dotnetspark" + PY_SPARK = "pyspark" + DOT_NET_SPARK = "dotnetspark" SQL = "sql" diff --git a/sdk/synapse/azure-synapse-spark/setup.py b/sdk/synapse/azure-synapse-spark/setup.py index e50e231d81cf..683e8c2bc89a 100644 --- a/sdk/synapse/azure-synapse-spark/setup.py +++ b/sdk/synapse/azure-synapse-spark/setup.py @@ -66,7 +66,6 @@ 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8',