diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index b5ff5442..f07cf44e 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -06d330f43d92c1be864d4638c672cd0723e20a51 \ No newline at end of file +9bb7950fa3390afb97abaa552934bc0a2e069de5 \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 68f7f5c9..f8da50a7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,102 @@ # Version changelog +## 0.27.0 + +### New Features + +* DBUtils implementation for Volumes ([#623](https://github.com/databricks/databricks-sdk-py/pull/623), [#634](https://github.com/databricks/databricks-sdk-py/pull/634), [#631](https://github.com/databricks/databricks-sdk-py/pull/631)). + +### Bug Fixes + +* Fixed codecov for repository ([#636](https://github.com/databricks/databricks-sdk-py/pull/636)). + +API Changes: + + * Added `ingestion_definition` field for `databricks.sdk.service.pipelines.CreatePipeline`. + * Added `ingestion_definition` field for `databricks.sdk.service.pipelines.EditPipeline`. + * Added `ingestion_definition` field for `databricks.sdk.service.pipelines.PipelineSpec`. + * Added `databricks.sdk.service.pipelines.IngestionConfig` dataclass. + * Added `databricks.sdk.service.pipelines.ManagedIngestionPipelineDefinition` dataclass. + * Added `databricks.sdk.service.pipelines.SchemaSpec` dataclass. + * Added `databricks.sdk.service.pipelines.TableSpec` dataclass. + * Changed `create()` method for [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html) workspace-level service . New request type is `databricks.sdk.service.serving.CreateAppRequest` dataclass. + * Changed `create()` method for [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html) workspace-level service to return `databricks.sdk.service.serving.App` dataclass. + * Removed `delete_app()` method for [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html) workspace-level service. + * Removed `get_app()` method for [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html) workspace-level service. + * Removed `get_app_deployment_status()` method for [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html) workspace-level service. + * Removed `get_apps()` method for [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html) workspace-level service. + * Removed `get_events()` method for [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html) workspace-level service. + * Added `create_deployment()` method for [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html) workspace-level service. + * Added `delete()` method for [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html) workspace-level service. + * Added `get()` method for [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html) workspace-level service. + * Added `get_deployment()` method for [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html) workspace-level service. + * Added `get_environment()` method for [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html) workspace-level service. + * Added `list()` method for [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html) workspace-level service. + * Added `list_deployments()` method for [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html) workspace-level service. + * Added `stop()` method for [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html) workspace-level service. + * Added `update()` method for [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html) workspace-level service. + * Added `get_open_api()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving_endpoints.html) workspace-level service. + * Removed `databricks.sdk.service.serving.AppEvents` dataclass. + * Removed `databricks.sdk.service.serving.AppManifest` dataclass. + * Removed `databricks.sdk.service.serving.AppServiceStatus` dataclass. + * Removed `databricks.sdk.service.serving.DeleteAppResponse` dataclass. + * Removed `databricks.sdk.service.serving.DeployAppRequest` dataclass. + * Removed `databricks.sdk.service.serving.DeploymentStatus` dataclass. + * Removed `databricks.sdk.service.serving.DeploymentStatusState` dataclass. + * Removed `databricks.sdk.service.serving.GetAppDeploymentStatusRequest` dataclass. + * Removed `databricks.sdk.service.serving.GetAppResponse` dataclass. + * Removed `databricks.sdk.service.serving.GetEventsRequest` dataclass. + * Removed `databricks.sdk.service.serving.ListAppEventsResponse` dataclass. + * Changed `apps` field for `databricks.sdk.service.serving.ListAppsResponse` to `databricks.sdk.service.serving.AppList` dataclass. + * Added `databricks.sdk.service.serving.App` dataclass. + * Added `databricks.sdk.service.serving.AppDeployment` dataclass. + * Added `databricks.sdk.service.serving.AppDeploymentState` dataclass. + * Added `databricks.sdk.service.serving.AppDeploymentStatus` dataclass. + * Added `databricks.sdk.service.serving.AppEnvironment` dataclass. + * Added `databricks.sdk.service.serving.AppState` dataclass. + * Added `databricks.sdk.service.serving.AppStatus` dataclass. + * Added `databricks.sdk.service.serving.CreateAppDeploymentRequest` dataclass. + * Added `databricks.sdk.service.serving.CreateAppRequest` dataclass. + * Added `databricks.sdk.service.serving.EnvVariable` dataclass. + * Added `databricks.sdk.service.serving.GetAppDeploymentRequest` dataclass. + * Added `databricks.sdk.service.serving.GetAppEnvironmentRequest` dataclass. + * Added `databricks.sdk.service.serving.GetOpenApiRequest` dataclass. + * Added `any` dataclass. + * Added `databricks.sdk.service.serving.ListAppDeploymentsRequest` dataclass. + * Added `databricks.sdk.service.serving.ListAppDeploymentsResponse` dataclass. + * Added `databricks.sdk.service.serving.ListAppsRequest` dataclass. + * Added `databricks.sdk.service.serving.StopAppRequest` dataclass. + * Added `any` dataclass. + * Added `databricks.sdk.service.serving.UpdateAppRequest` dataclass. + * Removed [w.csp_enablement](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/csp_enablement.html) workspace-level service. + * Removed [w.esm_enablement](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/esm_enablement.html) workspace-level service. + * Added [w.compliance_security_profile](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/compliance_security_profile.html) workspace-level service. + * Added [w.enhanced_security_monitoring](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/enhanced_security_monitoring.html) workspace-level service. + * Removed `databricks.sdk.service.settings.CspEnablement` dataclass. + * Removed `databricks.sdk.service.settings.CspEnablementSetting` dataclass. + * Removed `databricks.sdk.service.settings.EsmEnablement` dataclass. + * Removed `databricks.sdk.service.settings.EsmEnablementSetting` dataclass. + * Removed `databricks.sdk.service.settings.GetCspEnablementSettingRequest` dataclass. + * Removed `databricks.sdk.service.settings.GetEsmEnablementSettingRequest` dataclass. + * Removed `databricks.sdk.service.settings.UpdateCspEnablementSettingRequest` dataclass. + * Removed `databricks.sdk.service.settings.UpdateEsmEnablementSettingRequest` dataclass. + * Added `databricks.sdk.service.settings.ComplianceSecurityProfile` dataclass. + * Added `databricks.sdk.service.settings.ComplianceSecurityProfileSetting` dataclass. + * Added `databricks.sdk.service.settings.EnhancedSecurityMonitoring` dataclass. + * Added `databricks.sdk.service.settings.EnhancedSecurityMonitoringSetting` dataclass. + * Added `databricks.sdk.service.settings.GetComplianceSecurityProfileSettingRequest` dataclass. + * Added `databricks.sdk.service.settings.GetEnhancedSecurityMonitoringSettingRequest` dataclass. + * Added `databricks.sdk.service.settings.UpdateComplianceSecurityProfileSettingRequest` dataclass. + * Added `databricks.sdk.service.settings.UpdateEnhancedSecurityMonitoringSettingRequest` dataclass. + * Added `tags` field for `databricks.sdk.service.sql.DashboardEditContent`. + * Added `tags` field for `databricks.sdk.service.sql.QueryEditContent`. + * Added `catalog` field for `databricks.sdk.service.sql.QueryOptions`. + * Added `schema` field for `databricks.sdk.service.sql.QueryOptions`. + * Added `tags` field for `databricks.sdk.service.sql.QueryPostContent`. + * Added `query` field for `databricks.sdk.service.sql.Visualization`. + +OpenAPI SHA: 9bb7950fa3390afb97abaa552934bc0a2e069de5, Date: 2024-05-02 + ## 0.26.0 * Increase cluster creation test timeout ([#617](https://github.com/databricks/databricks-sdk-py/pull/617)). diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py index df8111e8..312d538b 100755 --- a/databricks/sdk/__init__.py +++ b/databricks/sdk/__init__.py @@ -59,12 +59,12 @@ from databricks.sdk.service.settings import (AccountIpAccessListsAPI, AccountSettingsAPI, AutomaticClusterUpdateAPI, + ComplianceSecurityProfileAPI, CredentialsManagerAPI, CspEnablementAccountAPI, - CspEnablementAPI, DefaultNamespaceAPI, + EnhancedSecurityMonitoringAPI, EsmEnablementAccountAPI, - EsmEnablementAPI, IpAccessListsAPI, NetworkConnectivityAPI, PersonalComputeAPI, @@ -267,7 +267,7 @@ def alerts(self) -> AlertsAPI: @property def apps(self) -> AppsAPI: - """Lakehouse Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on.""" + """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on.""" return self._apps @property diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py index c5e9521f..7f95caab 100755 --- a/databricks/sdk/service/catalog.py +++ b/databricks/sdk/service/catalog.py @@ -7592,7 +7592,8 @@ def list(self, response. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - There is no guarantee of a specific ordering of the elements in the response. + There is no guarantee of a specific ordering of the elements in the response. The elements in the + response will not contain any aliases or tags. :param full_name: str The full three-level name of the registered model under which to list model versions diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index 3205dfcb..a11dc6c8 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -2502,6 +2502,8 @@ def from_dict(cls, d: Dict[str, any]) -> ResolvedValues: @dataclass class Run: + """Run was retrieved successfully""" + attempt_number: Optional[int] = None """The sequence number of this run attempt for a triggered job run. The initial attempt of a run has an attempt_number of 0\. If the initial run attempt fails, and the job has a retry policy diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py index e530f8e8..8e2a7187 100755 --- a/databricks/sdk/service/pipelines.py +++ b/databricks/sdk/service/pipelines.py @@ -60,6 +60,10 @@ class CreatePipeline: id: Optional[str] = None """Unique identifier for this pipeline.""" + ingestion_definition: Optional[ManagedIngestionPipelineDefinition] = None + """The configuration for a managed ingestion pipeline. These settings cannot be used with the + 'libraries', 'target' or 'catalog' settings.""" + libraries: Optional[List[PipelineLibrary]] = None """Libraries or code needed by this deployment.""" @@ -101,6 +105,7 @@ def as_dict(self) -> dict: if self.edition is not None: body['edition'] = self.edition if self.filters: body['filters'] = self.filters.as_dict() if self.id is not None: body['id'] = self.id + if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition.as_dict() if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] if self.name is not None: body['name'] = self.name if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications] @@ -126,6 +131,8 @@ def from_dict(cls, d: Dict[str, any]) -> CreatePipeline: edition=d.get('edition', None), filters=_from_dict(d, 'filters', Filters), id=d.get('id', None), + ingestion_definition=_from_dict(d, 'ingestion_definition', + ManagedIngestionPipelineDefinition), libraries=_repeated_dict(d, 'libraries', PipelineLibrary), name=d.get('name', None), notifications=_repeated_dict(d, 'notifications', Notifications), @@ -262,6 +269,10 @@ class EditPipeline: id: Optional[str] = None """Unique identifier for this pipeline.""" + ingestion_definition: Optional[ManagedIngestionPipelineDefinition] = None + """The configuration for a managed ingestion pipeline. These settings cannot be used with the + 'libraries', 'target' or 'catalog' settings.""" + libraries: Optional[List[PipelineLibrary]] = None """Libraries or code needed by this deployment.""" @@ -307,6 +318,7 @@ def as_dict(self) -> dict: body['expected_last_modified'] = self.expected_last_modified if self.filters: body['filters'] = self.filters.as_dict() if self.id is not None: body['id'] = self.id + if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition.as_dict() if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] if self.name is not None: body['name'] = self.name if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications] @@ -333,6 +345,8 @@ def from_dict(cls, d: Dict[str, any]) -> EditPipeline: expected_last_modified=d.get('expected_last_modified', None), filters=_from_dict(d, 'filters', Filters), id=d.get('id', None), + ingestion_definition=_from_dict(d, 'ingestion_definition', + ManagedIngestionPipelineDefinition), libraries=_repeated_dict(d, 'libraries', PipelineLibrary), name=d.get('name', None), notifications=_repeated_dict(d, 'notifications', Notifications), @@ -535,6 +549,27 @@ def from_dict(cls, d: Dict[str, any]) -> GetUpdateResponse: return cls(update=_from_dict(d, 'update', UpdateInfo)) +@dataclass +class IngestionConfig: + schema: Optional[SchemaSpec] = None + """Select tables from a specific source schema.""" + + table: Optional[TableSpec] = None + """Select tables from a specific source table.""" + + def as_dict(self) -> dict: + """Serializes the IngestionConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.schema: body['schema'] = self.schema.as_dict() + if self.table: body['table'] = self.table.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> IngestionConfig: + """Deserializes the IngestionConfig from a dictionary.""" + return cls(schema=_from_dict(d, 'schema', SchemaSpec), table=_from_dict(d, 'table', TableSpec)) + + @dataclass class ListPipelineEventsResponse: events: Optional[List[PipelineEvent]] = None @@ -611,6 +646,35 @@ def from_dict(cls, d: Dict[str, any]) -> ListUpdatesResponse: updates=_repeated_dict(d, 'updates', UpdateInfo)) +@dataclass +class ManagedIngestionPipelineDefinition: + connection_name: Optional[str] = None + """Immutable. The Unity Catalog connection this ingestion pipeline uses to communicate with the + source. Specify either ingestion_gateway_id or connection_name.""" + + ingestion_gateway_id: Optional[str] = None + """Immutable. Identifier for the ingestion gateway used by this ingestion pipeline to communicate + with the source. Specify either ingestion_gateway_id or connection_name.""" + + objects: Optional[List[IngestionConfig]] = None + """Required. Settings specifying tables to replicate and the destination for the replicated tables.""" + + def as_dict(self) -> dict: + """Serializes the ManagedIngestionPipelineDefinition into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.connection_name is not None: body['connection_name'] = self.connection_name + if self.ingestion_gateway_id is not None: body['ingestion_gateway_id'] = self.ingestion_gateway_id + if self.objects: body['objects'] = [v.as_dict() for v in self.objects] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ManagedIngestionPipelineDefinition: + """Deserializes the ManagedIngestionPipelineDefinition from a dictionary.""" + return cls(connection_name=d.get('connection_name', None), + ingestion_gateway_id=d.get('ingestion_gateway_id', None), + objects=_repeated_dict(d, 'objects', IngestionConfig)) + + @dataclass class ManualTrigger: @@ -1283,6 +1347,10 @@ class PipelineSpec: id: Optional[str] = None """Unique identifier for this pipeline.""" + ingestion_definition: Optional[ManagedIngestionPipelineDefinition] = None + """The configuration for a managed ingestion pipeline. These settings cannot be used with the + 'libraries', 'target' or 'catalog' settings.""" + libraries: Optional[List[PipelineLibrary]] = None """Libraries or code needed by this deployment.""" @@ -1322,6 +1390,7 @@ def as_dict(self) -> dict: if self.edition is not None: body['edition'] = self.edition if self.filters: body['filters'] = self.filters.as_dict() if self.id is not None: body['id'] = self.id + if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition.as_dict() if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] if self.name is not None: body['name'] = self.name if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications] @@ -1345,6 +1414,8 @@ def from_dict(cls, d: Dict[str, any]) -> PipelineSpec: edition=d.get('edition', None), filters=_from_dict(d, 'filters', Filters), id=d.get('id', None), + ingestion_definition=_from_dict(d, 'ingestion_definition', + ManagedIngestionPipelineDefinition), libraries=_repeated_dict(d, 'libraries', PipelineLibrary), name=d.get('name', None), notifications=_repeated_dict(d, 'notifications', Notifications), @@ -1436,6 +1507,40 @@ def from_dict(cls, d: Dict[str, any]) -> PipelineTrigger: return cls(cron=_from_dict(d, 'cron', CronTrigger), manual=_from_dict(d, 'manual', ManualTrigger)) +@dataclass +class SchemaSpec: + destination_catalog: Optional[str] = None + """Required. Destination catalog to store tables.""" + + destination_schema: Optional[str] = None + """Required. Destination schema to store tables in. Tables with the same name as the source tables + are created in this destination schema. The pipeline fails If a table with the same name already + exists.""" + + source_catalog: Optional[str] = None + """The source catalog name. Might be optional depending on the type of source.""" + + source_schema: Optional[str] = None + """Required. Schema name in the source database.""" + + def as_dict(self) -> dict: + """Serializes the SchemaSpec into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog + if self.destination_schema is not None: body['destination_schema'] = self.destination_schema + if self.source_catalog is not None: body['source_catalog'] = self.source_catalog + if self.source_schema is not None: body['source_schema'] = self.source_schema + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> SchemaSpec: + """Deserializes the SchemaSpec from a dictionary.""" + return cls(destination_catalog=d.get('destination_catalog', None), + destination_schema=d.get('destination_schema', None), + source_catalog=d.get('source_catalog', None), + source_schema=d.get('source_schema', None)) + + @dataclass class Sequencing: control_plane_seq_no: Optional[int] = None @@ -1603,6 +1708,49 @@ def from_dict(cls, d: Dict[str, any]) -> StopPipelineResponse: return cls() +@dataclass +class TableSpec: + destination_catalog: Optional[str] = None + """Required. Destination catalog to store table.""" + + destination_schema: Optional[str] = None + """Required. Destination schema to store table.""" + + destination_table: Optional[str] = None + """Optional. Destination table name. The pipeline fails If a table with that name already exists. + If not set, the source table name is used.""" + + source_catalog: Optional[str] = None + """Source catalog name. Might be optional depending on the type of source.""" + + source_schema: Optional[str] = None + """Schema name in the source database. Might be optional depending on the type of source.""" + + source_table: Optional[str] = None + """Required. Table name in the source database.""" + + def as_dict(self) -> dict: + """Serializes the TableSpec into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog + if self.destination_schema is not None: body['destination_schema'] = self.destination_schema + if self.destination_table is not None: body['destination_table'] = self.destination_table + if self.source_catalog is not None: body['source_catalog'] = self.source_catalog + if self.source_schema is not None: body['source_schema'] = self.source_schema + if self.source_table is not None: body['source_table'] = self.source_table + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> TableSpec: + """Deserializes the TableSpec from a dictionary.""" + return cls(destination_catalog=d.get('destination_catalog', None), + destination_schema=d.get('destination_schema', None), + destination_table=d.get('destination_table', None), + source_catalog=d.get('source_catalog', None), + source_schema=d.get('source_schema', None), + source_table=d.get('source_table', None)) + + @dataclass class UpdateInfo: cause: Optional[UpdateInfoCause] = None @@ -1834,6 +1982,7 @@ def create(self, edition: Optional[str] = None, filters: Optional[Filters] = None, id: Optional[str] = None, + ingestion_definition: Optional[ManagedIngestionPipelineDefinition] = None, libraries: Optional[List[PipelineLibrary]] = None, name: Optional[str] = None, notifications: Optional[List[Notifications]] = None, @@ -1872,6 +2021,9 @@ def create(self, Filters on which Pipeline packages to include in the deployed graph. :param id: str (optional) Unique identifier for this pipeline. + :param ingestion_definition: :class:`ManagedIngestionPipelineDefinition` (optional) + The configuration for a managed ingestion pipeline. These settings cannot be used with the + 'libraries', 'target' or 'catalog' settings. :param libraries: List[:class:`PipelineLibrary`] (optional) Libraries or code needed by this deployment. :param name: str (optional) @@ -1905,6 +2057,7 @@ def create(self, if edition is not None: body['edition'] = edition if filters is not None: body['filters'] = filters.as_dict() if id is not None: body['id'] = id + if ingestion_definition is not None: body['ingestion_definition'] = ingestion_definition.as_dict() if libraries is not None: body['libraries'] = [v.as_dict() for v in libraries] if name is not None: body['name'] = name if notifications is not None: body['notifications'] = [v.as_dict() for v in notifications] @@ -2233,6 +2386,7 @@ def update(self, expected_last_modified: Optional[int] = None, filters: Optional[Filters] = None, id: Optional[str] = None, + ingestion_definition: Optional[ManagedIngestionPipelineDefinition] = None, libraries: Optional[List[PipelineLibrary]] = None, name: Optional[str] = None, notifications: Optional[List[Notifications]] = None, @@ -2274,6 +2428,9 @@ def update(self, Filters on which Pipeline packages to include in the deployed graph. :param id: str (optional) Unique identifier for this pipeline. + :param ingestion_definition: :class:`ManagedIngestionPipelineDefinition` (optional) + The configuration for a managed ingestion pipeline. These settings cannot be used with the + 'libraries', 'target' or 'catalog' settings. :param libraries: List[:class:`PipelineLibrary`] (optional) Libraries or code needed by this deployment. :param name: str (optional) @@ -2307,6 +2464,7 @@ def update(self, if expected_last_modified is not None: body['expected_last_modified'] = expected_last_modified if filters is not None: body['filters'] = filters.as_dict() if id is not None: body['id'] = id + if ingestion_definition is not None: body['ingestion_definition'] = ingestion_definition.as_dict() if libraries is not None: body['libraries'] = [v.as_dict() for v in libraries] if name is not None: body['name'] = name if notifications is not None: body['notifications'] = [v.as_dict() for v in notifications] diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py index 8bbdb3a0..5c9a061f 100755 --- a/databricks/sdk/service/serving.py +++ b/databricks/sdk/service/serving.py @@ -98,106 +98,190 @@ def from_dict(cls, d: Dict[str, any]) -> AnthropicConfig: @dataclass -class AppEvents: - event_name: Optional[str] = None +class App: + name: str + """The name of the app. The name must contain only lowercase alphanumeric characters and hyphens + and be between 2 and 30 characters long. It must be unique within the workspace.""" - event_time: Optional[str] = None + active_deployment: Optional[AppDeployment] = None + """The active deployment of the app.""" - event_type: Optional[str] = None + create_time: Optional[str] = None + """The creation time of the app. Formatted timestamp in ISO 6801.""" - message: Optional[str] = None + creator: Optional[str] = None + """The email of the user that created the app.""" + + description: Optional[str] = None + """The description of the app.""" - service_name: Optional[str] = None + pending_deployment: Optional[AppDeployment] = None + """The pending deployment of the app.""" + + status: Optional[AppStatus] = None + + update_time: Optional[str] = None + """The update time of the app. Formatted timestamp in ISO 6801.""" + + updater: Optional[str] = None + """The email of the user that last updated the app.""" + + url: Optional[str] = None + """The URL of the app once it is deployed.""" def as_dict(self) -> dict: - """Serializes the AppEvents into a dictionary suitable for use as a JSON request body.""" + """Serializes the App into a dictionary suitable for use as a JSON request body.""" body = {} - if self.event_name is not None: body['event_name'] = self.event_name - if self.event_time is not None: body['event_time'] = self.event_time - if self.event_type is not None: body['event_type'] = self.event_type - if self.message is not None: body['message'] = self.message - if self.service_name is not None: body['service_name'] = self.service_name + if self.active_deployment: body['active_deployment'] = self.active_deployment.as_dict() + if self.create_time is not None: body['create_time'] = self.create_time + if self.creator is not None: body['creator'] = self.creator + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + if self.pending_deployment: body['pending_deployment'] = self.pending_deployment.as_dict() + if self.status: body['status'] = self.status.as_dict() + if self.update_time is not None: body['update_time'] = self.update_time + if self.updater is not None: body['updater'] = self.updater + if self.url is not None: body['url'] = self.url return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> AppEvents: - """Deserializes the AppEvents from a dictionary.""" - return cls(event_name=d.get('event_name', None), - event_time=d.get('event_time', None), - event_type=d.get('event_type', None), - message=d.get('message', None), - service_name=d.get('service_name', None)) + def from_dict(cls, d: Dict[str, any]) -> App: + """Deserializes the App from a dictionary.""" + return cls(active_deployment=_from_dict(d, 'active_deployment', AppDeployment), + create_time=d.get('create_time', None), + creator=d.get('creator', None), + description=d.get('description', None), + name=d.get('name', None), + pending_deployment=_from_dict(d, 'pending_deployment', AppDeployment), + status=_from_dict(d, 'status', AppStatus), + update_time=d.get('update_time', None), + updater=d.get('updater', None), + url=d.get('url', None)) @dataclass -class AppManifest: - dependencies: Optional[List[Any]] = None - """Workspace dependencies.""" +class AppDeployment: + source_code_path: str + """The source code path of the deployment.""" - description: Optional[str] = None - """application description""" + create_time: Optional[str] = None + """The creation time of the deployment. Formatted timestamp in ISO 6801.""" - ingress: Optional[Any] = None - """Ingress rules for app public endpoints""" + creator: Optional[str] = None + """The email of the user creates the deployment.""" - name: Optional[str] = None - """Only a-z and dashes (-). Max length of 30.""" + deployment_id: Optional[str] = None + """The unique id of the deployment.""" + + status: Optional[AppDeploymentStatus] = None + """Status and status message of the deployment""" + + update_time: Optional[str] = None + """The update time of the deployment. Formatted timestamp in ISO 6801.""" + + def as_dict(self) -> dict: + """Serializes the AppDeployment into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.create_time is not None: body['create_time'] = self.create_time + if self.creator is not None: body['creator'] = self.creator + if self.deployment_id is not None: body['deployment_id'] = self.deployment_id + if self.source_code_path is not None: body['source_code_path'] = self.source_code_path + if self.status: body['status'] = self.status.as_dict() + if self.update_time is not None: body['update_time'] = self.update_time + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AppDeployment: + """Deserializes the AppDeployment from a dictionary.""" + return cls(create_time=d.get('create_time', None), + creator=d.get('creator', None), + deployment_id=d.get('deployment_id', None), + source_code_path=d.get('source_code_path', None), + status=_from_dict(d, 'status', AppDeploymentStatus), + update_time=d.get('update_time', None)) - registry: Optional[Any] = None - """Container private registry""" - services: Optional[Any] = None - """list of app services. Restricted to one for now.""" +class AppDeploymentState(Enum): + + CANCELLED = 'CANCELLED' + FAILED = 'FAILED' + IN_PROGRESS = 'IN_PROGRESS' + STATE_UNSPECIFIED = 'STATE_UNSPECIFIED' + SUCCEEDED = 'SUCCEEDED' - version: Optional[Any] = None - """The manifest format version. Must be set to 1.""" + +@dataclass +class AppDeploymentStatus: + message: Optional[str] = None + """Message corresponding with the deployment state.""" + + state: Optional[AppDeploymentState] = None + """State of the deployment.""" def as_dict(self) -> dict: - """Serializes the AppManifest into a dictionary suitable for use as a JSON request body.""" + """Serializes the AppDeploymentStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dependencies: body['dependencies'] = [v for v in self.dependencies] - if self.description is not None: body['description'] = self.description - if self.ingress: body['ingress'] = self.ingress - if self.name is not None: body['name'] = self.name - if self.registry: body['registry'] = self.registry - if self.services: body['services'] = self.services - if self.version: body['version'] = self.version + if self.message is not None: body['message'] = self.message + if self.state is not None: body['state'] = self.state.value return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> AppManifest: - """Deserializes the AppManifest from a dictionary.""" - return cls(dependencies=d.get('dependencies', None), - description=d.get('description', None), - ingress=d.get('ingress', None), - name=d.get('name', None), - registry=d.get('registry', None), - services=d.get('services', None), - version=d.get('version', None)) + def from_dict(cls, d: Dict[str, any]) -> AppDeploymentStatus: + """Deserializes the AppDeploymentStatus from a dictionary.""" + return cls(message=d.get('message', None), state=_enum(d, 'state', AppDeploymentState)) @dataclass -class AppServiceStatus: - deployment: Optional[Any] = None +class AppEnvironment: + env: Optional[List[EnvVariable]] = None - name: Optional[str] = None + def as_dict(self) -> dict: + """Serializes the AppEnvironment into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.env: body['env'] = [v.as_dict() for v in self.env] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AppEnvironment: + """Deserializes the AppEnvironment from a dictionary.""" + return cls(env=_repeated_dict(d, 'env', EnvVariable)) + + +class AppState(Enum): + + CREATING = 'CREATING' + DELETED = 'DELETED' + DELETING = 'DELETING' + DEPLOYED = 'DEPLOYED' + DEPLOYING = 'DEPLOYING' + ERROR = 'ERROR' + IDLE = 'IDLE' + READY = 'READY' + RUNNING = 'RUNNING' + STARTING = 'STARTING' + STATE_UNSPECIFIED = 'STATE_UNSPECIFIED' + UPDATING = 'UPDATING' - template: Optional[Any] = None + +@dataclass +class AppStatus: + message: Optional[str] = None + """Message corresponding with the app state.""" + + state: Optional[AppState] = None + """State of the app.""" def as_dict(self) -> dict: - """Serializes the AppServiceStatus into a dictionary suitable for use as a JSON request body.""" + """Serializes the AppStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.deployment: body['deployment'] = self.deployment - if self.name is not None: body['name'] = self.name - if self.template: body['template'] = self.template + if self.message is not None: body['message'] = self.message + if self.state is not None: body['state'] = self.state.value return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> AppServiceStatus: - """Deserializes the AppServiceStatus from a dictionary.""" - return cls(deployment=d.get('deployment', None), - name=d.get('name', None), - template=d.get('template', None)) + def from_dict(cls, d: Dict[str, any]) -> AppStatus: + """Deserializes the AppStatus from a dictionary.""" + return cls(message=d.get('message', None), state=_enum(d, 'state', AppState)) @dataclass @@ -351,6 +435,49 @@ def from_dict(cls, d: Dict[str, any]) -> CohereConfig: return cls(cohere_api_key=d.get('cohere_api_key', None)) +@dataclass +class CreateAppDeploymentRequest: + source_code_path: str + """The source code path of the deployment.""" + + app_name: Optional[str] = None + """The name of the app.""" + + def as_dict(self) -> dict: + """Serializes the CreateAppDeploymentRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.app_name is not None: body['app_name'] = self.app_name + if self.source_code_path is not None: body['source_code_path'] = self.source_code_path + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateAppDeploymentRequest: + """Deserializes the CreateAppDeploymentRequest from a dictionary.""" + return cls(app_name=d.get('app_name', None), source_code_path=d.get('source_code_path', None)) + + +@dataclass +class CreateAppRequest: + name: str + """The name of the app. The name must contain only lowercase alphanumeric characters and hyphens + and be between 2 and 30 characters long. It must be unique within the workspace.""" + + description: Optional[str] = None + """The description of the app.""" + + def as_dict(self) -> dict: + """Serializes the CreateAppRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateAppRequest: + """Deserializes the CreateAppRequest from a dictionary.""" + return cls(description=d.get('description', None), name=d.get('name', None)) + + @dataclass class CreateServingEndpoint: name: str @@ -433,22 +560,6 @@ def from_dict(cls, d: Dict[str, any]) -> DataframeSplitInput: return cls(columns=d.get('columns', None), data=d.get('data', None), index=d.get('index', None)) -@dataclass -class DeleteAppResponse: - name: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the DeleteAppResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.name is not None: body['name'] = self.name - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> DeleteAppResponse: - """Deserializes the DeleteAppResponse from a dictionary.""" - return cls(name=d.get('name', None)) - - @dataclass class DeleteResponse: @@ -463,68 +574,6 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteResponse: return cls() -@dataclass -class DeployAppRequest: - manifest: AppManifest - """Manifest that specifies the application requirements""" - - resources: Optional[Any] = None - """Information passed at app deployment time to fulfill app dependencies""" - - def as_dict(self) -> dict: - """Serializes the DeployAppRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.manifest: body['manifest'] = self.manifest.as_dict() - if self.resources: body['resources'] = self.resources - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> DeployAppRequest: - """Deserializes the DeployAppRequest from a dictionary.""" - return cls(manifest=_from_dict(d, 'manifest', AppManifest), resources=d.get('resources', None)) - - -@dataclass -class DeploymentStatus: - container_logs: Optional[List[Any]] = None - """Container logs.""" - - deployment_id: Optional[str] = None - """description""" - - extra_info: Optional[str] = None - """Supplementary information about pod""" - - state: Optional[DeploymentStatusState] = None - """State: one of DEPLOYING,SUCCESS, FAILURE, DEPLOYMENT_STATE_UNSPECIFIED""" - - def as_dict(self) -> dict: - """Serializes the DeploymentStatus into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.container_logs: body['container_logs'] = [v for v in self.container_logs] - if self.deployment_id is not None: body['deployment_id'] = self.deployment_id - if self.extra_info is not None: body['extra_info'] = self.extra_info - if self.state is not None: body['state'] = self.state.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> DeploymentStatus: - """Deserializes the DeploymentStatus from a dictionary.""" - return cls(container_logs=d.get('container_logs', None), - deployment_id=d.get('deployment_id', None), - extra_info=d.get('extra_info', None), - state=_enum(d, 'state', DeploymentStatusState)) - - -class DeploymentStatusState(Enum): - """State: one of DEPLOYING,SUCCESS, FAILURE, DEPLOYMENT_STATE_UNSPECIFIED""" - - DEPLOYING = 'DEPLOYING' - DEPLOYMENT_STATE_UNSPECIFIED = 'DEPLOYMENT_STATE_UNSPECIFIED' - FAILURE = 'FAILURE' - SUCCESS = 'SUCCESS' - - @dataclass class EmbeddingsV1ResponseEmbeddingElement: embedding: Optional[List[float]] = None @@ -771,6 +820,28 @@ def from_dict(cls, d: Dict[str, any]) -> EndpointTag: return cls(key=d.get('key', None), value=d.get('value', None)) +@dataclass +class EnvVariable: + name: Optional[str] = None + + value: Optional[str] = None + + value_from: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the EnvVariable into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.name is not None: body['name'] = self.name + if self.value is not None: body['value'] = self.value + if self.value_from is not None: body['value_from'] = self.value_from + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> EnvVariable: + """Deserializes the EnvVariable from a dictionary.""" + return cls(name=d.get('name', None), value=d.get('value', None), value_from=d.get('value_from', None)) + + @dataclass class ExportMetricsResponse: @@ -925,31 +996,19 @@ def from_dict(cls, d: Dict[str, any]) -> FoundationModel: @dataclass -class GetAppResponse: - current_services: Optional[List[AppServiceStatus]] = None - - name: Optional[str] = None - - pending_services: Optional[List[AppServiceStatus]] = None - - url: Optional[str] = None +class GetOpenApiResponse: + """The response is an OpenAPI spec in JSON format that typically includes fields like openapi, + info, servers and paths, etc.""" def as_dict(self) -> dict: - """Serializes the GetAppResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the GetOpenApiResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.current_services: body['current_services'] = [v.as_dict() for v in self.current_services] - if self.name is not None: body['name'] = self.name - if self.pending_services: body['pending_services'] = [v.as_dict() for v in self.pending_services] - if self.url is not None: body['url'] = self.url return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> GetAppResponse: - """Deserializes the GetAppResponse from a dictionary.""" - return cls(current_services=_repeated_dict(d, 'current_services', AppServiceStatus), - name=d.get('name', None), - pending_services=_repeated_dict(d, 'pending_services', AppServiceStatus), - url=d.get('url', None)) + def from_dict(cls, d: Dict[str, any]) -> GetOpenApiResponse: + """Deserializes the GetOpenApiResponse from a dictionary.""" + return cls() @dataclass @@ -971,40 +1030,45 @@ def from_dict(cls, d: Dict[str, any]) -> GetServingEndpointPermissionLevelsRespo @dataclass -class ListAppEventsResponse: - events: Optional[List[AppEvents]] = None - """App events""" +class ListAppDeploymentsResponse: + app_deployments: Optional[List[AppDeployment]] = None + """Deployment history of the app.""" + + next_page_token: Optional[str] = None + """Pagination token to request the next page of apps.""" def as_dict(self) -> dict: - """Serializes the ListAppEventsResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the ListAppDeploymentsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.events: body['events'] = [v.as_dict() for v in self.events] + if self.app_deployments: body['app_deployments'] = [v.as_dict() for v in self.app_deployments] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> ListAppEventsResponse: - """Deserializes the ListAppEventsResponse from a dictionary.""" - return cls(events=_repeated_dict(d, 'events', AppEvents)) + def from_dict(cls, d: Dict[str, any]) -> ListAppDeploymentsResponse: + """Deserializes the ListAppDeploymentsResponse from a dictionary.""" + return cls(app_deployments=_repeated_dict(d, 'app_deployments', AppDeployment), + next_page_token=d.get('next_page_token', None)) @dataclass class ListAppsResponse: - apps: Optional[List[Any]] = None - """Available apps.""" + apps: Optional[List[App]] = None next_page_token: Optional[str] = None + """Pagination token to request the next page of apps.""" def as_dict(self) -> dict: """Serializes the ListAppsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.apps: body['apps'] = [v for v in self.apps] + if self.apps: body['apps'] = [v.as_dict() for v in self.apps] if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, any]) -> ListAppsResponse: """Deserializes the ListAppsResponse from a dictionary.""" - return cls(apps=d.get('apps', None), next_page_token=d.get('next_page_token', None)) + return cls(apps=_repeated_dict(d, 'apps', App), next_page_token=d.get('next_page_token', None)) @dataclass @@ -2228,6 +2292,26 @@ def from_dict(cls, d: Dict[str, any]) -> ServingEndpointPermissionsRequest: serving_endpoint_id=d.get('serving_endpoint_id', None)) +@dataclass +class StopAppRequest: + name: Optional[str] = None + """The name of the app.""" + + +@dataclass +class StopAppResponse: + + def as_dict(self) -> dict: + """Serializes the StopAppResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> StopAppResponse: + """Deserializes the StopAppResponse from a dictionary.""" + return cls() + + @dataclass class TrafficConfig: routes: Optional[List[Route]] = None @@ -2245,6 +2329,28 @@ def from_dict(cls, d: Dict[str, any]) -> TrafficConfig: return cls(routes=_repeated_dict(d, 'routes', Route)) +@dataclass +class UpdateAppRequest: + name: str + """The name of the app. The name must contain only lowercase alphanumeric characters and hyphens + and be between 2 and 30 characters long. It must be unique within the workspace.""" + + description: Optional[str] = None + """The description of the app.""" + + def as_dict(self) -> dict: + """Serializes the UpdateAppRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> UpdateAppRequest: + """Deserializes the UpdateAppRequest from a dictionary.""" + return cls(description=d.get('description', None), name=d.get('name', None)) + + @dataclass class V1ResponseChoiceElement: finish_reason: Optional[str] = None @@ -2283,118 +2389,304 @@ def from_dict(cls, d: Dict[str, any]) -> V1ResponseChoiceElement: class AppsAPI: - """Lakehouse Apps run directly on a customer’s Databricks instance, integrate with their data, use and - extend Databricks services, and enable users to interact through single sign-on.""" + """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend + Databricks services, and enable users to interact through single sign-on.""" def __init__(self, api_client): self._api = api_client - def create(self, manifest: AppManifest, *, resources: Optional[Any] = None) -> DeploymentStatus: - """Create and deploy an application. + def wait_get_app_idle(self, + name: str, + timeout=timedelta(minutes=20), + callback: Optional[Callable[[App], None]] = None) -> App: + deadline = time.time() + timeout.total_seconds() + target_states = (AppState.IDLE, ) + failure_states = (AppState.ERROR, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.get(name=name) + status = poll.status.state + status_message = f'current status: {status}' + if poll.status: + status_message = poll.status.message + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach IDLE, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"name={name}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + def wait_get_deployment_app_succeeded( + self, + app_name: str, + deployment_id: str, + timeout=timedelta(minutes=20), + callback: Optional[Callable[[AppDeployment], None]] = None) -> AppDeployment: + deadline = time.time() + timeout.total_seconds() + target_states = (AppDeploymentState.SUCCEEDED, ) + failure_states = (AppDeploymentState.FAILED, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.get_deployment(app_name=app_name, deployment_id=deployment_id) + status = poll.status.state + status_message = f'current status: {status}' + if poll.status: + status_message = poll.status.message + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach SUCCEEDED, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"app_name={app_name}, deployment_id={deployment_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + def create(self, name: str, *, description: Optional[str] = None) -> Wait[App]: + """Create an App. + + Creates a new app. + + :param name: str + The name of the app. The name must contain only lowercase alphanumeric characters and hyphens and be + between 2 and 30 characters long. It must be unique within the workspace. + :param description: str (optional) + The description of the app. + + :returns: + Long-running operation waiter for :class:`App`. + See :method:wait_get_app_idle for more details. + """ + body = {} + if description is not None: body['description'] = description + if name is not None: body['name'] = name + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + op_response = self._api.do('POST', '/api/2.0/preview/apps', body=body, headers=headers) + return Wait(self.wait_get_app_idle, response=App.from_dict(op_response), name=op_response['name']) + + def create_and_wait(self, + name: str, + *, + description: Optional[str] = None, + timeout=timedelta(minutes=20)) -> App: + return self.create(description=description, name=name).result(timeout=timeout) + + def create_deployment(self, app_name: str, source_code_path: str) -> Wait[AppDeployment]: + """Create an App Deployment. - Creates and deploys an application. + Creates an app deployment for the app with the supplied name. - :param manifest: :class:`AppManifest` - Manifest that specifies the application requirements - :param resources: Any (optional) - Information passed at app deployment time to fulfill app dependencies + :param app_name: str + The name of the app. + :param source_code_path: str + The source code path of the deployment. - :returns: :class:`DeploymentStatus` + :returns: + Long-running operation waiter for :class:`AppDeployment`. + See :method:wait_get_deployment_app_succeeded for more details. """ body = {} - if manifest is not None: body['manifest'] = manifest.as_dict() - if resources is not None: body['resources'] = resources + if source_code_path is not None: body['source_code_path'] = source_code_path headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - res = self._api.do('POST', '/api/2.0/preview/apps/deployments', body=body, headers=headers) - return DeploymentStatus.from_dict(res) + op_response = self._api.do('POST', + f'/api/2.0/preview/apps/{app_name}/deployments', + body=body, + headers=headers) + return Wait(self.wait_get_deployment_app_succeeded, + response=AppDeployment.from_dict(op_response), + app_name=app_name, + deployment_id=op_response['deployment_id']) + + def create_deployment_and_wait(self, app_name: str, source_code_path: str, + timeout=timedelta(minutes=20)) -> AppDeployment: + return self.create_deployment(app_name=app_name, + source_code_path=source_code_path).result(timeout=timeout) - def delete_app(self, name: str) -> DeleteAppResponse: - """Delete an application. + def delete(self, name: str): + """Delete an App. - Delete an application definition + Deletes an app. :param name: str - The name of an application. This field is required. + The name of the app. + - :returns: :class:`DeleteAppResponse` """ headers = {'Accept': 'application/json', } - res = self._api.do('DELETE', f'/api/2.0/preview/apps/instances/{name}', headers=headers) - return DeleteAppResponse.from_dict(res) + self._api.do('DELETE', f'/api/2.0/preview/apps/{name}', headers=headers) - def get_app(self, name: str) -> GetAppResponse: - """Get definition for an application. + def get(self, name: str) -> App: + """Get an App. - Get an application definition + Retrieves information for the app with the supplied name. :param name: str - The name of an application. This field is required. + The name of the app. - :returns: :class:`GetAppResponse` + :returns: :class:`App` """ headers = {'Accept': 'application/json', } - res = self._api.do('GET', f'/api/2.0/preview/apps/instances/{name}', headers=headers) - return GetAppResponse.from_dict(res) + res = self._api.do('GET', f'/api/2.0/preview/apps/{name}', headers=headers) + return App.from_dict(res) - def get_app_deployment_status(self, - deployment_id: str, - *, - include_app_log: Optional[str] = None) -> DeploymentStatus: - """Get deployment status for an application. + def get_deployment(self, app_name: str, deployment_id: str) -> AppDeployment: + """Get an App Deployment. - Get deployment status for an application + Retrieves information for the app deployment with the supplied name and deployment id. + :param app_name: str + The name of the app. :param deployment_id: str - The deployment id for an application. This field is required. - :param include_app_log: str (optional) - Boolean flag to include application logs + The unique id of the deployment. - :returns: :class:`DeploymentStatus` + :returns: :class:`AppDeployment` """ - query = {} - if include_app_log is not None: query['include_app_log'] = include_app_log headers = {'Accept': 'application/json', } res = self._api.do('GET', - f'/api/2.0/preview/apps/deployments/{deployment_id}', - query=query, + f'/api/2.0/preview/apps/{app_name}/deployments/{deployment_id}', headers=headers) - return DeploymentStatus.from_dict(res) + return AppDeployment.from_dict(res) - def get_apps(self) -> ListAppsResponse: - """List all applications. + def get_environment(self, name: str) -> AppEnvironment: + """Get App Environment. - List all available applications + Retrieves app environment. - :returns: :class:`ListAppsResponse` + :param name: str + The name of the app. + + :returns: :class:`AppEnvironment` """ headers = {'Accept': 'application/json', } - res = self._api.do('GET', '/api/2.0/preview/apps/instances', headers=headers) - return ListAppsResponse.from_dict(res) + res = self._api.do('GET', f'/api/2.0/preview/apps/{name}/environment', headers=headers) + return AppEnvironment.from_dict(res) - def get_events(self, name: str) -> ListAppEventsResponse: - """Get deployment events for an application. + def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[App]: + """List Apps. - Get deployment events for an application + Lists all apps in the workspace. - :param name: str - The name of an application. This field is required. + :param page_size: int (optional) + Upper bound for items returned. + :param page_token: str (optional) + Pagination token to go to the next page of apps. Requests first page if absent. + + :returns: Iterator over :class:`App` + """ + + query = {} + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json', } + + while True: + json = self._api.do('GET', '/api/2.0/preview/apps', query=query, headers=headers) + if 'apps' in json: + for v in json['apps']: + yield App.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + def list_deployments(self, + app_name: str, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[AppDeployment]: + """List App Deployments. + + Lists all app deployments for the app with the supplied name. - :returns: :class:`ListAppEventsResponse` + :param app_name: str + The name of the app. + :param page_size: int (optional) + Upper bound for items returned. + :param page_token: str (optional) + Pagination token to go to the next page of apps. Requests first page if absent. + + :returns: Iterator over :class:`AppDeployment` """ + query = {} + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } - res = self._api.do('GET', f'/api/2.0/preview/apps/{name}/events', headers=headers) - return ListAppEventsResponse.from_dict(res) + while True: + json = self._api.do('GET', + f'/api/2.0/preview/apps/{app_name}/deployments', + query=query, + headers=headers) + if 'app_deployments' in json: + for v in json['app_deployments']: + yield AppDeployment.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + def stop(self, name: str): + """Stop an App. + + Stops the active deployment of the app in the workspace. + + :param name: str + The name of the app. + + + """ + + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + self._api.do('POST', f'/api/2.0/preview/apps/{name}/stop', headers=headers) + + def update(self, name: str, *, description: Optional[str] = None) -> App: + """Update an App. + + Updates the app with the supplied name. + + :param name: str + The name of the app. The name must contain only lowercase alphanumeric characters and hyphens and be + between 2 and 30 characters long. It must be unique within the workspace. + :param description: str (optional) + The description of the app. + + :returns: :class:`App` + """ + body = {} + if description is not None: body['description'] = description + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('PATCH', f'/api/2.0/preview/apps/{name}', body=body, headers=headers) + return App.from_dict(res) class ServingEndpointsAPI: @@ -2554,6 +2846,22 @@ def get(self, name: str) -> ServingEndpointDetailed: res = self._api.do('GET', f'/api/2.0/serving-endpoints/{name}', headers=headers) return ServingEndpointDetailed.from_dict(res) + def get_open_api(self, name: str): + """Get the schema for a serving endpoint. + + Get the query schema of the serving endpoint in OpenAPI format. The schema contains information for + the supported paths, input and output format and datatypes. + + :param name: str + The name of the serving endpoint that the served model belongs to. This field is required. + + + """ + + headers = {'Accept': 'application/json', } + + self._api.do('GET', f'/api/2.0/serving-endpoints/{name}/openapi', headers=headers) + def get_permission_levels(self, serving_endpoint_id: str) -> GetServingEndpointPermissionLevelsResponse: """Get serving endpoint permission levels. diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py index e85991e8..636f7544 100755 --- a/databricks/sdk/service/settings.py +++ b/databricks/sdk/service/settings.py @@ -214,6 +214,70 @@ def from_dict(cls, d: Dict[str, any]) -> ClusterAutoRestartMessageMaintenanceWin return cls(hours=d.get('hours', None), minutes=d.get('minutes', None)) +@dataclass +class ComplianceSecurityProfile: + """SHIELD feature: CSP""" + + compliance_standards: Optional[List[ComplianceStandard]] = None + """Set by customers when they request Compliance Security Profile (CSP)""" + + is_enabled: Optional[bool] = None + + def as_dict(self) -> dict: + """Serializes the ComplianceSecurityProfile into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.compliance_standards: + body['compliance_standards'] = [v.value for v in self.compliance_standards] + if self.is_enabled is not None: body['is_enabled'] = self.is_enabled + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ComplianceSecurityProfile: + """Deserializes the ComplianceSecurityProfile from a dictionary.""" + return cls(compliance_standards=_repeated_enum(d, 'compliance_standards', ComplianceStandard), + is_enabled=d.get('is_enabled', None)) + + +@dataclass +class ComplianceSecurityProfileSetting: + compliance_security_profile_workspace: ComplianceSecurityProfile + """SHIELD feature: CSP""" + + etag: Optional[str] = None + """etag used for versioning. The response is at least as fresh as the eTag provided. This is used + for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + overwriting each other. It is strongly suggested that systems make use of the etag in the read + -> update pattern to perform setting updates in order to avoid race conditions. That is, get an + etag from a GET request, and pass it with the PATCH request to identify the setting version you + are updating.""" + + setting_name: Optional[str] = None + """Name of the corresponding setting. This field is populated in the response, but it will not be + respected even if it's set in the request body. The setting name in the path parameter will be + respected instead. Setting name is required to be 'default' if the setting only has one instance + per workspace.""" + + def as_dict(self) -> dict: + """Serializes the ComplianceSecurityProfileSetting into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.compliance_security_profile_workspace: + body[ + 'compliance_security_profile_workspace'] = self.compliance_security_profile_workspace.as_dict( + ) + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ComplianceSecurityProfileSetting: + """Deserializes the ComplianceSecurityProfileSetting from a dictionary.""" + return cls(compliance_security_profile_workspace=_from_dict(d, + 'compliance_security_profile_workspace', + ComplianceSecurityProfile), + etag=d.get('etag', None), + setting_name=d.get('setting_name', None)) + + class ComplianceStandard(Enum): """Compliance stardard for SHIELD customers""" @@ -437,32 +501,6 @@ def from_dict(cls, d: Dict[str, any]) -> CreateTokenResponse: token_value=d.get('token_value', None)) -@dataclass -class CspEnablement: - """Compliance Security Profile (CSP) - one of the features in ESC product Tracks if the feature is - enabled.""" - - compliance_standards: Optional[List[ComplianceStandard]] = None - """Set by customers when they request Compliance Security Profile (CSP) Invariants are enforced in - Settings policy.""" - - is_enabled: Optional[bool] = None - - def as_dict(self) -> dict: - """Serializes the CspEnablement into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.compliance_standards: - body['compliance_standards'] = [v.value for v in self.compliance_standards] - if self.is_enabled is not None: body['is_enabled'] = self.is_enabled - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> CspEnablement: - """Deserializes the CspEnablement from a dictionary.""" - return cls(compliance_standards=_repeated_enum(d, 'compliance_standards', ComplianceStandard), - is_enabled=d.get('is_enabled', None)) - - @dataclass class CspEnablementAccount: """Account level policy for CSP""" @@ -524,43 +562,6 @@ def from_dict(cls, d: Dict[str, any]) -> CspEnablementAccountSetting: setting_name=d.get('setting_name', None)) -@dataclass -class CspEnablementSetting: - csp_enablement_workspace: CspEnablement - """Compliance Security Profile (CSP) - one of the features in ESC product Tracks if the feature is - enabled.""" - - etag: Optional[str] = None - """etag used for versioning. The response is at least as fresh as the eTag provided. This is used - for optimistic concurrency control as a way to help prevent simultaneous writes of a setting - overwriting each other. It is strongly suggested that systems make use of the etag in the read - -> update pattern to perform setting updates in order to avoid race conditions. That is, get an - etag from a GET request, and pass it with the PATCH request to identify the setting version you - are updating.""" - - setting_name: Optional[str] = None - """Name of the corresponding setting. This field is populated in the response, but it will not be - respected even if it's set in the request body. The setting name in the path parameter will be - respected instead. Setting name is required to be 'default' if the setting only has one instance - per workspace.""" - - def as_dict(self) -> dict: - """Serializes the CspEnablementSetting into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.csp_enablement_workspace: - body['csp_enablement_workspace'] = self.csp_enablement_workspace.as_dict() - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> CspEnablementSetting: - """Deserializes the CspEnablementSetting from a dictionary.""" - return cls(csp_enablement_workspace=_from_dict(d, 'csp_enablement_workspace', CspEnablement), - etag=d.get('etag', None), - setting_name=d.get('setting_name', None)) - - @dataclass class DefaultNamespaceSetting: """This represents the setting configuration for the default namespace in the Databricks workspace. @@ -704,46 +705,27 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteRestrictWorkspaceAdminsSettingRes @dataclass -class EsmEnablement: - """Enhanced Security Monitoring (ESM) - one of the features in ESC product Tracks if the feature is - enabled.""" +class EnhancedSecurityMonitoring: + """SHIELD feature: ESM""" is_enabled: Optional[bool] = None def as_dict(self) -> dict: - """Serializes the EsmEnablement into a dictionary suitable for use as a JSON request body.""" + """Serializes the EnhancedSecurityMonitoring into a dictionary suitable for use as a JSON request body.""" body = {} if self.is_enabled is not None: body['is_enabled'] = self.is_enabled return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> EsmEnablement: - """Deserializes the EsmEnablement from a dictionary.""" + def from_dict(cls, d: Dict[str, any]) -> EnhancedSecurityMonitoring: + """Deserializes the EnhancedSecurityMonitoring from a dictionary.""" return cls(is_enabled=d.get('is_enabled', None)) @dataclass -class EsmEnablementAccount: - """Account level policy for ESM""" - - is_enforced: Optional[bool] = None - - def as_dict(self) -> dict: - """Serializes the EsmEnablementAccount into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.is_enforced is not None: body['is_enforced'] = self.is_enforced - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> EsmEnablementAccount: - """Deserializes the EsmEnablementAccount from a dictionary.""" - return cls(is_enforced=d.get('is_enforced', None)) - - -@dataclass -class EsmEnablementAccountSetting: - esm_enablement_account: EsmEnablementAccount - """Account level policy for ESM""" +class EnhancedSecurityMonitoringSetting: + enhanced_security_monitoring_workspace: EnhancedSecurityMonitoring + """SHIELD feature: ESM""" etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -760,26 +742,47 @@ class EsmEnablementAccountSetting: per workspace.""" def as_dict(self) -> dict: - """Serializes the EsmEnablementAccountSetting into a dictionary suitable for use as a JSON request body.""" + """Serializes the EnhancedSecurityMonitoringSetting into a dictionary suitable for use as a JSON request body.""" body = {} - if self.esm_enablement_account: body['esm_enablement_account'] = self.esm_enablement_account.as_dict() + if self.enhanced_security_monitoring_workspace: + body[ + 'enhanced_security_monitoring_workspace'] = self.enhanced_security_monitoring_workspace.as_dict( + ) if self.etag is not None: body['etag'] = self.etag if self.setting_name is not None: body['setting_name'] = self.setting_name return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> EsmEnablementAccountSetting: - """Deserializes the EsmEnablementAccountSetting from a dictionary.""" - return cls(esm_enablement_account=_from_dict(d, 'esm_enablement_account', EsmEnablementAccount), + def from_dict(cls, d: Dict[str, any]) -> EnhancedSecurityMonitoringSetting: + """Deserializes the EnhancedSecurityMonitoringSetting from a dictionary.""" + return cls(enhanced_security_monitoring_workspace=_from_dict( + d, 'enhanced_security_monitoring_workspace', EnhancedSecurityMonitoring), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) @dataclass -class EsmEnablementSetting: - esm_enablement_workspace: EsmEnablement - """Enhanced Security Monitoring (ESM) - one of the features in ESC product Tracks if the feature is - enabled.""" +class EsmEnablementAccount: + """Account level policy for ESM""" + + is_enforced: Optional[bool] = None + + def as_dict(self) -> dict: + """Serializes the EsmEnablementAccount into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.is_enforced is not None: body['is_enforced'] = self.is_enforced + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> EsmEnablementAccount: + """Deserializes the EsmEnablementAccount from a dictionary.""" + return cls(is_enforced=d.get('is_enforced', None)) + + +@dataclass +class EsmEnablementAccountSetting: + esm_enablement_account: EsmEnablementAccount + """Account level policy for ESM""" etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used @@ -796,18 +799,17 @@ class EsmEnablementSetting: per workspace.""" def as_dict(self) -> dict: - """Serializes the EsmEnablementSetting into a dictionary suitable for use as a JSON request body.""" + """Serializes the EsmEnablementAccountSetting into a dictionary suitable for use as a JSON request body.""" body = {} - if self.esm_enablement_workspace: - body['esm_enablement_workspace'] = self.esm_enablement_workspace.as_dict() + if self.esm_enablement_account: body['esm_enablement_account'] = self.esm_enablement_account.as_dict() if self.etag is not None: body['etag'] = self.etag if self.setting_name is not None: body['setting_name'] = self.setting_name return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> EsmEnablementSetting: - """Deserializes the EsmEnablementSetting from a dictionary.""" - return cls(esm_enablement_workspace=_from_dict(d, 'esm_enablement_workspace', EsmEnablement), + def from_dict(cls, d: Dict[str, any]) -> EsmEnablementAccountSetting: + """Deserializes the EsmEnablementAccountSetting from a dictionary.""" + return cls(esm_enablement_account=_from_dict(d, 'esm_enablement_account', EsmEnablementAccount), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) @@ -1989,13 +1991,13 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateAutomaticClusterUpdateSettingRequ @dataclass -class UpdateCspEnablementAccountSettingRequest: +class UpdateComplianceSecurityProfileSettingRequest: """Details required to update a setting.""" allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - setting: CspEnablementAccountSetting + setting: ComplianceSecurityProfileSetting field_mask: str """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of @@ -2003,7 +2005,7 @@ class UpdateCspEnablementAccountSettingRequest: specify multiple fields in the field mask, use comma as the separator (no space).""" def as_dict(self) -> dict: - """Serializes the UpdateCspEnablementAccountSettingRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the UpdateComplianceSecurityProfileSettingRequest into a dictionary suitable for use as a JSON request body.""" body = {} if self.allow_missing is not None: body['allow_missing'] = self.allow_missing if self.field_mask is not None: body['field_mask'] = self.field_mask @@ -2011,21 +2013,21 @@ def as_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> UpdateCspEnablementAccountSettingRequest: - """Deserializes the UpdateCspEnablementAccountSettingRequest from a dictionary.""" + def from_dict(cls, d: Dict[str, any]) -> UpdateComplianceSecurityProfileSettingRequest: + """Deserializes the UpdateComplianceSecurityProfileSettingRequest from a dictionary.""" return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), - setting=_from_dict(d, 'setting', CspEnablementAccountSetting)) + setting=_from_dict(d, 'setting', ComplianceSecurityProfileSetting)) @dataclass -class UpdateCspEnablementSettingRequest: +class UpdateCspEnablementAccountSettingRequest: """Details required to update a setting.""" allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - setting: CspEnablementSetting + setting: CspEnablementAccountSetting field_mask: str """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of @@ -2033,7 +2035,7 @@ class UpdateCspEnablementSettingRequest: specify multiple fields in the field mask, use comma as the separator (no space).""" def as_dict(self) -> dict: - """Serializes the UpdateCspEnablementSettingRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the UpdateCspEnablementAccountSettingRequest into a dictionary suitable for use as a JSON request body.""" body = {} if self.allow_missing is not None: body['allow_missing'] = self.allow_missing if self.field_mask is not None: body['field_mask'] = self.field_mask @@ -2041,11 +2043,11 @@ def as_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> UpdateCspEnablementSettingRequest: - """Deserializes the UpdateCspEnablementSettingRequest from a dictionary.""" + def from_dict(cls, d: Dict[str, any]) -> UpdateCspEnablementAccountSettingRequest: + """Deserializes the UpdateCspEnablementAccountSettingRequest from a dictionary.""" return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), - setting=_from_dict(d, 'setting', CspEnablementSetting)) + setting=_from_dict(d, 'setting', CspEnablementAccountSetting)) @dataclass @@ -2086,13 +2088,13 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateDefaultNamespaceSettingRequest: @dataclass -class UpdateEsmEnablementAccountSettingRequest: +class UpdateEnhancedSecurityMonitoringSettingRequest: """Details required to update a setting.""" allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - setting: EsmEnablementAccountSetting + setting: EnhancedSecurityMonitoringSetting field_mask: str """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of @@ -2100,7 +2102,7 @@ class UpdateEsmEnablementAccountSettingRequest: specify multiple fields in the field mask, use comma as the separator (no space).""" def as_dict(self) -> dict: - """Serializes the UpdateEsmEnablementAccountSettingRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the UpdateEnhancedSecurityMonitoringSettingRequest into a dictionary suitable for use as a JSON request body.""" body = {} if self.allow_missing is not None: body['allow_missing'] = self.allow_missing if self.field_mask is not None: body['field_mask'] = self.field_mask @@ -2108,21 +2110,21 @@ def as_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> UpdateEsmEnablementAccountSettingRequest: - """Deserializes the UpdateEsmEnablementAccountSettingRequest from a dictionary.""" + def from_dict(cls, d: Dict[str, any]) -> UpdateEnhancedSecurityMonitoringSettingRequest: + """Deserializes the UpdateEnhancedSecurityMonitoringSettingRequest from a dictionary.""" return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), - setting=_from_dict(d, 'setting', EsmEnablementAccountSetting)) + setting=_from_dict(d, 'setting', EnhancedSecurityMonitoringSetting)) @dataclass -class UpdateEsmEnablementSettingRequest: +class UpdateEsmEnablementAccountSettingRequest: """Details required to update a setting.""" allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - setting: EsmEnablementSetting + setting: EsmEnablementAccountSetting field_mask: str """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of @@ -2130,7 +2132,7 @@ class UpdateEsmEnablementSettingRequest: specify multiple fields in the field mask, use comma as the separator (no space).""" def as_dict(self) -> dict: - """Serializes the UpdateEsmEnablementSettingRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the UpdateEsmEnablementAccountSettingRequest into a dictionary suitable for use as a JSON request body.""" body = {} if self.allow_missing is not None: body['allow_missing'] = self.allow_missing if self.field_mask is not None: body['field_mask'] = self.field_mask @@ -2138,11 +2140,11 @@ def as_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> UpdateEsmEnablementSettingRequest: - """Deserializes the UpdateEsmEnablementSettingRequest from a dictionary.""" + def from_dict(cls, d: Dict[str, any]) -> UpdateEsmEnablementAccountSettingRequest: + """Deserializes the UpdateEsmEnablementAccountSettingRequest from a dictionary.""" return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), - setting=_from_dict(d, 'setting', EsmEnablementSetting)) + setting=_from_dict(d, 'setting', EsmEnablementAccountSetting)) @dataclass @@ -2568,43 +2570,7 @@ def update(self, allow_missing: bool, setting: AutomaticClusterUpdateSetting, return AutomaticClusterUpdateSetting.from_dict(res) -class CredentialsManagerAPI: - """Credentials manager interacts with with Identity Providers to to perform token exchanges using stored - credentials and refresh tokens.""" - - def __init__(self, api_client): - self._api = api_client - - def exchange_token(self, partition_id: PartitionId, token_type: List[TokenType], - scopes: List[str]) -> ExchangeTokenResponse: - """Exchange token. - - Exchange tokens with an Identity Provider to get a new access token. It allows specifying scopes to - determine token permissions. - - :param partition_id: :class:`PartitionId` - The partition of Credentials store - :param token_type: List[:class:`TokenType`] - A list of token types being requested - :param scopes: List[str] - Array of scopes for the token request. - - :returns: :class:`ExchangeTokenResponse` - """ - body = {} - if partition_id is not None: body['partitionId'] = partition_id.as_dict() - if scopes is not None: body['scopes'] = [v for v in scopes] - if token_type is not None: body['tokenType'] = [v.value for v in token_type] - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - - res = self._api.do('POST', - '/api/2.0/credentials-manager/exchange-tokens/token', - body=body, - headers=headers) - return ExchangeTokenResponse.from_dict(res) - - -class CspEnablementAPI: +class ComplianceSecurityProfileAPI: """Controls whether to enable the compliance security profile for the current workspace. Enabling it on a workspace is permanent. By default, it is turned off. @@ -2613,7 +2579,7 @@ class CspEnablementAPI: def __init__(self, api_client): self._api = api_client - def get(self, *, etag: Optional[str] = None) -> CspEnablementSetting: + def get(self, *, etag: Optional[str] = None) -> ComplianceSecurityProfileSetting: """Get the compliance security profile setting. Gets the compliance security profile setting. @@ -2625,7 +2591,7 @@ def get(self, *, etag: Optional[str] = None) -> CspEnablementSetting: to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - :returns: :class:`CspEnablementSetting` + :returns: :class:`ComplianceSecurityProfileSetting` """ query = {} @@ -2636,10 +2602,10 @@ def get(self, *, etag: Optional[str] = None) -> CspEnablementSetting: '/api/2.0/settings/types/shield_csp_enablement_ws_db/names/default', query=query, headers=headers) - return CspEnablementSetting.from_dict(res) + return ComplianceSecurityProfileSetting.from_dict(res) - def update(self, allow_missing: bool, setting: CspEnablementSetting, - field_mask: str) -> CspEnablementSetting: + def update(self, allow_missing: bool, setting: ComplianceSecurityProfileSetting, + field_mask: str) -> ComplianceSecurityProfileSetting: """Update the compliance security profile setting. Updates the compliance security profile setting for the workspace. A fresh etag needs to be provided @@ -2649,13 +2615,13 @@ def update(self, allow_missing: bool, setting: CspEnablementSetting, :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. - :param setting: :class:`CspEnablementSetting` + :param setting: :class:`ComplianceSecurityProfileSetting` :param field_mask: str Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the setting payload will be updated. The field mask needs to be supplied as single string. To specify multiple fields in the field mask, use comma as the separator (no space). - :returns: :class:`CspEnablementSetting` + :returns: :class:`ComplianceSecurityProfileSetting` """ body = {} if allow_missing is not None: body['allow_missing'] = allow_missing @@ -2667,7 +2633,43 @@ def update(self, allow_missing: bool, setting: CspEnablementSetting, '/api/2.0/settings/types/shield_csp_enablement_ws_db/names/default', body=body, headers=headers) - return CspEnablementSetting.from_dict(res) + return ComplianceSecurityProfileSetting.from_dict(res) + + +class CredentialsManagerAPI: + """Credentials manager interacts with with Identity Providers to to perform token exchanges using stored + credentials and refresh tokens.""" + + def __init__(self, api_client): + self._api = api_client + + def exchange_token(self, partition_id: PartitionId, token_type: List[TokenType], + scopes: List[str]) -> ExchangeTokenResponse: + """Exchange token. + + Exchange tokens with an Identity Provider to get a new access token. It allows specifying scopes to + determine token permissions. + + :param partition_id: :class:`PartitionId` + The partition of Credentials store + :param token_type: List[:class:`TokenType`] + A list of token types being requested + :param scopes: List[str] + Array of scopes for the token request. + + :returns: :class:`ExchangeTokenResponse` + """ + body = {} + if partition_id is not None: body['partitionId'] = partition_id.as_dict() + if scopes is not None: body['scopes'] = [v for v in scopes] + if token_type is not None: body['tokenType'] = [v.value for v in token_type] + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('POST', + '/api/2.0/credentials-manager/exchange-tokens/token', + body=body, + headers=headers) + return ExchangeTokenResponse.from_dict(res) class CspEnablementAccountAPI: @@ -2846,7 +2848,7 @@ def update(self, allow_missing: bool, setting: DefaultNamespaceSetting, return DefaultNamespaceSetting.from_dict(res) -class EsmEnablementAPI: +class EnhancedSecurityMonitoringAPI: """Controls whether enhanced security monitoring is enabled for the current workspace. If the compliance security profile is enabled, this is automatically enabled. By default, it is disabled. However, if the compliance security profile is enabled, this is automatically enabled. @@ -2857,7 +2859,7 @@ class EsmEnablementAPI: def __init__(self, api_client): self._api = api_client - def get(self, *, etag: Optional[str] = None) -> EsmEnablementSetting: + def get(self, *, etag: Optional[str] = None) -> EnhancedSecurityMonitoringSetting: """Get the enhanced security monitoring setting. Gets the enhanced security monitoring setting. @@ -2869,7 +2871,7 @@ def get(self, *, etag: Optional[str] = None) -> EsmEnablementSetting: to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - :returns: :class:`EsmEnablementSetting` + :returns: :class:`EnhancedSecurityMonitoringSetting` """ query = {} @@ -2880,10 +2882,10 @@ def get(self, *, etag: Optional[str] = None) -> EsmEnablementSetting: '/api/2.0/settings/types/shield_esm_enablement_ws_db/names/default', query=query, headers=headers) - return EsmEnablementSetting.from_dict(res) + return EnhancedSecurityMonitoringSetting.from_dict(res) - def update(self, allow_missing: bool, setting: EsmEnablementSetting, - field_mask: str) -> EsmEnablementSetting: + def update(self, allow_missing: bool, setting: EnhancedSecurityMonitoringSetting, + field_mask: str) -> EnhancedSecurityMonitoringSetting: """Update the enhanced security monitoring setting. Updates the enhanced security monitoring setting for the workspace. A fresh etag needs to be provided @@ -2893,13 +2895,13 @@ def update(self, allow_missing: bool, setting: EsmEnablementSetting, :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. - :param setting: :class:`EsmEnablementSetting` + :param setting: :class:`EnhancedSecurityMonitoringSetting` :param field_mask: str Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the setting payload will be updated. The field mask needs to be supplied as single string. To specify multiple fields in the field mask, use comma as the separator (no space). - :returns: :class:`EsmEnablementSetting` + :returns: :class:`EnhancedSecurityMonitoringSetting` """ body = {} if allow_missing is not None: body['allow_missing'] = allow_missing @@ -2911,7 +2913,7 @@ def update(self, allow_missing: bool, setting: EsmEnablementSetting, '/api/2.0/settings/types/shield_esm_enablement_ws_db/names/default', body=body, headers=headers) - return EsmEnablementSetting.from_dict(res) + return EnhancedSecurityMonitoringSetting.from_dict(res) class EsmEnablementAccountAPI: @@ -3599,9 +3601,9 @@ def __init__(self, api_client): self._api = api_client self._automatic_cluster_update = AutomaticClusterUpdateAPI(self._api) - self._csp_enablement = CspEnablementAPI(self._api) + self._compliance_security_profile = ComplianceSecurityProfileAPI(self._api) self._default_namespace = DefaultNamespaceAPI(self._api) - self._esm_enablement = EsmEnablementAPI(self._api) + self._enhanced_security_monitoring = EnhancedSecurityMonitoringAPI(self._api) self._restrict_workspace_admins = RestrictWorkspaceAdminsAPI(self._api) @property @@ -3610,9 +3612,9 @@ def automatic_cluster_update(self) -> AutomaticClusterUpdateAPI: return self._automatic_cluster_update @property - def csp_enablement(self) -> CspEnablementAPI: + def compliance_security_profile(self) -> ComplianceSecurityProfileAPI: """Controls whether to enable the compliance security profile for the current workspace.""" - return self._csp_enablement + return self._compliance_security_profile @property def default_namespace(self) -> DefaultNamespaceAPI: @@ -3620,9 +3622,9 @@ def default_namespace(self) -> DefaultNamespaceAPI: return self._default_namespace @property - def esm_enablement(self) -> EsmEnablementAPI: + def enhanced_security_monitoring(self) -> EnhancedSecurityMonitoringAPI: """Controls whether enhanced security monitoring is enabled for the current workspace.""" - return self._esm_enablement + return self._enhanced_security_monitoring @property def restrict_workspace_admins(self) -> RestrictWorkspaceAdminsAPI: diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py index 7bd0cc7d..bfef44af 100755 --- a/databricks/sdk/service/sql.py +++ b/databricks/sdk/service/sql.py @@ -360,6 +360,7 @@ def from_dict(cls, d: Dict[str, any]) -> ChannelInfo: class ChannelName(Enum): + """Name of the channel""" CHANNEL_NAME_CURRENT = 'CHANNEL_NAME_CURRENT' CHANNEL_NAME_CUSTOM = 'CHANNEL_NAME_CUSTOM' @@ -769,12 +770,15 @@ class DashboardEditContent: """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" + tags: Optional[List[str]] = None + def as_dict(self) -> dict: """Serializes the DashboardEditContent into a dictionary suitable for use as a JSON request body.""" body = {} if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id if self.name is not None: body['name'] = self.name if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value + if self.tags: body['tags'] = [v for v in self.tags] return body @classmethod @@ -782,7 +786,8 @@ def from_dict(cls, d: Dict[str, any]) -> DashboardEditContent: """Deserializes the DashboardEditContent from a dictionary.""" return cls(dashboard_id=d.get('dashboard_id', None), name=d.get('name', None), - run_as_role=_enum(d, 'run_as_role', RunAsRole)) + run_as_role=_enum(d, 'run_as_role', RunAsRole), + tags=d.get('tags', None)) @dataclass @@ -2394,6 +2399,8 @@ class QueryEditContent: """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" + tags: Optional[List[str]] = None + def as_dict(self) -> dict: """Serializes the QueryEditContent into a dictionary suitable for use as a JSON request body.""" body = {} @@ -2404,6 +2411,7 @@ def as_dict(self) -> dict: if self.query is not None: body['query'] = self.query if self.query_id is not None: body['query_id'] = self.query_id if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value + if self.tags: body['tags'] = [v for v in self.tags] return body @classmethod @@ -2415,7 +2423,8 @@ def from_dict(cls, d: Dict[str, any]) -> QueryEditContent: options=d.get('options', None), query=d.get('query', None), query_id=d.get('query_id', None), - run_as_role=_enum(d, 'run_as_role', RunAsRole)) + run_as_role=_enum(d, 'run_as_role', RunAsRole), + tags=d.get('tags', None)) @dataclass @@ -2765,24 +2774,34 @@ def from_dict(cls, d: Dict[str, any]) -> QueryMetrics: @dataclass class QueryOptions: + catalog: Optional[str] = None + """The name of the catalog to execute this query in.""" + moved_to_trash_at: Optional[str] = None """The timestamp when this query was moved to trash. Only present when the `is_archived` property is `true`. Trashed items are deleted after thirty days.""" parameters: Optional[List[Parameter]] = None + schema: Optional[str] = None + """The name of the schema to execute this query in.""" + def as_dict(self) -> dict: """Serializes the QueryOptions into a dictionary suitable for use as a JSON request body.""" body = {} + if self.catalog is not None: body['catalog'] = self.catalog if self.moved_to_trash_at is not None: body['moved_to_trash_at'] = self.moved_to_trash_at if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] + if self.schema is not None: body['schema'] = self.schema return body @classmethod def from_dict(cls, d: Dict[str, any]) -> QueryOptions: """Deserializes the QueryOptions from a dictionary.""" - return cls(moved_to_trash_at=d.get('moved_to_trash_at', None), - parameters=_repeated_dict(d, 'parameters', Parameter)) + return cls(catalog=d.get('catalog', None), + moved_to_trash_at=d.get('moved_to_trash_at', None), + parameters=_repeated_dict(d, 'parameters', Parameter), + schema=d.get('schema', None)) @dataclass @@ -2814,6 +2833,8 @@ class QueryPostContent: """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" + tags: Optional[List[str]] = None + def as_dict(self) -> dict: """Serializes the QueryPostContent into a dictionary suitable for use as a JSON request body.""" body = {} @@ -2824,6 +2845,7 @@ def as_dict(self) -> dict: if self.parent is not None: body['parent'] = self.parent if self.query is not None: body['query'] = self.query if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value + if self.tags: body['tags'] = [v for v in self.tags] return body @classmethod @@ -2835,7 +2857,8 @@ def from_dict(cls, d: Dict[str, any]) -> QueryPostContent: options=d.get('options', None), parent=d.get('parent', None), query=d.get('query', None), - run_as_role=_enum(d, 'run_as_role', RunAsRole)) + run_as_role=_enum(d, 'run_as_role', RunAsRole), + tags=d.get('tags', None)) class QueryStatementType(Enum): @@ -3572,6 +3595,8 @@ class Visualization: """The options object varies widely from one visualization type to the next and is unsupported. Databricks does not recommend modifying visualization settings in JSON.""" + query: Optional[Query] = None + type: Optional[str] = None """The type of visualization: chart, table, pivot table, and so on.""" @@ -3585,6 +3610,7 @@ def as_dict(self) -> dict: if self.id is not None: body['id'] = self.id if self.name is not None: body['name'] = self.name if self.options: body['options'] = self.options + if self.query: body['query'] = self.query.as_dict() if self.type is not None: body['type'] = self.type if self.updated_at is not None: body['updated_at'] = self.updated_at return body @@ -3597,6 +3623,7 @@ def from_dict(cls, d: Dict[str, any]) -> Visualization: id=d.get('id', None), name=d.get('name', None), options=d.get('options', None), + query=_from_dict(d, 'query', Query), type=d.get('type', None), updated_at=d.get('updated_at', None)) @@ -4286,7 +4313,8 @@ def update(self, dashboard_id: str, *, name: Optional[str] = None, - run_as_role: Optional[RunAsRole] = None) -> Dashboard: + run_as_role: Optional[RunAsRole] = None, + tags: Optional[List[str]] = None) -> Dashboard: """Change a dashboard definition. Modify this dashboard definition. This operation only affects attributes of the dashboard object. It @@ -4300,12 +4328,14 @@ def update(self, :param run_as_role: :class:`RunAsRole` (optional) Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior) + :param tags: List[str] (optional) :returns: :class:`Dashboard` """ body = {} if name is not None: body['name'] = name if run_as_role is not None: body['run_as_role'] = run_as_role.value + if tags is not None: body['tags'] = [v for v in tags] headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('POST', @@ -4453,7 +4483,8 @@ def create(self, options: Optional[Any] = None, parent: Optional[str] = None, query: Optional[str] = None, - run_as_role: Optional[RunAsRole] = None) -> Query: + run_as_role: Optional[RunAsRole] = None, + tags: Optional[List[str]] = None) -> Query: """Create a new query definition. Creates a new query definition. Queries created with this endpoint belong to the authenticated user @@ -4485,6 +4516,7 @@ def create(self, :param run_as_role: :class:`RunAsRole` (optional) Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior) + :param tags: List[str] (optional) :returns: :class:`Query` """ @@ -4496,6 +4528,7 @@ def create(self, if parent is not None: body['parent'] = parent if query is not None: body['query'] = query if run_as_role is not None: body['run_as_role'] = run_as_role.value + if tags is not None: body['tags'] = [v for v in tags] headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('POST', '/api/2.0/preview/sql/queries', body=body, headers=headers) @@ -4615,7 +4648,8 @@ def update(self, name: Optional[str] = None, options: Optional[Any] = None, query: Optional[str] = None, - run_as_role: Optional[RunAsRole] = None) -> Query: + run_as_role: Optional[RunAsRole] = None, + tags: Optional[List[str]] = None) -> Query: """Change a query definition. Modify this query definition. @@ -4641,6 +4675,7 @@ def update(self, :param run_as_role: :class:`RunAsRole` (optional) Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior) + :param tags: List[str] (optional) :returns: :class:`Query` """ @@ -4651,6 +4686,7 @@ def update(self, if options is not None: body['options'] = options if query is not None: body['query'] = query if run_as_role is not None: body['run_as_role'] = run_as_role.value + if tags is not None: body['tags'] = [v for v in tags] headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('POST', f'/api/2.0/preview/sql/queries/{query_id}', body=body, headers=headers) @@ -4767,6 +4803,7 @@ def update(self, description: Optional[str] = None, name: Optional[str] = None, options: Optional[Any] = None, + query: Optional[Query] = None, type: Optional[str] = None, updated_at: Optional[str] = None) -> Visualization: """Edit existing visualization. @@ -4781,6 +4818,7 @@ def update(self, :param options: Any (optional) The options object varies widely from one visualization type to the next and is unsupported. Databricks does not recommend modifying visualization settings in JSON. + :param query: :class:`Query` (optional) :param type: str (optional) The type of visualization: chart, table, pivot table, and so on. :param updated_at: str (optional) @@ -4792,6 +4830,7 @@ def update(self, if description is not None: body['description'] = description if name is not None: body['name'] = name if options is not None: body['options'] = options + if query is not None: body['query'] = query.as_dict() if type is not None: body['type'] = type if updated_at is not None: body['updated_at'] = updated_at headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } diff --git a/databricks/sdk/version.py b/databricks/sdk/version.py index 826d20e8..cf7b6d65 100644 --- a/databricks/sdk/version.py +++ b/databricks/sdk/version.py @@ -1 +1 @@ -__version__ = '0.26.0' +__version__ = '0.27.0' diff --git a/docs/account/iam/groups.rst b/docs/account/iam/groups.rst index be1af3c8..4efb37ac 100644 --- a/docs/account/iam/groups.rst +++ b/docs/account/iam/groups.rst @@ -143,6 +143,36 @@ .. py:method:: patch(id: str [, operations: Optional[List[Patch]], schemas: Optional[List[PatchSchema]]]) + + Usage: + + .. code-block:: + + import time + from databricks.sdk import WorkspaceClient + from databricks.sdk.service import iam + + w = WorkspaceClient() + + group = w.groups.create(display_name=f'sdk-{time.time_ns()}-group') + user = w.users.create( + display_name=f'sdk-{time.time_ns()}-user', user_name=f'sdk-{time.time_ns()}@example.com') + + w.groups.patch( + id=group.id, + operations=[iam.Patch( + op=iam.PatchOp.ADD, + value={"members": [{ + "value": user.id, + }]}, + )], + schemas=[iam.PatchSchema.URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_PATCH_OP], + ) + + # cleanup + w.users.delete(id=user.id) + w.groups.delete(id=group.id) + Update group details. Partially updates the details of a group. diff --git a/docs/account/iam/workspace_assignment.rst b/docs/account/iam/workspace_assignment.rst index 878ad847..1ce06996 100644 --- a/docs/account/iam/workspace_assignment.rst +++ b/docs/account/iam/workspace_assignment.rst @@ -82,9 +82,9 @@ workspace_id = os.environ["DUMMY_WORKSPACE_ID"] - a.workspace_assignment.update(workspace_id=workspace_id, - principal_id=spn_id, - permissions=[iam.WorkspacePermission.USER]) + _ = a.workspace_assignment.update(workspace_id=workspace_id, + principal_id=spn_id, + permissions=[iam.WorkspacePermission.USER]) Create or update permissions assignment. diff --git a/docs/dbdataclasses/pipelines.rst b/docs/dbdataclasses/pipelines.rst index 9c927508..993a5fba 100644 --- a/docs/dbdataclasses/pipelines.rst +++ b/docs/dbdataclasses/pipelines.rst @@ -89,6 +89,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: IngestionConfig + :members: + :undoc-members: + .. autoclass:: ListPipelineEventsResponse :members: :undoc-members: @@ -101,6 +105,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: ManagedIngestionPipelineDefinition + :members: + :undoc-members: + .. autoclass:: ManualTrigger :members: :undoc-members: @@ -243,6 +251,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: SchemaSpec + :members: + :undoc-members: + .. autoclass:: Sequencing :members: :undoc-members: @@ -287,6 +299,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: TableSpec + :members: + :undoc-members: + .. autoclass:: UpdateInfo :members: :undoc-members: diff --git a/docs/dbdataclasses/serving.rst b/docs/dbdataclasses/serving.rst index c65496c4..9f2a1743 100644 --- a/docs/dbdataclasses/serving.rst +++ b/docs/dbdataclasses/serving.rst @@ -32,15 +32,78 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: AppEvents +.. autoclass:: App :members: :undoc-members: -.. autoclass:: AppManifest +.. autoclass:: AppDeployment :members: :undoc-members: -.. autoclass:: AppServiceStatus +.. py:class:: AppDeploymentState + + .. py:attribute:: CANCELLED + :value: "CANCELLED" + + .. py:attribute:: FAILED + :value: "FAILED" + + .. py:attribute:: IN_PROGRESS + :value: "IN_PROGRESS" + + .. py:attribute:: STATE_UNSPECIFIED + :value: "STATE_UNSPECIFIED" + + .. py:attribute:: SUCCEEDED + :value: "SUCCEEDED" + +.. autoclass:: AppDeploymentStatus + :members: + :undoc-members: + +.. autoclass:: AppEnvironment + :members: + :undoc-members: + +.. py:class:: AppState + + .. py:attribute:: CREATING + :value: "CREATING" + + .. py:attribute:: DELETED + :value: "DELETED" + + .. py:attribute:: DELETING + :value: "DELETING" + + .. py:attribute:: DEPLOYED + :value: "DEPLOYED" + + .. py:attribute:: DEPLOYING + :value: "DEPLOYING" + + .. py:attribute:: ERROR + :value: "ERROR" + + .. py:attribute:: IDLE + :value: "IDLE" + + .. py:attribute:: READY + :value: "READY" + + .. py:attribute:: RUNNING + :value: "RUNNING" + + .. py:attribute:: STARTING + :value: "STARTING" + + .. py:attribute:: STATE_UNSPECIFIED + :value: "STATE_UNSPECIFIED" + + .. py:attribute:: UPDATING + :value: "UPDATING" + +.. autoclass:: AppStatus :members: :undoc-members: @@ -81,50 +144,30 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateServingEndpoint +.. autoclass:: CreateAppDeploymentRequest :members: :undoc-members: -.. autoclass:: DatabricksModelServingConfig +.. autoclass:: CreateAppRequest :members: :undoc-members: -.. autoclass:: DataframeSplitInput - :members: - :undoc-members: - -.. autoclass:: DeleteAppResponse +.. autoclass:: CreateServingEndpoint :members: :undoc-members: -.. autoclass:: DeleteResponse +.. autoclass:: DatabricksModelServingConfig :members: :undoc-members: -.. autoclass:: DeployAppRequest +.. autoclass:: DataframeSplitInput :members: :undoc-members: -.. autoclass:: DeploymentStatus +.. autoclass:: DeleteResponse :members: :undoc-members: -.. py:class:: DeploymentStatusState - - State: one of DEPLOYING,SUCCESS, FAILURE, DEPLOYMENT_STATE_UNSPECIFIED - - .. py:attribute:: DEPLOYING - :value: "DEPLOYING" - - .. py:attribute:: DEPLOYMENT_STATE_UNSPECIFIED - :value: "DEPLOYMENT_STATE_UNSPECIFIED" - - .. py:attribute:: FAILURE - :value: "FAILURE" - - .. py:attribute:: SUCCESS - :value: "SUCCESS" - .. autoclass:: EmbeddingsV1ResponseEmbeddingElement :members: :undoc-members: @@ -183,6 +226,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: EnvVariable + :members: + :undoc-members: + .. autoclass:: ExportMetricsResponse :members: :undoc-members: @@ -224,7 +271,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: GetAppResponse +.. autoclass:: GetOpenApiResponse :members: :undoc-members: @@ -232,7 +279,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ListAppEventsResponse +.. autoclass:: ListAppDeploymentsResponse :members: :undoc-members: @@ -452,10 +499,22 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: StopAppRequest + :members: + :undoc-members: + +.. autoclass:: StopAppResponse + :members: + :undoc-members: + .. autoclass:: TrafficConfig :members: :undoc-members: +.. autoclass:: UpdateAppRequest + :members: + :undoc-members: + .. autoclass:: V1ResponseChoiceElement :members: :undoc-members: diff --git a/docs/dbdataclasses/settings.rst b/docs/dbdataclasses/settings.rst index da4a258f..54274999 100644 --- a/docs/dbdataclasses/settings.rst +++ b/docs/dbdataclasses/settings.rst @@ -80,6 +80,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: ComplianceSecurityProfile + :members: + :undoc-members: + +.. autoclass:: ComplianceSecurityProfileSetting + :members: + :undoc-members: + .. py:class:: ComplianceStandard Compliance stardard for SHIELD customers @@ -159,10 +167,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CspEnablement - :members: - :undoc-members: - .. autoclass:: CspEnablementAccount :members: :undoc-members: @@ -171,10 +175,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CspEnablementSetting - :members: - :undoc-members: - .. autoclass:: DefaultNamespaceSetting :members: :undoc-members: @@ -199,19 +199,19 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: EsmEnablement +.. autoclass:: EnhancedSecurityMonitoring :members: :undoc-members: -.. autoclass:: EsmEnablementAccount +.. autoclass:: EnhancedSecurityMonitoringSetting :members: :undoc-members: -.. autoclass:: EsmEnablementAccountSetting +.. autoclass:: EsmEnablementAccount :members: :undoc-members: -.. autoclass:: EsmEnablementSetting +.. autoclass:: EsmEnablementAccountSetting :members: :undoc-members: @@ -461,11 +461,11 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: UpdateCspEnablementAccountSettingRequest +.. autoclass:: UpdateComplianceSecurityProfileSettingRequest :members: :undoc-members: -.. autoclass:: UpdateCspEnablementSettingRequest +.. autoclass:: UpdateCspEnablementAccountSettingRequest :members: :undoc-members: @@ -473,11 +473,11 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: UpdateEsmEnablementAccountSettingRequest +.. autoclass:: UpdateEnhancedSecurityMonitoringSettingRequest :members: :undoc-members: -.. autoclass:: UpdateEsmEnablementSettingRequest +.. autoclass:: UpdateEsmEnablementAccountSettingRequest :members: :undoc-members: diff --git a/docs/dbdataclasses/sql.rst b/docs/dbdataclasses/sql.rst index adf3ced5..fe1469a3 100644 --- a/docs/dbdataclasses/sql.rst +++ b/docs/dbdataclasses/sql.rst @@ -64,6 +64,8 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ChannelName + Name of the channel + .. py:attribute:: CHANNEL_NAME_CURRENT :value: "CHANNEL_NAME_CURRENT" diff --git a/docs/workspace/catalog/model_versions.rst b/docs/workspace/catalog/model_versions.rst index 9dc39287..017a6aa1 100644 --- a/docs/workspace/catalog/model_versions.rst +++ b/docs/workspace/catalog/model_versions.rst @@ -82,7 +82,8 @@ response. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - There is no guarantee of a specific ordering of the elements in the response. + There is no guarantee of a specific ordering of the elements in the response. The elements in the + response will not contain any aliases or tags. :param full_name: str The full three-level name of the registered model under which to list model versions diff --git a/docs/workspace/files/dbfs.rst b/docs/workspace/files/dbfs.rst index f0c57e49..c52d11bc 100644 --- a/docs/workspace/files/dbfs.rst +++ b/docs/workspace/files/dbfs.rst @@ -62,33 +62,9 @@ :returns: :class:`CreateResponse` - .. py:method:: delete(path: str [, recursive: Optional[bool]]) + .. py:method:: delete(path: str [, recursive: bool = False]) - Delete a file/directory. - - Delete the file or directory (optionally recursively delete all files in the directory). This call - throws an exception with `IO_ERROR` if the path is a non-empty directory and `recursive` is set to - `false` or on other similar errors. - - When you delete a large number of files, the delete operation is done in increments. The call returns - a response after approximately 45 seconds with an error message (503 Service Unavailable) asking you - to re-invoke the delete operation until the directory structure is fully deleted. - - For operations that delete more than 10K files, we discourage using the DBFS REST API, but advise you - to perform such operations in the context of a cluster, using the [File system utility - (dbutils.fs)](/dev-tools/databricks-utils.html#dbutils-fs). `dbutils.fs` covers the functional scope - of the DBFS REST API, but from notebooks. Running such operations using notebooks provides better - control and manageability, such as selective deletes, and the possibility to automate periodic delete - jobs. - - :param path: str - The path of the file or directory to delete. The path should be the absolute DBFS path. - :param recursive: bool (optional) - Whether or not to recursively delete the directory's contents. Deleting empty directories can be - done without providing the recursive flag. - - - + Delete file or directory on DBFS .. py:method:: download(path: str) -> BinaryIO @@ -142,24 +118,14 @@ When calling list on a large directory, the list operation will time out after approximately 60 seconds. + :param path: the DBFS or UC Volume path to list :param recursive: traverse deep into directory tree :returns iterator of metadata for every file .. py:method:: mkdirs(path: str) - Create a directory. - - Creates the given directory and necessary parent directories if they do not exist. If a file (not a - directory) exists at any prefix of the input path, this call throws an exception with - `RESOURCE_ALREADY_EXISTS`. **Note**: If this operation fails, it might have succeeded in creating some - of the necessary parent directories. - - :param path: str - The path of the new directory. The path should be the absolute DBFS path. - - - + Create directory on DBFS .. py:method:: move(source_path: str, destination_path: str) @@ -182,7 +148,7 @@ Move files between local and DBFS systems - .. py:method:: open(path: str [, read: bool = False, write: bool = False, overwrite: bool = False]) -> _DbfsIO + .. py:method:: open(path: str [, read: bool = False, write: bool = False, overwrite: bool = False]) -> BinaryIO .. py:method:: put(path: str [, contents: Optional[str], overwrite: Optional[bool]]) diff --git a/docs/workspace/iam/groups.rst b/docs/workspace/iam/groups.rst index 0dd76485..ef32112c 100644 --- a/docs/workspace/iam/groups.rst +++ b/docs/workspace/iam/groups.rst @@ -143,6 +143,36 @@ .. py:method:: patch(id: str [, operations: Optional[List[Patch]], schemas: Optional[List[PatchSchema]]]) + + Usage: + + .. code-block:: + + import time + from databricks.sdk import WorkspaceClient + from databricks.sdk.service import iam + + w = WorkspaceClient() + + group = w.groups.create(display_name=f'sdk-{time.time_ns()}-group') + user = w.users.create( + display_name=f'sdk-{time.time_ns()}-user', user_name=f'sdk-{time.time_ns()}@example.com') + + w.groups.patch( + id=group.id, + operations=[iam.Patch( + op=iam.PatchOp.ADD, + value={"members": [{ + "value": user.id, + }]}, + )], + schemas=[iam.PatchSchema.URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_PATCH_OP], + ) + + # cleanup + w.users.delete(id=user.id) + w.groups.delete(id=group.id) + Update group details. Partially updates the details of a group. diff --git a/docs/workspace/pipelines/pipelines.rst b/docs/workspace/pipelines/pipelines.rst index 4fc29038..1237c2be 100644 --- a/docs/workspace/pipelines/pipelines.rst +++ b/docs/workspace/pipelines/pipelines.rst @@ -15,7 +15,7 @@ also enforce data quality with Delta Live Tables expectations. Expectations allow you to define expected data quality and specify how to handle records that fail those expectations. - .. py:method:: create( [, allow_duplicate_names: Optional[bool], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], dry_run: Optional[bool], edition: Optional[str], filters: Optional[Filters], id: Optional[str], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) -> CreatePipelineResponse + .. py:method:: create( [, allow_duplicate_names: Optional[bool], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], dry_run: Optional[bool], edition: Optional[str], filters: Optional[Filters], id: Optional[str], ingestion_definition: Optional[ManagedIngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) -> CreatePipelineResponse Usage: @@ -78,6 +78,9 @@ Filters on which Pipeline packages to include in the deployed graph. :param id: str (optional) Unique identifier for this pipeline. + :param ingestion_definition: :class:`ManagedIngestionPipelineDefinition` (optional) + The configuration for a managed ingestion pipeline. These settings cannot be used with the + 'libraries', 'target' or 'catalog' settings. :param libraries: List[:class:`PipelineLibrary`] (optional) Libraries or code needed by this deployment. :param name: str (optional) @@ -366,7 +369,7 @@ .. py:method:: stop_and_wait(pipeline_id: str, timeout: datetime.timedelta = 0:20:00) -> GetPipelineResponse - .. py:method:: update(pipeline_id: str [, allow_duplicate_names: Optional[bool], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], edition: Optional[str], expected_last_modified: Optional[int], filters: Optional[Filters], id: Optional[str], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) + .. py:method:: update(pipeline_id: str [, allow_duplicate_names: Optional[bool], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], edition: Optional[str], expected_last_modified: Optional[int], filters: Optional[Filters], id: Optional[str], ingestion_definition: Optional[ManagedIngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) Usage: @@ -445,6 +448,9 @@ Filters on which Pipeline packages to include in the deployed graph. :param id: str (optional) Unique identifier for this pipeline. + :param ingestion_definition: :class:`ManagedIngestionPipelineDefinition` (optional) + The configuration for a managed ingestion pipeline. These settings cannot be used with the + 'libraries', 'target' or 'catalog' settings. :param libraries: List[:class:`PipelineLibrary`] (optional) Libraries or code needed by this deployment. :param name: str (optional) diff --git a/docs/workspace/serving/apps.rst b/docs/workspace/serving/apps.rst index bd2f6bed..06e4d040 100644 --- a/docs/workspace/serving/apps.rst +++ b/docs/workspace/serving/apps.rst @@ -1,81 +1,159 @@ -``w.apps``: Databricks Apps -=========================== +``w.apps``: Apps +================ .. currentmodule:: databricks.sdk.service.serving .. py:class:: AppsAPI - Lakehouse Apps run directly on a customer’s Databricks instance, integrate with their data, use and - extend Databricks services, and enable users to interact through single sign-on. + Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend + Databricks services, and enable users to interact through single sign-on. - .. py:method:: create(manifest: AppManifest [, resources: Optional[Any]]) -> DeploymentStatus + .. py:method:: create(name: str [, description: Optional[str]]) -> Wait[App] - Create and deploy an application. + Create an App. - Creates and deploys an application. + Creates a new app. - :param manifest: :class:`AppManifest` - Manifest that specifies the application requirements - :param resources: Any (optional) - Information passed at app deployment time to fulfill app dependencies + :param name: str + The name of the app. The name must contain only lowercase alphanumeric characters and hyphens and be + between 2 and 30 characters long. It must be unique within the workspace. + :param description: str (optional) + The description of the app. - :returns: :class:`DeploymentStatus` + :returns: + Long-running operation waiter for :class:`App`. + See :method:wait_get_app_idle for more details. - .. py:method:: delete_app(name: str) -> DeleteAppResponse + .. py:method:: create_and_wait(name: str [, description: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> App + + + .. py:method:: create_deployment(app_name: str, source_code_path: str) -> Wait[AppDeployment] - Delete an application. + Create an App Deployment. + + Creates an app deployment for the app with the supplied name. + + :param app_name: str + The name of the app. + :param source_code_path: str + The source code path of the deployment. - Delete an application definition + :returns: + Long-running operation waiter for :class:`AppDeployment`. + See :method:wait_get_deployment_app_succeeded for more details. + + + .. py:method:: create_deployment_and_wait(app_name: str, source_code_path: str, timeout: datetime.timedelta = 0:20:00) -> AppDeployment + + + .. py:method:: delete(name: str) + + Delete an App. + + Deletes an app. :param name: str - The name of an application. This field is required. + The name of the app. + - :returns: :class:`DeleteAppResponse` - .. py:method:: get_app(name: str) -> GetAppResponse + .. py:method:: get(name: str) -> App - Get definition for an application. + Get an App. - Get an application definition + Retrieves information for the app with the supplied name. :param name: str - The name of an application. This field is required. + The name of the app. - :returns: :class:`GetAppResponse` + :returns: :class:`App` - .. py:method:: get_app_deployment_status(deployment_id: str [, include_app_log: Optional[str]]) -> DeploymentStatus + .. py:method:: get_deployment(app_name: str, deployment_id: str) -> AppDeployment - Get deployment status for an application. + Get an App Deployment. - Get deployment status for an application + Retrieves information for the app deployment with the supplied name and deployment id. + :param app_name: str + The name of the app. :param deployment_id: str - The deployment id for an application. This field is required. - :param include_app_log: str (optional) - Boolean flag to include application logs + The unique id of the deployment. + + :returns: :class:`AppDeployment` + + + .. py:method:: get_environment(name: str) -> AppEnvironment + + Get App Environment. + + Retrieves app environment. + + :param name: str + The name of the app. - :returns: :class:`DeploymentStatus` + :returns: :class:`AppEnvironment` - .. py:method:: get_apps() -> ListAppsResponse + .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[App] - List all applications. + List Apps. - List all available applications + Lists all apps in the workspace. - :returns: :class:`ListAppsResponse` + :param page_size: int (optional) + Upper bound for items returned. + :param page_token: str (optional) + Pagination token to go to the next page of apps. Requests first page if absent. + + :returns: Iterator over :class:`App` + + + .. py:method:: list_deployments(app_name: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[AppDeployment] + + List App Deployments. + + Lists all app deployments for the app with the supplied name. + + :param app_name: str + The name of the app. + :param page_size: int (optional) + Upper bound for items returned. + :param page_token: str (optional) + Pagination token to go to the next page of apps. Requests first page if absent. + + :returns: Iterator over :class:`AppDeployment` - .. py:method:: get_events(name: str) -> ListAppEventsResponse + .. py:method:: stop(name: str) - Get deployment events for an application. + Stop an App. - Get deployment events for an application + Stops the active deployment of the app in the workspace. :param name: str - The name of an application. This field is required. + The name of the app. + - :returns: :class:`ListAppEventsResponse` - \ No newline at end of file + + + .. py:method:: update(name: str [, description: Optional[str]]) -> App + + Update an App. + + Updates the app with the supplied name. + + :param name: str + The name of the app. The name must contain only lowercase alphanumeric characters and hyphens and be + between 2 and 30 characters long. It must be unique within the workspace. + :param description: str (optional) + The description of the app. + + :returns: :class:`App` + + + .. py:method:: wait_get_app_idle(name: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[App], None]]) -> App + + + .. py:method:: wait_get_deployment_app_succeeded(app_name: str, deployment_id: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[AppDeployment], None]]) -> AppDeployment diff --git a/docs/workspace/serving/serving_endpoints.rst b/docs/workspace/serving/serving_endpoints.rst index 141dd170..4a90c78e 100644 --- a/docs/workspace/serving/serving_endpoints.rst +++ b/docs/workspace/serving/serving_endpoints.rst @@ -87,6 +87,19 @@ :returns: :class:`ServingEndpointDetailed` + .. py:method:: get_open_api(name: str) + + Get the schema for a serving endpoint. + + Get the query schema of the serving endpoint in OpenAPI format. The schema contains information for + the supported paths, input and output format and datatypes. + + :param name: str + The name of the serving endpoint that the served model belongs to. This field is required. + + + + .. py:method:: get_permission_levels(serving_endpoint_id: str) -> GetServingEndpointPermissionLevelsResponse Get serving endpoint permission levels. diff --git a/docs/workspace/settings/compliance_security_profile.rst b/docs/workspace/settings/compliance_security_profile.rst new file mode 100644 index 00000000..f503830b --- /dev/null +++ b/docs/workspace/settings/compliance_security_profile.rst @@ -0,0 +1,46 @@ +``w.settings.compliance_security_profile``: Compliance Security Profile +======================================================================= +.. currentmodule:: databricks.sdk.service.settings + +.. py:class:: ComplianceSecurityProfileAPI + + Controls whether to enable the compliance security profile for the current workspace. Enabling it on a + workspace is permanent. By default, it is turned off. + + This settings can NOT be disabled once it is enabled. + + .. py:method:: get( [, etag: Optional[str]]) -> ComplianceSecurityProfileSetting + + Get the compliance security profile setting. + + Gets the compliance security profile setting. + + :param etag: str (optional) + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`ComplianceSecurityProfileSetting` + + + .. py:method:: update(allow_missing: bool, setting: ComplianceSecurityProfileSetting, field_mask: str) -> ComplianceSecurityProfileSetting + + Update the compliance security profile setting. + + Updates the compliance security profile setting for the workspace. A fresh etag needs to be provided + in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` + request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and + the request must be retried by using the fresh etag in the 409 response. + + :param allow_missing: bool + This should always be set to true for Settings API. Added for AIP compliance. + :param setting: :class:`ComplianceSecurityProfileSetting` + :param field_mask: str + Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the + setting payload will be updated. The field mask needs to be supplied as single string. To specify + multiple fields in the field mask, use comma as the separator (no space). + + :returns: :class:`ComplianceSecurityProfileSetting` + \ No newline at end of file diff --git a/docs/workspace/settings/enhanced_security_monitoring.rst b/docs/workspace/settings/enhanced_security_monitoring.rst new file mode 100644 index 00000000..fe766897 --- /dev/null +++ b/docs/workspace/settings/enhanced_security_monitoring.rst @@ -0,0 +1,48 @@ +``w.settings.enhanced_security_monitoring``: Enhanced Security Monitoring +========================================================================= +.. currentmodule:: databricks.sdk.service.settings + +.. py:class:: EnhancedSecurityMonitoringAPI + + Controls whether enhanced security monitoring is enabled for the current workspace. If the compliance + security profile is enabled, this is automatically enabled. By default, it is disabled. However, if the + compliance security profile is enabled, this is automatically enabled. + + If the compliance security profile is disabled, you can enable or disable this setting and it is not + permanent. + + .. py:method:: get( [, etag: Optional[str]]) -> EnhancedSecurityMonitoringSetting + + Get the enhanced security monitoring setting. + + Gets the enhanced security monitoring setting. + + :param etag: str (optional) + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`EnhancedSecurityMonitoringSetting` + + + .. py:method:: update(allow_missing: bool, setting: EnhancedSecurityMonitoringSetting, field_mask: str) -> EnhancedSecurityMonitoringSetting + + Update the enhanced security monitoring setting. + + Updates the enhanced security monitoring setting for the workspace. A fresh etag needs to be provided + in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` + request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and + the request must be retried by using the fresh etag in the 409 response. + + :param allow_missing: bool + This should always be set to true for Settings API. Added for AIP compliance. + :param setting: :class:`EnhancedSecurityMonitoringSetting` + :param field_mask: str + Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the + setting payload will be updated. The field mask needs to be supplied as single string. To specify + multiple fields in the field mask, use comma as the separator (no space). + + :returns: :class:`EnhancedSecurityMonitoringSetting` + \ No newline at end of file diff --git a/docs/workspace/settings/index.rst b/docs/workspace/settings/index.rst index a873f2d1..5b56652e 100644 --- a/docs/workspace/settings/index.rst +++ b/docs/workspace/settings/index.rst @@ -11,9 +11,9 @@ Manage security settings for Accounts and Workspaces ip_access_lists settings automatic_cluster_update - csp_enablement + compliance_security_profile default_namespace - esm_enablement + enhanced_security_monitoring restrict_workspace_admins token_management tokens diff --git a/docs/workspace/settings/settings.rst b/docs/workspace/settings/settings.rst index 0bebf375..55f47dae 100644 --- a/docs/workspace/settings/settings.rst +++ b/docs/workspace/settings/settings.rst @@ -12,8 +12,8 @@ Controls whether automatic cluster update is enabled for the current workspace. By default, it is turned off. - .. py:property:: csp_enablement - :type: CspEnablementAPI + .. py:property:: compliance_security_profile + :type: ComplianceSecurityProfileAPI Controls whether to enable the compliance security profile for the current workspace. Enabling it on a workspace is permanent. By default, it is turned off. @@ -34,8 +34,8 @@ This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only applies when using Unity Catalog-enabled compute. - .. py:property:: esm_enablement - :type: EsmEnablementAPI + .. py:property:: enhanced_security_monitoring + :type: EnhancedSecurityMonitoringAPI Controls whether enhanced security monitoring is enabled for the current workspace. If the compliance security profile is enabled, this is automatically enabled. By default, it is disabled. However, if the diff --git a/docs/workspace/sql/dashboards.rst b/docs/workspace/sql/dashboards.rst index 8d13bd68..a59e625f 100644 --- a/docs/workspace/sql/dashboards.rst +++ b/docs/workspace/sql/dashboards.rst @@ -167,7 +167,7 @@ - .. py:method:: update(dashboard_id: str [, name: Optional[str], run_as_role: Optional[RunAsRole]]) -> Dashboard + .. py:method:: update(dashboard_id: str [, name: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> Dashboard Change a dashboard definition. @@ -182,6 +182,7 @@ :param run_as_role: :class:`RunAsRole` (optional) Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior) + :param tags: List[str] (optional) :returns: :class:`Dashboard` \ No newline at end of file diff --git a/docs/workspace/sql/queries.rst b/docs/workspace/sql/queries.rst index 83e0c033..d15de54f 100644 --- a/docs/workspace/sql/queries.rst +++ b/docs/workspace/sql/queries.rst @@ -8,7 +8,7 @@ SQL warehouse, query text, name, description, tags, parameters, and visualizations. Queries can be scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create. - .. py:method:: create( [, data_source_id: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], parent: Optional[str], query: Optional[str], run_as_role: Optional[RunAsRole]]) -> Query + .. py:method:: create( [, data_source_id: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], parent: Optional[str], query: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> Query Usage: @@ -62,6 +62,7 @@ :param run_as_role: :class:`RunAsRole` (optional) Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior) + :param tags: List[str] (optional) :returns: :class:`Query` @@ -158,7 +159,7 @@ - .. py:method:: update(query_id: str [, data_source_id: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], query: Optional[str], run_as_role: Optional[RunAsRole]]) -> Query + .. py:method:: update(query_id: str [, data_source_id: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], query: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> Query Usage: @@ -212,6 +213,7 @@ :param run_as_role: :class:`RunAsRole` (optional) Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior) + :param tags: List[str] (optional) :returns: :class:`Query` \ No newline at end of file diff --git a/docs/workspace/sql/query_visualizations.rst b/docs/workspace/sql/query_visualizations.rst index 7ef5b1cd..53888cee 100644 --- a/docs/workspace/sql/query_visualizations.rst +++ b/docs/workspace/sql/query_visualizations.rst @@ -36,7 +36,7 @@ - .. py:method:: update(id: str [, created_at: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], type: Optional[str], updated_at: Optional[str]]) -> Visualization + .. py:method:: update(id: str [, created_at: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], query: Optional[Query], type: Optional[str], updated_at: Optional[str]]) -> Visualization Edit existing visualization. @@ -50,6 +50,7 @@ :param options: Any (optional) The options object varies widely from one visualization type to the next and is unsupported. Databricks does not recommend modifying visualization settings in JSON. + :param query: :class:`Query` (optional) :param type: str (optional) The type of visualization: chart, table, pivot table, and so on. :param updated_at: str (optional) diff --git a/examples/workspace_assignment/update_workspace_assignment_on_aws.py b/examples/workspace_assignment/update_workspace_assignment_on_aws.py index 48a26a40..f12e8589 100755 --- a/examples/workspace_assignment/update_workspace_assignment_on_aws.py +++ b/examples/workspace_assignment/update_workspace_assignment_on_aws.py @@ -12,6 +12,6 @@ workspace_id = os.environ["DUMMY_WORKSPACE_ID"] -a.workspace_assignment.update(workspace_id=workspace_id, - principal_id=spn_id, - permissions=[iam.WorkspacePermission.USER]) +_ = a.workspace_assignment.update(workspace_id=workspace_id, + principal_id=spn_id, + permissions=[iam.WorkspacePermission.USER])