diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index ed18d818..fef6f268 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -37e2bbe0cbcbbbe78a06a018d4fab06314a26a40 \ No newline at end of file +f98c07f9c71f579de65d2587bb0292f83d10e55d \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index c37d866d..22e000b1 100755 --- a/.gitattributes +++ b/.gitattributes @@ -1,6 +1,7 @@ databricks/sdk/__init__.py linguist-generated=true databricks/sdk/errors/overrides.py linguist-generated=true databricks/sdk/errors/platform.py linguist-generated=true +databricks/sdk/service/apps.py linguist-generated=true databricks/sdk/service/billing.py linguist-generated=true databricks/sdk/service/catalog.py linguist-generated=true databricks/sdk/service/compute.py linguist-generated=true diff --git a/CHANGELOG.md b/CHANGELOG.md index 341770ca..278eec3e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,203 @@ # Version changelog +## [Release] Release v0.30.0 + +### New Features and Improvements + + * Add DataPlane support ([#700](https://github.com/databricks/databricks-sdk-py/pull/700)). + * Support partners in SDK ([#648](https://github.com/databricks/databricks-sdk-py/pull/648)). + + +### Bug Fixes + + * Check trailing slash in host url ([#681](https://github.com/databricks/databricks-sdk-py/pull/681)). + * Decrease runtime of recursive workspace listing test ([#721](https://github.com/databricks/databricks-sdk-py/pull/721)). + * Fix test_get_workspace_client and test_runtime_auth_from_jobs ([#719](https://github.com/databricks/databricks-sdk-py/pull/719)). + * Infer Azure tenant ID if not set ([#638](https://github.com/databricks/databricks-sdk-py/pull/638)). + + +### Internal Changes + + * Add Release tag and Workflow fix ([#704](https://github.com/databricks/databricks-sdk-py/pull/704)). + * Add apps package in docgen ([#722](https://github.com/databricks/databricks-sdk-py/pull/722)). + * Fix processing of `quoted` titles ([#712](https://github.com/databricks/databricks-sdk-py/pull/712)). + * Improve Changelog by grouping changes ([#703](https://github.com/databricks/databricks-sdk-py/pull/703)). + * Move PR message validation to a separate workflow ([#707](https://github.com/databricks/databricks-sdk-py/pull/707)). + * Test that Jobs API endpoints are pinned to 2.1 ([#714](https://github.com/databricks/databricks-sdk-py/pull/714)). + * Trigger the validate workflow in the merge queue ([#709](https://github.com/databricks/databricks-sdk-py/pull/709)). + * Update OpenAPI spec ([#715](https://github.com/databricks/databricks-sdk-py/pull/715)). + + +### Other Changes + + * Add Windows WorkFlow ([#692](https://github.com/databricks/databricks-sdk-py/pull/692)). + * Fix auth tests for windows. ([#697](https://github.com/databricks/databricks-sdk-py/pull/697)). + * Fix for cancelled workflow ([#701](https://github.com/databricks/databricks-sdk-py/pull/701)). + * Fix test_core for windows ([#702](https://github.com/databricks/databricks-sdk-py/pull/702)). + * Fix test_local_io for windows ([#695](https://github.com/databricks/databricks-sdk-py/pull/695)). + * Remove duplicate ubuntu tests ([#693](https://github.com/databricks/databricks-sdk-py/pull/693)). + * fix windows path ([#660](https://github.com/databricks/databricks-sdk-py/pull/660)) ([#673](https://github.com/databricks/databricks-sdk-py/pull/673)). + + +### API Changes: + + * Added `databricks.sdk.service.apps` package. + * Added [a.usage_dashboards](https://databricks-sdk-py.readthedocs.io/en/latest/account/usage_dashboards.html) account-level service. + * Added [w.alerts_legacy](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts_legacy.html) workspace-level service, [w.queries_legacy](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries_legacy.html) workspace-level service and [w.query_visualizations_legacy](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/query_visualizations_legacy.html) workspace-level service. + * Added [w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/genie.html) workspace-level service. + * Added [w.notification_destinations](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/notification_destinations.html) workspace-level service. + * Added `update()` method for [w.clusters](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/clusters.html) workspace-level service. + * Added `list_visualizations()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service. + * Added `databricks.sdk.service.catalog.GetBindingsSecurableType` and `databricks.sdk.service.catalog.UpdateBindingsSecurableType` dataclasses. + * Added `databricks.sdk.service.billing.ActionConfiguration`, `databricks.sdk.service.billing.ActionConfigurationType`, `databricks.sdk.service.billing.AlertConfiguration`, `databricks.sdk.service.billing.AlertConfigurationQuantityType`, `databricks.sdk.service.billing.AlertConfigurationTimePeriod`, `databricks.sdk.service.billing.AlertConfigurationTriggerType`, `databricks.sdk.service.billing.BudgetConfiguration`, `databricks.sdk.service.billing.BudgetConfigurationFilter`, `databricks.sdk.service.billing.BudgetConfigurationFilterClause`, `databricks.sdk.service.billing.BudgetConfigurationFilterOperator`, `databricks.sdk.service.billing.BudgetConfigurationFilterTagClause`, `databricks.sdk.service.billing.BudgetConfigurationFilterWorkspaceIdClause`, `databricks.sdk.service.billing.CreateBillingUsageDashboardRequest`, `databricks.sdk.service.billing.CreateBillingUsageDashboardResponse`, `databricks.sdk.service.billing.CreateBudgetConfigurationBudget`, `databricks.sdk.service.billing.CreateBudgetConfigurationBudgetActionConfigurations`, `databricks.sdk.service.billing.CreateBudgetConfigurationBudgetAlertConfigurations`, `databricks.sdk.service.billing.CreateBudgetConfigurationRequest`, `databricks.sdk.service.billing.CreateBudgetConfigurationResponse`, `databricks.sdk.service.billing.DeleteBudgetConfigurationRequest`, `any`, `databricks.sdk.service.billing.GetBillingUsageDashboardRequest`, `databricks.sdk.service.billing.GetBillingUsageDashboardResponse`, `databricks.sdk.service.billing.GetBudgetConfigurationRequest`, `databricks.sdk.service.billing.GetBudgetConfigurationResponse`, `databricks.sdk.service.billing.ListBudgetConfigurationsRequest`, `databricks.sdk.service.billing.ListBudgetConfigurationsResponse`, `databricks.sdk.service.billing.UpdateBudgetConfigurationBudget`, `databricks.sdk.service.billing.UpdateBudgetConfigurationRequest`, `databricks.sdk.service.billing.UpdateBudgetConfigurationResponse` and `databricks.sdk.service.billing.UsageDashboardType` dataclasses. + * Added `databricks.sdk.service.compute.ListClustersFilterBy`, `databricks.sdk.service.compute.ListClustersSortBy`, `databricks.sdk.service.compute.ListClustersSortByDirection`, `databricks.sdk.service.compute.ListClustersSortByField`, `databricks.sdk.service.compute.UpdateCluster`, `databricks.sdk.service.compute.UpdateClusterResource` and `any` dataclasses. + * Added `databricks.sdk.service.dashboards.ExecuteMessageQueryRequest`, `databricks.sdk.service.dashboards.GenieAttachment`, `databricks.sdk.service.dashboards.GenieConversation`, `databricks.sdk.service.dashboards.GenieCreateConversationMessageRequest`, `databricks.sdk.service.dashboards.GenieGetConversationMessageRequest`, `databricks.sdk.service.dashboards.GenieGetMessageQueryResultRequest`, `databricks.sdk.service.dashboards.GenieGetMessageQueryResultResponse`, `databricks.sdk.service.dashboards.GenieMessage`, `databricks.sdk.service.dashboards.GenieStartConversationMessageRequest`, `databricks.sdk.service.dashboards.GenieStartConversationResponse`, `databricks.sdk.service.dashboards.MessageError`, `databricks.sdk.service.dashboards.MessageErrorType`, `databricks.sdk.service.dashboards.MessageStatus`, `databricks.sdk.service.dashboards.QueryAttachment`, `databricks.sdk.service.dashboards.Result` and `databricks.sdk.service.dashboards.TextAttachment` dataclasses. + * Added `any`, `databricks.sdk.service.iam.MigratePermissionsRequest` and `databricks.sdk.service.iam.MigratePermissionsResponse` dataclasses. + * Added `databricks.sdk.service.oauth2.ListCustomAppIntegrationsRequest` and `databricks.sdk.service.oauth2.ListPublishedAppIntegrationsRequest` dataclasses. + * Added `databricks.sdk.service.pipelines.IngestionPipelineDefinition` and `databricks.sdk.service.pipelines.PipelineStateInfoHealth` dataclasses. + * Added `databricks.sdk.service.serving.GoogleCloudVertexAiConfig` dataclass. + * Added `databricks.sdk.service.settings.Config`, `databricks.sdk.service.settings.CreateNotificationDestinationRequest`, `databricks.sdk.service.settings.DeleteNotificationDestinationRequest`, `databricks.sdk.service.settings.DestinationType`, `databricks.sdk.service.settings.EmailConfig`, `any`, `databricks.sdk.service.settings.GenericWebhookConfig`, `databricks.sdk.service.settings.GetNotificationDestinationRequest`, `databricks.sdk.service.settings.ListNotificationDestinationsRequest`, `databricks.sdk.service.settings.ListNotificationDestinationsResponse`, `databricks.sdk.service.settings.ListNotificationDestinationsResult`, `databricks.sdk.service.settings.MicrosoftTeamsConfig`, `databricks.sdk.service.settings.NotificationDestination`, `databricks.sdk.service.settings.PagerdutyConfig`, `databricks.sdk.service.settings.SlackConfig` and `databricks.sdk.service.settings.UpdateNotificationDestinationRequest` dataclasses. + * Added `databricks.sdk.service.sql.AlertCondition`, `databricks.sdk.service.sql.AlertConditionOperand`, `databricks.sdk.service.sql.AlertConditionThreshold`, `databricks.sdk.service.sql.AlertOperandColumn`, `databricks.sdk.service.sql.AlertOperandValue`, `databricks.sdk.service.sql.AlertOperator`, `databricks.sdk.service.sql.ClientCallContext`, `databricks.sdk.service.sql.ContextFilter`, `databricks.sdk.service.sql.CreateAlertRequest`, `databricks.sdk.service.sql.CreateAlertRequestAlert`, `databricks.sdk.service.sql.CreateQueryRequest`, `databricks.sdk.service.sql.CreateQueryRequestQuery`, `databricks.sdk.service.sql.CreateQueryVisualizationsLegacyRequest`, `databricks.sdk.service.sql.CreateVisualizationRequest`, `databricks.sdk.service.sql.CreateVisualizationRequestVisualization`, `databricks.sdk.service.sql.DatePrecision`, `databricks.sdk.service.sql.DateRange`, `databricks.sdk.service.sql.DateRangeValue`, `databricks.sdk.service.sql.DateRangeValueDynamicDateRange`, `databricks.sdk.service.sql.DateValue`, `databricks.sdk.service.sql.DateValueDynamicDate`, `databricks.sdk.service.sql.DeleteAlertsLegacyRequest`, `databricks.sdk.service.sql.DeleteQueriesLegacyRequest`, `databricks.sdk.service.sql.DeleteQueryVisualizationsLegacyRequest`, `databricks.sdk.service.sql.DeleteVisualizationRequest`, `any`, `databricks.sdk.service.sql.EncodedText`, `databricks.sdk.service.sql.EncodedTextEncoding`, `databricks.sdk.service.sql.EnumValue`, `databricks.sdk.service.sql.GetAlertsLegacyRequest`, `databricks.sdk.service.sql.GetQueriesLegacyRequest`, `databricks.sdk.service.sql.LegacyAlert`, `databricks.sdk.service.sql.LegacyAlertState`, `databricks.sdk.service.sql.LegacyQuery`, `databricks.sdk.service.sql.LegacyVisualization`, `databricks.sdk.service.sql.LifecycleState`, `databricks.sdk.service.sql.ListAlertsRequest`, `databricks.sdk.service.sql.ListAlertsResponse`, `databricks.sdk.service.sql.ListAlertsResponseAlert`, `databricks.sdk.service.sql.ListQueriesLegacyRequest`, `databricks.sdk.service.sql.ListQueryObjectsResponse`, `databricks.sdk.service.sql.ListQueryObjectsResponseQuery`, `databricks.sdk.service.sql.ListVisualizationsForQueryRequest`, `databricks.sdk.service.sql.ListVisualizationsForQueryResponse`, `databricks.sdk.service.sql.NumericValue`, `databricks.sdk.service.sql.QueryBackedValue`, `databricks.sdk.service.sql.QueryParameter`, `databricks.sdk.service.sql.QuerySource`, `databricks.sdk.service.sql.QuerySourceDriverInfo`, `databricks.sdk.service.sql.QuerySourceEntryPoint`, `databricks.sdk.service.sql.QuerySourceJobManager`, `databricks.sdk.service.sql.QuerySourceTrigger`, `databricks.sdk.service.sql.RestoreQueriesLegacyRequest`, `databricks.sdk.service.sql.RunAsMode`, `databricks.sdk.service.sql.ServerlessChannelInfo`, `databricks.sdk.service.sql.StatementResponse`, `databricks.sdk.service.sql.TextValue`, `databricks.sdk.service.sql.TrashAlertRequest`, `databricks.sdk.service.sql.TrashQueryRequest`, `databricks.sdk.service.sql.UpdateAlertRequest`, `databricks.sdk.service.sql.UpdateAlertRequestAlert`, `databricks.sdk.service.sql.UpdateQueryRequest`, `databricks.sdk.service.sql.UpdateQueryRequestQuery`, `databricks.sdk.service.sql.UpdateVisualizationRequest` and `databricks.sdk.service.sql.UpdateVisualizationRequestVisualization` dataclasses. + * Added `force` field for `databricks.sdk.service.catalog.DeleteSchemaRequest`. + * Added `max_results` and `page_token` fields for `databricks.sdk.service.catalog.GetBindingsRequest`. + * Added `include_aliases` field for `databricks.sdk.service.catalog.GetByAliasRequest`. + * Added `include_aliases` field for `databricks.sdk.service.catalog.GetModelVersionRequest`. + * Added `include_aliases` field for `databricks.sdk.service.catalog.GetRegisteredModelRequest`. + * Added `max_results` and `page_token` fields for `databricks.sdk.service.catalog.ListSystemSchemasRequest`. + * Added `next_page_token` field for `databricks.sdk.service.catalog.ListSystemSchemasResponse`. + * Added `aliases` field for `databricks.sdk.service.catalog.ModelVersionInfo`. + * Added `next_page_token` field for `databricks.sdk.service.catalog.WorkspaceBindingsResponse`. + * Added `version` field for `databricks.sdk.service.compute.GetPolicyFamilyRequest`. + * Added `filter_by`, `page_size`, `page_token` and `sort_by` fields for `databricks.sdk.service.compute.ListClustersRequest`. + * Added `next_page_token` and `prev_page_token` fields for `databricks.sdk.service.compute.ListClustersResponse`. + * Added `page_token` field for `databricks.sdk.service.jobs.GetRunRequest`. + * Added `iterations`, `next_page_token` and `prev_page_token` fields for `databricks.sdk.service.jobs.Run`. + * Added `create_time`, `created_by`, `creator_username` and `scopes` fields for `databricks.sdk.service.oauth2.GetCustomAppIntegrationOutput`. + * Added `next_page_token` field for `databricks.sdk.service.oauth2.GetCustomAppIntegrationsOutput`. + * Added `create_time` and `created_by` fields for `databricks.sdk.service.oauth2.GetPublishedAppIntegrationOutput`. + * Added `next_page_token` field for `databricks.sdk.service.oauth2.GetPublishedAppIntegrationsOutput`. + * Added `enable_local_disk_encryption` field for `databricks.sdk.service.pipelines.PipelineCluster`. + * Added `whl` field for `databricks.sdk.service.pipelines.PipelineLibrary`. + * Added `health` field for `databricks.sdk.service.pipelines.PipelineStateInfo`. + * Added `ai21labs_api_key_plaintext` field for `databricks.sdk.service.serving.Ai21LabsConfig`. + * Added `aws_access_key_id_plaintext` and `aws_secret_access_key_plaintext` fields for `databricks.sdk.service.serving.AmazonBedrockConfig`. + * Added `anthropic_api_key_plaintext` field for `databricks.sdk.service.serving.AnthropicConfig`. + * Added `cohere_api_base` and `cohere_api_key_plaintext` fields for `databricks.sdk.service.serving.CohereConfig`. + * Added `databricks_api_token_plaintext` field for `databricks.sdk.service.serving.DatabricksModelServingConfig`. + * Added `google_cloud_vertex_ai_config` field for `databricks.sdk.service.serving.ExternalModel`. + * Added `microsoft_entra_client_secret_plaintext` and `openai_api_key_plaintext` fields for `databricks.sdk.service.serving.OpenAiConfig`. + * Added `palm_api_key_plaintext` field for `databricks.sdk.service.serving.PaLmConfig`. + * Added `expiration_time` field for `databricks.sdk.service.sharing.CreateRecipient`. + * Added `next_page_token` field for `databricks.sdk.service.sharing.GetRecipientSharePermissionsResponse`. + * Added `next_page_token` field for `databricks.sdk.service.sharing.ListProviderSharesResponse`. + * Added `max_results` and `page_token` fields for `databricks.sdk.service.sharing.ListProvidersRequest`. + * Added `next_page_token` field for `databricks.sdk.service.sharing.ListProvidersResponse`. + * Added `max_results` and `page_token` fields for `databricks.sdk.service.sharing.ListRecipientsRequest`. + * Added `next_page_token` field for `databricks.sdk.service.sharing.ListRecipientsResponse`. + * Added `max_results` and `page_token` fields for `databricks.sdk.service.sharing.ListSharesRequest`. + * Added `next_page_token` field for `databricks.sdk.service.sharing.ListSharesResponse`. + * Added `max_results` and `page_token` fields for `databricks.sdk.service.sharing.SharePermissionsRequest`. + * Added `expiration_time` field for `databricks.sdk.service.sharing.UpdateRecipient`. + * Added `max_results` and `page_token` fields for `databricks.sdk.service.sharing.UpdateSharePermissions`. + * Added `condition`, `create_time`, `custom_body`, `custom_subject`, `display_name`, `lifecycle_state`, `owner_user_name`, `parent_path`, `query_id`, `seconds_to_retrigger`, `trigger_time` and `update_time` fields for `databricks.sdk.service.sql.Alert`. + * Added `id` field for `databricks.sdk.service.sql.GetAlertRequest`. + * Added `id` field for `databricks.sdk.service.sql.GetQueryRequest`. + * Added `page_token` field for `databricks.sdk.service.sql.ListQueriesRequest`. + * Added `apply_auto_limit`, `catalog`, `create_time`, `display_name`, `last_modifier_user_name`, `lifecycle_state`, `owner_user_name`, `parameters`, `parent_path`, `query_text`, `run_as_mode`, `schema`, `update_time` and `warehouse_id` fields for `databricks.sdk.service.sql.Query`. + * Added `context_filter` field for `databricks.sdk.service.sql.QueryFilter`. + * Added `query_source` field for `databricks.sdk.service.sql.QueryInfo`. + * Added `create_time`, `display_name`, `query_id`, `serialized_options`, `serialized_query_plan` and `update_time` fields for `databricks.sdk.service.sql.Visualization`. + * Changed `create()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service to return `databricks.sdk.service.billing.CreateBudgetConfigurationResponse` dataclass. + * Changed `create()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service . New request type is `databricks.sdk.service.billing.CreateBudgetConfigurationRequest` dataclass. + * Changed `delete()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service . New request type is `databricks.sdk.service.billing.DeleteBudgetConfigurationRequest` dataclass. + * Changed `delete()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service to return `any` dataclass. + * Changed `get()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service . New request type is `databricks.sdk.service.billing.GetBudgetConfigurationRequest` dataclass. + * Changed `get()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service to return `databricks.sdk.service.billing.GetBudgetConfigurationResponse` dataclass. + * Changed `list()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service to return `databricks.sdk.service.billing.ListBudgetConfigurationsResponse` dataclass. + * Changed `list()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service to require request of `databricks.sdk.service.billing.ListBudgetConfigurationsRequest` dataclass. + * Changed `update()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service to return `databricks.sdk.service.billing.UpdateBudgetConfigurationResponse` dataclass. + * Changed `update()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service . New request type is `databricks.sdk.service.billing.UpdateBudgetConfigurationRequest` dataclass. + * Changed `create()` method for [a.custom_app_integration](https://databricks-sdk-py.readthedocs.io/en/latest/account/custom_app_integration.html) account-level service with new required argument order. + * Changed `list()` method for [a.custom_app_integration](https://databricks-sdk-py.readthedocs.io/en/latest/account/custom_app_integration.html) account-level service to require request of `databricks.sdk.service.oauth2.ListCustomAppIntegrationsRequest` dataclass. + * Changed `list()` method for [a.published_app_integration](https://databricks-sdk-py.readthedocs.io/en/latest/account/published_app_integration.html) account-level service to require request of `databricks.sdk.service.oauth2.ListPublishedAppIntegrationsRequest` dataclass. + * Changed `delete()` method for [a.workspace_assignment](https://databricks-sdk-py.readthedocs.io/en/latest/account/workspace_assignment.html) account-level service to return `any` dataclass. + * Changed `update()` method for [a.workspace_assignment](https://databricks-sdk-py.readthedocs.io/en/latest/account/workspace_assignment.html) account-level service with new required argument order. + * Changed `create()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service . New request type is `databricks.sdk.service.sql.CreateAlertRequest` dataclass. + * Changed `delete()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service to return `any` dataclass. + * Changed `delete()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service . New request type is `databricks.sdk.service.sql.TrashAlertRequest` dataclass. + * Changed `get()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service with new required argument order. + * Changed `list()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service to return `databricks.sdk.service.sql.ListAlertsResponse` dataclass. + * Changed `list()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service to require request of `databricks.sdk.service.sql.ListAlertsRequest` dataclass. + * Changed `update()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service to return `databricks.sdk.service.sql.Alert` dataclass. + * Changed `update()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service . New request type is `databricks.sdk.service.sql.UpdateAlertRequest` dataclass. + * Changed `create()` and `edit()` methods for [w.cluster_policies](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/cluster_policies.html) workspace-level service with new required argument order. + * Changed `get()` method for [w.model_versions](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/model_versions.html) workspace-level service to return `databricks.sdk.service.catalog.ModelVersionInfo` dataclass. + * Changed `migrate_permissions()` method for [w.permission_migration](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/permission_migration.html) workspace-level service . New request type is `databricks.sdk.service.iam.MigratePermissionsRequest` dataclass. + * Changed `migrate_permissions()` method for [w.permission_migration](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/permission_migration.html) workspace-level service to return `databricks.sdk.service.iam.MigratePermissionsResponse` dataclass. + * Changed `create()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service . New request type is `databricks.sdk.service.sql.CreateQueryRequest` dataclass. + * Changed `delete()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service to return `any` dataclass. + * Changed `delete()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service . New request type is `databricks.sdk.service.sql.TrashQueryRequest` dataclass. + * Changed `get()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service with new required argument order. + * Changed `list()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service to return `databricks.sdk.service.sql.ListQueryObjectsResponse` dataclass. + * Changed `update()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service . New request type is `databricks.sdk.service.sql.UpdateQueryRequest` dataclass. + * Changed `create()` method for [w.query_visualizations](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/query_visualizations.html) workspace-level service . New request type is `databricks.sdk.service.sql.CreateVisualizationRequest` dataclass. + * Changed `delete()` method for [w.query_visualizations](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/query_visualizations.html) workspace-level service to return `any` dataclass. + * Changed `delete()` method for [w.query_visualizations](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/query_visualizations.html) workspace-level service . New request type is `databricks.sdk.service.sql.DeleteVisualizationRequest` dataclass. + * Changed `update()` method for [w.query_visualizations](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/query_visualizations.html) workspace-level service . New request type is `databricks.sdk.service.sql.UpdateVisualizationRequest` dataclass. + * Changed `list()` method for [w.shares](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/shares.html) workspace-level service to require request of `databricks.sdk.service.sharing.ListSharesRequest` dataclass. + * Changed `execute_statement()` and `get_statement()` methods for [w.statement_execution](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/statement_execution.html) workspace-level service to return `databricks.sdk.service.sql.StatementResponse` dataclass. + * Changed `securable_type` field for `databricks.sdk.service.catalog.GetBindingsRequest` to `databricks.sdk.service.catalog.GetBindingsSecurableType` dataclass. + * Changed `securable_type` field for `databricks.sdk.service.catalog.UpdateWorkspaceBindingsParameters` to `databricks.sdk.service.catalog.UpdateBindingsSecurableType` dataclass. + * Changed `name` field for `databricks.sdk.service.compute.CreatePolicy` to no longer be required. + * Changed `name` field for `databricks.sdk.service.compute.EditPolicy` to no longer be required. + * Changed `policy_family_id` field for `databricks.sdk.service.compute.GetPolicyFamilyRequest` to `str` dataclass. + * Changed `policy_families` field for `databricks.sdk.service.compute.ListPolicyFamiliesResponse` to no longer be required. + * Changed `definition`, `description`, `name` and `policy_family_id` fields for `databricks.sdk.service.compute.PolicyFamily` to no longer be required. + * Changed `permissions` field for `databricks.sdk.service.iam.UpdateWorkspaceAssignments` to no longer be required. + * Changed `access_control_list` field for `databricks.sdk.service.jobs.CreateJob` to `databricks.sdk.service.jobs.JobAccessControlRequestList` dataclass. + * Changed `access_control_list` field for `databricks.sdk.service.jobs.SubmitRun` to `databricks.sdk.service.jobs.JobAccessControlRequestList` dataclass. + * Changed `name` and `redirect_urls` fields for `databricks.sdk.service.oauth2.CreateCustomAppIntegration` to no longer be required. + * Changed `ingestion_definition` field for `databricks.sdk.service.pipelines.CreatePipeline` to `databricks.sdk.service.pipelines.IngestionPipelineDefinition` dataclass. + * Changed `ingestion_definition` field for `databricks.sdk.service.pipelines.EditPipeline` to `databricks.sdk.service.pipelines.IngestionPipelineDefinition` dataclass. + * Changed `ingestion_definition` field for `databricks.sdk.service.pipelines.PipelineSpec` to `databricks.sdk.service.pipelines.IngestionPipelineDefinition` dataclass. + * Changed `ai21labs_api_key` field for `databricks.sdk.service.serving.Ai21LabsConfig` to no longer be required. + * Changed `aws_access_key_id` and `aws_secret_access_key` fields for `databricks.sdk.service.serving.AmazonBedrockConfig` to no longer be required. + * Changed `anthropic_api_key` field for `databricks.sdk.service.serving.AnthropicConfig` to no longer be required. + * Changed `cohere_api_key` field for `databricks.sdk.service.serving.CohereConfig` to no longer be required. + * Changed `databricks_api_token` field for `databricks.sdk.service.serving.DatabricksModelServingConfig` to no longer be required. + * Changed `palm_api_key` field for `databricks.sdk.service.serving.PaLmConfig` to no longer be required. + * Changed `tags` field for `databricks.sdk.service.sql.Query` to `databricks.sdk.service.sql.List` dataclass. + * Changed `user_ids` and `warehouse_ids` fields for `databricks.sdk.service.sql.QueryFilter` to `databricks.sdk.service.sql.List` dataclass. + * Changed `results` field for `databricks.sdk.service.sql.QueryList` to `databricks.sdk.service.sql.LegacyQueryList` dataclass. + * Changed `visualization` field for `databricks.sdk.service.sql.Widget` to `databricks.sdk.service.sql.LegacyVisualization` dataclass. + * Removed [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html) workspace-level service. + * Removed `restore()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service. + * Removed `databricks.sdk.service.marketplace.FilterType`, `databricks.sdk.service.marketplace.ProviderIconFile`, `databricks.sdk.service.marketplace.ProviderIconType`, `databricks.sdk.service.marketplace.ProviderListingSummaryInfo`, `databricks.sdk.service.marketplace.SortBy` and `databricks.sdk.service.marketplace.VisibilityFilter` dataclasses. + * Removed `databricks.sdk.service.billing.Budget`, `databricks.sdk.service.billing.BudgetAlert`, `databricks.sdk.service.billing.BudgetList`, `databricks.sdk.service.billing.BudgetWithStatus`, `databricks.sdk.service.billing.BudgetWithStatusStatusDailyItem`, `databricks.sdk.service.billing.DeleteBudgetRequest`, `any`, `databricks.sdk.service.billing.GetBudgetRequest`, `any`, `databricks.sdk.service.billing.WrappedBudget` and `databricks.sdk.service.billing.WrappedBudgetWithStatus` dataclasses. + * Removed `any`, `databricks.sdk.service.iam.PermissionMigrationRequest` and `databricks.sdk.service.iam.PermissionMigrationResponse` dataclasses. + * Removed `databricks.sdk.service.pipelines.ManagedIngestionPipelineDefinition` dataclass. + * Removed `databricks.sdk.service.serving.App`, `databricks.sdk.service.serving.AppDeployment`, `databricks.sdk.service.serving.AppDeploymentArtifacts`, `databricks.sdk.service.serving.AppDeploymentMode`, `databricks.sdk.service.serving.AppDeploymentState`, `databricks.sdk.service.serving.AppDeploymentStatus`, `databricks.sdk.service.serving.AppEnvironment`, `databricks.sdk.service.serving.AppState`, `databricks.sdk.service.serving.AppStatus`, `databricks.sdk.service.serving.CreateAppDeploymentRequest`, `databricks.sdk.service.serving.CreateAppRequest`, `databricks.sdk.service.serving.DeleteAppRequest`, `databricks.sdk.service.serving.EnvVariable`, `databricks.sdk.service.serving.GetAppDeploymentRequest`, `databricks.sdk.service.serving.GetAppEnvironmentRequest`, `databricks.sdk.service.serving.GetAppRequest`, `databricks.sdk.service.serving.ListAppDeploymentsRequest`, `databricks.sdk.service.serving.ListAppDeploymentsResponse`, `databricks.sdk.service.serving.ListAppsRequest`, `databricks.sdk.service.serving.ListAppsResponse`, `databricks.sdk.service.serving.StartAppRequest`, `databricks.sdk.service.serving.StopAppRequest`, `any` and `databricks.sdk.service.serving.UpdateAppRequest` dataclasses. + * Removed `databricks.sdk.service.sql.CreateQueryVisualizationRequest`, `databricks.sdk.service.sql.DeleteAlertRequest`, `databricks.sdk.service.sql.DeleteQueryRequest`, `databricks.sdk.service.sql.DeleteQueryVisualizationRequest`, `databricks.sdk.service.sql.ExecuteStatementResponse`, `databricks.sdk.service.sql.GetStatementResponse`, `databricks.sdk.service.sql.RestoreQueryRequest`, `databricks.sdk.service.sql.StatementId`, `databricks.sdk.service.sql.UserId` and `databricks.sdk.service.sql.WarehouseId` dataclasses. + * Removed `databricks.sdk.service.compute.PolicyFamilyId` dataclass. + * Removed `can_use_client` field for `databricks.sdk.service.compute.ListClustersRequest`. + * Removed `is_ascending` and `sort_by` fields for `databricks.sdk.service.marketplace.ListListingsRequest`. + * Removed `provider_summary` field for `databricks.sdk.service.marketplace.Listing`. + * Removed `filters` field for `databricks.sdk.service.marketplace.ListingSetting`. + * Removed `metastore_id` field for `databricks.sdk.service.marketplace.ListingSummary`. + * Removed `is_ascending` and `sort_by` fields for `databricks.sdk.service.marketplace.SearchListingsRequest`. + * Removed `created_at`, `last_triggered_at`, `name`, `options`, `parent`, `query`, `rearm`, `updated_at` and `user` fields for `databricks.sdk.service.sql.Alert`. + * Removed `alert_id` field for `databricks.sdk.service.sql.GetAlertRequest`. + * Removed `query_id` field for `databricks.sdk.service.sql.GetQueryRequest`. + * Removed `order`, `page` and `q` fields for `databricks.sdk.service.sql.ListQueriesRequest`. + * Removed `include_metrics` field for `databricks.sdk.service.sql.ListQueryHistoryRequest`. + * Removed `can_edit`, `created_at`, `data_source_id`, `is_archived`, `is_draft`, `is_favorite`, `is_safe`, `last_modified_by`, `last_modified_by_id`, `latest_query_data_id`, `name`, `options`, `parent`, `permission_tier`, `query`, `query_hash`, `run_as_role`, `updated_at`, `user`, `user_id` and `visualizations` fields for `databricks.sdk.service.sql.Query`. + * Removed `statement_ids` field for `databricks.sdk.service.sql.QueryFilter`. + * Removed `can_subscribe_to_live_query` field for `databricks.sdk.service.sql.QueryInfo`. + * Removed `metadata_time_ms`, `planning_time_ms` and `query_execution_time_ms` fields for `databricks.sdk.service.sql.QueryMetrics`. + * Removed `created_at`, `description`, `name`, `options`, `query` and `updated_at` fields for `databricks.sdk.service.sql.Visualization`. + +OpenAPI SHA: f98c07f9c71f579de65d2587bb0292f83d10e55d, Date: 2024-08-12 + ## 0.29.0 ### Breaking Changes diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py index 7603678e..48fe1beb 100755 --- a/databricks/sdk/__init__.py +++ b/databricks/sdk/__init__.py @@ -5,8 +5,9 @@ from databricks.sdk.mixins.compute import ClustersExt from databricks.sdk.mixins.files import DbfsExt from databricks.sdk.mixins.workspace import WorkspaceExt +from databricks.sdk.service.apps import AppsAPI from databricks.sdk.service.billing import (BillableUsageAPI, BudgetsAPI, - LogDeliveryAPI) + LogDeliveryAPI, UsageDashboardsAPI) from databricks.sdk.service.catalog import (AccountMetastoreAssignmentsAPI, AccountMetastoresAPI, AccountStorageCredentialsAPI, @@ -55,7 +56,7 @@ NetworksAPI, PrivateAccessAPI, StorageAPI, VpcEndpointsAPI, Workspace, WorkspacesAPI) -from databricks.sdk.service.serving import (AppsAPI, ServingEndpointsAPI, +from databricks.sdk.service.serving import (ServingEndpointsAPI, ServingEndpointsDataPlaneAPI) from databricks.sdk.service.settings import (AccountIpAccessListsAPI, AccountSettingsAPI, @@ -793,6 +794,7 @@ def __init__(self, self._settings = AccountSettingsAPI(self._api_client) self._storage = StorageAPI(self._api_client) self._storage_credentials = AccountStorageCredentialsAPI(self._api_client) + self._usage_dashboards = UsageDashboardsAPI(self._api_client) self._users = AccountUsersAPI(self._api_client) self._vpc_endpoints = VpcEndpointsAPI(self._api_client) self._workspace_assignment = WorkspaceAssignmentAPI(self._api_client) @@ -907,6 +909,11 @@ def storage_credentials(self) -> AccountStorageCredentialsAPI: """These APIs manage storage credentials for a particular metastore.""" return self._storage_credentials + @property + def usage_dashboards(self) -> UsageDashboardsAPI: + """These APIs manage usage dashboards for this account.""" + return self._usage_dashboards + @property def users(self) -> AccountUsersAPI: """User identities recognized by Databricks and represented by email addresses.""" diff --git a/databricks/sdk/service/apps.py b/databricks/sdk/service/apps.py new file mode 100755 index 00000000..7ec495b1 --- /dev/null +++ b/databricks/sdk/service/apps.py @@ -0,0 +1,977 @@ +# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +from __future__ import annotations + +import logging +import random +import time +from dataclasses import dataclass +from datetime import timedelta +from enum import Enum +from typing import Callable, Dict, Iterator, List, Optional + +from ..errors import OperationFailed +from ._internal import Wait, _enum, _from_dict, _repeated_dict + +_LOG = logging.getLogger('databricks.sdk') + +# all definitions in this file are in alphabetical order + + +@dataclass +class App: + name: str + """The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. + It must be unique within the workspace.""" + + active_deployment: Optional[AppDeployment] = None + """The active deployment of the app.""" + + create_time: Optional[str] = None + """The creation time of the app. Formatted timestamp in ISO 6801.""" + + creator: Optional[str] = None + """The email of the user that created the app.""" + + description: Optional[str] = None + """The description of the app.""" + + pending_deployment: Optional[AppDeployment] = None + """The pending deployment of the app.""" + + service_principal_id: Optional[int] = None + + service_principal_name: Optional[str] = None + + status: Optional[AppStatus] = None + + update_time: Optional[str] = None + """The update time of the app. Formatted timestamp in ISO 6801.""" + + updater: Optional[str] = None + """The email of the user that last updated the app.""" + + url: Optional[str] = None + """The URL of the app once it is deployed.""" + + def as_dict(self) -> dict: + """Serializes the App into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.active_deployment: body['active_deployment'] = self.active_deployment.as_dict() + if self.create_time is not None: body['create_time'] = self.create_time + if self.creator is not None: body['creator'] = self.creator + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + if self.pending_deployment: body['pending_deployment'] = self.pending_deployment.as_dict() + if self.service_principal_id is not None: body['service_principal_id'] = self.service_principal_id + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.status: body['status'] = self.status.as_dict() + if self.update_time is not None: body['update_time'] = self.update_time + if self.updater is not None: body['updater'] = self.updater + if self.url is not None: body['url'] = self.url + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> App: + """Deserializes the App from a dictionary.""" + return cls(active_deployment=_from_dict(d, 'active_deployment', AppDeployment), + create_time=d.get('create_time', None), + creator=d.get('creator', None), + description=d.get('description', None), + name=d.get('name', None), + pending_deployment=_from_dict(d, 'pending_deployment', AppDeployment), + service_principal_id=d.get('service_principal_id', None), + service_principal_name=d.get('service_principal_name', None), + status=_from_dict(d, 'status', AppStatus), + update_time=d.get('update_time', None), + updater=d.get('updater', None), + url=d.get('url', None)) + + +@dataclass +class AppAccessControlRequest: + group_name: Optional[str] = None + """name of the group""" + + permission_level: Optional[AppPermissionLevel] = None + """Permission level""" + + service_principal_name: Optional[str] = None + """application ID of a service principal""" + + user_name: Optional[str] = None + """name of the user""" + + def as_dict(self) -> dict: + """Serializes the AppAccessControlRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AppAccessControlRequest: + """Deserializes the AppAccessControlRequest from a dictionary.""" + return cls(group_name=d.get('group_name', None), + permission_level=_enum(d, 'permission_level', AppPermissionLevel), + service_principal_name=d.get('service_principal_name', None), + user_name=d.get('user_name', None)) + + +@dataclass +class AppAccessControlResponse: + all_permissions: Optional[List[AppPermission]] = None + """All permissions.""" + + display_name: Optional[str] = None + """Display name of the user or service principal.""" + + group_name: Optional[str] = None + """name of the group""" + + service_principal_name: Optional[str] = None + """Name of the service principal.""" + + user_name: Optional[str] = None + """name of the user""" + + def as_dict(self) -> dict: + """Serializes the AppAccessControlResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AppAccessControlResponse: + """Deserializes the AppAccessControlResponse from a dictionary.""" + return cls(all_permissions=_repeated_dict(d, 'all_permissions', AppPermission), + display_name=d.get('display_name', None), + group_name=d.get('group_name', None), + service_principal_name=d.get('service_principal_name', None), + user_name=d.get('user_name', None)) + + +@dataclass +class AppDeployment: + source_code_path: str + """The workspace file system path of the source code used to create the app deployment. This is + different from `deployment_artifacts.source_code_path`, which is the path used by the deployed + app. The former refers to the original source code location of the app in the workspace during + deployment creation, whereas the latter provides a system generated stable snapshotted source + code path used by the deployment.""" + + create_time: Optional[str] = None + """The creation time of the deployment. Formatted timestamp in ISO 6801.""" + + creator: Optional[str] = None + """The email of the user creates the deployment.""" + + deployment_artifacts: Optional[AppDeploymentArtifacts] = None + """The deployment artifacts for an app.""" + + deployment_id: Optional[str] = None + """The unique id of the deployment.""" + + mode: Optional[AppDeploymentMode] = None + """The mode of which the deployment will manage the source code.""" + + status: Optional[AppDeploymentStatus] = None + """Status and status message of the deployment""" + + update_time: Optional[str] = None + """The update time of the deployment. Formatted timestamp in ISO 6801.""" + + def as_dict(self) -> dict: + """Serializes the AppDeployment into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.create_time is not None: body['create_time'] = self.create_time + if self.creator is not None: body['creator'] = self.creator + if self.deployment_artifacts: body['deployment_artifacts'] = self.deployment_artifacts.as_dict() + if self.deployment_id is not None: body['deployment_id'] = self.deployment_id + if self.mode is not None: body['mode'] = self.mode.value + if self.source_code_path is not None: body['source_code_path'] = self.source_code_path + if self.status: body['status'] = self.status.as_dict() + if self.update_time is not None: body['update_time'] = self.update_time + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AppDeployment: + """Deserializes the AppDeployment from a dictionary.""" + return cls(create_time=d.get('create_time', None), + creator=d.get('creator', None), + deployment_artifacts=_from_dict(d, 'deployment_artifacts', AppDeploymentArtifacts), + deployment_id=d.get('deployment_id', None), + mode=_enum(d, 'mode', AppDeploymentMode), + source_code_path=d.get('source_code_path', None), + status=_from_dict(d, 'status', AppDeploymentStatus), + update_time=d.get('update_time', None)) + + +@dataclass +class AppDeploymentArtifacts: + source_code_path: Optional[str] = None + """The snapshotted workspace file system path of the source code loaded by the deployed app.""" + + def as_dict(self) -> dict: + """Serializes the AppDeploymentArtifacts into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.source_code_path is not None: body['source_code_path'] = self.source_code_path + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AppDeploymentArtifacts: + """Deserializes the AppDeploymentArtifacts from a dictionary.""" + return cls(source_code_path=d.get('source_code_path', None)) + + +class AppDeploymentMode(Enum): + + AUTO_SYNC = 'AUTO_SYNC' + SNAPSHOT = 'SNAPSHOT' + + +class AppDeploymentState(Enum): + + FAILED = 'FAILED' + IN_PROGRESS = 'IN_PROGRESS' + STOPPED = 'STOPPED' + SUCCEEDED = 'SUCCEEDED' + + +@dataclass +class AppDeploymentStatus: + message: Optional[str] = None + """Message corresponding with the deployment state.""" + + state: Optional[AppDeploymentState] = None + """State of the deployment.""" + + def as_dict(self) -> dict: + """Serializes the AppDeploymentStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.message is not None: body['message'] = self.message + if self.state is not None: body['state'] = self.state.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AppDeploymentStatus: + """Deserializes the AppDeploymentStatus from a dictionary.""" + return cls(message=d.get('message', None), state=_enum(d, 'state', AppDeploymentState)) + + +@dataclass +class AppPermission: + inherited: Optional[bool] = None + + inherited_from_object: Optional[List[str]] = None + + permission_level: Optional[AppPermissionLevel] = None + """Permission level""" + + def as_dict(self) -> dict: + """Serializes the AppPermission into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] + if self.permission_level is not None: body['permission_level'] = self.permission_level.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AppPermission: + """Deserializes the AppPermission from a dictionary.""" + return cls(inherited=d.get('inherited', None), + inherited_from_object=d.get('inherited_from_object', None), + permission_level=_enum(d, 'permission_level', AppPermissionLevel)) + + +class AppPermissionLevel(Enum): + """Permission level""" + + CAN_MANAGE = 'CAN_MANAGE' + CAN_USE = 'CAN_USE' + + +@dataclass +class AppPermissions: + access_control_list: Optional[List[AppAccessControlResponse]] = None + + object_id: Optional[str] = None + + object_type: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the AppPermissions into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.access_control_list: + body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AppPermissions: + """Deserializes the AppPermissions from a dictionary.""" + return cls(access_control_list=_repeated_dict(d, 'access_control_list', AppAccessControlResponse), + object_id=d.get('object_id', None), + object_type=d.get('object_type', None)) + + +@dataclass +class AppPermissionsDescription: + description: Optional[str] = None + + permission_level: Optional[AppPermissionLevel] = None + """Permission level""" + + def as_dict(self) -> dict: + """Serializes the AppPermissionsDescription into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AppPermissionsDescription: + """Deserializes the AppPermissionsDescription from a dictionary.""" + return cls(description=d.get('description', None), + permission_level=_enum(d, 'permission_level', AppPermissionLevel)) + + +@dataclass +class AppPermissionsRequest: + access_control_list: Optional[List[AppAccessControlRequest]] = None + + app_name: Optional[str] = None + """The app for which to get or manage permissions.""" + + def as_dict(self) -> dict: + """Serializes the AppPermissionsRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.access_control_list: + body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.app_name is not None: body['app_name'] = self.app_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AppPermissionsRequest: + """Deserializes the AppPermissionsRequest from a dictionary.""" + return cls(access_control_list=_repeated_dict(d, 'access_control_list', AppAccessControlRequest), + app_name=d.get('app_name', None)) + + +class AppState(Enum): + + CREATING = 'CREATING' + DELETED = 'DELETED' + DELETING = 'DELETING' + ERROR = 'ERROR' + IDLE = 'IDLE' + RUNNING = 'RUNNING' + STARTING = 'STARTING' + + +@dataclass +class AppStatus: + message: Optional[str] = None + """Message corresponding with the app state.""" + + state: Optional[AppState] = None + """State of the app.""" + + def as_dict(self) -> dict: + """Serializes the AppStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.message is not None: body['message'] = self.message + if self.state is not None: body['state'] = self.state.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AppStatus: + """Deserializes the AppStatus from a dictionary.""" + return cls(message=d.get('message', None), state=_enum(d, 'state', AppState)) + + +@dataclass +class CreateAppDeploymentRequest: + source_code_path: str + """The workspace file system path of the source code used to create the app deployment. This is + different from `deployment_artifacts.source_code_path`, which is the path used by the deployed + app. The former refers to the original source code location of the app in the workspace during + deployment creation, whereas the latter provides a system generated stable snapshotted source + code path used by the deployment.""" + + app_name: Optional[str] = None + """The name of the app.""" + + mode: Optional[AppDeploymentMode] = None + """The mode of which the deployment will manage the source code.""" + + def as_dict(self) -> dict: + """Serializes the CreateAppDeploymentRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.app_name is not None: body['app_name'] = self.app_name + if self.mode is not None: body['mode'] = self.mode.value + if self.source_code_path is not None: body['source_code_path'] = self.source_code_path + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateAppDeploymentRequest: + """Deserializes the CreateAppDeploymentRequest from a dictionary.""" + return cls(app_name=d.get('app_name', None), + mode=_enum(d, 'mode', AppDeploymentMode), + source_code_path=d.get('source_code_path', None)) + + +@dataclass +class CreateAppRequest: + name: str + """The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. + It must be unique within the workspace.""" + + description: Optional[str] = None + """The description of the app.""" + + def as_dict(self) -> dict: + """Serializes the CreateAppRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateAppRequest: + """Deserializes the CreateAppRequest from a dictionary.""" + return cls(description=d.get('description', None), name=d.get('name', None)) + + +@dataclass +class DeleteResponse: + + def as_dict(self) -> dict: + """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> DeleteResponse: + """Deserializes the DeleteResponse from a dictionary.""" + return cls() + + +@dataclass +class GetAppPermissionLevelsResponse: + permission_levels: Optional[List[AppPermissionsDescription]] = None + """Specific permission levels""" + + def as_dict(self) -> dict: + """Serializes the GetAppPermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GetAppPermissionLevelsResponse: + """Deserializes the GetAppPermissionLevelsResponse from a dictionary.""" + return cls(permission_levels=_repeated_dict(d, 'permission_levels', AppPermissionsDescription)) + + +@dataclass +class ListAppDeploymentsResponse: + app_deployments: Optional[List[AppDeployment]] = None + """Deployment history of the app.""" + + next_page_token: Optional[str] = None + """Pagination token to request the next page of apps.""" + + def as_dict(self) -> dict: + """Serializes the ListAppDeploymentsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.app_deployments: body['app_deployments'] = [v.as_dict() for v in self.app_deployments] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListAppDeploymentsResponse: + """Deserializes the ListAppDeploymentsResponse from a dictionary.""" + return cls(app_deployments=_repeated_dict(d, 'app_deployments', AppDeployment), + next_page_token=d.get('next_page_token', None)) + + +@dataclass +class ListAppsResponse: + apps: Optional[List[App]] = None + + next_page_token: Optional[str] = None + """Pagination token to request the next page of apps.""" + + def as_dict(self) -> dict: + """Serializes the ListAppsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.apps: body['apps'] = [v.as_dict() for v in self.apps] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListAppsResponse: + """Deserializes the ListAppsResponse from a dictionary.""" + return cls(apps=_repeated_dict(d, 'apps', App), next_page_token=d.get('next_page_token', None)) + + +@dataclass +class StartAppRequest: + name: Optional[str] = None + """The name of the app.""" + + +@dataclass +class StopAppRequest: + name: Optional[str] = None + """The name of the app.""" + + +@dataclass +class StopAppResponse: + + def as_dict(self) -> dict: + """Serializes the StopAppResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> StopAppResponse: + """Deserializes the StopAppResponse from a dictionary.""" + return cls() + + +@dataclass +class UpdateAppRequest: + name: str + """The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. + It must be unique within the workspace.""" + + description: Optional[str] = None + """The description of the app.""" + + def as_dict(self) -> dict: + """Serializes the UpdateAppRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> UpdateAppRequest: + """Deserializes the UpdateAppRequest from a dictionary.""" + return cls(description=d.get('description', None), name=d.get('name', None)) + + +class AppsAPI: + """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend + Databricks services, and enable users to interact through single sign-on.""" + + def __init__(self, api_client): + self._api = api_client + + def wait_get_app_idle(self, + name: str, + timeout=timedelta(minutes=20), + callback: Optional[Callable[[App], None]] = None) -> App: + deadline = time.time() + timeout.total_seconds() + target_states = (AppState.IDLE, ) + failure_states = (AppState.ERROR, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.get(name=name) + status = poll.status.state + status_message = f'current status: {status}' + if poll.status: + status_message = poll.status.message + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach IDLE, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"name={name}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + def wait_get_deployment_app_succeeded( + self, + app_name: str, + deployment_id: str, + timeout=timedelta(minutes=20), + callback: Optional[Callable[[AppDeployment], None]] = None) -> AppDeployment: + deadline = time.time() + timeout.total_seconds() + target_states = (AppDeploymentState.SUCCEEDED, ) + failure_states = (AppDeploymentState.FAILED, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.get_deployment(app_name=app_name, deployment_id=deployment_id) + status = poll.status.state + status_message = f'current status: {status}' + if poll.status: + status_message = poll.status.message + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach SUCCEEDED, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"app_name={app_name}, deployment_id={deployment_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + def create(self, name: str, *, description: Optional[str] = None) -> Wait[App]: + """Create an app. + + Creates a new app. + + :param name: str + The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It + must be unique within the workspace. + :param description: str (optional) + The description of the app. + + :returns: + Long-running operation waiter for :class:`App`. + See :method:wait_get_app_idle for more details. + """ + body = {} + if description is not None: body['description'] = description + if name is not None: body['name'] = name + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + op_response = self._api.do('POST', '/api/2.0/preview/apps', body=body, headers=headers) + return Wait(self.wait_get_app_idle, response=App.from_dict(op_response), name=op_response['name']) + + def create_and_wait(self, + name: str, + *, + description: Optional[str] = None, + timeout=timedelta(minutes=20)) -> App: + return self.create(description=description, name=name).result(timeout=timeout) + + def delete(self, name: str): + """Delete an app. + + Deletes an app. + + :param name: str + The name of the app. + + + """ + + headers = {'Accept': 'application/json', } + + self._api.do('DELETE', f'/api/2.0/preview/apps/{name}', headers=headers) + + def deploy(self, + app_name: str, + source_code_path: str, + *, + mode: Optional[AppDeploymentMode] = None) -> Wait[AppDeployment]: + """Create an app deployment. + + Creates an app deployment for the app with the supplied name. + + :param app_name: str + The name of the app. + :param source_code_path: str + The workspace file system path of the source code used to create the app deployment. This is + different from `deployment_artifacts.source_code_path`, which is the path used by the deployed app. + The former refers to the original source code location of the app in the workspace during deployment + creation, whereas the latter provides a system generated stable snapshotted source code path used by + the deployment. + :param mode: :class:`AppDeploymentMode` (optional) + The mode of which the deployment will manage the source code. + + :returns: + Long-running operation waiter for :class:`AppDeployment`. + See :method:wait_get_deployment_app_succeeded for more details. + """ + body = {} + if mode is not None: body['mode'] = mode.value + if source_code_path is not None: body['source_code_path'] = source_code_path + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + op_response = self._api.do('POST', + f'/api/2.0/preview/apps/{app_name}/deployments', + body=body, + headers=headers) + return Wait(self.wait_get_deployment_app_succeeded, + response=AppDeployment.from_dict(op_response), + app_name=app_name, + deployment_id=op_response['deployment_id']) + + def deploy_and_wait( + self, + app_name: str, + source_code_path: str, + *, + mode: Optional[AppDeploymentMode] = None, + timeout=timedelta(minutes=20)) -> AppDeployment: + return self.deploy(app_name=app_name, mode=mode, + source_code_path=source_code_path).result(timeout=timeout) + + def get(self, name: str) -> App: + """Get an app. + + Retrieves information for the app with the supplied name. + + :param name: str + The name of the app. + + :returns: :class:`App` + """ + + headers = {'Accept': 'application/json', } + + res = self._api.do('GET', f'/api/2.0/preview/apps/{name}', headers=headers) + return App.from_dict(res) + + def get_deployment(self, app_name: str, deployment_id: str) -> AppDeployment: + """Get an app deployment. + + Retrieves information for the app deployment with the supplied name and deployment id. + + :param app_name: str + The name of the app. + :param deployment_id: str + The unique id of the deployment. + + :returns: :class:`AppDeployment` + """ + + headers = {'Accept': 'application/json', } + + res = self._api.do('GET', + f'/api/2.0/preview/apps/{app_name}/deployments/{deployment_id}', + headers=headers) + return AppDeployment.from_dict(res) + + def get_permission_levels(self, app_name: str) -> GetAppPermissionLevelsResponse: + """Get app permission levels. + + Gets the permission levels that a user can have on an object. + + :param app_name: str + The app for which to get or manage permissions. + + :returns: :class:`GetAppPermissionLevelsResponse` + """ + + headers = {'Accept': 'application/json', } + + res = self._api.do('GET', f'/api/2.0/permissions/apps/{app_name}/permissionLevels', headers=headers) + return GetAppPermissionLevelsResponse.from_dict(res) + + def get_permissions(self, app_name: str) -> AppPermissions: + """Get app permissions. + + Gets the permissions of an app. Apps can inherit permissions from their root object. + + :param app_name: str + The app for which to get or manage permissions. + + :returns: :class:`AppPermissions` + """ + + headers = {'Accept': 'application/json', } + + res = self._api.do('GET', f'/api/2.0/permissions/apps/{app_name}', headers=headers) + return AppPermissions.from_dict(res) + + def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[App]: + """List apps. + + Lists all apps in the workspace. + + :param page_size: int (optional) + Upper bound for items returned. + :param page_token: str (optional) + Pagination token to go to the next page of apps. Requests first page if absent. + + :returns: Iterator over :class:`App` + """ + + query = {} + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json', } + + while True: + json = self._api.do('GET', '/api/2.0/preview/apps', query=query, headers=headers) + if 'apps' in json: + for v in json['apps']: + yield App.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + def list_deployments(self, + app_name: str, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[AppDeployment]: + """List app deployments. + + Lists all app deployments for the app with the supplied name. + + :param app_name: str + The name of the app. + :param page_size: int (optional) + Upper bound for items returned. + :param page_token: str (optional) + Pagination token to go to the next page of apps. Requests first page if absent. + + :returns: Iterator over :class:`AppDeployment` + """ + + query = {} + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json', } + + while True: + json = self._api.do('GET', + f'/api/2.0/preview/apps/{app_name}/deployments', + query=query, + headers=headers) + if 'app_deployments' in json: + for v in json['app_deployments']: + yield AppDeployment.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + def set_permissions( + self, + app_name: str, + *, + access_control_list: Optional[List[AppAccessControlRequest]] = None) -> AppPermissions: + """Set app permissions. + + Sets permissions on an app. Apps can inherit permissions from their root object. + + :param app_name: str + The app for which to get or manage permissions. + :param access_control_list: List[:class:`AppAccessControlRequest`] (optional) + + :returns: :class:`AppPermissions` + """ + body = {} + if access_control_list is not None: + body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('PUT', f'/api/2.0/permissions/apps/{app_name}', body=body, headers=headers) + return AppPermissions.from_dict(res) + + def start(self, name: str) -> Wait[AppDeployment]: + """Start an app. + + Start the last active deployment of the app in the workspace. + + :param name: str + The name of the app. + + :returns: + Long-running operation waiter for :class:`AppDeployment`. + See :method:wait_get_deployment_app_succeeded for more details. + """ + + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + op_response = self._api.do('POST', f'/api/2.0/preview/apps/{name}/start', headers=headers) + return Wait(self.wait_get_deployment_app_succeeded, + response=AppDeployment.from_dict(op_response), + app_name=name, + deployment_id=op_response['deployment_id']) + + def start_and_wait(self, name: str, timeout=timedelta(minutes=20)) -> AppDeployment: + return self.start(name=name).result(timeout=timeout) + + def stop(self, name: str): + """Stop an app. + + Stops the active deployment of the app in the workspace. + + :param name: str + The name of the app. + + + """ + + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + self._api.do('POST', f'/api/2.0/preview/apps/{name}/stop', headers=headers) + + def update(self, name: str, *, description: Optional[str] = None) -> App: + """Update an app. + + Updates the app with the supplied name. + + :param name: str + The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It + must be unique within the workspace. + :param description: str (optional) + The description of the app. + + :returns: :class:`App` + """ + body = {} + if description is not None: body['description'] = description + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('PATCH', f'/api/2.0/preview/apps/{name}', body=body, headers=headers) + return App.from_dict(res) + + def update_permissions( + self, + app_name: str, + *, + access_control_list: Optional[List[AppAccessControlRequest]] = None) -> AppPermissions: + """Update app permissions. + + Updates the permissions on an app. Apps can inherit permissions from their root object. + + :param app_name: str + The app for which to get or manage permissions. + :param access_control_list: List[:class:`AppAccessControlRequest`] (optional) + + :returns: :class:`AppPermissions` + """ + body = {} + if access_control_list is not None: + body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('PATCH', f'/api/2.0/permissions/apps/{app_name}', body=body, headers=headers) + return AppPermissions.from_dict(res) diff --git a/databricks/sdk/service/billing.py b/databricks/sdk/service/billing.py index d2ef50bc..cfb7ba0b 100755 --- a/databricks/sdk/service/billing.py +++ b/databricks/sdk/service/billing.py @@ -249,6 +249,46 @@ def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilterWorkspaceIdCla values=d.get('values', None)) +@dataclass +class CreateBillingUsageDashboardRequest: + dashboard_type: Optional[UsageDashboardType] = None + """Workspace level usage dashboard shows usage data for the specified workspace ID. Global level + usage dashboard shows usage data for all workspaces in the account.""" + + workspace_id: Optional[int] = None + """The workspace ID of the workspace in which the usage dashboard is created.""" + + def as_dict(self) -> dict: + """Serializes the CreateBillingUsageDashboardRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.dashboard_type is not None: body['dashboard_type'] = self.dashboard_type.value + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateBillingUsageDashboardRequest: + """Deserializes the CreateBillingUsageDashboardRequest from a dictionary.""" + return cls(dashboard_type=_enum(d, 'dashboard_type', UsageDashboardType), + workspace_id=d.get('workspace_id', None)) + + +@dataclass +class CreateBillingUsageDashboardResponse: + dashboard_id: Optional[str] = None + """The unique id of the usage dashboard.""" + + def as_dict(self) -> dict: + """Serializes the CreateBillingUsageDashboardResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateBillingUsageDashboardResponse: + """Deserializes the CreateBillingUsageDashboardResponse from a dictionary.""" + return cls(dashboard_id=d.get('dashboard_id', None)) + + @dataclass class CreateBudgetConfigurationBudget: account_id: Optional[str] = None @@ -529,6 +569,27 @@ def from_dict(cls, d: Dict[str, any]) -> DownloadResponse: return cls(contents=d.get('contents', None)) +@dataclass +class GetBillingUsageDashboardResponse: + dashboard_id: Optional[str] = None + """The unique id of the usage dashboard.""" + + dashboard_url: Optional[str] = None + """The URL of the usage dashboard.""" + + def as_dict(self) -> dict: + """Serializes the GetBillingUsageDashboardResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.dashboard_url is not None: body['dashboard_url'] = self.dashboard_url + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GetBillingUsageDashboardResponse: + """Deserializes the GetBillingUsageDashboardResponse from a dictionary.""" + return cls(dashboard_id=d.get('dashboard_id', None), dashboard_url=d.get('dashboard_url', None)) + + @dataclass class GetBudgetConfigurationResponse: budget: Optional[BudgetConfiguration] = None @@ -898,6 +959,12 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateLogDeliveryConfigurationStatusReq status=_enum(d, 'status', LogDeliveryConfigStatus)) +class UsageDashboardType(Enum): + + USAGE_DASHBOARD_TYPE_GLOBAL = 'USAGE_DASHBOARD_TYPE_GLOBAL' + USAGE_DASHBOARD_TYPE_WORKSPACE = 'USAGE_DASHBOARD_TYPE_WORKSPACE' + + @dataclass class WrappedCreateLogDeliveryConfiguration: log_delivery_configuration: Optional[CreateLogDeliveryConfigurationParams] = None @@ -1290,3 +1357,67 @@ def patch_status(self, log_delivery_configuration_id: str, status: LogDeliveryCo f'/api/2.0/accounts/{self._api.account_id}/log-delivery/{log_delivery_configuration_id}', body=body, headers=headers) + + +class UsageDashboardsAPI: + """These APIs manage usage dashboards for this account. Usage dashboards enable you to gain insights into + your usage with pre-built dashboards: visualize breakdowns, analyze tag attributions, and identify cost + drivers.""" + + def __init__(self, api_client): + self._api = api_client + + def create(self, + *, + dashboard_type: Optional[UsageDashboardType] = None, + workspace_id: Optional[int] = None) -> CreateBillingUsageDashboardResponse: + """Create new usage dashboard. + + Create a usage dashboard specified by workspaceId, accountId, and dashboard type. + + :param dashboard_type: :class:`UsageDashboardType` (optional) + Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage + dashboard shows usage data for all workspaces in the account. + :param workspace_id: int (optional) + The workspace ID of the workspace in which the usage dashboard is created. + + :returns: :class:`CreateBillingUsageDashboardResponse` + """ + body = {} + if dashboard_type is not None: body['dashboard_type'] = dashboard_type.value + if workspace_id is not None: body['workspace_id'] = workspace_id + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('POST', + f'/api/2.0/accounts/{self._api.account_id}/dashboard', + body=body, + headers=headers) + return CreateBillingUsageDashboardResponse.from_dict(res) + + def get(self, + *, + dashboard_type: Optional[UsageDashboardType] = None, + workspace_id: Optional[int] = None) -> GetBillingUsageDashboardResponse: + """Get usage dashboard. + + Get a usage dashboard specified by workspaceId, accountId, and dashboard type. + + :param dashboard_type: :class:`UsageDashboardType` (optional) + Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage + dashboard shows usage data for all workspaces in the account. + :param workspace_id: int (optional) + The workspace ID of the workspace in which the usage dashboard is created. + + :returns: :class:`GetBillingUsageDashboardResponse` + """ + + query = {} + if dashboard_type is not None: query['dashboard_type'] = dashboard_type.value + if workspace_id is not None: query['workspace_id'] = workspace_id + headers = {'Accept': 'application/json', } + + res = self._api.do('GET', + f'/api/2.0/accounts/{self._api.account_id}/dashboard', + query=query, + headers=headers) + return GetBillingUsageDashboardResponse.from_dict(res) diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py index c6da9b8c..0e81d239 100755 --- a/databricks/sdk/service/catalog.py +++ b/databricks/sdk/service/catalog.py @@ -2789,19 +2789,25 @@ def from_dict(cls, d: Dict[str, any]) -> ListStorageCredentialsResponse: @dataclass class ListSystemSchemasResponse: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" + schemas: Optional[List[SystemSchemaInfo]] = None """An array of system schema information objects.""" def as_dict(self) -> dict: """Serializes the ListSystemSchemasResponse into a dictionary suitable for use as a JSON request body.""" body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token if self.schemas: body['schemas'] = [v.as_dict() for v in self.schemas] return body @classmethod def from_dict(cls, d: Dict[str, any]) -> ListSystemSchemasResponse: """Deserializes the ListSystemSchemasResponse from a dictionary.""" - return cls(schemas=_repeated_dict(d, 'schemas', SystemSchemaInfo)) + return cls(next_page_token=d.get('next_page_token', None), + schemas=_repeated_dict(d, 'schemas', SystemSchemaInfo)) @dataclass @@ -3026,6 +3032,9 @@ class MetastoreInfoDeltaSharingScope(Enum): @dataclass class ModelVersionInfo: + aliases: Optional[List[RegisteredModelAlias]] = None + """List of aliases associated with the model version""" + browse_only: Optional[bool] = None """Indicates whether the principal is limited to retrieving metadata for the associated object through the BROWSE privilege when include_browse is enabled in the request.""" @@ -3086,6 +3095,7 @@ class ModelVersionInfo: def as_dict(self) -> dict: """Serializes the ModelVersionInfo into a dictionary suitable for use as a JSON request body.""" body = {} + if self.aliases: body['aliases'] = [v.as_dict() for v in self.aliases] if self.browse_only is not None: body['browse_only'] = self.browse_only if self.catalog_name is not None: body['catalog_name'] = self.catalog_name if self.comment is not None: body['comment'] = self.comment @@ -3110,7 +3120,8 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> ModelVersionInfo: """Deserializes the ModelVersionInfo from a dictionary.""" - return cls(browse_only=d.get('browse_only', None), + return cls(aliases=_repeated_dict(d, 'aliases', RegisteredModelAlias), + browse_only=d.get('browse_only', None), catalog_name=d.get('catalog_name', None), comment=d.get('comment', None), created_at=d.get('created_at', None), @@ -5790,16 +5801,22 @@ class WorkspaceBindingsResponse: bindings: Optional[List[WorkspaceBinding]] = None """List of workspace bindings""" + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" + def as_dict(self) -> dict: """Serializes the WorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body.""" body = {} if self.bindings: body['bindings'] = [v.as_dict() for v in self.bindings] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, any]) -> WorkspaceBindingsResponse: """Deserializes the WorkspaceBindingsResponse from a dictionary.""" - return cls(bindings=_repeated_dict(d, 'bindings', WorkspaceBinding)) + return cls(bindings=_repeated_dict(d, 'bindings', WorkspaceBinding), + next_page_token=d.get('next_page_token', None)) class AccountMetastoreAssignmentsAPI: @@ -7339,7 +7356,8 @@ def get(self, full_name: str, version: int, *, - include_browse: Optional[bool] = None) -> RegisteredModelInfo: + include_aliases: Optional[bool] = None, + include_browse: Optional[bool] = None) -> ModelVersionInfo: """Get a Model Version. Get a model version. @@ -7352,14 +7370,17 @@ def get(self, The three-level (fully qualified) name of the model version :param version: int The integer version number of the model version + :param include_aliases: bool (optional) + Whether to include aliases associated with the model version in the response :param include_browse: bool (optional) Whether to include model versions in the response for which the principal can only access selective metadata for - :returns: :class:`RegisteredModelInfo` + :returns: :class:`ModelVersionInfo` """ query = {} + if include_aliases is not None: query['include_aliases'] = include_aliases if include_browse is not None: query['include_browse'] = include_browse headers = {'Accept': 'application/json', } @@ -7367,9 +7388,13 @@ def get(self, f'/api/2.1/unity-catalog/models/{full_name}/versions/{version}', query=query, headers=headers) - return RegisteredModelInfo.from_dict(res) + return ModelVersionInfo.from_dict(res) - def get_by_alias(self, full_name: str, alias: str) -> ModelVersionInfo: + def get_by_alias(self, + full_name: str, + alias: str, + *, + include_aliases: Optional[bool] = None) -> ModelVersionInfo: """Get Model Version By Alias. Get a model version by alias. @@ -7382,14 +7407,19 @@ def get_by_alias(self, full_name: str, alias: str) -> ModelVersionInfo: The three-level (fully qualified) name of the registered model :param alias: str The name of the alias + :param include_aliases: bool (optional) + Whether to include aliases associated with the model version in the response :returns: :class:`ModelVersionInfo` """ + query = {} + if include_aliases is not None: query['include_aliases'] = include_aliases headers = {'Accept': 'application/json', } res = self._api.do('GET', f'/api/2.1/unity-catalog/models/{full_name}/aliases/{alias}', + query=query, headers=headers) return ModelVersionInfo.from_dict(res) @@ -7985,7 +8015,11 @@ def delete_alias(self, full_name: str, alias: str): self._api.do('DELETE', f'/api/2.1/unity-catalog/models/{full_name}/aliases/{alias}', headers=headers) - def get(self, full_name: str, *, include_browse: Optional[bool] = None) -> RegisteredModelInfo: + def get(self, + full_name: str, + *, + include_aliases: Optional[bool] = None, + include_browse: Optional[bool] = None) -> RegisteredModelInfo: """Get a Registered Model. Get a registered model. @@ -7996,6 +8030,8 @@ def get(self, full_name: str, *, include_browse: Optional[bool] = None) -> Regis :param full_name: str The three-level (fully qualified) name of the registered model + :param include_aliases: bool (optional) + Whether to include registered model aliases in the response :param include_browse: bool (optional) Whether to include registered models in the response for which the principal can only access selective metadata for @@ -8004,6 +8040,7 @@ def get(self, full_name: str, *, include_browse: Optional[bool] = None) -> Regis """ query = {} + if include_aliases is not None: query['include_aliases'] = include_aliases if include_browse is not None: query['include_browse'] = include_browse headers = {'Accept': 'application/json', } @@ -8650,7 +8687,11 @@ def enable(self, metastore_id: str, schema_name: str): f'/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas/{schema_name}', headers=headers) - def list(self, metastore_id: str) -> Iterator[SystemSchemaInfo]: + def list(self, + metastore_id: str, + *, + max_results: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[SystemSchemaInfo]: """List system schemas. Gets an array of system schemas for a metastore. The caller must be an account admin or a metastore @@ -8658,17 +8699,33 @@ def list(self, metastore_id: str) -> Iterator[SystemSchemaInfo]: :param metastore_id: str The ID for the metastore in which the system schema resides. + :param max_results: int (optional) + Maximum number of schemas to return. - When set to 0, the page length is set to a server configured + value (recommended); - When set to a value greater than 0, the page length is the minimum of this + value and a server configured value; - When set to a value less than 0, an invalid parameter error + is returned; - If not set, all the schemas are returned (not recommended). + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. :returns: Iterator over :class:`SystemSchemaInfo` """ + query = {} + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } - json = self._api.do('GET', - f'/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas', - headers=headers) - parsed = ListSystemSchemasResponse.from_dict(json).schemas - return parsed if parsed is not None else [] + while True: + json = self._api.do('GET', + f'/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas', + query=query, + headers=headers) + if 'schemas' in json: + for v in json['schemas']: + yield SystemSchemaInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] class TableConstraintsAPI: @@ -9214,8 +9271,12 @@ def get(self, name: str) -> CurrentWorkspaceBindings: headers=headers) return CurrentWorkspaceBindings.from_dict(res) - def get_bindings(self, securable_type: GetBindingsSecurableType, - securable_name: str) -> WorkspaceBindingsResponse: + def get_bindings(self, + securable_type: GetBindingsSecurableType, + securable_name: str, + *, + max_results: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[WorkspaceBinding]: """Get securable workspace bindings. Gets workspace bindings of the securable. The caller must be a metastore admin or an owner of the @@ -9225,16 +9286,33 @@ def get_bindings(self, securable_type: GetBindingsSecurableType, The type of the securable to bind to a workspace. :param securable_name: str The name of the securable. + :param max_results: int (optional) + Maximum number of workspace bindings to return. - When set to 0, the page length is set to a server + configured value (recommended); - When set to a value greater than 0, the page length is the minimum + of this value and a server configured value; - When set to a value less than 0, an invalid parameter + error is returned; - If not set, all the workspace bindings are returned (not recommended). + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. - :returns: :class:`WorkspaceBindingsResponse` + :returns: Iterator over :class:`WorkspaceBinding` """ + query = {} + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } - res = self._api.do('GET', - f'/api/2.1/unity-catalog/bindings/{securable_type.value}/{securable_name}', - headers=headers) - return WorkspaceBindingsResponse.from_dict(res) + while True: + json = self._api.do('GET', + f'/api/2.1/unity-catalog/bindings/{securable_type.value}/{securable_name}', + query=query, + headers=headers) + if 'bindings' in json: + for v in json['bindings']: + yield WorkspaceBinding.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] def update(self, name: str, diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py index bbfda789..148ce44e 100755 --- a/databricks/sdk/service/compute.py +++ b/databricks/sdk/service/compute.py @@ -2106,10 +2106,6 @@ def from_dict(cls, d: Dict[str, any]) -> CreateInstancePoolResponse: @dataclass class CreatePolicy: - name: str - """Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and - 100 characters.""" - definition: Optional[str] = None """Policy definition document expressed in [Databricks Cluster Policy Definition Language]. @@ -2126,6 +2122,10 @@ class CreatePolicy: """Max number of clusters per user that can be active using this policy. If not present, there is no max limit.""" + name: Optional[str] = None + """Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and + 100 characters.""" + policy_family_definition_overrides: Optional[str] = None """Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON document must be passed as a string and cannot be embedded in the requests. @@ -2891,10 +2891,6 @@ class EditPolicy: policy_id: str """The ID of the policy to update.""" - name: str - """Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and - 100 characters.""" - definition: Optional[str] = None """Policy definition document expressed in [Databricks Cluster Policy Definition Language]. @@ -2911,6 +2907,10 @@ class EditPolicy: """Max number of clusters per user that can be active using this policy. If not present, there is no max limit.""" + name: Optional[str] = None + """Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and + 100 characters.""" + policy_family_definition_overrides: Optional[str] = None """Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON document must be passed as a string and cannot be embedded in the requests. @@ -4784,13 +4784,13 @@ def from_dict(cls, d: Dict[str, any]) -> ListPoliciesResponse: @dataclass class ListPolicyFamiliesResponse: - policy_families: List[PolicyFamily] - """List of policy families.""" - next_page_token: Optional[str] = None """A token that can be used to get the next page of results. If not present, there are no more results to show.""" + policy_families: Optional[List[PolicyFamily]] = None + """List of policy families.""" + def as_dict(self) -> dict: """Serializes the ListPolicyFamiliesResponse into a dictionary suitable for use as a JSON request body.""" body = {} @@ -4812,6 +4812,7 @@ class ListSortColumn(Enum): class ListSortOrder(Enum): + """A generic ordering enum for list-based queries.""" ASC = 'ASC' DESC = 'DESC' @@ -5138,6 +5139,8 @@ def from_dict(cls, d: Dict[str, any]) -> PinClusterResponse: @dataclass class Policy: + """Describes a Cluster Policy entity.""" + created_at_timestamp: Optional[int] = None """Creation time. The timestamp (in millisecond) when this Cluster Policy was created.""" @@ -5179,7 +5182,11 @@ class Policy: [Databricks Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html""" policy_family_id: Optional[str] = None - """ID of the policy family.""" + """ID of the policy family. The cluster policy's policy definition inherits the policy family's + policy definition. + + Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to customize + the policy definition.""" policy_id: Optional[str] = None """Canonical unique identifier for the Cluster Policy.""" @@ -5219,20 +5226,20 @@ def from_dict(cls, d: Dict[str, any]) -> Policy: @dataclass class PolicyFamily: - policy_family_id: str - """ID of the policy family.""" - - name: str - """Name of the policy family.""" - - description: str - """Human-readable description of the purpose of the policy family.""" - - definition: str + definition: Optional[str] = None """Policy definition document expressed in [Databricks Cluster Policy Definition Language]. [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html""" + description: Optional[str] = None + """Human-readable description of the purpose of the policy family.""" + + name: Optional[str] = None + """Name of the policy family.""" + + policy_family_id: Optional[str] = None + """Unique identifier for the policy family.""" + def as_dict(self) -> dict: """Serializes the PolicyFamily into a dictionary suitable for use as a JSON request body.""" body = {} @@ -5872,6 +5879,260 @@ def from_dict(cls, d: Dict[str, any]) -> UnpinClusterResponse: return cls() +@dataclass +class UpdateCluster: + cluster_id: str + """ID of the cluster.""" + + update_mask: str + """Specifies which fields of the cluster will be updated. This is required in the POST request. The + update mask should be supplied as a single string. To specify multiple fields, separate them + with commas (no spaces). To delete a field from a cluster configuration, add it to the + `update_mask` string but omit it from the `cluster` object.""" + + cluster: Optional[UpdateClusterResource] = None + """The cluster to be updated.""" + + def as_dict(self) -> dict: + """Serializes the UpdateCluster into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.cluster: body['cluster'] = self.cluster.as_dict() + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.update_mask is not None: body['update_mask'] = self.update_mask + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> UpdateCluster: + """Deserializes the UpdateCluster from a dictionary.""" + return cls(cluster=_from_dict(d, 'cluster', UpdateClusterResource), + cluster_id=d.get('cluster_id', None), + update_mask=d.get('update_mask', None)) + + +@dataclass +class UpdateClusterResource: + autoscale: Optional[AutoScale] = None + """Parameters needed in order to automatically scale clusters up and down based on load. Note: + autoscaling works best with DB runtime versions 3.0 or later.""" + + autotermination_minutes: Optional[int] = None + """Automatically terminates the cluster after it is inactive for this time in minutes. If not set, + this cluster will not be automatically terminated. If specified, the threshold must be between + 10 and 10000 minutes. Users can also set this value to 0 to explicitly disable automatic + termination.""" + + aws_attributes: Optional[AwsAttributes] = None + """Attributes related to clusters running on Amazon Web Services. If not specified at cluster + creation, a set of default values will be used.""" + + azure_attributes: Optional[AzureAttributes] = None + """Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, + a set of default values will be used.""" + + cluster_log_conf: Optional[ClusterLogConf] = None + """The configuration for delivering spark logs to a long-term storage destination. Two kinds of + destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster. + If the conf is given, the logs will be delivered to the destination every `5 mins`. The + destination of driver logs is `$destination/$clusterId/driver`, while the destination of + executor logs is `$destination/$clusterId/executor`.""" + + cluster_name: Optional[str] = None + """Cluster name requested by the user. This doesn't have to be unique. If not specified at + creation, the cluster name will be an empty string.""" + + custom_tags: Optional[Dict[str, str]] = None + """Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS + instances and EBS volumes) with these tags in addition to `default_tags`. Notes: + + - Currently, Databricks allows at most 45 custom tags + + - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster + tags""" + + data_security_mode: Optional[DataSecurityMode] = None + """Data security mode decides what data governance model to use when accessing data from a cluster. + + * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features + are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively + used by a single user specified in `single_user_name`. Most programming languages, cluster + features and data governance features are available in this mode. * `USER_ISOLATION`: A secure + cluster that can be shared by multiple users. Cluster users are fully isolated so that they + cannot see each other's data and credentials. Most data governance features are supported in + this mode. But programming languages and cluster features might be limited. + + The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for + future Databricks Runtime versions: + + * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * + `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high + concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy + Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that + doesn’t have UC nor passthrough enabled.""" + + docker_image: Optional[DockerImage] = None + + driver_instance_pool_id: Optional[str] = None + """The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster + uses the instance pool with id (instance_pool_id) if the driver pool is not assigned.""" + + driver_node_type_id: Optional[str] = None + """The node type of the Spark driver. Note that this field is optional; if unset, the driver node + type will be set as the same value as `node_type_id` defined above.""" + + enable_elastic_disk: Optional[bool] = None + """Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk + space when its Spark workers are running low on disk space. This feature requires specific AWS + permissions to function correctly - refer to the User Guide for more details.""" + + enable_local_disk_encryption: Optional[bool] = None + """Whether to enable LUKS on cluster VMs' local disks""" + + gcp_attributes: Optional[GcpAttributes] = None + """Attributes related to clusters running on Google Cloud Platform. If not specified at cluster + creation, a set of default values will be used.""" + + init_scripts: Optional[List[InitScriptInfo]] = None + """The configuration for storing init scripts. Any number of destinations can be specified. The + scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, + init script logs are sent to `//init_scripts`.""" + + instance_pool_id: Optional[str] = None + """The optional ID of the instance pool to which the cluster belongs.""" + + node_type_id: Optional[str] = None + """This field encodes, through a single value, the resources available to each of the Spark nodes + in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or + compute intensive workloads. A list of available node types can be retrieved by using the + :method:clusters/listNodeTypes API call.""" + + num_workers: Optional[int] = None + """Number of worker nodes that this cluster should have. A cluster has one Spark Driver and + `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. + + Note: When reading the properties of a cluster, this field reflects the desired number of + workers rather than the actual current number of workers. For instance, if a cluster is resized + from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 + workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the + new nodes are provisioned.""" + + policy_id: Optional[str] = None + """The ID of the cluster policy used to create the cluster if applicable.""" + + runtime_engine: Optional[RuntimeEngine] = None + """Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime + engine is inferred from spark_version.""" + + single_user_name: Optional[str] = None + """Single user name if data_security_mode is `SINGLE_USER`""" + + spark_conf: Optional[Dict[str, str]] = None + """An object containing a set of optional, user-specified Spark configuration key-value pairs. + Users can also pass in a string of extra JVM options to the driver and the executors via + `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.""" + + spark_env_vars: Optional[Dict[str, str]] = None + """An object containing a set of optional, user-specified environment variable key-value pairs. + Please note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) + while launching the driver and workers. + + In order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending them + to `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all default + databricks managed environmental variables are included as well. + + Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS": + "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS + -Dspark.shuffle.service.enabled=true"}`""" + + spark_version: Optional[str] = None + """The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can + be retrieved by using the :method:clusters/sparkVersions API call.""" + + ssh_public_keys: Optional[List[str]] = None + """SSH public key contents that will be added to each Spark node in this cluster. The corresponding + private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can + be specified.""" + + workload_type: Optional[WorkloadType] = None + + def as_dict(self) -> dict: + """Serializes the UpdateClusterResource into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.autoscale: body['autoscale'] = self.autoscale.as_dict() + if self.autotermination_minutes is not None: + body['autotermination_minutes'] = self.autotermination_minutes + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() + if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf.as_dict() + if self.cluster_name is not None: body['cluster_name'] = self.cluster_name + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode.value + if self.docker_image: body['docker_image'] = self.docker_image.as_dict() + if self.driver_instance_pool_id is not None: + body['driver_instance_pool_id'] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id + if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: + body['enable_local_disk_encryption'] = self.enable_local_disk_encryption + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() + if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts] + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.num_workers is not None: body['num_workers'] = self.num_workers + if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine.value + if self.single_user_name is not None: body['single_user_name'] = self.single_user_name + if self.spark_conf: body['spark_conf'] = self.spark_conf + if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars + if self.spark_version is not None: body['spark_version'] = self.spark_version + if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys] + if self.workload_type: body['workload_type'] = self.workload_type.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> UpdateClusterResource: + """Deserializes the UpdateClusterResource from a dictionary.""" + return cls(autoscale=_from_dict(d, 'autoscale', AutoScale), + autotermination_minutes=d.get('autotermination_minutes', None), + aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes), + azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes), + cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf), + cluster_name=d.get('cluster_name', None), + custom_tags=d.get('custom_tags', None), + data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode), + docker_image=_from_dict(d, 'docker_image', DockerImage), + driver_instance_pool_id=d.get('driver_instance_pool_id', None), + driver_node_type_id=d.get('driver_node_type_id', None), + enable_elastic_disk=d.get('enable_elastic_disk', None), + enable_local_disk_encryption=d.get('enable_local_disk_encryption', None), + gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes), + init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo), + instance_pool_id=d.get('instance_pool_id', None), + node_type_id=d.get('node_type_id', None), + num_workers=d.get('num_workers', None), + policy_id=d.get('policy_id', None), + runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine), + single_user_name=d.get('single_user_name', None), + spark_conf=d.get('spark_conf', None), + spark_env_vars=d.get('spark_env_vars', None), + spark_version=d.get('spark_version', None), + ssh_public_keys=d.get('ssh_public_keys', None), + workload_type=_from_dict(d, 'workload_type', WorkloadType)) + + +@dataclass +class UpdateClusterResponse: + + def as_dict(self) -> dict: + """Serializes the UpdateClusterResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> UpdateClusterResponse: + """Deserializes the UpdateClusterResponse from a dictionary.""" + return cls() + + @dataclass class UpdateResponse: @@ -5960,21 +6221,18 @@ def __init__(self, api_client): self._api = api_client def create(self, - name: str, *, definition: Optional[str] = None, description: Optional[str] = None, libraries: Optional[List[Library]] = None, max_clusters_per_user: Optional[int] = None, + name: Optional[str] = None, policy_family_definition_overrides: Optional[str] = None, policy_family_id: Optional[str] = None) -> CreatePolicyResponse: """Create a new policy. Creates a new policy with prescribed settings. - :param name: str - Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100 - characters. :param definition: str (optional) Policy definition document expressed in [Databricks Cluster Policy Definition Language]. @@ -5987,6 +6245,9 @@ def create(self, :param max_clusters_per_user: int (optional) Max number of clusters per user that can be active using this policy. If not present, there is no max limit. + :param name: str (optional) + Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100 + characters. :param policy_family_definition_overrides: str (optional) Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON document must be passed as a string and cannot be embedded in the requests. @@ -6036,12 +6297,12 @@ def delete(self, policy_id: str): def edit(self, policy_id: str, - name: str, *, definition: Optional[str] = None, description: Optional[str] = None, libraries: Optional[List[Library]] = None, max_clusters_per_user: Optional[int] = None, + name: Optional[str] = None, policy_family_definition_overrides: Optional[str] = None, policy_family_id: Optional[str] = None): """Update a cluster policy. @@ -6051,9 +6312,6 @@ def edit(self, :param policy_id: str The ID of the policy to update. - :param name: str - Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100 - characters. :param definition: str (optional) Policy definition document expressed in [Databricks Cluster Policy Definition Language]. @@ -6066,6 +6324,9 @@ def edit(self, :param max_clusters_per_user: int (optional) Max number of clusters per user that can be active using this policy. If not present, there is no max limit. + :param name: str (optional) + Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100 + characters. :param policy_family_definition_overrides: str (optional) Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON document must be passed as a string and cannot be embedded in the requests. @@ -6103,7 +6364,7 @@ def get(self, policy_id: str) -> Policy: Get a cluster policy entity. Creation and editing is available to admins only. :param policy_id: str - Canonical unique identifier for the cluster policy. + Canonical unique identifier for the Cluster Policy. :returns: :class:`Policy` """ @@ -7275,6 +7536,57 @@ def unpin(self, cluster_id: str): self._api.do('POST', '/api/2.1/clusters/unpin', body=body, headers=headers) + def update(self, + cluster_id: str, + update_mask: str, + *, + cluster: Optional[UpdateClusterResource] = None) -> Wait[ClusterDetails]: + """Update cluster configuration (partial). + + Updates the configuration of a cluster to match the partial set of attributes and size. Denote which + fields to update using the `update_mask` field in the request body. A cluster can be updated if it is + in a `RUNNING` or `TERMINATED` state. If a cluster is updated while in a `RUNNING` state, it will be + restarted so that the new attributes can take effect. If a cluster is updated while in a `TERMINATED` + state, it will remain `TERMINATED`. The updated attributes will take effect the next time the cluster + is started using the `clusters/start` API. Attempts to update a cluster in any other state will be + rejected with an `INVALID_STATE` error code. Clusters created by the Databricks Jobs service cannot be + updated. + + :param cluster_id: str + ID of the cluster. + :param update_mask: str + Specifies which fields of the cluster will be updated. This is required in the POST request. The + update mask should be supplied as a single string. To specify multiple fields, separate them with + commas (no spaces). To delete a field from a cluster configuration, add it to the `update_mask` + string but omit it from the `cluster` object. + :param cluster: :class:`UpdateClusterResource` (optional) + The cluster to be updated. + + :returns: + Long-running operation waiter for :class:`ClusterDetails`. + See :method:wait_get_cluster_running for more details. + """ + body = {} + if cluster is not None: body['cluster'] = cluster.as_dict() + if cluster_id is not None: body['cluster_id'] = cluster_id + if update_mask is not None: body['update_mask'] = update_mask + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + op_response = self._api.do('POST', '/api/2.1/clusters/update', body=body, headers=headers) + return Wait(self.wait_get_cluster_running, + response=UpdateClusterResponse.from_dict(op_response), + cluster_id=cluster_id) + + def update_and_wait( + self, + cluster_id: str, + update_mask: str, + *, + cluster: Optional[UpdateClusterResource] = None, + timeout=timedelta(minutes=20)) -> ClusterDetails: + return self.update(cluster=cluster, cluster_id=cluster_id, + update_mask=update_mask).result(timeout=timeout) + def update_permissions( self, cluster_id: str, @@ -8286,19 +8598,27 @@ class PolicyFamiliesAPI: def __init__(self, api_client): self._api = api_client - def get(self, policy_family_id: str) -> PolicyFamily: + def get(self, policy_family_id: str, *, version: Optional[int] = None) -> PolicyFamily: """Get policy family information. - Retrieve the information for an policy family based on its identifier. + Retrieve the information for an policy family based on its identifier and version :param policy_family_id: str + The family ID about which to retrieve information. + :param version: int (optional) + The version number for the family to fetch. Defaults to the latest version. :returns: :class:`PolicyFamily` """ + query = {} + if version is not None: query['version'] = version headers = {'Accept': 'application/json', } - res = self._api.do('GET', f'/api/2.0/policy-families/{policy_family_id}', headers=headers) + res = self._api.do('GET', + f'/api/2.0/policy-families/{policy_family_id}', + query=query, + headers=headers) return PolicyFamily.from_dict(res) def list(self, @@ -8307,10 +8627,11 @@ def list(self, page_token: Optional[str] = None) -> Iterator[PolicyFamily]: """List policy families. - Retrieve a list of policy families. This API is paginated. + Returns the list of policy definition types available to use at their latest version. This API is + paginated. :param max_results: int (optional) - The max number of policy families to return. + Maximum number of policy families to return. :param page_token: str (optional) A token that can be used to get the next page of results. diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py index bf571dd4..28ddca56 100755 --- a/databricks/sdk/service/dashboards.py +++ b/databricks/sdk/service/dashboards.py @@ -208,7 +208,6 @@ def from_dict(cls, d: Dict[str, any]) -> Dashboard: class DashboardView(Enum): DASHBOARD_VIEW_BASIC = 'DASHBOARD_VIEW_BASIC' - DASHBOARD_VIEW_FULL = 'DASHBOARD_VIEW_FULL' @dataclass @@ -381,7 +380,9 @@ class GenieMessage: """MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data sources. * `ASKING_AI`: Waiting for the LLM to respond to the users question. * `EXECUTING_QUERY`: Executing AI provided SQL query. Get the SQL query result by calling - [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `FAILED`: Generating a + [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message + status will stay in the `EXECUTING_QUERY` until a client calls + [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a response or the executing the query failed. Please see `error` field. * `COMPLETED`: Message processing is completed. Results are in the `attachments` field. Get the SQL query result by calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `SUBMITTED`: Message @@ -612,7 +613,9 @@ class MessageStatus(Enum): """MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data sources. * `ASKING_AI`: Waiting for the LLM to respond to the users question. * `EXECUTING_QUERY`: Executing AI provided SQL query. Get the SQL query result by calling - [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `FAILED`: Generating a + [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message + status will stay in the `EXECUTING_QUERY` until a client calls + [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a response or the executing the query failed. Please see `error` field. * `COMPLETED`: Message processing is completed. Results are in the `attachments` field. Get the SQL query result by calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `SUBMITTED`: Message @@ -721,6 +724,8 @@ class QueryAttachment: description: Optional[str] = None """Description of the query""" + id: Optional[str] = None + instruction_id: Optional[str] = None """If the query was created on an instruction (trusted asset) we link to the id""" @@ -741,6 +746,7 @@ def as_dict(self) -> dict: """Serializes the QueryAttachment into a dictionary suitable for use as a JSON request body.""" body = {} if self.description is not None: body['description'] = self.description + if self.id is not None: body['id'] = self.id if self.instruction_id is not None: body['instruction_id'] = self.instruction_id if self.instruction_title is not None: body['instruction_title'] = self.instruction_title if self.last_updated_timestamp is not None: @@ -753,6 +759,7 @@ def as_dict(self) -> dict: def from_dict(cls, d: Dict[str, any]) -> QueryAttachment: """Deserializes the QueryAttachment from a dictionary.""" return cls(description=d.get('description', None), + id=d.get('id', None), instruction_id=d.get('instruction_id', None), instruction_title=d.get('instruction_title', None), last_updated_timestamp=d.get('last_updated_timestamp', None), @@ -960,16 +967,19 @@ class TextAttachment: content: Optional[str] = None """AI generated message""" + id: Optional[str] = None + def as_dict(self) -> dict: """Serializes the TextAttachment into a dictionary suitable for use as a JSON request body.""" body = {} if self.content is not None: body['content'] = self.content + if self.id is not None: body['id'] = self.id return body @classmethod def from_dict(cls, d: Dict[str, any]) -> TextAttachment: """Deserializes the TextAttachment from a dictionary.""" - return cls(content=d.get('content', None)) + return cls(content=d.get('content', None), id=d.get('id', None)) @dataclass @@ -1505,8 +1515,7 @@ def list(self, The flag to include dashboards located in the trash. If unspecified, only active dashboards will be returned. :param view: :class:`DashboardView` (optional) - Indicates whether to include all metadata from the dashboard in the response. If unset, the response - defaults to `DASHBOARD_VIEW_BASIC` which only includes summary metadata from the dashboard. + `DASHBOARD_VIEW_BASIC`only includes summary metadata from the dashboard. :returns: Iterator over :class:`Dashboard` """ diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py index b5cf9184..2b027fe6 100755 --- a/databricks/sdk/service/iam.py +++ b/databricks/sdk/service/iam.py @@ -830,6 +830,7 @@ class PermissionLevel(Enum): CAN_MANAGE_PRODUCTION_VERSIONS = 'CAN_MANAGE_PRODUCTION_VERSIONS' CAN_MANAGE_RUN = 'CAN_MANAGE_RUN' CAN_MANAGE_STAGING_VERSIONS = 'CAN_MANAGE_STAGING_VERSIONS' + CAN_MONITOR = 'CAN_MONITOR' CAN_QUERY = 'CAN_QUERY' CAN_READ = 'CAN_READ' CAN_RESTART = 'CAN_RESTART' @@ -890,9 +891,9 @@ class PermissionsRequest: """The id of the request object.""" request_object_type: Optional[str] = None - """The type of the request object. Can be one of the following: authorization, clusters, - cluster-policies, directories, experiments, files, instance-pools, jobs, notebooks, pipelines, - registered-models, repos, serving-endpoints, or warehouses.""" + """The type of the request object. Can be one of the following: alerts, authorization, clusters, + cluster-policies, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, + notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses.""" def as_dict(self) -> dict: """Serializes the PermissionsRequest into a dictionary suitable for use as a JSON request body.""" @@ -1139,8 +1140,10 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateRuleSetRequest: @dataclass class UpdateWorkspaceAssignments: permissions: Optional[List[WorkspacePermission]] = None - """Array of permissions assignments to update on the workspace. Note that excluding this field will - have the same effect as providing an empty list which will result in the deletion of all + """Array of permissions assignments to update on the workspace. Valid values are "USER" and "ADMIN" + (case-sensitive). If both "USER" and "ADMIN" are provided, "ADMIN" takes precedence. Other + values will be ignored. Note that excluding this field, or providing unsupported values, will + have the same effect as providing an empty list, which will result in the deletion of all permissions for the principal.""" principal_id: Optional[int] = None @@ -2539,6 +2542,8 @@ class PermissionsAPI: """Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints. + * **[Apps permissions](:service:apps)** — Manage which users can manage or use apps. + * **[Cluster permissions](:service:clusters)** — Manage which users can manage, restart, or attach to clusters. @@ -2574,7 +2579,7 @@ class PermissionsAPI: * **[Token permissions](:service:tokenmanagement)** — Manage which users can create or use tokens. * **[Workspace object permissions](:service:workspace)** — Manage which users can read, run, edit, or - manage directories, files, and notebooks. + manage alerts, dbsql-dashboards, directories, files, notebooks and queries. For the mapping of the required permissions for specific actions or abilities and other important information, see [Access Control]. @@ -2594,9 +2599,9 @@ def get(self, request_object_type: str, request_object_id: str) -> ObjectPermiss object. :param request_object_type: str - The type of the request object. Can be one of the following: authorization, clusters, - cluster-policies, directories, experiments, files, instance-pools, jobs, notebooks, pipelines, - registered-models, repos, serving-endpoints, or warehouses. + The type of the request object. Can be one of the following: alerts, authorization, clusters, + cluster-policies, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, + notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. :param request_object_id: str The id of the request object. @@ -2642,9 +2647,9 @@ def set(self, object. :param request_object_type: str - The type of the request object. Can be one of the following: authorization, clusters, - cluster-policies, directories, experiments, files, instance-pools, jobs, notebooks, pipelines, - registered-models, repos, serving-endpoints, or warehouses. + The type of the request object. Can be one of the following: alerts, authorization, clusters, + cluster-policies, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, + notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. :param request_object_id: str The id of the request object. :param access_control_list: List[:class:`AccessControlRequest`] (optional) @@ -2673,9 +2678,9 @@ def update(self, root object. :param request_object_type: str - The type of the request object. Can be one of the following: authorization, clusters, - cluster-policies, directories, experiments, files, instance-pools, jobs, notebooks, pipelines, - registered-models, repos, serving-endpoints, or warehouses. + The type of the request object. Can be one of the following: alerts, authorization, clusters, + cluster-policies, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, + notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. :param request_object_id: str The id of the request object. :param access_control_list: List[:class:`AccessControlRequest`] (optional) @@ -3382,9 +3387,11 @@ def update(self, :param principal_id: int The ID of the user, service principal, or group. :param permissions: List[:class:`WorkspacePermission`] (optional) - Array of permissions assignments to update on the workspace. Note that excluding this field will - have the same effect as providing an empty list which will result in the deletion of all permissions - for the principal. + Array of permissions assignments to update on the workspace. Valid values are "USER" and "ADMIN" + (case-sensitive). If both "USER" and "ADMIN" are provided, "ADMIN" takes precedence. Other values + will be ignored. Note that excluding this field, or providing unsupported values, will have the same + effect as providing an empty list, which will result in the deletion of all permissions for the + principal. :returns: :class:`PermissionAssignment` """ diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index cf677fd0..6e5b34ad 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -15,7 +15,7 @@ _LOG = logging.getLogger('databricks.sdk') -from databricks.sdk.service import compute, iam +from databricks.sdk.service import compute # all definitions in this file are in alphabetical order @@ -469,7 +469,7 @@ def from_dict(cls, d: Dict[str, any]) -> Continuous: @dataclass class CreateJob: - access_control_list: Optional[List[iam.AccessControlRequest]] = None + access_control_list: Optional[List[JobAccessControlRequest]] = None """List of permissions to set on the job.""" continuous: Optional[Continuous] = None @@ -603,7 +603,7 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateJob: """Deserializes the CreateJob from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', iam.AccessControlRequest), + return cls(access_control_list=_repeated_dict(d, 'access_control_list', JobAccessControlRequest), continuous=_from_dict(d, 'continuous', Continuous), deployment=_from_dict(d, 'deployment', JobDeployment), description=d.get('description', None), @@ -2614,6 +2614,9 @@ class Run: Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job.""" + iterations: Optional[List[RunTask]] = None + """Only populated by for-each iterations. The parent for-each task is located in tasks array.""" + job_clusters: Optional[List[JobCluster]] = None """A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in @@ -2625,6 +2628,9 @@ class Run: job_parameters: Optional[List[JobParameter]] = None """Job-level parameters used in the run""" + next_page_token: Optional[str] = None + """A token that can be used to list the next page of sub-resources.""" + number_in_job: Optional[int] = None """A unique identifier for this job run. This is set to the same value as `run_id`.""" @@ -2635,6 +2641,9 @@ class Run: overriding_parameters: Optional[RunParameters] = None """The parameters used for this run.""" + prev_page_token: Optional[str] = None + """A token that can be used to list the previous page of sub-resources.""" + queue_duration: Optional[int] = None """The time in milliseconds that the run has spent in the queue.""" @@ -2708,13 +2717,16 @@ def as_dict(self) -> dict: if self.end_time is not None: body['end_time'] = self.end_time if self.execution_duration is not None: body['execution_duration'] = self.execution_duration if self.git_source: body['git_source'] = self.git_source.as_dict() + if self.iterations: body['iterations'] = [v.as_dict() for v in self.iterations] if self.job_clusters: body['job_clusters'] = [v.as_dict() for v in self.job_clusters] if self.job_id is not None: body['job_id'] = self.job_id if self.job_parameters: body['job_parameters'] = [v.as_dict() for v in self.job_parameters] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token if self.number_in_job is not None: body['number_in_job'] = self.number_in_job if self.original_attempt_run_id is not None: body['original_attempt_run_id'] = self.original_attempt_run_id if self.overriding_parameters: body['overriding_parameters'] = self.overriding_parameters.as_dict() + if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token if self.queue_duration is not None: body['queue_duration'] = self.queue_duration if self.repair_history: body['repair_history'] = [v.as_dict() for v in self.repair_history] if self.run_duration is not None: body['run_duration'] = self.run_duration @@ -2743,12 +2755,15 @@ def from_dict(cls, d: Dict[str, any]) -> Run: end_time=d.get('end_time', None), execution_duration=d.get('execution_duration', None), git_source=_from_dict(d, 'git_source', GitSource), + iterations=_repeated_dict(d, 'iterations', RunTask), job_clusters=_repeated_dict(d, 'job_clusters', JobCluster), job_id=d.get('job_id', None), job_parameters=_repeated_dict(d, 'job_parameters', JobParameter), + next_page_token=d.get('next_page_token', None), number_in_job=d.get('number_in_job', None), original_attempt_run_id=d.get('original_attempt_run_id', None), overriding_parameters=_from_dict(d, 'overriding_parameters', RunParameters), + prev_page_token=d.get('prev_page_token', None), queue_duration=d.get('queue_duration', None), repair_history=_repeated_dict(d, 'repair_history', RepairHistoryItem), run_duration=d.get('run_duration', None), @@ -4187,7 +4202,7 @@ def from_dict(cls, d: Dict[str, any]) -> SqlTaskSubscription: @dataclass class SubmitRun: - access_control_list: Optional[List[iam.AccessControlRequest]] = None + access_control_list: Optional[List[JobAccessControlRequest]] = None """List of permissions to set on the job.""" email_notifications: Optional[JobEmailNotifications] = None @@ -4267,7 +4282,7 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> SubmitRun: """Deserializes the SubmitRun from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', iam.AccessControlRequest), + return cls(access_control_list=_repeated_dict(d, 'access_control_list', JobAccessControlRequest), email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications), environments=_repeated_dict(d, 'environments', JobEnvironment), git_source=_from_dict(d, 'git_source', GitSource), @@ -5156,7 +5171,7 @@ def cancel_run_and_wait(self, run_id: int, timeout=timedelta(minutes=20)) -> Run def create(self, *, - access_control_list: Optional[List[iam.AccessControlRequest]] = None, + access_control_list: Optional[List[JobAccessControlRequest]] = None, continuous: Optional[Continuous] = None, deployment: Optional[JobDeployment] = None, description: Optional[str] = None, @@ -5183,7 +5198,7 @@ def create(self, Create a new job. - :param access_control_list: List[:class:`AccessControlRequest`] (optional) + :param access_control_list: List[:class:`JobAccessControlRequest`] (optional) List of permissions to set on the job. :param continuous: :class:`Continuous` (optional) An optional continuous property for this job. The continuous property will ensure that there is @@ -5401,7 +5416,8 @@ def get_run(self, run_id: int, *, include_history: Optional[bool] = None, - include_resolved_values: Optional[bool] = None) -> Run: + include_resolved_values: Optional[bool] = None, + page_token: Optional[str] = None) -> Run: """Get a single job run. Retrieve the metadata of a run. @@ -5412,6 +5428,9 @@ def get_run(self, Whether to include the repair history in the response. :param include_resolved_values: bool (optional) Whether to include resolved parameter values in the response. + :param page_token: str (optional) + To list the next page or the previous page of job tasks, set this field to the value of the + `next_page_token` or `prev_page_token` returned in the GetJob response. :returns: :class:`Run` """ @@ -5419,6 +5438,7 @@ def get_run(self, query = {} if include_history is not None: query['include_history'] = include_history if include_resolved_values is not None: query['include_resolved_values'] = include_resolved_values + if page_token is not None: query['page_token'] = page_token if run_id is not None: query['run_id'] = run_id headers = {'Accept': 'application/json', } @@ -5926,7 +5946,7 @@ def set_permissions( def submit(self, *, - access_control_list: Optional[List[iam.AccessControlRequest]] = None, + access_control_list: Optional[List[JobAccessControlRequest]] = None, email_notifications: Optional[JobEmailNotifications] = None, environments: Optional[List[JobEnvironment]] = None, git_source: Optional[GitSource] = None, @@ -5945,7 +5965,7 @@ def submit(self, Runs submitted using this endpoint don’t display in the UI. Use the `jobs/runs/get` API to check the run state after the job is submitted. - :param access_control_list: List[:class:`AccessControlRequest`] (optional) + :param access_control_list: List[:class:`JobAccessControlRequest`] (optional) List of permissions to set on the job. :param email_notifications: :class:`JobEmailNotifications` (optional) An optional set of email addresses notified when the run begins or completes. @@ -6020,7 +6040,7 @@ def submit(self, def submit_and_wait( self, *, - access_control_list: Optional[List[iam.AccessControlRequest]] = None, + access_control_list: Optional[List[JobAccessControlRequest]] = None, email_notifications: Optional[JobEmailNotifications] = None, environments: Optional[List[JobEnvironment]] = None, git_source: Optional[GitSource] = None, diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py index bba59811..ae76632e 100755 --- a/databricks/sdk/service/pipelines.py +++ b/databricks/sdk/service/pipelines.py @@ -63,7 +63,7 @@ class CreatePipeline: id: Optional[str] = None """Unique identifier for this pipeline.""" - ingestion_definition: Optional[ManagedIngestionPipelineDefinition] = None + ingestion_definition: Optional[IngestionPipelineDefinition] = None """The configuration for a managed ingestion pipeline. These settings cannot be used with the 'libraries', 'target' or 'catalog' settings.""" @@ -136,8 +136,7 @@ def from_dict(cls, d: Dict[str, any]) -> CreatePipeline: filters=_from_dict(d, 'filters', Filters), gateway_definition=_from_dict(d, 'gateway_definition', IngestionGatewayPipelineDefinition), id=d.get('id', None), - ingestion_definition=_from_dict(d, 'ingestion_definition', - ManagedIngestionPipelineDefinition), + ingestion_definition=_from_dict(d, 'ingestion_definition', IngestionPipelineDefinition), libraries=_repeated_dict(d, 'libraries', PipelineLibrary), name=d.get('name', None), notifications=_repeated_dict(d, 'notifications', Notifications), @@ -277,7 +276,7 @@ class EditPipeline: id: Optional[str] = None """Unique identifier for this pipeline.""" - ingestion_definition: Optional[ManagedIngestionPipelineDefinition] = None + ingestion_definition: Optional[IngestionPipelineDefinition] = None """The configuration for a managed ingestion pipeline. These settings cannot be used with the 'libraries', 'target' or 'catalog' settings.""" @@ -355,8 +354,7 @@ def from_dict(cls, d: Dict[str, any]) -> EditPipeline: filters=_from_dict(d, 'filters', Filters), gateway_definition=_from_dict(d, 'gateway_definition', IngestionGatewayPipelineDefinition), id=d.get('id', None), - ingestion_definition=_from_dict(d, 'ingestion_definition', - ManagedIngestionPipelineDefinition), + ingestion_definition=_from_dict(d, 'ingestion_definition', IngestionPipelineDefinition), libraries=_repeated_dict(d, 'libraries', PipelineLibrary), name=d.get('name', None), notifications=_repeated_dict(d, 'notifications', Notifications), @@ -590,7 +588,7 @@ class IngestionGatewayPipelineDefinition: """Required, Immutable. The name of the catalog for the gateway pipeline's storage location.""" gateway_storage_name: Optional[str] = None - """Required. The Unity Catalog-compatible naming for the gateway storage location. This is the + """Optional. The Unity Catalog-compatible name for the gateway storage location. This is the destination to use for the data that is extracted by the gateway. Delta Live Tables system will automatically create the storage location under the catalog and schema.""" @@ -617,6 +615,41 @@ def from_dict(cls, d: Dict[str, any]) -> IngestionGatewayPipelineDefinition: gateway_storage_schema=d.get('gateway_storage_schema', None)) +@dataclass +class IngestionPipelineDefinition: + connection_name: Optional[str] = None + """Immutable. The Unity Catalog connection this ingestion pipeline uses to communicate with the + source. Specify either ingestion_gateway_id or connection_name.""" + + ingestion_gateway_id: Optional[str] = None + """Immutable. Identifier for the ingestion gateway used by this ingestion pipeline to communicate + with the source. Specify either ingestion_gateway_id or connection_name.""" + + objects: Optional[List[IngestionConfig]] = None + """Required. Settings specifying tables to replicate and the destination for the replicated tables.""" + + table_configuration: Optional[TableSpecificConfig] = None + """Configuration settings to control the ingestion of tables. These settings are applied to all + tables in the pipeline.""" + + def as_dict(self) -> dict: + """Serializes the IngestionPipelineDefinition into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.connection_name is not None: body['connection_name'] = self.connection_name + if self.ingestion_gateway_id is not None: body['ingestion_gateway_id'] = self.ingestion_gateway_id + if self.objects: body['objects'] = [v.as_dict() for v in self.objects] + if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> IngestionPipelineDefinition: + """Deserializes the IngestionPipelineDefinition from a dictionary.""" + return cls(connection_name=d.get('connection_name', None), + ingestion_gateway_id=d.get('ingestion_gateway_id', None), + objects=_repeated_dict(d, 'objects', IngestionConfig), + table_configuration=_from_dict(d, 'table_configuration', TableSpecificConfig)) + + @dataclass class ListPipelineEventsResponse: events: Optional[List[PipelineEvent]] = None @@ -693,41 +726,6 @@ def from_dict(cls, d: Dict[str, any]) -> ListUpdatesResponse: updates=_repeated_dict(d, 'updates', UpdateInfo)) -@dataclass -class ManagedIngestionPipelineDefinition: - connection_name: Optional[str] = None - """Immutable. The Unity Catalog connection this ingestion pipeline uses to communicate with the - source. Specify either ingestion_gateway_id or connection_name.""" - - ingestion_gateway_id: Optional[str] = None - """Immutable. Identifier for the ingestion gateway used by this ingestion pipeline to communicate - with the source. Specify either ingestion_gateway_id or connection_name.""" - - objects: Optional[List[IngestionConfig]] = None - """Required. Settings specifying tables to replicate and the destination for the replicated tables.""" - - table_configuration: Optional[TableSpecificConfig] = None - """Configuration settings to control the ingestion of tables. These settings are applied to all - tables in the pipeline.""" - - def as_dict(self) -> dict: - """Serializes the ManagedIngestionPipelineDefinition into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.connection_name is not None: body['connection_name'] = self.connection_name - if self.ingestion_gateway_id is not None: body['ingestion_gateway_id'] = self.ingestion_gateway_id - if self.objects: body['objects'] = [v.as_dict() for v in self.objects] - if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict() - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> ManagedIngestionPipelineDefinition: - """Deserializes the ManagedIngestionPipelineDefinition from a dictionary.""" - return cls(connection_name=d.get('connection_name', None), - ingestion_gateway_id=d.get('ingestion_gateway_id', None), - objects=_repeated_dict(d, 'objects', IngestionConfig), - table_configuration=_from_dict(d, 'table_configuration', TableSpecificConfig)) - - @dataclass class ManualTrigger: @@ -1003,6 +1001,9 @@ class PipelineCluster: """The node type of the Spark driver. Note that this field is optional; if unset, the driver node type will be set as the same value as `node_type_id` defined above.""" + enable_local_disk_encryption: Optional[bool] = None + """Whether to enable local disk encryption for the cluster.""" + gcp_attributes: Optional[compute.GcpAttributes] = None """Attributes related to clusters running on Google Cloud Platform. If not specified at cluster creation, a set of default values will be used.""" @@ -1074,6 +1075,8 @@ def as_dict(self) -> dict: if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id + if self.enable_local_disk_encryption is not None: + body['enable_local_disk_encryption'] = self.enable_local_disk_encryption if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts] if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id @@ -1097,6 +1100,7 @@ def from_dict(cls, d: Dict[str, any]) -> PipelineCluster: custom_tags=d.get('custom_tags', None), driver_instance_pool_id=d.get('driver_instance_pool_id', None), driver_node_type_id=d.get('driver_node_type_id', None), + enable_local_disk_encryption=d.get('enable_local_disk_encryption', None), gcp_attributes=_from_dict(d, 'gcp_attributes', compute.GcpAttributes), init_scripts=_repeated_dict(d, 'init_scripts', compute.InitScriptInfo), instance_pool_id=d.get('instance_pool_id', None), @@ -1244,6 +1248,9 @@ class PipelineLibrary: notebook: Optional[NotebookLibrary] = None """The path to a notebook that defines a pipeline and is stored in the Databricks workspace.""" + whl: Optional[str] = None + """URI of the whl to be installed.""" + def as_dict(self) -> dict: """Serializes the PipelineLibrary into a dictionary suitable for use as a JSON request body.""" body = {} @@ -1251,6 +1258,7 @@ def as_dict(self) -> dict: if self.jar is not None: body['jar'] = self.jar if self.maven: body['maven'] = self.maven.as_dict() if self.notebook: body['notebook'] = self.notebook.as_dict() + if self.whl is not None: body['whl'] = self.whl return body @classmethod @@ -1259,7 +1267,8 @@ def from_dict(cls, d: Dict[str, any]) -> PipelineLibrary: return cls(file=_from_dict(d, 'file', FileLibrary), jar=d.get('jar', None), maven=_from_dict(d, 'maven', compute.MavenLibrary), - notebook=_from_dict(d, 'notebook', NotebookLibrary)) + notebook=_from_dict(d, 'notebook', NotebookLibrary), + whl=d.get('whl', None)) @dataclass @@ -1403,7 +1412,7 @@ class PipelineSpec: id: Optional[str] = None """Unique identifier for this pipeline.""" - ingestion_definition: Optional[ManagedIngestionPipelineDefinition] = None + ingestion_definition: Optional[IngestionPipelineDefinition] = None """The configuration for a managed ingestion pipeline. These settings cannot be used with the 'libraries', 'target' or 'catalog' settings.""" @@ -1472,8 +1481,7 @@ def from_dict(cls, d: Dict[str, any]) -> PipelineSpec: filters=_from_dict(d, 'filters', Filters), gateway_definition=_from_dict(d, 'gateway_definition', IngestionGatewayPipelineDefinition), id=d.get('id', None), - ingestion_definition=_from_dict(d, 'ingestion_definition', - ManagedIngestionPipelineDefinition), + ingestion_definition=_from_dict(d, 'ingestion_definition', IngestionPipelineDefinition), libraries=_repeated_dict(d, 'libraries', PipelineLibrary), name=d.get('name', None), notifications=_repeated_dict(d, 'notifications', Notifications), @@ -1506,6 +1514,9 @@ class PipelineStateInfo: creator_user_name: Optional[str] = None """The username of the pipeline creator.""" + health: Optional[PipelineStateInfoHealth] = None + """The health of a pipeline.""" + latest_updates: Optional[List[UpdateStateInfo]] = None """Status of the latest updates for the pipeline. Ordered with the newest update first.""" @@ -1527,6 +1538,7 @@ def as_dict(self) -> dict: body = {} if self.cluster_id is not None: body['cluster_id'] = self.cluster_id if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name + if self.health is not None: body['health'] = self.health.value if self.latest_updates: body['latest_updates'] = [v.as_dict() for v in self.latest_updates] if self.name is not None: body['name'] = self.name if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id @@ -1539,6 +1551,7 @@ def from_dict(cls, d: Dict[str, any]) -> PipelineStateInfo: """Deserializes the PipelineStateInfo from a dictionary.""" return cls(cluster_id=d.get('cluster_id', None), creator_user_name=d.get('creator_user_name', None), + health=_enum(d, 'health', PipelineStateInfoHealth), latest_updates=_repeated_dict(d, 'latest_updates', UpdateStateInfo), name=d.get('name', None), pipeline_id=d.get('pipeline_id', None), @@ -1546,6 +1559,13 @@ def from_dict(cls, d: Dict[str, any]) -> PipelineStateInfo: state=_enum(d, 'state', PipelineState)) +class PipelineStateInfoHealth(Enum): + """The health of a pipeline.""" + + HEALTHY = 'HEALTHY' + UNHEALTHY = 'UNHEALTHY' + + @dataclass class PipelineTrigger: cron: Optional[CronTrigger] = None @@ -1584,7 +1604,7 @@ class SchemaSpec: table_configuration: Optional[TableSpecificConfig] = None """Configuration settings to control the ingestion of tables. These settings are applied to all tables in this schema and override the table_configuration defined in the - ManagedIngestionPipelineDefinition object.""" + IngestionPipelineDefinition object.""" def as_dict(self) -> dict: """Serializes the SchemaSpec into a dictionary suitable for use as a JSON request body.""" @@ -1796,7 +1816,7 @@ class TableSpec: table_configuration: Optional[TableSpecificConfig] = None """Configuration settings to control the ingestion of tables. These settings override the - table_configuration defined in the ManagedIngestionPipelineDefinition object and the SchemaSpec.""" + table_configuration defined in the IngestionPipelineDefinition object and the SchemaSpec.""" def as_dict(self) -> dict: """Serializes the TableSpec into a dictionary suitable for use as a JSON request body.""" @@ -2090,7 +2110,7 @@ def create(self, filters: Optional[Filters] = None, gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None, id: Optional[str] = None, - ingestion_definition: Optional[ManagedIngestionPipelineDefinition] = None, + ingestion_definition: Optional[IngestionPipelineDefinition] = None, libraries: Optional[List[PipelineLibrary]] = None, name: Optional[str] = None, notifications: Optional[List[Notifications]] = None, @@ -2131,7 +2151,7 @@ def create(self, The definition of a gateway pipeline to support CDC. :param id: str (optional) Unique identifier for this pipeline. - :param ingestion_definition: :class:`ManagedIngestionPipelineDefinition` (optional) + :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional) The configuration for a managed ingestion pipeline. These settings cannot be used with the 'libraries', 'target' or 'catalog' settings. :param libraries: List[:class:`PipelineLibrary`] (optional) @@ -2498,7 +2518,7 @@ def update(self, filters: Optional[Filters] = None, gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None, id: Optional[str] = None, - ingestion_definition: Optional[ManagedIngestionPipelineDefinition] = None, + ingestion_definition: Optional[IngestionPipelineDefinition] = None, libraries: Optional[List[PipelineLibrary]] = None, name: Optional[str] = None, notifications: Optional[List[Notifications]] = None, @@ -2542,7 +2562,7 @@ def update(self, The definition of a gateway pipeline to support CDC. :param id: str (optional) Unique identifier for this pipeline. - :param ingestion_definition: :class:`ManagedIngestionPipelineDefinition` (optional) + :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional) The configuration for a managed ingestion pipeline. These settings cannot be used with the 'libraries', 'target' or 'catalog' settings. :param libraries: List[:class:`PipelineLibrary`] (optional) diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py index b1c43a92..97306b07 100755 --- a/databricks/sdk/service/serving.py +++ b/databricks/sdk/service/serving.py @@ -144,233 +144,6 @@ def from_dict(cls, d: Dict[str, any]) -> AnthropicConfig: anthropic_api_key_plaintext=d.get('anthropic_api_key_plaintext', None)) -@dataclass -class App: - name: str - """The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. - It must be unique within the workspace.""" - - active_deployment: Optional[AppDeployment] = None - """The active deployment of the app.""" - - create_time: Optional[str] = None - """The creation time of the app. Formatted timestamp in ISO 6801.""" - - creator: Optional[str] = None - """The email of the user that created the app.""" - - description: Optional[str] = None - """The description of the app.""" - - pending_deployment: Optional[AppDeployment] = None - """The pending deployment of the app.""" - - service_principal_id: Optional[int] = None - - service_principal_name: Optional[str] = None - - status: Optional[AppStatus] = None - - update_time: Optional[str] = None - """The update time of the app. Formatted timestamp in ISO 6801.""" - - updater: Optional[str] = None - """The email of the user that last updated the app.""" - - url: Optional[str] = None - """The URL of the app once it is deployed.""" - - def as_dict(self) -> dict: - """Serializes the App into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.active_deployment: body['active_deployment'] = self.active_deployment.as_dict() - if self.create_time is not None: body['create_time'] = self.create_time - if self.creator is not None: body['creator'] = self.creator - if self.description is not None: body['description'] = self.description - if self.name is not None: body['name'] = self.name - if self.pending_deployment: body['pending_deployment'] = self.pending_deployment.as_dict() - if self.service_principal_id is not None: body['service_principal_id'] = self.service_principal_id - if self.service_principal_name is not None: - body['service_principal_name'] = self.service_principal_name - if self.status: body['status'] = self.status.as_dict() - if self.update_time is not None: body['update_time'] = self.update_time - if self.updater is not None: body['updater'] = self.updater - if self.url is not None: body['url'] = self.url - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> App: - """Deserializes the App from a dictionary.""" - return cls(active_deployment=_from_dict(d, 'active_deployment', AppDeployment), - create_time=d.get('create_time', None), - creator=d.get('creator', None), - description=d.get('description', None), - name=d.get('name', None), - pending_deployment=_from_dict(d, 'pending_deployment', AppDeployment), - service_principal_id=d.get('service_principal_id', None), - service_principal_name=d.get('service_principal_name', None), - status=_from_dict(d, 'status', AppStatus), - update_time=d.get('update_time', None), - updater=d.get('updater', None), - url=d.get('url', None)) - - -@dataclass -class AppDeployment: - source_code_path: str - """The workspace file system path of the source code used to create the app deployment. This is - different from `deployment_artifacts.source_code_path`, which is the path used by the deployed - app. The former refers to the original source code location of the app in the workspace during - deployment creation, whereas the latter provides a system generated stable snapshotted source - code path used by the deployment.""" - - mode: AppDeploymentMode - """The mode of which the deployment will manage the source code.""" - - create_time: Optional[str] = None - """The creation time of the deployment. Formatted timestamp in ISO 6801.""" - - creator: Optional[str] = None - """The email of the user creates the deployment.""" - - deployment_artifacts: Optional[AppDeploymentArtifacts] = None - """The deployment artifacts for an app.""" - - deployment_id: Optional[str] = None - """The unique id of the deployment.""" - - status: Optional[AppDeploymentStatus] = None - """Status and status message of the deployment""" - - update_time: Optional[str] = None - """The update time of the deployment. Formatted timestamp in ISO 6801.""" - - def as_dict(self) -> dict: - """Serializes the AppDeployment into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.create_time is not None: body['create_time'] = self.create_time - if self.creator is not None: body['creator'] = self.creator - if self.deployment_artifacts: body['deployment_artifacts'] = self.deployment_artifacts.as_dict() - if self.deployment_id is not None: body['deployment_id'] = self.deployment_id - if self.mode is not None: body['mode'] = self.mode.value - if self.source_code_path is not None: body['source_code_path'] = self.source_code_path - if self.status: body['status'] = self.status.as_dict() - if self.update_time is not None: body['update_time'] = self.update_time - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> AppDeployment: - """Deserializes the AppDeployment from a dictionary.""" - return cls(create_time=d.get('create_time', None), - creator=d.get('creator', None), - deployment_artifacts=_from_dict(d, 'deployment_artifacts', AppDeploymentArtifacts), - deployment_id=d.get('deployment_id', None), - mode=_enum(d, 'mode', AppDeploymentMode), - source_code_path=d.get('source_code_path', None), - status=_from_dict(d, 'status', AppDeploymentStatus), - update_time=d.get('update_time', None)) - - -@dataclass -class AppDeploymentArtifacts: - source_code_path: Optional[str] = None - """The snapshotted workspace file system path of the source code loaded by the deployed app.""" - - def as_dict(self) -> dict: - """Serializes the AppDeploymentArtifacts into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.source_code_path is not None: body['source_code_path'] = self.source_code_path - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> AppDeploymentArtifacts: - """Deserializes the AppDeploymentArtifacts from a dictionary.""" - return cls(source_code_path=d.get('source_code_path', None)) - - -class AppDeploymentMode(Enum): - - AUTO_SYNC = 'AUTO_SYNC' - SNAPSHOT = 'SNAPSHOT' - - -class AppDeploymentState(Enum): - - FAILED = 'FAILED' - IN_PROGRESS = 'IN_PROGRESS' - STOPPED = 'STOPPED' - SUCCEEDED = 'SUCCEEDED' - - -@dataclass -class AppDeploymentStatus: - message: Optional[str] = None - """Message corresponding with the deployment state.""" - - state: Optional[AppDeploymentState] = None - """State of the deployment.""" - - def as_dict(self) -> dict: - """Serializes the AppDeploymentStatus into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.message is not None: body['message'] = self.message - if self.state is not None: body['state'] = self.state.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> AppDeploymentStatus: - """Deserializes the AppDeploymentStatus from a dictionary.""" - return cls(message=d.get('message', None), state=_enum(d, 'state', AppDeploymentState)) - - -@dataclass -class AppEnvironment: - env: Optional[List[EnvVariable]] = None - - def as_dict(self) -> dict: - """Serializes the AppEnvironment into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.env: body['env'] = [v.as_dict() for v in self.env] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> AppEnvironment: - """Deserializes the AppEnvironment from a dictionary.""" - return cls(env=_repeated_dict(d, 'env', EnvVariable)) - - -class AppState(Enum): - - CREATING = 'CREATING' - DELETED = 'DELETED' - DELETING = 'DELETING' - ERROR = 'ERROR' - IDLE = 'IDLE' - RUNNING = 'RUNNING' - STARTING = 'STARTING' - - -@dataclass -class AppStatus: - message: Optional[str] = None - """Message corresponding with the app state.""" - - state: Optional[AppState] = None - """State of the app.""" - - def as_dict(self) -> dict: - """Serializes the AppStatus into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.message is not None: body['message'] = self.message - if self.state is not None: body['state'] = self.state.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> AppStatus: - """Deserializes the AppStatus from a dictionary.""" - return cls(message=d.get('message', None), state=_enum(d, 'state', AppState)) - - @dataclass class AutoCaptureConfigInput: catalog_name: Optional[str] = None @@ -537,59 +310,6 @@ def from_dict(cls, d: Dict[str, any]) -> CohereConfig: cohere_api_key_plaintext=d.get('cohere_api_key_plaintext', None)) -@dataclass -class CreateAppDeploymentRequest: - source_code_path: str - """The workspace file system path of the source code used to create the app deployment. This is - different from `deployment_artifacts.source_code_path`, which is the path used by the deployed - app. The former refers to the original source code location of the app in the workspace during - deployment creation, whereas the latter provides a system generated stable snapshotted source - code path used by the deployment.""" - - mode: AppDeploymentMode - """The mode of which the deployment will manage the source code.""" - - app_name: Optional[str] = None - """The name of the app.""" - - def as_dict(self) -> dict: - """Serializes the CreateAppDeploymentRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.app_name is not None: body['app_name'] = self.app_name - if self.mode is not None: body['mode'] = self.mode.value - if self.source_code_path is not None: body['source_code_path'] = self.source_code_path - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> CreateAppDeploymentRequest: - """Deserializes the CreateAppDeploymentRequest from a dictionary.""" - return cls(app_name=d.get('app_name', None), - mode=_enum(d, 'mode', AppDeploymentMode), - source_code_path=d.get('source_code_path', None)) - - -@dataclass -class CreateAppRequest: - name: str - """The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. - It must be unique within the workspace.""" - - description: Optional[str] = None - """The description of the app.""" - - def as_dict(self) -> dict: - """Serializes the CreateAppRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.description is not None: body['description'] = self.description - if self.name is not None: body['name'] = self.name - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> CreateAppRequest: - """Deserializes the CreateAppRequest from a dictionary.""" - return cls(description=d.get('description', None), name=d.get('name', None)) - - @dataclass class CreateServingEndpoint: name: str @@ -950,28 +670,6 @@ def from_dict(cls, d: Dict[str, any]) -> EndpointTag: return cls(key=d.get('key', None), value=d.get('value', None)) -@dataclass -class EnvVariable: - name: Optional[str] = None - - value: Optional[str] = None - - value_from: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the EnvVariable into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.name is not None: body['name'] = self.name - if self.value is not None: body['value'] = self.value - if self.value_from is not None: body['value_from'] = self.value_from - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> EnvVariable: - """Deserializes the EnvVariable from a dictionary.""" - return cls(name=d.get('name', None), value=d.get('value', None), value_from=d.get('value_from', None)) - - @dataclass class ExportMetricsResponse: contents: Optional[BinaryIO] = None @@ -1214,48 +912,6 @@ def from_dict(cls, d: Dict[str, any]) -> GoogleCloudVertexAiConfig: region=d.get('region', None)) -@dataclass -class ListAppDeploymentsResponse: - app_deployments: Optional[List[AppDeployment]] = None - """Deployment history of the app.""" - - next_page_token: Optional[str] = None - """Pagination token to request the next page of apps.""" - - def as_dict(self) -> dict: - """Serializes the ListAppDeploymentsResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.app_deployments: body['app_deployments'] = [v.as_dict() for v in self.app_deployments] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> ListAppDeploymentsResponse: - """Deserializes the ListAppDeploymentsResponse from a dictionary.""" - return cls(app_deployments=_repeated_dict(d, 'app_deployments', AppDeployment), - next_page_token=d.get('next_page_token', None)) - - -@dataclass -class ListAppsResponse: - apps: Optional[List[App]] = None - - next_page_token: Optional[str] = None - """Pagination token to request the next page of apps.""" - - def as_dict(self) -> dict: - """Serializes the ListAppsResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.apps: body['apps'] = [v.as_dict() for v in self.apps] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> ListAppsResponse: - """Deserializes the ListAppsResponse from a dictionary.""" - return cls(apps=_repeated_dict(d, 'apps', App), next_page_token=d.get('next_page_token', None)) - - @dataclass class ListEndpointsResponse: endpoints: Optional[List[ServingEndpoint]] = None @@ -2560,32 +2216,6 @@ def from_dict(cls, d: Dict[str, any]) -> ServingEndpointPermissionsRequest: serving_endpoint_id=d.get('serving_endpoint_id', None)) -@dataclass -class StartAppRequest: - name: Optional[str] = None - """The name of the app.""" - - -@dataclass -class StopAppRequest: - name: Optional[str] = None - """The name of the app.""" - - -@dataclass -class StopAppResponse: - - def as_dict(self) -> dict: - """Serializes the StopAppResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> StopAppResponse: - """Deserializes the StopAppResponse from a dictionary.""" - return cls() - - @dataclass class TrafficConfig: routes: Optional[List[Route]] = None @@ -2603,28 +2233,6 @@ def from_dict(cls, d: Dict[str, any]) -> TrafficConfig: return cls(routes=_repeated_dict(d, 'routes', Route)) -@dataclass -class UpdateAppRequest: - name: str - """The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. - It must be unique within the workspace.""" - - description: Optional[str] = None - """The description of the app.""" - - def as_dict(self) -> dict: - """Serializes the UpdateAppRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.description is not None: body['description'] = self.description - if self.name is not None: body['name'] = self.name - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> UpdateAppRequest: - """Deserializes the UpdateAppRequest from a dictionary.""" - return cls(description=d.get('description', None), name=d.get('name', None)) - - @dataclass class V1ResponseChoiceElement: finish_reason: Optional[str] = None @@ -2662,333 +2270,6 @@ def from_dict(cls, d: Dict[str, any]) -> V1ResponseChoiceElement: text=d.get('text', None)) -class AppsAPI: - """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend - Databricks services, and enable users to interact through single sign-on.""" - - def __init__(self, api_client): - self._api = api_client - - def wait_get_app_idle(self, - name: str, - timeout=timedelta(minutes=20), - callback: Optional[Callable[[App], None]] = None) -> App: - deadline = time.time() + timeout.total_seconds() - target_states = (AppState.IDLE, ) - failure_states = (AppState.ERROR, ) - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.get(name=name) - status = poll.status.state - status_message = f'current status: {status}' - if poll.status: - status_message = poll.status.message - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f'failed to reach IDLE, got {status}: {status_message}' - raise OperationFailed(msg) - prefix = f"name={name}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - - def wait_get_deployment_app_succeeded( - self, - app_name: str, - deployment_id: str, - timeout=timedelta(minutes=20), - callback: Optional[Callable[[AppDeployment], None]] = None) -> AppDeployment: - deadline = time.time() + timeout.total_seconds() - target_states = (AppDeploymentState.SUCCEEDED, ) - failure_states = (AppDeploymentState.FAILED, ) - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.get_deployment(app_name=app_name, deployment_id=deployment_id) - status = poll.status.state - status_message = f'current status: {status}' - if poll.status: - status_message = poll.status.message - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f'failed to reach SUCCEEDED, got {status}: {status_message}' - raise OperationFailed(msg) - prefix = f"app_name={app_name}, deployment_id={deployment_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - - def create(self, name: str, *, description: Optional[str] = None) -> Wait[App]: - """Create an app. - - Creates a new app. - - :param name: str - The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It - must be unique within the workspace. - :param description: str (optional) - The description of the app. - - :returns: - Long-running operation waiter for :class:`App`. - See :method:wait_get_app_idle for more details. - """ - body = {} - if description is not None: body['description'] = description - if name is not None: body['name'] = name - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - - op_response = self._api.do('POST', '/api/2.0/preview/apps', body=body, headers=headers) - return Wait(self.wait_get_app_idle, response=App.from_dict(op_response), name=op_response['name']) - - def create_and_wait(self, - name: str, - *, - description: Optional[str] = None, - timeout=timedelta(minutes=20)) -> App: - return self.create(description=description, name=name).result(timeout=timeout) - - def delete(self, name: str): - """Delete an app. - - Deletes an app. - - :param name: str - The name of the app. - - - """ - - headers = {'Accept': 'application/json', } - - self._api.do('DELETE', f'/api/2.0/preview/apps/{name}', headers=headers) - - def deploy(self, app_name: str, source_code_path: str, mode: AppDeploymentMode) -> Wait[AppDeployment]: - """Create an app deployment. - - Creates an app deployment for the app with the supplied name. - - :param app_name: str - The name of the app. - :param source_code_path: str - The workspace file system path of the source code used to create the app deployment. This is - different from `deployment_artifacts.source_code_path`, which is the path used by the deployed app. - The former refers to the original source code location of the app in the workspace during deployment - creation, whereas the latter provides a system generated stable snapshotted source code path used by - the deployment. - :param mode: :class:`AppDeploymentMode` - The mode of which the deployment will manage the source code. - - :returns: - Long-running operation waiter for :class:`AppDeployment`. - See :method:wait_get_deployment_app_succeeded for more details. - """ - body = {} - if mode is not None: body['mode'] = mode.value - if source_code_path is not None: body['source_code_path'] = source_code_path - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - - op_response = self._api.do('POST', - f'/api/2.0/preview/apps/{app_name}/deployments', - body=body, - headers=headers) - return Wait(self.wait_get_deployment_app_succeeded, - response=AppDeployment.from_dict(op_response), - app_name=app_name, - deployment_id=op_response['deployment_id']) - - def deploy_and_wait(self, - app_name: str, - source_code_path: str, - mode: AppDeploymentMode, - timeout=timedelta(minutes=20)) -> AppDeployment: - return self.deploy(app_name=app_name, mode=mode, - source_code_path=source_code_path).result(timeout=timeout) - - def get(self, name: str) -> App: - """Get an app. - - Retrieves information for the app with the supplied name. - - :param name: str - The name of the app. - - :returns: :class:`App` - """ - - headers = {'Accept': 'application/json', } - - res = self._api.do('GET', f'/api/2.0/preview/apps/{name}', headers=headers) - return App.from_dict(res) - - def get_deployment(self, app_name: str, deployment_id: str) -> AppDeployment: - """Get an app deployment. - - Retrieves information for the app deployment with the supplied name and deployment id. - - :param app_name: str - The name of the app. - :param deployment_id: str - The unique id of the deployment. - - :returns: :class:`AppDeployment` - """ - - headers = {'Accept': 'application/json', } - - res = self._api.do('GET', - f'/api/2.0/preview/apps/{app_name}/deployments/{deployment_id}', - headers=headers) - return AppDeployment.from_dict(res) - - def get_environment(self, name: str) -> AppEnvironment: - """Get app environment. - - Retrieves app environment. - - :param name: str - The name of the app. - - :returns: :class:`AppEnvironment` - """ - - headers = {'Accept': 'application/json', } - - res = self._api.do('GET', f'/api/2.0/preview/apps/{name}/environment', headers=headers) - return AppEnvironment.from_dict(res) - - def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[App]: - """List apps. - - Lists all apps in the workspace. - - :param page_size: int (optional) - Upper bound for items returned. - :param page_token: str (optional) - Pagination token to go to the next page of apps. Requests first page if absent. - - :returns: Iterator over :class:`App` - """ - - query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json', } - - while True: - json = self._api.do('GET', '/api/2.0/preview/apps', query=query, headers=headers) - if 'apps' in json: - for v in json['apps']: - yield App.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - def list_deployments(self, - app_name: str, - *, - page_size: Optional[int] = None, - page_token: Optional[str] = None) -> Iterator[AppDeployment]: - """List app deployments. - - Lists all app deployments for the app with the supplied name. - - :param app_name: str - The name of the app. - :param page_size: int (optional) - Upper bound for items returned. - :param page_token: str (optional) - Pagination token to go to the next page of apps. Requests first page if absent. - - :returns: Iterator over :class:`AppDeployment` - """ - - query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json', } - - while True: - json = self._api.do('GET', - f'/api/2.0/preview/apps/{app_name}/deployments', - query=query, - headers=headers) - if 'app_deployments' in json: - for v in json['app_deployments']: - yield AppDeployment.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - def start(self, name: str) -> AppDeployment: - """Start an app. - - Start the last active deployment of the app in the workspace. - - :param name: str - The name of the app. - - :returns: :class:`AppDeployment` - """ - - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - - res = self._api.do('POST', f'/api/2.0/preview/apps/{name}/start', headers=headers) - return AppDeployment.from_dict(res) - - def stop(self, name: str): - """Stop an app. - - Stops the active deployment of the app in the workspace. - - :param name: str - The name of the app. - - - """ - - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - - self._api.do('POST', f'/api/2.0/preview/apps/{name}/stop', headers=headers) - - def update(self, name: str, *, description: Optional[str] = None) -> App: - """Update an app. - - Updates the app with the supplied name. - - :param name: str - The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It - must be unique within the workspace. - :param description: str (optional) - The description of the app. - - :returns: :class:`App` - """ - body = {} - if description is not None: body['description'] = description - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - - res = self._api.do('PATCH', f'/api/2.0/preview/apps/{name}', body=body, headers=headers) - return App.from_dict(res) - - class ServingEndpointsAPI: """The Serving Endpoints API allows you to create, update, and delete model serving endpoints. diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py index fc411ff8..23a31e77 100755 --- a/databricks/sdk/service/sharing.py +++ b/databricks/sdk/service/sharing.py @@ -483,6 +483,9 @@ class CreateRecipient: when the __authentication_type__ is **DATABRICKS**. The identifier is of format __cloud__:__region__:__metastore-uuid__.""" + expiration_time: Optional[int] = None + """Expiration timestamp of the token, in epoch milliseconds.""" + ip_access_list: Optional[IpAccessList] = None """IP Access List""" @@ -503,6 +506,7 @@ def as_dict(self) -> dict: if self.comment is not None: body['comment'] = self.comment if self.data_recipient_global_metastore_id is not None: body['data_recipient_global_metastore_id'] = self.data_recipient_global_metastore_id + if self.expiration_time is not None: body['expiration_time'] = self.expiration_time if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict() if self.name is not None: body['name'] = self.name if self.owner is not None: body['owner'] = self.owner @@ -516,6 +520,7 @@ def from_dict(cls, d: Dict[str, any]) -> CreateRecipient: return cls(authentication_type=_enum(d, 'authentication_type', AuthenticationType), comment=d.get('comment', None), data_recipient_global_metastore_id=d.get('data_recipient_global_metastore_id', None), + expiration_time=d.get('expiration_time', None), ip_access_list=_from_dict(d, 'ip_access_list', IpAccessList), name=d.get('name', None), owner=d.get('owner', None), @@ -580,19 +585,25 @@ def from_dict(cls, d: Dict[str, any]) -> GetActivationUrlInfoResponse: @dataclass class GetRecipientSharePermissionsResponse: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" + permissions_out: Optional[List[ShareToPrivilegeAssignment]] = None """An array of data share permissions for a recipient.""" def as_dict(self) -> dict: """Serializes the GetRecipientSharePermissionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token if self.permissions_out: body['permissions_out'] = [v.as_dict() for v in self.permissions_out] return body @classmethod def from_dict(cls, d: Dict[str, any]) -> GetRecipientSharePermissionsResponse: """Deserializes the GetRecipientSharePermissionsResponse from a dictionary.""" - return cls(permissions_out=_repeated_dict(d, 'permissions_out', ShareToPrivilegeAssignment)) + return cls(next_page_token=d.get('next_page_token', None), + permissions_out=_repeated_dict(d, 'permissions_out', ShareToPrivilegeAssignment)) @dataclass @@ -637,70 +648,94 @@ def from_dict(cls, d: Dict[str, any]) -> ListCleanRoomsResponse: @dataclass class ListProviderSharesResponse: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" + shares: Optional[List[ProviderShare]] = None """An array of provider shares.""" def as_dict(self) -> dict: """Serializes the ListProviderSharesResponse into a dictionary suitable for use as a JSON request body.""" body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token if self.shares: body['shares'] = [v.as_dict() for v in self.shares] return body @classmethod def from_dict(cls, d: Dict[str, any]) -> ListProviderSharesResponse: """Deserializes the ListProviderSharesResponse from a dictionary.""" - return cls(shares=_repeated_dict(d, 'shares', ProviderShare)) + return cls(next_page_token=d.get('next_page_token', None), + shares=_repeated_dict(d, 'shares', ProviderShare)) @dataclass class ListProvidersResponse: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" + providers: Optional[List[ProviderInfo]] = None """An array of provider information objects.""" def as_dict(self) -> dict: """Serializes the ListProvidersResponse into a dictionary suitable for use as a JSON request body.""" body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token if self.providers: body['providers'] = [v.as_dict() for v in self.providers] return body @classmethod def from_dict(cls, d: Dict[str, any]) -> ListProvidersResponse: """Deserializes the ListProvidersResponse from a dictionary.""" - return cls(providers=_repeated_dict(d, 'providers', ProviderInfo)) + return cls(next_page_token=d.get('next_page_token', None), + providers=_repeated_dict(d, 'providers', ProviderInfo)) @dataclass class ListRecipientsResponse: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" + recipients: Optional[List[RecipientInfo]] = None """An array of recipient information objects.""" def as_dict(self) -> dict: """Serializes the ListRecipientsResponse into a dictionary suitable for use as a JSON request body.""" body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token if self.recipients: body['recipients'] = [v.as_dict() for v in self.recipients] return body @classmethod def from_dict(cls, d: Dict[str, any]) -> ListRecipientsResponse: """Deserializes the ListRecipientsResponse from a dictionary.""" - return cls(recipients=_repeated_dict(d, 'recipients', RecipientInfo)) + return cls(next_page_token=d.get('next_page_token', None), + recipients=_repeated_dict(d, 'recipients', RecipientInfo)) @dataclass class ListSharesResponse: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" + shares: Optional[List[ShareInfo]] = None """An array of data share information objects.""" def as_dict(self) -> dict: """Serializes the ListSharesResponse into a dictionary suitable for use as a JSON request body.""" body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token if self.shares: body['shares'] = [v.as_dict() for v in self.shares] return body @classmethod def from_dict(cls, d: Dict[str, any]) -> ListSharesResponse: """Deserializes the ListSharesResponse from a dictionary.""" - return cls(shares=_repeated_dict(d, 'shares', ShareInfo)) + return cls(next_page_token=d.get('next_page_token', None), + shares=_repeated_dict(d, 'shares', ShareInfo)) @dataclass @@ -1526,6 +1561,9 @@ class UpdateRecipient: comment: Optional[str] = None """Description about the recipient.""" + expiration_time: Optional[int] = None + """Expiration timestamp of the token, in epoch milliseconds.""" + ip_access_list: Optional[IpAccessList] = None """IP Access List""" @@ -1547,6 +1585,7 @@ def as_dict(self) -> dict: """Serializes the UpdateRecipient into a dictionary suitable for use as a JSON request body.""" body = {} if self.comment is not None: body['comment'] = self.comment + if self.expiration_time is not None: body['expiration_time'] = self.expiration_time if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict() if self.name is not None: body['name'] = self.name if self.new_name is not None: body['new_name'] = self.new_name @@ -1558,6 +1597,7 @@ def as_dict(self) -> dict: def from_dict(cls, d: Dict[str, any]) -> UpdateRecipient: """Deserializes the UpdateRecipient from a dictionary.""" return cls(comment=d.get('comment', None), + expiration_time=d.get('expiration_time', None), ip_access_list=_from_dict(d, 'ip_access_list', IpAccessList), name=d.get('name', None), new_name=d.get('new_name', None), @@ -1626,20 +1666,37 @@ class UpdateSharePermissions: changes: Optional[List[catalog.PermissionsChange]] = None """Array of permission changes.""" + max_results: Optional[int] = None + """Maximum number of permissions to return. - when set to 0, the page length is set to a server + configured value (recommended); - when set to a value greater than 0, the page length is the + minimum of this value and a server configured value; - when set to a value less than 0, an + invalid parameter error is returned; - If not set, all valid permissions are returned (not + recommended). - Note: The number of returned permissions might be less than the specified + max_results size, even zero. The only definitive indication that no further permissions can be + fetched is when the next_page_token is unset from the response.""" + name: Optional[str] = None """The name of the share.""" + page_token: Optional[str] = None + """Opaque pagination token to go to next page based on previous query.""" + def as_dict(self) -> dict: """Serializes the UpdateSharePermissions into a dictionary suitable for use as a JSON request body.""" body = {} if self.changes: body['changes'] = [v.as_dict() for v in self.changes] + if self.max_results is not None: body['max_results'] = self.max_results if self.name is not None: body['name'] = self.name + if self.page_token is not None: body['page_token'] = self.page_token return body @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateSharePermissions: """Deserializes the UpdateSharePermissions from a dictionary.""" - return cls(changes=_repeated_dict(d, 'changes', catalog.PermissionsChange), name=d.get('name', None)) + return cls(changes=_repeated_dict(d, 'changes', catalog.PermissionsChange), + max_results=d.get('max_results', None), + name=d.get('name', None), + page_token=d.get('page_token', None)) class CleanRoomsAPI: @@ -1865,7 +1922,11 @@ def get(self, name: str) -> ProviderInfo: res = self._api.do('GET', f'/api/2.1/unity-catalog/providers/{name}', headers=headers) return ProviderInfo.from_dict(res) - def list(self, *, data_provider_global_metastore_id: Optional[str] = None) -> Iterator[ProviderInfo]: + def list(self, + *, + data_provider_global_metastore_id: Optional[str] = None, + max_results: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[ProviderInfo]: """List providers. Gets an array of available authentication providers. The caller must either be a metastore admin or @@ -1875,6 +1936,16 @@ def list(self, *, data_provider_global_metastore_id: Optional[str] = None) -> It :param data_provider_global_metastore_id: str (optional) If not provided, all providers will be returned. If no providers exist with this ID, no results will be returned. + :param max_results: int (optional) + Maximum number of providers to return. - when set to 0, the page length is set to a server + configured value (recommended); - when set to a value greater than 0, the page length is the minimum + of this value and a server configured value; - when set to a value less than 0, an invalid parameter + error is returned; - If not set, all valid providers are returned (not recommended). - Note: The + number of returned providers might be less than the specified max_results size, even zero. The only + definitive indication that no further providers can be fetched is when the next_page_token is unset + from the response. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. :returns: Iterator over :class:`ProviderInfo` """ @@ -1882,13 +1953,24 @@ def list(self, *, data_provider_global_metastore_id: Optional[str] = None) -> It query = {} if data_provider_global_metastore_id is not None: query['data_provider_global_metastore_id'] = data_provider_global_metastore_id + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } - json = self._api.do('GET', '/api/2.1/unity-catalog/providers', query=query, headers=headers) - parsed = ListProvidersResponse.from_dict(json).providers - return parsed if parsed is not None else [] + while True: + json = self._api.do('GET', '/api/2.1/unity-catalog/providers', query=query, headers=headers) + if 'providers' in json: + for v in json['providers']: + yield ProviderInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] - def list_shares(self, name: str) -> Iterator[ProviderShare]: + def list_shares(self, + name: str, + *, + max_results: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[ProviderShare]: """List shares by Provider. Gets an array of a specified provider's shares within the metastore where: @@ -1897,13 +1979,29 @@ def list_shares(self, name: str) -> Iterator[ProviderShare]: :param name: str Name of the provider in which to list shares. + :param max_results: int (optional) + Maximum number of shares to return. - when set to 0, the page length is set to a server configured + value (recommended); - when set to a value greater than 0, the page length is the minimum of this + value and a server configured value; - when set to a value less than 0, an invalid parameter error + is returned; - If not set, all valid shares are returned (not recommended). - Note: The number of + returned shares might be less than the specified max_results size, even zero. The only definitive + indication that no further shares can be fetched is when the next_page_token is unset from the + response. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. :returns: Iterator over :class:`ProviderShare` """ + query = {} + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } - json = self._api.do('GET', f'/api/2.1/unity-catalog/providers/{name}/shares', headers=headers) + json = self._api.do('GET', + f'/api/2.1/unity-catalog/providers/{name}/shares', + query=query, + headers=headers) parsed = ListProviderSharesResponse.from_dict(json).shares return parsed if parsed is not None else [] @@ -2016,6 +2114,7 @@ def create(self, *, comment: Optional[str] = None, data_recipient_global_metastore_id: Optional[str] = None, + expiration_time: Optional[int] = None, ip_access_list: Optional[IpAccessList] = None, owner: Optional[str] = None, properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None, @@ -2035,6 +2134,8 @@ def create(self, The global Unity Catalog metastore id provided by the data recipient. This field is required when the __authentication_type__ is **DATABRICKS**. The identifier is of format __cloud__:__region__:__metastore-uuid__. + :param expiration_time: int (optional) + Expiration timestamp of the token, in epoch milliseconds. :param ip_access_list: :class:`IpAccessList` (optional) IP Access List :param owner: str (optional) @@ -2052,6 +2153,7 @@ def create(self, if comment is not None: body['comment'] = comment if data_recipient_global_metastore_id is not None: body['data_recipient_global_metastore_id'] = data_recipient_global_metastore_id + if expiration_time is not None: body['expiration_time'] = expiration_time if ip_access_list is not None: body['ip_access_list'] = ip_access_list.as_dict() if name is not None: body['name'] = name if owner is not None: body['owner'] = owner @@ -2095,7 +2197,11 @@ def get(self, name: str) -> RecipientInfo: res = self._api.do('GET', f'/api/2.1/unity-catalog/recipients/{name}', headers=headers) return RecipientInfo.from_dict(res) - def list(self, *, data_recipient_global_metastore_id: Optional[str] = None) -> Iterator[RecipientInfo]: + def list(self, + *, + data_recipient_global_metastore_id: Optional[str] = None, + max_results: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[RecipientInfo]: """List share recipients. Gets an array of all share recipients within the current metastore where: @@ -2106,6 +2212,16 @@ def list(self, *, data_recipient_global_metastore_id: Optional[str] = None) -> I :param data_recipient_global_metastore_id: str (optional) If not provided, all recipients will be returned. If no recipients exist with this ID, no results will be returned. + :param max_results: int (optional) + Maximum number of recipients to return. - when set to 0, the page length is set to a server + configured value (recommended); - when set to a value greater than 0, the page length is the minimum + of this value and a server configured value; - when set to a value less than 0, an invalid parameter + error is returned; - If not set, all valid recipients are returned (not recommended). - Note: The + number of returned recipients might be less than the specified max_results size, even zero. The only + definitive indication that no further recipients can be fetched is when the next_page_token is unset + from the response. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. :returns: Iterator over :class:`RecipientInfo` """ @@ -2113,11 +2229,18 @@ def list(self, *, data_recipient_global_metastore_id: Optional[str] = None) -> I query = {} if data_recipient_global_metastore_id is not None: query['data_recipient_global_metastore_id'] = data_recipient_global_metastore_id + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } - json = self._api.do('GET', '/api/2.1/unity-catalog/recipients', query=query, headers=headers) - parsed = ListRecipientsResponse.from_dict(json).recipients - return parsed if parsed is not None else [] + while True: + json = self._api.do('GET', '/api/2.1/unity-catalog/recipients', query=query, headers=headers) + if 'recipients' in json: + for v in json['recipients']: + yield RecipientInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] def rotate_token(self, name: str, existing_token_expire_in_seconds: int) -> RecipientInfo: """Rotate a token. @@ -2145,7 +2268,11 @@ def rotate_token(self, name: str, existing_token_expire_in_seconds: int) -> Reci headers=headers) return RecipientInfo.from_dict(res) - def share_permissions(self, name: str) -> GetRecipientSharePermissionsResponse: + def share_permissions(self, + name: str, + *, + max_results: Optional[int] = None, + page_token: Optional[str] = None) -> GetRecipientSharePermissionsResponse: """Get recipient share permissions. Gets the share permissions for the specified Recipient. The caller must be a metastore admin or the @@ -2153,14 +2280,28 @@ def share_permissions(self, name: str) -> GetRecipientSharePermissionsResponse: :param name: str The name of the Recipient. + :param max_results: int (optional) + Maximum number of permissions to return. - when set to 0, the page length is set to a server + configured value (recommended); - when set to a value greater than 0, the page length is the minimum + of this value and a server configured value; - when set to a value less than 0, an invalid parameter + error is returned; - If not set, all valid permissions are returned (not recommended). - Note: The + number of returned permissions might be less than the specified max_results size, even zero. The + only definitive indication that no further permissions can be fetched is when the next_page_token is + unset from the response. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. :returns: :class:`GetRecipientSharePermissionsResponse` """ + query = {} + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } res = self._api.do('GET', f'/api/2.1/unity-catalog/recipients/{name}/share-permissions', + query=query, headers=headers) return GetRecipientSharePermissionsResponse.from_dict(res) @@ -2168,6 +2309,7 @@ def update(self, name: str, *, comment: Optional[str] = None, + expiration_time: Optional[int] = None, ip_access_list: Optional[IpAccessList] = None, new_name: Optional[str] = None, owner: Optional[str] = None, @@ -2182,6 +2324,8 @@ def update(self, Name of the recipient. :param comment: str (optional) Description about the recipient. + :param expiration_time: int (optional) + Expiration timestamp of the token, in epoch milliseconds. :param ip_access_list: :class:`IpAccessList` (optional) IP Access List :param new_name: str (optional) @@ -2197,6 +2341,7 @@ def update(self, """ body = {} if comment is not None: body['comment'] = comment + if expiration_time is not None: body['expiration_time'] = expiration_time if ip_access_list is not None: body['ip_access_list'] = ip_access_list.as_dict() if new_name is not None: body['new_name'] = new_name if owner is not None: body['owner'] = owner @@ -2279,22 +2424,48 @@ def get(self, name: str, *, include_shared_data: Optional[bool] = None) -> Share res = self._api.do('GET', f'/api/2.1/unity-catalog/shares/{name}', query=query, headers=headers) return ShareInfo.from_dict(res) - def list(self) -> Iterator[ShareInfo]: + def list(self, + *, + max_results: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[ShareInfo]: """List shares. Gets an array of data object shares from the metastore. The caller must be a metastore admin or the owner of the share. There is no guarantee of a specific ordering of the elements in the array. + :param max_results: int (optional) + Maximum number of shares to return. - when set to 0, the page length is set to a server configured + value (recommended); - when set to a value greater than 0, the page length is the minimum of this + value and a server configured value; - when set to a value less than 0, an invalid parameter error + is returned; - If not set, all valid shares are returned (not recommended). - Note: The number of + returned shares might be less than the specified max_results size, even zero. The only definitive + indication that no further shares can be fetched is when the next_page_token is unset from the + response. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. + :returns: Iterator over :class:`ShareInfo` """ + query = {} + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } - json = self._api.do('GET', '/api/2.1/unity-catalog/shares', headers=headers) - parsed = ListSharesResponse.from_dict(json).shares - return parsed if parsed is not None else [] + while True: + json = self._api.do('GET', '/api/2.1/unity-catalog/shares', query=query, headers=headers) + if 'shares' in json: + for v in json['shares']: + yield ShareInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] - def share_permissions(self, name: str) -> catalog.PermissionsList: + def share_permissions(self, + name: str, + *, + max_results: Optional[int] = None, + page_token: Optional[str] = None) -> catalog.PermissionsList: """Get permissions. Gets the permissions for a data share from the metastore. The caller must be a metastore admin or the @@ -2302,13 +2473,29 @@ def share_permissions(self, name: str) -> catalog.PermissionsList: :param name: str The name of the share. + :param max_results: int (optional) + Maximum number of permissions to return. - when set to 0, the page length is set to a server + configured value (recommended); - when set to a value greater than 0, the page length is the minimum + of this value and a server configured value; - when set to a value less than 0, an invalid parameter + error is returned; - If not set, all valid permissions are returned (not recommended). - Note: The + number of returned permissions might be less than the specified max_results size, even zero. The + only definitive indication that no further permissions can be fetched is when the next_page_token is + unset from the response. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. :returns: :class:`PermissionsList` """ + query = {} + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } - res = self._api.do('GET', f'/api/2.1/unity-catalog/shares/{name}/permissions', headers=headers) + res = self._api.do('GET', + f'/api/2.1/unity-catalog/shares/{name}/permissions', + query=query, + headers=headers) return PermissionsList.from_dict(res) def update(self, @@ -2363,7 +2550,12 @@ def update(self, res = self._api.do('PATCH', f'/api/2.1/unity-catalog/shares/{name}', body=body, headers=headers) return ShareInfo.from_dict(res) - def update_permissions(self, name: str, *, changes: Optional[List[catalog.PermissionsChange]] = None): + def update_permissions(self, + name: str, + *, + changes: Optional[List[catalog.PermissionsChange]] = None, + max_results: Optional[int] = None, + page_token: Optional[str] = None): """Update permissions. Updates the permissions for a data share in the metastore. The caller must be a metastore admin or an @@ -2376,11 +2568,28 @@ def update_permissions(self, name: str, *, changes: Optional[List[catalog.Permis The name of the share. :param changes: List[:class:`PermissionsChange`] (optional) Array of permission changes. + :param max_results: int (optional) + Maximum number of permissions to return. - when set to 0, the page length is set to a server + configured value (recommended); - when set to a value greater than 0, the page length is the minimum + of this value and a server configured value; - when set to a value less than 0, an invalid parameter + error is returned; - If not set, all valid permissions are returned (not recommended). - Note: The + number of returned permissions might be less than the specified max_results size, even zero. The + only definitive indication that no further permissions can be fetched is when the next_page_token is + unset from the response. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. """ body = {} + query = {} if changes is not None: body['changes'] = [v.as_dict() for v in changes] + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('PATCH', f'/api/2.1/unity-catalog/shares/{name}/permissions', body=body, headers=headers) + self._api.do('PATCH', + f'/api/2.1/unity-catalog/shares/{name}/permissions', + query=query, + body=body, + headers=headers) diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py index bcb46bb5..f2526909 100755 --- a/databricks/sdk/service/sql.py +++ b/databricks/sdk/service/sql.py @@ -1483,26 +1483,6 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteWarehouseResponse: class Disposition(Enum): - """The fetch disposition provides two modes of fetching results: `INLINE` and `EXTERNAL_LINKS`. - - Statements executed with `INLINE` disposition will return result data inline, in `JSON_ARRAY` - format, in a series of chunks. If a given statement produces a result set with a size larger - than 25 MiB, that statement execution is aborted, and no result set will be available. - - **NOTE** Byte limits are computed based upon internal representations of the result set data, - and might not match the sizes visible in JSON responses. - - Statements executed with `EXTERNAL_LINKS` disposition will return result data as external links: - URLs that point to cloud storage internal to the workspace. Using `EXTERNAL_LINKS` disposition - allows statements to generate arbitrarily sized result sets for fetching up to 100 GiB. The - resulting links have two important properties: - - 1. They point to resources _external_ to the Databricks compute; therefore any associated - authentication information (typically a personal access token, OAuth token, or similar) _must be - removed_ when fetching from these links. - - 2. These are presigned URLs with a specific expiration, indicated in the response. The behavior - when attempting to use an expired link is cloud specific.""" EXTERNAL_LINKS = 'EXTERNAL_LINKS' INLINE = 'INLINE' @@ -2019,26 +1999,6 @@ class ExecuteStatementRequest: [`USE CATALOG`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-catalog.html""" disposition: Optional[Disposition] = None - """The fetch disposition provides two modes of fetching results: `INLINE` and `EXTERNAL_LINKS`. - - Statements executed with `INLINE` disposition will return result data inline, in `JSON_ARRAY` - format, in a series of chunks. If a given statement produces a result set with a size larger - than 25 MiB, that statement execution is aborted, and no result set will be available. - - **NOTE** Byte limits are computed based upon internal representations of the result set data, - and might not match the sizes visible in JSON responses. - - Statements executed with `EXTERNAL_LINKS` disposition will return result data as external links: - URLs that point to cloud storage internal to the workspace. Using `EXTERNAL_LINKS` disposition - allows statements to generate arbitrarily sized result sets for fetching up to 100 GiB. The - resulting links have two important properties: - - 1. They point to resources _external_ to the Databricks compute; therefore any associated - authentication information (typically a personal access token, OAuth token, or similar) _must be - removed_ when fetching from these links. - - 2. These are presigned URLs with a specific expiration, indicated in the response. The behavior - when attempting to use an expired link is cloud specific.""" format: Optional[Format] = None """Statement execution supports three result formats: `JSON_ARRAY` (default), `ARROW_STREAM`, and @@ -2191,9 +2151,6 @@ class ExternalLink: which point a new `external_link` must be requested.""" external_link: Optional[str] = None - """A presigned URL pointing to a chunk of result data, hosted by an external service, with a short - expiration time (<= 15 minutes). As this URL contains a temporary credential, it should be - considered sensitive and the client should not expose this URL in a log.""" http_headers: Optional[Dict[str, str]] = None """HTTP headers that must be included with a GET request to the `external_link`. Each header is @@ -4203,12 +4160,6 @@ def from_dict(cls, d: Dict[str, any]) -> RestoreResponse: @dataclass class ResultData: - """Contains the result data of a single chunk when using `INLINE` disposition. When using - `EXTERNAL_LINKS` disposition, the array `external_links` is used instead to provide presigned - URLs to the result data in cloud storage. Exactly one of these alternatives is used. (While the - `external_links` array prepares the API to return multiple links in a single response. Currently - only a single link is returned.)""" - byte_count: Optional[int] = None """The number of bytes in the result chunk. This field is not available when using `INLINE` disposition.""" @@ -4590,11 +4541,6 @@ class StatementResponse: """The result manifest provides schema and metadata for the result set.""" result: Optional[ResultData] = None - """Contains the result data of a single chunk when using `INLINE` disposition. When using - `EXTERNAL_LINKS` disposition, the array `external_links` is used instead to provide presigned - URLs to the result data in cloud storage. Exactly one of these alternatives is used. (While the - `external_links` array prepares the API to return multiple links in a single response. Currently - only a single link is returned.)""" statement_id: Optional[str] = None """The statement ID is returned upon successfully submitting a SQL statement, and is a required @@ -5304,6 +5250,7 @@ class WarehousePermissionLevel(Enum): """Permission level""" CAN_MANAGE = 'CAN_MANAGE' + CAN_MONITOR = 'CAN_MONITOR' CAN_USE = 'CAN_USE' IS_OWNER = 'IS_OWNER' @@ -5646,7 +5593,10 @@ class AlertsLegacyAPI: notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create. - **Note**: A new version of the Databricks SQL API is now available. Please see the latest version.""" + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn + more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html""" def __init__(self, api_client): self._api = api_client @@ -5664,7 +5614,9 @@ def create(self, condition of its result, and notifies users or notification destinations if the condition was met. **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/create - instead. + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param name: str Name of the alert. @@ -5698,7 +5650,9 @@ def delete(self, alert_id: str): queries and dashboards, alerts cannot be moved to the trash. **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/delete - instead. + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param alert_id: str @@ -5715,7 +5669,9 @@ def get(self, alert_id: str) -> LegacyAlert: Gets an alert. **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/get - instead. + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param alert_id: str @@ -5733,7 +5689,9 @@ def list(self) -> Iterator[LegacyAlert]: Gets a list of alerts. **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/list - instead. + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :returns: Iterator over :class:`LegacyAlert` """ @@ -5755,7 +5713,9 @@ def update(self, Updates an alert. **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/update - instead. + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param alert_id: str :param name: str @@ -6055,7 +6015,9 @@ class DataSourcesAPI: advise you to use any text editor, REST client, or `grep` to search the response from this API for the name of your SQL warehouse as it appears in Databricks SQL. - **Note**: A new version of the Databricks SQL API is now available. Please see the latest version.""" + **Note**: A new version of the Databricks SQL API is now available. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html""" def __init__(self, api_client): self._api = api_client @@ -6068,7 +6030,9 @@ def list(self) -> Iterator[DataSource]: queries against it. **Note**: A new version of the Databricks SQL API is now available. Please use :method:warehouses/list - instead. + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :returns: Iterator over :class:`DataSource` """ @@ -6092,7 +6056,9 @@ class DbsqlPermissionsAPI: - `CAN_MANAGE`: Allows all actions: read, run, edit, delete, modify permissions (superset of `CAN_RUN`) - **Note**: A new version of the Databricks SQL API is now available. Please see the latest version.""" + **Note**: A new version of the Databricks SQL API is now available. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html""" def __init__(self, api_client): self._api = api_client @@ -6102,6 +6068,11 @@ def get(self, object_type: ObjectTypePlural, object_id: str) -> GetResponse: Gets a JSON representation of the access control list (ACL) for a specified object. + **Note**: A new version of the Databricks SQL API is now available. Please use + :method:workspace/getpermissions instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + :param object_type: :class:`ObjectTypePlural` The type of object permissions to check. :param object_id: str @@ -6127,6 +6098,11 @@ def set(self, Sets the access control list (ACL) for a specified object. This operation will complete rewrite the ACL. + **Note**: A new version of the Databricks SQL API is now available. Please use + :method:workspace/setpermissions instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + :param object_type: :class:`ObjectTypePlural` The type of object permission to set. :param object_id: str @@ -6156,7 +6132,9 @@ def transfer_ownership(self, Transfers ownership of a dashboard, query, or alert to an active user. Requires an admin API key. **Note**: A new version of the Databricks SQL API is now available. For queries and alerts, please use - :method:queries/update and :method:alerts/update respectively instead. + :method:queries/update and :method:alerts/update respectively instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param object_type: :class:`OwnableObjectType` The type of object on which to change ownership. @@ -6323,7 +6301,10 @@ class QueriesLegacyAPI: SQL warehouse, query text, name, description, tags, parameters, and visualizations. Queries can be scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create. - **Note**: A new version of the Databricks SQL API is now available. Please see the latest version.""" + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn + more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html""" def __init__(self, api_client): self._api = api_client @@ -6350,7 +6331,9 @@ def create(self, **Note**: You cannot add a visualization until you create the query. **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/create - instead. + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param data_source_id: str (optional) Data source ID maps to the ID of the data source used by the resource and is distinct from the @@ -6397,7 +6380,9 @@ def delete(self, query_id: str): they cannot be used for alerts. The trash is deleted after 30 days. **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/delete - instead. + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param query_id: str @@ -6415,7 +6400,9 @@ def get(self, query_id: str) -> LegacyQuery: authenticated user. **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/get - instead. + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param query_id: str @@ -6441,7 +6428,9 @@ def list(self, degradation, or a temporary ban. **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/list - instead. + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param order: str (optional) Name of query attribute to order by. Default sort order is ascending. Append a dash (`-`) to order @@ -6497,6 +6486,9 @@ def restore(self, query_id: str): You can use restored queries for alerts. **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. + [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param query_id: str @@ -6524,7 +6516,9 @@ def update(self, **Note**: You cannot undo this operation. **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/update - instead. + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param query_id: str :param data_source_id: str (optional) @@ -6675,7 +6669,10 @@ class QueryVisualizationsLegacyAPI: """This is an evolving API that facilitates the addition and removal of vizualisations from existing queries within the Databricks Workspace. Data structures may change over time. - **Note**: A new version of the Databricks SQL API is now available. Please see the latest version.""" + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn + more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html""" def __init__(self, api_client): self._api = api_client @@ -6692,7 +6689,9 @@ def create(self, Creates visualization in the query. **Note**: A new version of the Databricks SQL API is now available. Please use - :method:queryvisualizations/create instead. + :method:queryvisualizations/create instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param query_id: str The identifier returned by :method:queries/create @@ -6725,7 +6724,9 @@ def delete(self, id: str): Removes a visualization from the query. **Note**: A new version of the Databricks SQL API is now available. Please use - :method:queryvisualizations/delete instead. + :method:queryvisualizations/delete instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param id: str Widget ID returned by :method:queryvizualisations/create @@ -6752,7 +6753,9 @@ def update(self, Updates visualization in the query. **Note**: A new version of the Databricks SQL API is now available. Please use - :method:queryvisualizations/update instead. + :method:queryvisualizations/update instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param id: str The UUID for this visualization. @@ -6921,26 +6924,6 @@ def execute_statement(self, [`USE CATALOG`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-catalog.html :param disposition: :class:`Disposition` (optional) - The fetch disposition provides two modes of fetching results: `INLINE` and `EXTERNAL_LINKS`. - - Statements executed with `INLINE` disposition will return result data inline, in `JSON_ARRAY` - format, in a series of chunks. If a given statement produces a result set with a size larger than 25 - MiB, that statement execution is aborted, and no result set will be available. - - **NOTE** Byte limits are computed based upon internal representations of the result set data, and - might not match the sizes visible in JSON responses. - - Statements executed with `EXTERNAL_LINKS` disposition will return result data as external links: - URLs that point to cloud storage internal to the workspace. Using `EXTERNAL_LINKS` disposition - allows statements to generate arbitrarily sized result sets for fetching up to 100 GiB. The - resulting links have two important properties: - - 1. They point to resources _external_ to the Databricks compute; therefore any associated - authentication information (typically a personal access token, OAuth token, or similar) _must be - removed_ when fetching from these links. - - 2. These are presigned URLs with a specific expiration, indicated in the response. The behavior when - attempting to use an expired link is cloud specific. :param format: :class:`Format` (optional) Statement execution supports three result formats: `JSON_ARRAY` (default), `ARROW_STREAM`, and `CSV`. diff --git a/databricks/sdk/version.py b/databricks/sdk/version.py index 9093e4e4..e187e0aa 100644 --- a/databricks/sdk/version.py +++ b/databricks/sdk/version.py @@ -1 +1 @@ -__version__ = '0.29.0' +__version__ = '0.30.0' diff --git a/docs/account/billing/budgets.rst b/docs/account/billing/budgets.rst index 85f7ee13..bb625b49 100644 --- a/docs/account/billing/budgets.rst +++ b/docs/account/billing/budgets.rst @@ -4,10 +4,11 @@ .. py:class:: BudgetsAPI - These APIs manage budget configuration including notifications for exceeding a budget for a period. They - can also retrieve the status of each budget. + These APIs manage budget configurations for this account. Budgets enable you to monitor usage across your + account. You can set up budgets to either track account-wide spending, or apply filters to track the + spending of specific teams, projects, or workspaces. - .. py:method:: create(budget: Budget) -> WrappedBudgetWithStatus + .. py:method:: create(budget: CreateBudgetConfigurationBudget) -> CreateBudgetConfigurationResponse Usage: @@ -21,40 +22,55 @@ a = AccountClient() - created = a.budgets.create(budget=billing.Budget( - name=f'sdk-{time.time_ns()}', - filter="tag.tagName = 'all'", - period="1 month", - start_date="2022-01-01", - target_amount="100", - alerts=[billing.BudgetAlert(email_notifications=["admin@example.com"], min_percentage=50)])) + created = a.budgets.create(budget=billing.CreateBudgetConfigurationBudget( + display_name=f'sdk-{time.time_ns()}', + filter=billing.BudgetConfigurationFilter(tags=[ + billing.BudgetConfigurationFilterTagClause(key="tagName", + value=billing.BudgetConfigurationFilterClause( + operator=billing.BudgetConfigurationFilterOperator.IN, + values=["all"])) + ]), + alert_configurations=[ + billing.CreateBudgetConfigurationBudgetAlertConfigurations( + time_period=billing.AlertConfigurationTimePeriod.MONTH, + quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD, + trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED, + quantity_threshold="100", + action_configurations=[ + billing.CreateBudgetConfigurationBudgetActionConfigurations( + action_type=billing.ActionConfigurationType.EMAIL_NOTIFICATION, + target="admin@example.com") + ]) + ])) # cleanup - a.budgets.delete(budget_id=created.budget.budget_id) + a.budgets.delete(budget_id=created.budget.budget_configuration_id) - Create a new budget. + Create new budget. - Creates a new budget in the specified account. + Create a new budget configuration for an account. For full details, see + https://docs.databricks.com/en/admin/account-settings/budgets.html. - :param budget: :class:`Budget` - Budget configuration to be created. + :param budget: :class:`CreateBudgetConfigurationBudget` + Properties of the new budget configuration. - :returns: :class:`WrappedBudgetWithStatus` + :returns: :class:`CreateBudgetConfigurationResponse` .. py:method:: delete(budget_id: str) Delete budget. - Deletes the budget specified by its UUID. + Deletes a budget configuration for an account. Both account and budget configuration are specified by + ID. This cannot be undone. :param budget_id: str - Budget ID + The Databricks budget configuration ID. - .. py:method:: get(budget_id: str) -> WrappedBudgetWithStatus + .. py:method:: get(budget_id: str) -> GetBudgetConfigurationResponse Usage: @@ -68,31 +84,43 @@ a = AccountClient() - created = a.budgets.create(budget=billing.Budget( - name=f'sdk-{time.time_ns()}', - filter="tag.tagName = 'all'", - period="1 month", - start_date="2022-01-01", - target_amount="100", - alerts=[billing.BudgetAlert(email_notifications=["admin@example.com"], min_percentage=50)])) - - by_id = a.budgets.get(budget_id=created.budget.budget_id) + created = a.budgets.create(budget=billing.CreateBudgetConfigurationBudget( + display_name=f'sdk-{time.time_ns()}', + filter=billing.BudgetConfigurationFilter(tags=[ + billing.BudgetConfigurationFilterTagClause(key="tagName", + value=billing.BudgetConfigurationFilterClause( + operator=billing.BudgetConfigurationFilterOperator.IN, + values=["all"])) + ]), + alert_configurations=[ + billing.CreateBudgetConfigurationBudgetAlertConfigurations( + time_period=billing.AlertConfigurationTimePeriod.MONTH, + quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD, + trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED, + quantity_threshold="100", + action_configurations=[ + billing.CreateBudgetConfigurationBudgetActionConfigurations( + action_type=billing.ActionConfigurationType.EMAIL_NOTIFICATION, + target="admin@example.com") + ]) + ])) + + by_id = a.budgets.get(budget_id=created.budget.budget_configuration_id) # cleanup - a.budgets.delete(budget_id=created.budget.budget_id) + a.budgets.delete(budget_id=created.budget.budget_configuration_id) - Get budget and its status. + Get budget. - Gets the budget specified by its UUID, including noncumulative status for each day that the budget is - configured to include. + Gets a budget configuration for an account. Both account and budget configuration are specified by ID. :param budget_id: str - Budget ID + The Databricks budget configuration ID. - :returns: :class:`WrappedBudgetWithStatus` + :returns: :class:`GetBudgetConfigurationResponse` - .. py:method:: list() -> Iterator[BudgetWithStatus] + .. py:method:: list( [, page_token: Optional[str]]) -> Iterator[BudgetConfiguration] Usage: @@ -100,20 +128,24 @@ .. code-block:: from databricks.sdk import AccountClient + from databricks.sdk.service import billing a = AccountClient() - all = a.budgets.list() + all = a.budgets.list(billing.ListBudgetConfigurationsRequest()) Get all budgets. - Gets all budgets associated with this account, including noncumulative status for each day that the - budget is configured to include. + Gets all budgets associated with this account. - :returns: Iterator over :class:`BudgetWithStatus` + :param page_token: str (optional) + A page token received from a previous get all budget configurations call. This token can be used to + retrieve the subsequent page. Requests first page if absent. + + :returns: Iterator over :class:`BudgetConfiguration` - .. py:method:: update(budget_id: str, budget: Budget) + .. py:method:: update(budget_id: str, budget: UpdateBudgetConfigurationBudget) -> UpdateBudgetConfigurationResponse Usage: @@ -127,36 +159,62 @@ a = AccountClient() - created = a.budgets.create(budget=billing.Budget( - name=f'sdk-{time.time_ns()}', - filter="tag.tagName = 'all'", - period="1 month", - start_date="2022-01-01", - target_amount="100", - alerts=[billing.BudgetAlert(email_notifications=["admin@example.com"], min_percentage=50)])) - - a.budgets.update(budget_id=created.budget.budget_id, - budget=billing.Budget(name=f'sdk-{time.time_ns()}', - filter="tag.tagName = 'all'", - period="1 month", - start_date="2022-01-01", - target_amount="100", - alerts=[ - billing.BudgetAlert(email_notifications=["admin@example.com"], - min_percentage=70) - ])) + created = a.budgets.create(budget=billing.CreateBudgetConfigurationBudget( + display_name=f'sdk-{time.time_ns()}', + filter=billing.BudgetConfigurationFilter(tags=[ + billing.BudgetConfigurationFilterTagClause(key="tagName", + value=billing.BudgetConfigurationFilterClause( + operator=billing.BudgetConfigurationFilterOperator.IN, + values=["all"])) + ]), + alert_configurations=[ + billing.CreateBudgetConfigurationBudgetAlertConfigurations( + time_period=billing.AlertConfigurationTimePeriod.MONTH, + quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD, + trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED, + quantity_threshold="100", + action_configurations=[ + billing.CreateBudgetConfigurationBudgetActionConfigurations( + action_type=billing.ActionConfigurationType.EMAIL_NOTIFICATION, + target="admin@example.com") + ]) + ])) + + _ = a.budgets.update( + budget_id=created.budget.budget_configuration_id, + budget=billing.UpdateBudgetConfigurationBudget( + display_name=f'sdk-{time.time_ns()}', + filter=billing.BudgetConfigurationFilter(tags=[ + billing.BudgetConfigurationFilterTagClause( + key="tagName", + value=billing.BudgetConfigurationFilterClause( + operator=billing.BudgetConfigurationFilterOperator.IN, values=["all"])) + ]), + alert_configurations=[ + billing.AlertConfiguration( + time_period=billing.AlertConfigurationTimePeriod.MONTH, + quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD, + trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED, + quantity_threshold="50", + action_configurations=[ + billing.ActionConfiguration( + action_type=billing.ActionConfigurationType.EMAIL_NOTIFICATION, + target="admin@example.com") + ]) + ])) # cleanup - a.budgets.delete(budget_id=created.budget.budget_id) + a.budgets.delete(budget_id=created.budget.budget_configuration_id) Modify budget. - Modifies a budget in this account. Budget properties are completely overwritten. + Updates a budget configuration for an account. Both account and budget configuration are specified by + ID. :param budget_id: str - Budget ID - :param budget: :class:`Budget` - Budget configuration to be created. - + The Databricks budget configuration ID. + :param budget: :class:`UpdateBudgetConfigurationBudget` + The updated budget. This will overwrite the budget specified by the budget ID. + :returns: :class:`UpdateBudgetConfigurationResponse` \ No newline at end of file diff --git a/docs/account/billing/index.rst b/docs/account/billing/index.rst index 522f6f5f..0e07da59 100644 --- a/docs/account/billing/index.rst +++ b/docs/account/billing/index.rst @@ -9,4 +9,5 @@ Configure different aspects of Databricks billing and usage. billable_usage budgets - log_delivery \ No newline at end of file + log_delivery + usage_dashboards \ No newline at end of file diff --git a/docs/account/billing/usage_dashboards.rst b/docs/account/billing/usage_dashboards.rst new file mode 100644 index 00000000..350ef1f0 --- /dev/null +++ b/docs/account/billing/usage_dashboards.rst @@ -0,0 +1,39 @@ +``a.usage_dashboards``: Usage Dashboards +======================================== +.. currentmodule:: databricks.sdk.service.billing + +.. py:class:: UsageDashboardsAPI + + These APIs manage usage dashboards for this account. Usage dashboards enable you to gain insights into + your usage with pre-built dashboards: visualize breakdowns, analyze tag attributions, and identify cost + drivers. + + .. py:method:: create( [, dashboard_type: Optional[UsageDashboardType], workspace_id: Optional[int]]) -> CreateBillingUsageDashboardResponse + + Create new usage dashboard. + + Create a usage dashboard specified by workspaceId, accountId, and dashboard type. + + :param dashboard_type: :class:`UsageDashboardType` (optional) + Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage + dashboard shows usage data for all workspaces in the account. + :param workspace_id: int (optional) + The workspace ID of the workspace in which the usage dashboard is created. + + :returns: :class:`CreateBillingUsageDashboardResponse` + + + .. py:method:: get( [, dashboard_type: Optional[UsageDashboardType], workspace_id: Optional[int]]) -> GetBillingUsageDashboardResponse + + Get usage dashboard. + + Get a usage dashboard specified by workspaceId, accountId, and dashboard type. + + :param dashboard_type: :class:`UsageDashboardType` (optional) + Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage + dashboard shows usage data for all workspaces in the account. + :param workspace_id: int (optional) + The workspace ID of the workspace in which the usage dashboard is created. + + :returns: :class:`GetBillingUsageDashboardResponse` + \ No newline at end of file diff --git a/docs/account/iam/workspace_assignment.rst b/docs/account/iam/workspace_assignment.rst index 1ce06996..6230b819 100644 --- a/docs/account/iam/workspace_assignment.rst +++ b/docs/account/iam/workspace_assignment.rst @@ -15,7 +15,7 @@ principal. :param workspace_id: int - The workspace ID. + The workspace ID for the account. :param principal_id: int The ID of the user, service principal, or group. @@ -61,7 +61,7 @@ :returns: Iterator over :class:`PermissionAssignment` - .. py:method:: update(workspace_id: int, principal_id: int, permissions: List[WorkspacePermission]) -> PermissionAssignment + .. py:method:: update(workspace_id: int, principal_id: int [, permissions: Optional[List[WorkspacePermission]]]) -> PermissionAssignment Usage: @@ -92,13 +92,15 @@ specified principal. :param workspace_id: int - The workspace ID. + The workspace ID for the account. :param principal_id: int The ID of the user, service principal, or group. - :param permissions: List[:class:`WorkspacePermission`] - Array of permissions assignments to update on the workspace. Note that excluding this field will - have the same effect as providing an empty list which will result in the deletion of all permissions - for the principal. + :param permissions: List[:class:`WorkspacePermission`] (optional) + Array of permissions assignments to update on the workspace. Valid values are "USER" and "ADMIN" + (case-sensitive). If both "USER" and "ADMIN" are provided, "ADMIN" takes precedence. Other values + will be ignored. Note that excluding this field, or providing unsupported values, will have the same + effect as providing an empty list, which will result in the deletion of all permissions for the + principal. :returns: :class:`PermissionAssignment` \ No newline at end of file diff --git a/docs/account/oauth2/custom_app_integration.rst b/docs/account/oauth2/custom_app_integration.rst index 382ce0bd..0dcc3d8e 100644 --- a/docs/account/oauth2/custom_app_integration.rst +++ b/docs/account/oauth2/custom_app_integration.rst @@ -4,23 +4,23 @@ .. py:class:: CustomAppIntegrationAPI - These APIs enable administrators to manage custom oauth app integrations, which is required for + These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud. - .. py:method:: create(name: str, redirect_urls: List[str] [, confidential: Optional[bool], scopes: Optional[List[str]], token_access_policy: Optional[TokenAccessPolicy]]) -> CreateCustomAppIntegrationOutput + .. py:method:: create( [, confidential: Optional[bool], name: Optional[str], redirect_urls: Optional[List[str]], scopes: Optional[List[str]], token_access_policy: Optional[TokenAccessPolicy]]) -> CreateCustomAppIntegrationOutput Create Custom OAuth App Integration. Create Custom OAuth App Integration. - You can retrieve the custom oauth app integration via :method:CustomAppIntegration/get. + You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get. - :param name: str - name of the custom oauth app - :param redirect_urls: List[str] - List of oauth redirect urls :param confidential: bool (optional) - indicates if an oauth client-secret should be generated + This field indicates whether an OAuth client secret is required to authenticate this client. + :param name: str (optional) + Name of the custom OAuth app + :param redirect_urls: List[str] (optional) + List of OAuth redirect urls :param scopes: List[str] (optional) OAuth scopes granted to the application. Supported scopes: all-apis, sql, offline_access, openid, profile, email. @@ -34,11 +34,10 @@ Delete Custom OAuth App Integration. - Delete an existing Custom OAuth App Integration. You can retrieve the custom oauth app integration via + Delete an existing Custom OAuth App Integration. You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get. :param integration_id: str - The oauth app integration ID. @@ -50,16 +49,19 @@ Gets the Custom OAuth App Integration for the given integration id. :param integration_id: str - The oauth app integration ID. :returns: :class:`GetCustomAppIntegrationOutput` - .. py:method:: list() -> Iterator[GetCustomAppIntegrationOutput] + .. py:method:: list( [, include_creator_username: Optional[bool], page_size: Optional[int], page_token: Optional[str]]) -> Iterator[GetCustomAppIntegrationOutput] Get custom oauth app integrations. - Get the list of custom oauth app integrations for the specified Databricks account + Get the list of custom OAuth app integrations for the specified Databricks account + + :param include_creator_username: bool (optional) + :param page_size: int (optional) + :param page_token: str (optional) :returns: Iterator over :class:`GetCustomAppIntegrationOutput` @@ -68,15 +70,14 @@ Updates Custom OAuth App Integration. - Updates an existing custom OAuth App Integration. You can retrieve the custom oauth app integration + Updates an existing custom OAuth App Integration. You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get. :param integration_id: str - The oauth app integration ID. :param redirect_urls: List[str] (optional) - List of oauth redirect urls to be updated in the custom oauth app integration + List of OAuth redirect urls to be updated in the custom OAuth app integration :param token_access_policy: :class:`TokenAccessPolicy` (optional) - Token access policy to be updated in the custom oauth app integration + Token access policy to be updated in the custom OAuth app integration \ No newline at end of file diff --git a/docs/account/oauth2/o_auth_published_apps.rst b/docs/account/oauth2/o_auth_published_apps.rst index 69aecb8a..18c07c32 100644 --- a/docs/account/oauth2/o_auth_published_apps.rst +++ b/docs/account/oauth2/o_auth_published_apps.rst @@ -15,7 +15,7 @@ Get all the available published OAuth apps in Databricks. :param page_size: int (optional) - The max number of OAuth published apps to return. + The max number of OAuth published apps to return in one page. :param page_token: str (optional) A token that can be used to get the next page of results. diff --git a/docs/account/oauth2/published_app_integration.rst b/docs/account/oauth2/published_app_integration.rst index 0488415c..f59f2c4a 100644 --- a/docs/account/oauth2/published_app_integration.rst +++ b/docs/account/oauth2/published_app_integration.rst @@ -4,7 +4,7 @@ .. py:class:: PublishedAppIntegrationAPI - These APIs enable administrators to manage published oauth app integrations, which is required for + These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud. .. py:method:: create( [, app_id: Optional[str], token_access_policy: Optional[TokenAccessPolicy]]) -> CreatePublishedAppIntegrationOutput @@ -13,10 +13,10 @@ Create Published OAuth App Integration. - You can retrieve the published oauth app integration via :method:PublishedAppIntegration/get. + You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get. :param app_id: str (optional) - app_id of the oauth published app integration. For example power-bi, tableau-deskop + App id of the OAuth published app integration. For example power-bi, tableau-deskop :param token_access_policy: :class:`TokenAccessPolicy` (optional) Token access policy @@ -27,11 +27,10 @@ Delete Published OAuth App Integration. - Delete an existing Published OAuth App Integration. You can retrieve the published oauth app + Delete an existing Published OAuth App Integration. You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get. :param integration_id: str - The oauth app integration ID. @@ -43,16 +42,18 @@ Gets the Published OAuth App Integration for the given integration id. :param integration_id: str - The oauth app integration ID. :returns: :class:`GetPublishedAppIntegrationOutput` - .. py:method:: list() -> Iterator[GetPublishedAppIntegrationOutput] + .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[GetPublishedAppIntegrationOutput] Get published oauth app integrations. - Get the list of published oauth app integrations for the specified Databricks account + Get the list of published OAuth app integrations for the specified Databricks account + + :param page_size: int (optional) + :param page_token: str (optional) :returns: Iterator over :class:`GetPublishedAppIntegrationOutput` @@ -61,13 +62,12 @@ Updates Published OAuth App Integration. - Updates an existing published OAuth App Integration. You can retrieve the published oauth app + Updates an existing published OAuth App Integration. You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get. :param integration_id: str - The oauth app integration ID. :param token_access_policy: :class:`TokenAccessPolicy` (optional) - Token access policy to be updated in the published oauth app integration + Token access policy to be updated in the published OAuth app integration \ No newline at end of file diff --git a/docs/dbdataclasses/apps.rst b/docs/dbdataclasses/apps.rst new file mode 100644 index 00000000..827a563b --- /dev/null +++ b/docs/dbdataclasses/apps.rst @@ -0,0 +1,144 @@ +Apps +==== + +These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.apps`` module. + +.. py:currentmodule:: databricks.sdk.service.apps +.. autoclass:: App + :members: + :undoc-members: + +.. autoclass:: AppAccessControlRequest + :members: + :undoc-members: + +.. autoclass:: AppAccessControlResponse + :members: + :undoc-members: + +.. autoclass:: AppDeployment + :members: + :undoc-members: + +.. autoclass:: AppDeploymentArtifacts + :members: + :undoc-members: + +.. py:class:: AppDeploymentMode + + .. py:attribute:: AUTO_SYNC + :value: "AUTO_SYNC" + + .. py:attribute:: SNAPSHOT + :value: "SNAPSHOT" + +.. py:class:: AppDeploymentState + + .. py:attribute:: FAILED + :value: "FAILED" + + .. py:attribute:: IN_PROGRESS + :value: "IN_PROGRESS" + + .. py:attribute:: STOPPED + :value: "STOPPED" + + .. py:attribute:: SUCCEEDED + :value: "SUCCEEDED" + +.. autoclass:: AppDeploymentStatus + :members: + :undoc-members: + +.. autoclass:: AppPermission + :members: + :undoc-members: + +.. py:class:: AppPermissionLevel + + Permission level + + .. py:attribute:: CAN_MANAGE + :value: "CAN_MANAGE" + + .. py:attribute:: CAN_USE + :value: "CAN_USE" + +.. autoclass:: AppPermissions + :members: + :undoc-members: + +.. autoclass:: AppPermissionsDescription + :members: + :undoc-members: + +.. autoclass:: AppPermissionsRequest + :members: + :undoc-members: + +.. py:class:: AppState + + .. py:attribute:: CREATING + :value: "CREATING" + + .. py:attribute:: DELETED + :value: "DELETED" + + .. py:attribute:: DELETING + :value: "DELETING" + + .. py:attribute:: ERROR + :value: "ERROR" + + .. py:attribute:: IDLE + :value: "IDLE" + + .. py:attribute:: RUNNING + :value: "RUNNING" + + .. py:attribute:: STARTING + :value: "STARTING" + +.. autoclass:: AppStatus + :members: + :undoc-members: + +.. autoclass:: CreateAppDeploymentRequest + :members: + :undoc-members: + +.. autoclass:: CreateAppRequest + :members: + :undoc-members: + +.. autoclass:: DeleteResponse + :members: + :undoc-members: + +.. autoclass:: GetAppPermissionLevelsResponse + :members: + :undoc-members: + +.. autoclass:: ListAppDeploymentsResponse + :members: + :undoc-members: + +.. autoclass:: ListAppsResponse + :members: + :undoc-members: + +.. autoclass:: StartAppRequest + :members: + :undoc-members: + +.. autoclass:: StopAppRequest + :members: + :undoc-members: + +.. autoclass:: StopAppResponse + :members: + :undoc-members: + +.. autoclass:: UpdateAppRequest + :members: + :undoc-members: diff --git a/docs/dbdataclasses/billing.rst b/docs/dbdataclasses/billing.rst index 27abdd35..25deb0a1 100644 --- a/docs/dbdataclasses/billing.rst +++ b/docs/dbdataclasses/billing.rst @@ -4,23 +4,84 @@ Billing These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.billing`` module. .. py:currentmodule:: databricks.sdk.service.billing -.. autoclass:: Budget +.. autoclass:: ActionConfiguration :members: :undoc-members: -.. autoclass:: BudgetAlert +.. py:class:: ActionConfigurationType + + .. py:attribute:: EMAIL_NOTIFICATION + :value: "EMAIL_NOTIFICATION" + +.. autoclass:: AlertConfiguration + :members: + :undoc-members: + +.. py:class:: AlertConfigurationQuantityType + + .. py:attribute:: LIST_PRICE_DOLLARS_USD + :value: "LIST_PRICE_DOLLARS_USD" + +.. py:class:: AlertConfigurationTimePeriod + + .. py:attribute:: MONTH + :value: "MONTH" + +.. py:class:: AlertConfigurationTriggerType + + .. py:attribute:: CUMULATIVE_SPENDING_EXCEEDED + :value: "CUMULATIVE_SPENDING_EXCEEDED" + +.. autoclass:: BudgetConfiguration + :members: + :undoc-members: + +.. autoclass:: BudgetConfigurationFilter :members: :undoc-members: -.. autoclass:: BudgetList +.. autoclass:: BudgetConfigurationFilterClause :members: :undoc-members: -.. autoclass:: BudgetWithStatus +.. py:class:: BudgetConfigurationFilterOperator + + .. py:attribute:: IN + :value: "IN" + +.. autoclass:: BudgetConfigurationFilterTagClause :members: :undoc-members: -.. autoclass:: BudgetWithStatusStatusDailyItem +.. autoclass:: BudgetConfigurationFilterWorkspaceIdClause + :members: + :undoc-members: + +.. autoclass:: CreateBillingUsageDashboardRequest + :members: + :undoc-members: + +.. autoclass:: CreateBillingUsageDashboardResponse + :members: + :undoc-members: + +.. autoclass:: CreateBudgetConfigurationBudget + :members: + :undoc-members: + +.. autoclass:: CreateBudgetConfigurationBudgetActionConfigurations + :members: + :undoc-members: + +.. autoclass:: CreateBudgetConfigurationBudgetAlertConfigurations + :members: + :undoc-members: + +.. autoclass:: CreateBudgetConfigurationRequest + :members: + :undoc-members: + +.. autoclass:: CreateBudgetConfigurationResponse :members: :undoc-members: @@ -28,7 +89,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DeleteResponse +.. autoclass:: DeleteBudgetConfigurationResponse :members: :undoc-members: @@ -55,6 +116,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: GetBillingUsageDashboardResponse + :members: + :undoc-members: + +.. autoclass:: GetBudgetConfigurationResponse + :members: + :undoc-members: + +.. autoclass:: ListBudgetConfigurationsResponse + :members: + :undoc-members: + .. py:class:: LogDeliveryConfigStatus Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). Defaults to `ENABLED`. You can [enable or disable the configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration is not supported, so disable a log delivery configuration that is no longer needed. @@ -102,22 +175,30 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: UpdateLogDeliveryConfigurationStatusRequest +.. autoclass:: UpdateBudgetConfigurationBudget :members: :undoc-members: -.. autoclass:: UpdateResponse +.. autoclass:: UpdateBudgetConfigurationRequest :members: :undoc-members: -.. autoclass:: WrappedBudget +.. autoclass:: UpdateBudgetConfigurationResponse :members: :undoc-members: -.. autoclass:: WrappedBudgetWithStatus +.. autoclass:: UpdateLogDeliveryConfigurationStatusRequest :members: :undoc-members: +.. py:class:: UsageDashboardType + + .. py:attribute:: USAGE_DASHBOARD_TYPE_GLOBAL + :value: "USAGE_DASHBOARD_TYPE_GLOBAL" + + .. py:attribute:: USAGE_DASHBOARD_TYPE_WORKSPACE + :value: "USAGE_DASHBOARD_TYPE_WORKSPACE" + .. autoclass:: WrappedCreateLogDeliveryConfiguration :members: :undoc-members: diff --git a/docs/dbdataclasses/catalog.rst b/docs/dbdataclasses/catalog.rst index e2c120bc..d1195dd4 100644 --- a/docs/dbdataclasses/catalog.rst +++ b/docs/dbdataclasses/catalog.rst @@ -647,6 +647,17 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: PARAM :value: "PARAM" +.. py:class:: GetBindingsSecurableType + + .. py:attribute:: CATALOG + :value: "CATALOG" + + .. py:attribute:: EXTERNAL_LOCATION + :value: "EXTERNAL_LOCATION" + + .. py:attribute:: STORAGE_CREDENTIAL + :value: "STORAGE_CREDENTIAL" + .. autoclass:: GetMetastoreSummaryResponse :members: :undoc-members: @@ -940,9 +951,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: ONLINE_PIPELINE_FAILED :value: "ONLINE_PIPELINE_FAILED" - .. py:attribute:: ONLINE_TABLE_STATE_UNSPECIFIED - :value: "ONLINE_TABLE_STATE_UNSPECIFIED" - .. py:attribute:: ONLINE_TRIGGERED_UPDATE :value: "ONLINE_TRIGGERED_UPDATE" @@ -1052,6 +1060,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: EXECUTE :value: "EXECUTE" + .. py:attribute:: MANAGE + :value: "MANAGE" + .. py:attribute:: MANAGE_ALLOWLIST :value: "MANAGE_ALLOWLIST" @@ -1304,6 +1315,17 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: UpdateBindingsSecurableType + + .. py:attribute:: CATALOG + :value: "CATALOG" + + .. py:attribute:: EXTERNAL_LOCATION + :value: "EXTERNAL_LOCATION" + + .. py:attribute:: STORAGE_CREDENTIAL + :value: "STORAGE_CREDENTIAL" + .. autoclass:: UpdateCatalog :members: :undoc-members: diff --git a/docs/dbdataclasses/compute.rst b/docs/dbdataclasses/compute.rst index 64ab4268..7b280c51 100644 --- a/docs/dbdataclasses/compute.rst +++ b/docs/dbdataclasses/compute.rst @@ -817,10 +817,38 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: ListClustersFilterBy + :members: + :undoc-members: + .. autoclass:: ListClustersResponse :members: :undoc-members: +.. autoclass:: ListClustersSortBy + :members: + :undoc-members: + +.. py:class:: ListClustersSortByDirection + + The direction to sort by. + + .. py:attribute:: ASC + :value: "ASC" + + .. py:attribute:: DESC + :value: "DESC" + +.. py:class:: ListClustersSortByField + + The sorting criteria. By default, clusters are sorted by 3 columns from highest to lowest precedence: cluster state, pinned or unpinned, then cluster name. + + .. py:attribute:: CLUSTER_NAME + :value: "CLUSTER_NAME" + + .. py:attribute:: DEFAULT + :value: "DEFAULT" + .. autoclass:: ListGlobalInitScriptsResponse :members: :undoc-members: @@ -855,6 +883,8 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ListSortOrder + A generic ordering enum for list-based queries. + .. py:attribute:: ASC :value: "ASC" @@ -1308,6 +1338,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: UpdateCluster + :members: + :undoc-members: + +.. autoclass:: UpdateClusterResource + :members: + :undoc-members: + +.. autoclass:: UpdateClusterResponse + :members: + :undoc-members: + .. autoclass:: UpdateResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/dashboards.rst b/docs/dbdataclasses/dashboards.rst index dca31d64..8765ee69 100644 --- a/docs/dbdataclasses/dashboards.rst +++ b/docs/dbdataclasses/dashboards.rst @@ -29,9 +29,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: DASHBOARD_VIEW_BASIC :value: "DASHBOARD_VIEW_BASIC" - .. py:attribute:: DASHBOARD_VIEW_FULL - :value: "DASHBOARD_VIEW_FULL" - .. autoclass:: DeleteScheduleResponse :members: :undoc-members: @@ -40,6 +37,34 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: GenieAttachment + :members: + :undoc-members: + +.. autoclass:: GenieConversation + :members: + :undoc-members: + +.. autoclass:: GenieCreateConversationMessageRequest + :members: + :undoc-members: + +.. autoclass:: GenieGetMessageQueryResultResponse + :members: + :undoc-members: + +.. autoclass:: GenieMessage + :members: + :undoc-members: + +.. autoclass:: GenieStartConversationMessageRequest + :members: + :undoc-members: + +.. autoclass:: GenieStartConversationResponse + :members: + :undoc-members: + .. py:class:: LifecycleState .. py:attribute:: ACTIVE @@ -60,6 +85,154 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: MessageError + :members: + :undoc-members: + +.. py:class:: MessageErrorType + + .. py:attribute:: BLOCK_MULTIPLE_EXECUTIONS_EXCEPTION + :value: "BLOCK_MULTIPLE_EXECUTIONS_EXCEPTION" + + .. py:attribute:: CHAT_COMPLETION_CLIENT_EXCEPTION + :value: "CHAT_COMPLETION_CLIENT_EXCEPTION" + + .. py:attribute:: CHAT_COMPLETION_CLIENT_TIMEOUT_EXCEPTION + :value: "CHAT_COMPLETION_CLIENT_TIMEOUT_EXCEPTION" + + .. py:attribute:: CHAT_COMPLETION_NETWORK_EXCEPTION + :value: "CHAT_COMPLETION_NETWORK_EXCEPTION" + + .. py:attribute:: CONTENT_FILTER_EXCEPTION + :value: "CONTENT_FILTER_EXCEPTION" + + .. py:attribute:: CONTEXT_EXCEEDED_EXCEPTION + :value: "CONTEXT_EXCEEDED_EXCEPTION" + + .. py:attribute:: COULD_NOT_GET_UC_SCHEMA_EXCEPTION + :value: "COULD_NOT_GET_UC_SCHEMA_EXCEPTION" + + .. py:attribute:: DEPLOYMENT_NOT_FOUND_EXCEPTION + :value: "DEPLOYMENT_NOT_FOUND_EXCEPTION" + + .. py:attribute:: FUNCTIONS_NOT_AVAILABLE_EXCEPTION + :value: "FUNCTIONS_NOT_AVAILABLE_EXCEPTION" + + .. py:attribute:: FUNCTION_ARGUMENTS_INVALID_EXCEPTION + :value: "FUNCTION_ARGUMENTS_INVALID_EXCEPTION" + + .. py:attribute:: FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION + :value: "FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION" + + .. py:attribute:: FUNCTION_CALL_MISSING_PARAMETER_EXCEPTION + :value: "FUNCTION_CALL_MISSING_PARAMETER_EXCEPTION" + + .. py:attribute:: GENERIC_CHAT_COMPLETION_EXCEPTION + :value: "GENERIC_CHAT_COMPLETION_EXCEPTION" + + .. py:attribute:: GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION + :value: "GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION" + + .. py:attribute:: GENERIC_SQL_EXEC_API_CALL_EXCEPTION + :value: "GENERIC_SQL_EXEC_API_CALL_EXCEPTION" + + .. py:attribute:: ILLEGAL_PARAMETER_DEFINITION_EXCEPTION + :value: "ILLEGAL_PARAMETER_DEFINITION_EXCEPTION" + + .. py:attribute:: INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION + :value: "INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION" + + .. py:attribute:: INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION + :value: "INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION" + + .. py:attribute:: INVALID_CHAT_COMPLETION_JSON_EXCEPTION + :value: "INVALID_CHAT_COMPLETION_JSON_EXCEPTION" + + .. py:attribute:: INVALID_COMPLETION_REQUEST_EXCEPTION + :value: "INVALID_COMPLETION_REQUEST_EXCEPTION" + + .. py:attribute:: INVALID_FUNCTION_CALL_EXCEPTION + :value: "INVALID_FUNCTION_CALL_EXCEPTION" + + .. py:attribute:: INVALID_TABLE_IDENTIFIER_EXCEPTION + :value: "INVALID_TABLE_IDENTIFIER_EXCEPTION" + + .. py:attribute:: LOCAL_CONTEXT_EXCEEDED_EXCEPTION + :value: "LOCAL_CONTEXT_EXCEEDED_EXCEPTION" + + .. py:attribute:: MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION + :value: "MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION" + + .. py:attribute:: MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION + :value: "MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION" + + .. py:attribute:: NO_TABLES_TO_QUERY_EXCEPTION + :value: "NO_TABLES_TO_QUERY_EXCEPTION" + + .. py:attribute:: RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION + :value: "RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION" + + .. py:attribute:: RATE_LIMIT_EXCEEDED_SPECIFIED_WAIT_EXCEPTION + :value: "RATE_LIMIT_EXCEEDED_SPECIFIED_WAIT_EXCEPTION" + + .. py:attribute:: REPLY_PROCESS_TIMEOUT_EXCEPTION + :value: "REPLY_PROCESS_TIMEOUT_EXCEPTION" + + .. py:attribute:: RETRYABLE_PROCESSING_EXCEPTION + :value: "RETRYABLE_PROCESSING_EXCEPTION" + + .. py:attribute:: SQL_EXECUTION_EXCEPTION + :value: "SQL_EXECUTION_EXCEPTION" + + .. py:attribute:: TABLES_MISSING_EXCEPTION + :value: "TABLES_MISSING_EXCEPTION" + + .. py:attribute:: TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION + :value: "TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION" + + .. py:attribute:: TOO_MANY_TABLES_EXCEPTION + :value: "TOO_MANY_TABLES_EXCEPTION" + + .. py:attribute:: UNEXPECTED_REPLY_PROCESS_EXCEPTION + :value: "UNEXPECTED_REPLY_PROCESS_EXCEPTION" + + .. py:attribute:: UNKNOWN_AI_MODEL + :value: "UNKNOWN_AI_MODEL" + + .. py:attribute:: WAREHOUSE_ACCESS_MISSING_EXCEPTION + :value: "WAREHOUSE_ACCESS_MISSING_EXCEPTION" + + .. py:attribute:: WAREHOUSE_NOT_FOUND_EXCEPTION + :value: "WAREHOUSE_NOT_FOUND_EXCEPTION" + +.. py:class:: MessageStatus + + MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data sources. * `ASKING_AI`: Waiting for the LLM to respond to the users question. * `EXECUTING_QUERY`: Executing AI provided SQL query. Get the SQL query result by calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message status will stay in the `EXECUTING_QUERY` until a client calls [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a response or the executing the query failed. Please see `error` field. * `COMPLETED`: Message processing is completed. Results are in the `attachments` field. Get the SQL query result by calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `SUBMITTED`: Message has been submitted. * `QUERY_RESULT_EXPIRED`: SQL result is not available anymore. The user needs to execute the query again. * `CANCELLED`: Message has been cancelled. + + .. py:attribute:: ASKING_AI + :value: "ASKING_AI" + + .. py:attribute:: CANCELLED + :value: "CANCELLED" + + .. py:attribute:: COMPLETED + :value: "COMPLETED" + + .. py:attribute:: EXECUTING_QUERY + :value: "EXECUTING_QUERY" + + .. py:attribute:: FAILED + :value: "FAILED" + + .. py:attribute:: FETCHING_METADATA + :value: "FETCHING_METADATA" + + .. py:attribute:: QUERY_RESULT_EXPIRED + :value: "QUERY_RESULT_EXPIRED" + + .. py:attribute:: SUBMITTED + :value: "SUBMITTED" + .. autoclass:: MigrateDashboardRequest :members: :undoc-members: @@ -72,6 +245,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: QueryAttachment + :members: + :undoc-members: + +.. autoclass:: Result + :members: + :undoc-members: + .. autoclass:: Schedule :members: :undoc-members: @@ -100,6 +281,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: TextAttachment + :members: + :undoc-members: + .. autoclass:: TrashDashboardResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/iam.rst b/docs/dbdataclasses/iam.rst index 9cafb78d..643da3d4 100644 --- a/docs/dbdataclasses/iam.rst +++ b/docs/dbdataclasses/iam.rst @@ -20,7 +20,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DeleteWorkspaceAssignments +.. autoclass:: DeleteWorkspacePermissionAssignmentResponse :members: :undoc-members: @@ -82,6 +82,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: MigratePermissionsRequest + :members: + :undoc-members: + +.. autoclass:: MigratePermissionsResponse + :members: + :undoc-members: + .. autoclass:: Name :members: :undoc-members: @@ -191,6 +199,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: CAN_MANAGE_STAGING_VERSIONS :value: "CAN_MANAGE_STAGING_VERSIONS" + .. py:attribute:: CAN_MONITOR + :value: "CAN_MONITOR" + .. py:attribute:: CAN_QUERY :value: "CAN_QUERY" @@ -215,14 +226,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: IS_OWNER :value: "IS_OWNER" -.. autoclass:: PermissionMigrationRequest - :members: - :undoc-members: - -.. autoclass:: PermissionMigrationResponse - :members: - :undoc-members: - .. autoclass:: PermissionOutput :members: :undoc-members: diff --git a/docs/dbdataclasses/index.rst b/docs/dbdataclasses/index.rst index 893e488d..987bee7f 100644 --- a/docs/dbdataclasses/index.rst +++ b/docs/dbdataclasses/index.rst @@ -5,6 +5,7 @@ Dataclasses .. toctree:: :maxdepth: 1 + apps billing catalog compute diff --git a/docs/dbdataclasses/jobs.rst b/docs/dbdataclasses/jobs.rst index 81d81020..0f501f77 100644 --- a/docs/dbdataclasses/jobs.rst +++ b/docs/dbdataclasses/jobs.rst @@ -365,9 +365,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: HOURS :value: "HOURS" - .. py:attribute:: TIME_UNIT_UNSPECIFIED - :value: "TIME_UNIT_UNSPECIFIED" - .. py:attribute:: WEEKS :value: "WEEKS" diff --git a/docs/dbdataclasses/marketplace.rst b/docs/dbdataclasses/marketplace.rst index 5204dd1e..bb48967d 100644 --- a/docs/dbdataclasses/marketplace.rst +++ b/docs/dbdataclasses/marketplace.rst @@ -29,9 +29,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: ASSET_TYPE_NOTEBOOK :value: "ASSET_TYPE_NOTEBOOK" - .. py:attribute:: ASSET_TYPE_UNSPECIFIED - :value: "ASSET_TYPE_UNSPECIFIED" - .. autoclass:: BatchGetListingsResponse :members: :undoc-members: @@ -288,11 +285,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: FILE_STATUS_STAGING :value: "FILE_STATUS_STAGING" -.. py:class:: FilterType - - .. py:attribute:: METASTORE - :value: "METASTORE" - .. py:class:: FulfillmentType .. py:attribute:: INSTALL @@ -453,9 +445,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: LISTING_TAG_TYPE_TASK :value: "LISTING_TAG_TYPE_TASK" - .. py:attribute:: LISTING_TAG_TYPE_UNSPECIFIED - :value: "LISTING_TAG_TYPE_UNSPECIFIED" - .. py:class:: ListingType .. py:attribute:: PERSONALIZED @@ -494,29 +483,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ProviderIconFile - :members: - :undoc-members: - -.. py:class:: ProviderIconType - - .. py:attribute:: DARK - :value: "DARK" - - .. py:attribute:: PRIMARY - :value: "PRIMARY" - - .. py:attribute:: PROVIDER_ICON_TYPE_UNSPECIFIED - :value: "PROVIDER_ICON_TYPE_UNSPECIFIED" - .. autoclass:: ProviderInfo :members: :undoc-members: -.. autoclass:: ProviderListingSummaryInfo - :members: - :undoc-members: - .. autoclass:: RegionInfo :members: :undoc-members: @@ -545,20 +515,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. py:class:: SortBy - - .. py:attribute:: SORT_BY_DATE - :value: "SORT_BY_DATE" - - .. py:attribute:: SORT_BY_RELEVANCE - :value: "SORT_BY_RELEVANCE" - - .. py:attribute:: SORT_BY_TITLE - :value: "SORT_BY_TITLE" - - .. py:attribute:: SORT_BY_UNSPECIFIED - :value: "SORT_BY_UNSPECIFIED" - .. autoclass:: TokenDetail :members: :undoc-members: @@ -630,7 +586,3 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: PUBLIC :value: "PUBLIC" - -.. autoclass:: VisibilityFilter - :members: - :undoc-members: diff --git a/docs/dbdataclasses/pipelines.rst b/docs/dbdataclasses/pipelines.rst index 385bf202..9d3d9c8a 100644 --- a/docs/dbdataclasses/pipelines.rst +++ b/docs/dbdataclasses/pipelines.rst @@ -97,19 +97,19 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ListPipelineEventsResponse +.. autoclass:: IngestionPipelineDefinition :members: :undoc-members: -.. autoclass:: ListPipelinesResponse +.. autoclass:: ListPipelineEventsResponse :members: :undoc-members: -.. autoclass:: ListUpdatesResponse +.. autoclass:: ListPipelinesResponse :members: :undoc-members: -.. autoclass:: ManagedIngestionPipelineDefinition +.. autoclass:: ListUpdatesResponse :members: :undoc-members: @@ -251,6 +251,16 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: PipelineStateInfoHealth + + The health of a pipeline. + + .. py:attribute:: HEALTHY + :value: "HEALTHY" + + .. py:attribute:: UNHEALTHY + :value: "UNHEALTHY" + .. autoclass:: PipelineTrigger :members: :undoc-members: diff --git a/docs/dbdataclasses/serving.rst b/docs/dbdataclasses/serving.rst index 46cfe6a3..23ef3c25 100644 --- a/docs/dbdataclasses/serving.rst +++ b/docs/dbdataclasses/serving.rst @@ -32,84 +32,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: App - :members: - :undoc-members: - -.. autoclass:: AppDeployment - :members: - :undoc-members: - -.. autoclass:: AppDeploymentArtifacts - :members: - :undoc-members: - -.. py:class:: AppDeploymentMode - - .. py:attribute:: AUTO_SYNC - :value: "AUTO_SYNC" - - .. py:attribute:: MODE_UNSPECIFIED - :value: "MODE_UNSPECIFIED" - - .. py:attribute:: SNAPSHOT - :value: "SNAPSHOT" - -.. py:class:: AppDeploymentState - - .. py:attribute:: FAILED - :value: "FAILED" - - .. py:attribute:: IN_PROGRESS - :value: "IN_PROGRESS" - - .. py:attribute:: STATE_UNSPECIFIED - :value: "STATE_UNSPECIFIED" - - .. py:attribute:: STOPPED - :value: "STOPPED" - - .. py:attribute:: SUCCEEDED - :value: "SUCCEEDED" - -.. autoclass:: AppDeploymentStatus - :members: - :undoc-members: - -.. autoclass:: AppEnvironment - :members: - :undoc-members: - -.. py:class:: AppState - - .. py:attribute:: CREATING - :value: "CREATING" - - .. py:attribute:: DELETED - :value: "DELETED" - - .. py:attribute:: DELETING - :value: "DELETING" - - .. py:attribute:: ERROR - :value: "ERROR" - - .. py:attribute:: IDLE - :value: "IDLE" - - .. py:attribute:: RUNNING - :value: "RUNNING" - - .. py:attribute:: STARTING - :value: "STARTING" - - .. py:attribute:: STATE_UNSPECIFIED - :value: "STATE_UNSPECIFIED" - -.. autoclass:: AppStatus - :members: - :undoc-members: - .. autoclass:: AutoCaptureConfigInput :members: :undoc-members: @@ -147,14 +69,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateAppDeploymentRequest - :members: - :undoc-members: - -.. autoclass:: CreateAppRequest - :members: - :undoc-members: - .. autoclass:: CreateServingEndpoint :members: :undoc-members: @@ -212,6 +126,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: NOT_UPDATING :value: "NOT_UPDATING" + .. py:attribute:: UPDATE_CANCELED + :value: "UPDATE_CANCELED" + .. py:attribute:: UPDATE_FAILED :value: "UPDATE_FAILED" @@ -229,10 +146,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: EnvVariable - :members: - :undoc-members: - .. autoclass:: ExportMetricsResponse :members: :undoc-members: @@ -243,7 +156,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ExternalModelProvider - The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'openai', and 'palm'.", + The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.", .. py:attribute:: AI21LABS :value: "AI21LABS" @@ -260,6 +173,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: DATABRICKS_MODEL_SERVING :value: "DATABRICKS_MODEL_SERVING" + .. py:attribute:: GOOGLE_CLOUD_VERTEX_AI + :value: "GOOGLE_CLOUD_VERTEX_AI" + .. py:attribute:: OPENAI :value: "OPENAI" @@ -282,11 +198,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ListAppDeploymentsResponse - :members: - :undoc-members: - -.. autoclass:: ListAppsResponse +.. autoclass:: GoogleCloudVertexAiConfig :members: :undoc-members: @@ -506,26 +418,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: StartAppRequest - :members: - :undoc-members: - -.. autoclass:: StopAppRequest - :members: - :undoc-members: - -.. autoclass:: StopAppResponse - :members: - :undoc-members: - .. autoclass:: TrafficConfig :members: :undoc-members: -.. autoclass:: UpdateAppRequest - :members: - :undoc-members: - .. autoclass:: V1ResponseChoiceElement :members: :undoc-members: diff --git a/docs/dbdataclasses/settings.rst b/docs/dbdataclasses/settings.rst index cc142abf..0031512e 100644 --- a/docs/dbdataclasses/settings.rst +++ b/docs/dbdataclasses/settings.rst @@ -22,9 +22,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ClusterAutoRestartMessageMaintenanceWindowDayOfWeek - .. py:attribute:: DAY_OF_WEEK_UNSPECIFIED - :value: "DAY_OF_WEEK_UNSPECIFIED" - .. py:attribute:: FRIDAY :value: "FRIDAY" @@ -73,9 +70,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: THIRD_OF_MONTH :value: "THIRD_OF_MONTH" - .. py:attribute:: WEEK_DAY_FREQUENCY_UNSPECIFIED - :value: "WEEK_DAY_FREQUENCY_UNSPECIFIED" - .. autoclass:: ClusterAutoRestartMessageMaintenanceWindowWindowStartTime :members: :undoc-members: @@ -92,8 +86,8 @@ These dataclasses are used in the SDK to represent API requests and responses fo Compliance stardard for SHIELD customers - .. py:attribute:: COMPLIANCE_STANDARD_UNSPECIFIED - :value: "COMPLIANCE_STANDARD_UNSPECIFIED" + .. py:attribute:: CANADA_PROTECTED_B + :value: "CANADA_PROTECTED_B" .. py:attribute:: CYBER_ESSENTIAL_PLUS :value: "CYBER_ESSENTIAL_PLUS" @@ -122,6 +116,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: PCI_DSS :value: "PCI_DSS" +.. autoclass:: Config + :members: + :undoc-members: + .. autoclass:: CreateIpAccessList :members: :undoc-members: @@ -134,6 +132,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: CreateNotificationDestinationRequest + :members: + :undoc-members: + .. autoclass:: CreateOboTokenRequest :members: :undoc-members: @@ -202,6 +204,31 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: DestinationType + + .. py:attribute:: EMAIL + :value: "EMAIL" + + .. py:attribute:: MICROSOFT_TEAMS + :value: "MICROSOFT_TEAMS" + + .. py:attribute:: PAGERDUTY + :value: "PAGERDUTY" + + .. py:attribute:: SLACK + :value: "SLACK" + + .. py:attribute:: WEBHOOK + :value: "WEBHOOK" + +.. autoclass:: EmailConfig + :members: + :undoc-members: + +.. autoclass:: Empty + :members: + :undoc-members: + .. autoclass:: EnhancedSecurityMonitoring :members: :undoc-members: @@ -234,6 +261,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: GenericWebhookConfig + :members: + :undoc-members: + .. autoclass:: GetIpAccessListResponse :members: :undoc-members: @@ -266,6 +297,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: ListNotificationDestinationsResponse + :members: + :undoc-members: + +.. autoclass:: ListNotificationDestinationsResult + :members: + :undoc-members: + .. autoclass:: ListPublicTokensResponse :members: :undoc-members: @@ -285,6 +324,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: BLOCK :value: "BLOCK" +.. autoclass:: MicrosoftTeamsConfig + :members: + :undoc-members: + .. autoclass:: NccAwsStableIpRule :members: :undoc-members: @@ -349,6 +392,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: NotificationDestination + :members: + :undoc-members: + +.. autoclass:: PagerdutyConfig + :members: + :undoc-members: + .. autoclass:: PartitionId :members: :undoc-members: @@ -395,9 +446,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: RESTRICT_TOKENS_AND_JOB_RUN_AS :value: "RESTRICT_TOKENS_AND_JOB_RUN_AS" - .. py:attribute:: STATUS_UNSPECIFIED - :value: "STATUS_UNSPECIFIED" - .. autoclass:: RestrictWorkspaceAdminsSetting :members: :undoc-members: @@ -414,6 +462,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: SlackConfig + :members: + :undoc-members: + .. autoclass:: StringMessage :members: :undoc-members: @@ -488,6 +540,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: UpdateNotificationDestinationRequest + :members: + :undoc-members: + .. autoclass:: UpdatePersonalComputeSettingRequest :members: :undoc-members: diff --git a/docs/dbdataclasses/sharing.rst b/docs/dbdataclasses/sharing.rst index f25f3f57..ded587fe 100644 --- a/docs/dbdataclasses/sharing.rst +++ b/docs/dbdataclasses/sharing.rst @@ -265,6 +265,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: EXECUTE :value: "EXECUTE" + .. py:attribute:: MANAGE + :value: "MANAGE" + .. py:attribute:: MANAGE_ALLOWLIST :value: "MANAGE_ALLOWLIST" diff --git a/docs/dbdataclasses/sql.rst b/docs/dbdataclasses/sql.rst index fe1469a3..b39ea9ed 100644 --- a/docs/dbdataclasses/sql.rst +++ b/docs/dbdataclasses/sql.rst @@ -12,6 +12,49 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: AlertCondition + :members: + :undoc-members: + +.. autoclass:: AlertConditionOperand + :members: + :undoc-members: + +.. autoclass:: AlertConditionThreshold + :members: + :undoc-members: + +.. autoclass:: AlertOperandColumn + :members: + :undoc-members: + +.. autoclass:: AlertOperandValue + :members: + :undoc-members: + +.. py:class:: AlertOperator + + .. py:attribute:: EQUAL + :value: "EQUAL" + + .. py:attribute:: GREATER_THAN + :value: "GREATER_THAN" + + .. py:attribute:: GREATER_THAN_OR_EQUAL + :value: "GREATER_THAN_OR_EQUAL" + + .. py:attribute:: IS_NULL + :value: "IS_NULL" + + .. py:attribute:: LESS_THAN + :value: "LESS_THAN" + + .. py:attribute:: LESS_THAN_OR_EQUAL + :value: "LESS_THAN_OR_EQUAL" + + .. py:attribute:: NOT_EQUAL + :value: "NOT_EQUAL" + .. autoclass:: AlertOptions :members: :undoc-members: @@ -35,8 +78,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: AlertState - State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions). - .. py:attribute:: OK :value: "OK" @@ -64,8 +105,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ChannelName - Name of the channel - .. py:attribute:: CHANNEL_NAME_CURRENT :value: "CHANNEL_NAME_CURRENT" @@ -81,6 +120,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: CHANNEL_NAME_UNSPECIFIED :value: "CHANNEL_NAME_UNSPECIFIED" +.. autoclass:: ClientCallContext + :members: + :undoc-members: + .. autoclass:: ColumnInfo :members: :undoc-members: @@ -146,10 +189,38 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: USER_DEFINED_TYPE :value: "USER_DEFINED_TYPE" +.. autoclass:: ContextFilter + :members: + :undoc-members: + .. autoclass:: CreateAlert :members: :undoc-members: +.. autoclass:: CreateAlertRequest + :members: + :undoc-members: + +.. autoclass:: CreateAlertRequestAlert + :members: + :undoc-members: + +.. autoclass:: CreateQueryRequest + :members: + :undoc-members: + +.. autoclass:: CreateQueryRequestQuery + :members: + :undoc-members: + +.. autoclass:: CreateVisualizationRequest + :members: + :undoc-members: + +.. autoclass:: CreateVisualizationRequestVisualization + :members: + :undoc-members: + .. autoclass:: CreateWarehouseRequest :members: :undoc-members: @@ -195,6 +266,90 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: DatePrecision + + .. py:attribute:: DAY_PRECISION + :value: "DAY_PRECISION" + + .. py:attribute:: MINUTE_PRECISION + :value: "MINUTE_PRECISION" + + .. py:attribute:: SECOND_PRECISION + :value: "SECOND_PRECISION" + +.. autoclass:: DateRange + :members: + :undoc-members: + +.. autoclass:: DateRangeValue + :members: + :undoc-members: + +.. py:class:: DateRangeValueDynamicDateRange + + .. py:attribute:: LAST_12_MONTHS + :value: "LAST_12_MONTHS" + + .. py:attribute:: LAST_14_DAYS + :value: "LAST_14_DAYS" + + .. py:attribute:: LAST_24_HOURS + :value: "LAST_24_HOURS" + + .. py:attribute:: LAST_30_DAYS + :value: "LAST_30_DAYS" + + .. py:attribute:: LAST_60_DAYS + :value: "LAST_60_DAYS" + + .. py:attribute:: LAST_7_DAYS + :value: "LAST_7_DAYS" + + .. py:attribute:: LAST_8_HOURS + :value: "LAST_8_HOURS" + + .. py:attribute:: LAST_90_DAYS + :value: "LAST_90_DAYS" + + .. py:attribute:: LAST_HOUR + :value: "LAST_HOUR" + + .. py:attribute:: LAST_MONTH + :value: "LAST_MONTH" + + .. py:attribute:: LAST_WEEK + :value: "LAST_WEEK" + + .. py:attribute:: LAST_YEAR + :value: "LAST_YEAR" + + .. py:attribute:: THIS_MONTH + :value: "THIS_MONTH" + + .. py:attribute:: THIS_WEEK + :value: "THIS_WEEK" + + .. py:attribute:: THIS_YEAR + :value: "THIS_YEAR" + + .. py:attribute:: TODAY + :value: "TODAY" + + .. py:attribute:: YESTERDAY + :value: "YESTERDAY" + +.. autoclass:: DateValue + :members: + :undoc-members: + +.. py:class:: DateValueDynamicDate + + .. py:attribute:: NOW + :value: "NOW" + + .. py:attribute:: YESTERDAY + :value: "YESTERDAY" + .. autoclass:: DeleteResponse :members: :undoc-members: @@ -205,13 +360,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: Disposition - The fetch disposition provides two modes of fetching results: `INLINE` and `EXTERNAL_LINKS`. - Statements executed with `INLINE` disposition will return result data inline, in `JSON_ARRAY` format, in a series of chunks. If a given statement produces a result set with a size larger than 25 MiB, that statement execution is aborted, and no result set will be available. - **NOTE** Byte limits are computed based upon internal representations of the result set data, and might not match the sizes visible in JSON responses. - Statements executed with `EXTERNAL_LINKS` disposition will return result data as external links: URLs that point to cloud storage internal to the workspace. Using `EXTERNAL_LINKS` disposition allows statements to generate arbitrarily sized result sets for fetching up to 100 GiB. The resulting links have two important properties: - 1. They point to resources _external_ to the Databricks compute; therefore any associated authentication information (typically a personal access token, OAuth token, or similar) _must be removed_ when fetching from these links. - 2. These are presigned URLs with a specific expiration, indicated in the response. The behavior when attempting to use an expired link is cloud specific. - .. py:attribute:: EXTERNAL_LINKS :value: "EXTERNAL_LINKS" @@ -243,6 +391,24 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: Empty + :members: + :undoc-members: + +.. autoclass:: EncodedText + :members: + :undoc-members: + +.. py:class:: EncodedTextEncoding + + Carry text data in different form. + + .. py:attribute:: BASE64 + :value: "BASE64" + + .. py:attribute:: PLAIN + :value: "PLAIN" + .. autoclass:: EndpointConfPair :members: :undoc-members: @@ -276,6 +442,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: EnumValue + :members: + :undoc-members: + .. autoclass:: ExecuteStatementRequest :members: :undoc-members: @@ -290,10 +460,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: CONTINUE :value: "CONTINUE" -.. autoclass:: ExecuteStatementResponse - :members: - :undoc-members: - .. autoclass:: ExternalLink :members: :undoc-members: @@ -313,10 +479,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: GetStatementResponse - :members: - :undoc-members: - .. autoclass:: GetWarehousePermissionLevelsResponse :members: :undoc-members: @@ -355,6 +517,47 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: PASSTHROUGH :value: "PASSTHROUGH" +.. autoclass:: LegacyAlert + :members: + :undoc-members: + +.. py:class:: LegacyAlertState + + State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions). + + .. py:attribute:: OK + :value: "OK" + + .. py:attribute:: TRIGGERED + :value: "TRIGGERED" + + .. py:attribute:: UNKNOWN + :value: "UNKNOWN" + +.. autoclass:: LegacyQuery + :members: + :undoc-members: + +.. autoclass:: LegacyVisualization + :members: + :undoc-members: + +.. py:class:: LifecycleState + + .. py:attribute:: ACTIVE + :value: "ACTIVE" + + .. py:attribute:: TRASHED + :value: "TRASHED" + +.. autoclass:: ListAlertsResponse + :members: + :undoc-members: + +.. autoclass:: ListAlertsResponseAlert + :members: + :undoc-members: + .. py:class:: ListOrder .. py:attribute:: CREATED_AT @@ -367,10 +570,22 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: ListQueryObjectsResponse + :members: + :undoc-members: + +.. autoclass:: ListQueryObjectsResponseQuery + :members: + :undoc-members: + .. autoclass:: ListResponse :members: :undoc-members: +.. autoclass:: ListVisualizationsForQueryResponse + :members: + :undoc-members: + .. autoclass:: ListWarehousesResponse :members: :undoc-members: @@ -379,6 +594,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: NumericValue + :members: + :undoc-members: + .. py:class:: ObjectType A singular noun object type. @@ -469,7 +688,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: PlansState - Whether plans exist for the execution, or the reason why they are missing + Possible Reasons for which we have not saved plans in the database .. py:attribute:: EMPTY :value: "EMPTY" @@ -493,6 +712,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: QueryBackedValue + :members: + :undoc-members: + .. autoclass:: QueryEditContent :members: :undoc-members: @@ -517,13 +740,87 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: QueryParameter + :members: + :undoc-members: + .. autoclass:: QueryPostContent :members: :undoc-members: -.. py:class:: QueryStatementType +.. autoclass:: QuerySource + :members: + :undoc-members: + +.. autoclass:: QuerySourceDriverInfo + :members: + :undoc-members: + +.. py:class:: QuerySourceEntryPoint + + Spark service that received and processed the query + + .. py:attribute:: DLT + :value: "DLT" + + .. py:attribute:: SPARK_CONNECT + :value: "SPARK_CONNECT" + + .. py:attribute:: THRIFT_SERVER + :value: "THRIFT_SERVER" + +.. py:class:: QuerySourceJobManager + + Copied from elastic-spark-common/api/messages/manager.proto with enum values changed by 1 to accommodate JOB_MANAGER_UNSPECIFIED - Type of statement for this query + .. py:attribute:: APP_SYSTEM_TABLE + :value: "APP_SYSTEM_TABLE" + + .. py:attribute:: AUTOML + :value: "AUTOML" + + .. py:attribute:: AUTO_MAINTENANCE + :value: "AUTO_MAINTENANCE" + + .. py:attribute:: CLEAN_ROOMS + :value: "CLEAN_ROOMS" + + .. py:attribute:: DATA_MONITORING + :value: "DATA_MONITORING" + + .. py:attribute:: DATA_SHARING + :value: "DATA_SHARING" + + .. py:attribute:: ENCRYPTION + :value: "ENCRYPTION" + + .. py:attribute:: FABRIC_CRAWLER + :value: "FABRIC_CRAWLER" + + .. py:attribute:: JOBS + :value: "JOBS" + + .. py:attribute:: LAKEVIEW + :value: "LAKEVIEW" + + .. py:attribute:: MANAGED_RAG + :value: "MANAGED_RAG" + + .. py:attribute:: SCHEDULED_MV_REFRESH + :value: "SCHEDULED_MV_REFRESH" + + .. py:attribute:: TESTING + :value: "TESTING" + +.. py:class:: QuerySourceTrigger + + .. py:attribute:: MANUAL + :value: "MANUAL" + + .. py:attribute:: SCHEDULED + :value: "SCHEDULED" + +.. py:class:: QueryStatementType .. py:attribute:: ALTER :value: "ALTER" @@ -593,11 +890,17 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: QueryStatus - Query status with one the following values: * `QUEUED`: Query has been received and queued. * `RUNNING`: Query has started. * `CANCELED`: Query has been cancelled by the user. * `FAILED`: Query has failed. * `FINISHED`: Query has completed. + Statuses which are also used by OperationStatus in runtime .. py:attribute:: CANCELED :value: "CANCELED" + .. py:attribute:: COMPILED + :value: "COMPILED" + + .. py:attribute:: COMPILING + :value: "COMPILING" + .. py:attribute:: FAILED :value: "FAILED" @@ -610,6 +913,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: RUNNING :value: "RUNNING" + .. py:attribute:: STARTED + :value: "STARTED" + .. autoclass:: RepeatedEndpointConfPairs :members: :undoc-members: @@ -630,6 +936,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: RunAsMode + + .. py:attribute:: OWNER + :value: "OWNER" + + .. py:attribute:: VIEWER + :value: "VIEWER" + .. py:class:: RunAsRole Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior) @@ -640,6 +954,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: VIEWER :value: "VIEWER" +.. autoclass:: ServerlessChannelInfo + :members: + :undoc-members: + .. autoclass:: ServiceError :members: :undoc-members: @@ -756,6 +1074,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: StatementResponse + :members: + :undoc-members: + .. py:class:: StatementState Statement execution state: - `PENDING`: waiting for warehouse - `RUNNING`: running - `SUCCEEDED`: execution was successful, result data available for fetch - `FAILED`: execution failed; reason for failure described in accomanying error message - `CANCELED`: user canceled; can come from explicit cancel call, or timeout with `on_wait_timeout=CANCEL` - `CLOSED`: execution successful, and statement closed; result no longer available for fetch @@ -1072,6 +1394,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SUCCESS :value: "SUCCESS" +.. autoclass:: TextValue + :members: + :undoc-members: + .. autoclass:: TimeRange :members: :undoc-members: @@ -1080,10 +1406,34 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: UpdateAlertRequest + :members: + :undoc-members: + +.. autoclass:: UpdateAlertRequestAlert + :members: + :undoc-members: + +.. autoclass:: UpdateQueryRequest + :members: + :undoc-members: + +.. autoclass:: UpdateQueryRequestQuery + :members: + :undoc-members: + .. autoclass:: UpdateResponse :members: :undoc-members: +.. autoclass:: UpdateVisualizationRequest + :members: + :undoc-members: + +.. autoclass:: UpdateVisualizationRequestVisualization + :members: + :undoc-members: + .. autoclass:: User :members: :undoc-members: @@ -1111,6 +1461,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: CAN_MANAGE :value: "CAN_MANAGE" + .. py:attribute:: CAN_MONITOR + :value: "CAN_MONITOR" + .. py:attribute:: CAN_USE :value: "CAN_USE" diff --git a/docs/workspace/apps/apps.rst b/docs/workspace/apps/apps.rst new file mode 100644 index 00000000..455bb81c --- /dev/null +++ b/docs/workspace/apps/apps.rst @@ -0,0 +1,220 @@ +``w.apps``: Apps +================ +.. currentmodule:: databricks.sdk.service.apps + +.. py:class:: AppsAPI + + Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend + Databricks services, and enable users to interact through single sign-on. + + .. py:method:: create(name: str [, description: Optional[str]]) -> Wait[App] + + Create an app. + + Creates a new app. + + :param name: str + The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It + must be unique within the workspace. + :param description: str (optional) + The description of the app. + + :returns: + Long-running operation waiter for :class:`App`. + See :method:wait_get_app_idle for more details. + + + .. py:method:: create_and_wait(name: str [, description: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> App + + + .. py:method:: delete(name: str) + + Delete an app. + + Deletes an app. + + :param name: str + The name of the app. + + + + + .. py:method:: deploy(app_name: str, source_code_path: str [, mode: Optional[AppDeploymentMode]]) -> Wait[AppDeployment] + + Create an app deployment. + + Creates an app deployment for the app with the supplied name. + + :param app_name: str + The name of the app. + :param source_code_path: str + The workspace file system path of the source code used to create the app deployment. This is + different from `deployment_artifacts.source_code_path`, which is the path used by the deployed app. + The former refers to the original source code location of the app in the workspace during deployment + creation, whereas the latter provides a system generated stable snapshotted source code path used by + the deployment. + :param mode: :class:`AppDeploymentMode` (optional) + The mode of which the deployment will manage the source code. + + :returns: + Long-running operation waiter for :class:`AppDeployment`. + See :method:wait_get_deployment_app_succeeded for more details. + + + .. py:method:: deploy_and_wait(app_name: str, source_code_path: str [, mode: Optional[AppDeploymentMode], timeout: datetime.timedelta = 0:20:00]) -> AppDeployment + + + .. py:method:: get(name: str) -> App + + Get an app. + + Retrieves information for the app with the supplied name. + + :param name: str + The name of the app. + + :returns: :class:`App` + + + .. py:method:: get_deployment(app_name: str, deployment_id: str) -> AppDeployment + + Get an app deployment. + + Retrieves information for the app deployment with the supplied name and deployment id. + + :param app_name: str + The name of the app. + :param deployment_id: str + The unique id of the deployment. + + :returns: :class:`AppDeployment` + + + .. py:method:: get_permission_levels(app_name: str) -> GetAppPermissionLevelsResponse + + Get app permission levels. + + Gets the permission levels that a user can have on an object. + + :param app_name: str + The app for which to get or manage permissions. + + :returns: :class:`GetAppPermissionLevelsResponse` + + + .. py:method:: get_permissions(app_name: str) -> AppPermissions + + Get app permissions. + + Gets the permissions of an app. Apps can inherit permissions from their root object. + + :param app_name: str + The app for which to get or manage permissions. + + :returns: :class:`AppPermissions` + + + .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[App] + + List apps. + + Lists all apps in the workspace. + + :param page_size: int (optional) + Upper bound for items returned. + :param page_token: str (optional) + Pagination token to go to the next page of apps. Requests first page if absent. + + :returns: Iterator over :class:`App` + + + .. py:method:: list_deployments(app_name: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[AppDeployment] + + List app deployments. + + Lists all app deployments for the app with the supplied name. + + :param app_name: str + The name of the app. + :param page_size: int (optional) + Upper bound for items returned. + :param page_token: str (optional) + Pagination token to go to the next page of apps. Requests first page if absent. + + :returns: Iterator over :class:`AppDeployment` + + + .. py:method:: set_permissions(app_name: str [, access_control_list: Optional[List[AppAccessControlRequest]]]) -> AppPermissions + + Set app permissions. + + Sets permissions on an app. Apps can inherit permissions from their root object. + + :param app_name: str + The app for which to get or manage permissions. + :param access_control_list: List[:class:`AppAccessControlRequest`] (optional) + + :returns: :class:`AppPermissions` + + + .. py:method:: start(name: str) -> Wait[AppDeployment] + + Start an app. + + Start the last active deployment of the app in the workspace. + + :param name: str + The name of the app. + + :returns: + Long-running operation waiter for :class:`AppDeployment`. + See :method:wait_get_deployment_app_succeeded for more details. + + + .. py:method:: start_and_wait(name: str, timeout: datetime.timedelta = 0:20:00) -> AppDeployment + + + .. py:method:: stop(name: str) + + Stop an app. + + Stops the active deployment of the app in the workspace. + + :param name: str + The name of the app. + + + + + .. py:method:: update(name: str [, description: Optional[str]]) -> App + + Update an app. + + Updates the app with the supplied name. + + :param name: str + The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It + must be unique within the workspace. + :param description: str (optional) + The description of the app. + + :returns: :class:`App` + + + .. py:method:: update_permissions(app_name: str [, access_control_list: Optional[List[AppAccessControlRequest]]]) -> AppPermissions + + Update app permissions. + + Updates the permissions on an app. Apps can inherit permissions from their root object. + + :param app_name: str + The app for which to get or manage permissions. + :param access_control_list: List[:class:`AppAccessControlRequest`] (optional) + + :returns: :class:`AppPermissions` + + + .. py:method:: wait_get_app_idle(name: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[App], None]]) -> App + + + .. py:method:: wait_get_deployment_app_succeeded(app_name: str, deployment_id: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[AppDeployment], None]]) -> AppDeployment diff --git a/docs/workspace/apps/index.rst b/docs/workspace/apps/index.rst new file mode 100644 index 00000000..bd21c93a --- /dev/null +++ b/docs/workspace/apps/index.rst @@ -0,0 +1,10 @@ + +Apps +==== + +Build custom applications on Databricks + +.. toctree:: + :maxdepth: 1 + + apps \ No newline at end of file diff --git a/docs/workspace/catalog/model_versions.rst b/docs/workspace/catalog/model_versions.rst index 017a6aa1..bae6f25f 100644 --- a/docs/workspace/catalog/model_versions.rst +++ b/docs/workspace/catalog/model_versions.rst @@ -30,7 +30,7 @@ - .. py:method:: get(full_name: str, version: int [, include_browse: Optional[bool]]) -> RegisteredModelInfo + .. py:method:: get(full_name: str, version: int [, include_aliases: Optional[bool], include_browse: Optional[bool]]) -> ModelVersionInfo Get a Model Version. @@ -44,14 +44,16 @@ The three-level (fully qualified) name of the model version :param version: int The integer version number of the model version + :param include_aliases: bool (optional) + Whether to include aliases associated with the model version in the response :param include_browse: bool (optional) Whether to include model versions in the response for which the principal can only access selective metadata for - :returns: :class:`RegisteredModelInfo` + :returns: :class:`ModelVersionInfo` - .. py:method:: get_by_alias(full_name: str, alias: str) -> ModelVersionInfo + .. py:method:: get_by_alias(full_name: str, alias: str [, include_aliases: Optional[bool]]) -> ModelVersionInfo Get Model Version By Alias. @@ -65,6 +67,8 @@ The three-level (fully qualified) name of the registered model :param alias: str The name of the alias + :param include_aliases: bool (optional) + Whether to include aliases associated with the model version in the response :returns: :class:`ModelVersionInfo` diff --git a/docs/workspace/catalog/registered_models.rst b/docs/workspace/catalog/registered_models.rst index 6a60c4f6..b05a702b 100644 --- a/docs/workspace/catalog/registered_models.rst +++ b/docs/workspace/catalog/registered_models.rst @@ -91,7 +91,7 @@ - .. py:method:: get(full_name: str [, include_browse: Optional[bool]]) -> RegisteredModelInfo + .. py:method:: get(full_name: str [, include_aliases: Optional[bool], include_browse: Optional[bool]]) -> RegisteredModelInfo Get a Registered Model. @@ -103,6 +103,8 @@ :param full_name: str The three-level (fully qualified) name of the registered model + :param include_aliases: bool (optional) + Whether to include registered model aliases in the response :param include_browse: bool (optional) Whether to include registered models in the response for which the principal can only access selective metadata for diff --git a/docs/workspace/catalog/schemas.rst b/docs/workspace/catalog/schemas.rst index 1c9fcbbd..feaf7c7a 100644 --- a/docs/workspace/catalog/schemas.rst +++ b/docs/workspace/catalog/schemas.rst @@ -49,7 +49,7 @@ :returns: :class:`SchemaInfo` - .. py:method:: delete(full_name: str) + .. py:method:: delete(full_name: str [, force: Optional[bool]]) Delete a schema. @@ -58,6 +58,8 @@ :param full_name: str Full name of the schema. + :param force: bool (optional) + Force deletion even if the schema is not empty. diff --git a/docs/workspace/catalog/system_schemas.rst b/docs/workspace/catalog/system_schemas.rst index b9ab3b0f..2028a362 100644 --- a/docs/workspace/catalog/system_schemas.rst +++ b/docs/workspace/catalog/system_schemas.rst @@ -37,7 +37,7 @@ - .. py:method:: list(metastore_id: str) -> Iterator[SystemSchemaInfo] + .. py:method:: list(metastore_id: str [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[SystemSchemaInfo] List system schemas. @@ -46,6 +46,13 @@ :param metastore_id: str The ID for the metastore in which the system schema resides. + :param max_results: int (optional) + Maximum number of schemas to return. - When set to 0, the page length is set to a server configured + value (recommended); - When set to a value greater than 0, the page length is the minimum of this + value and a server configured value; - When set to a value less than 0, an invalid parameter error + is returned; - If not set, all the schemas are returned (not recommended). + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. :returns: Iterator over :class:`SystemSchemaInfo` \ No newline at end of file diff --git a/docs/workspace/catalog/workspace_bindings.rst b/docs/workspace/catalog/workspace_bindings.rst index e1ec753d..08a74b29 100644 --- a/docs/workspace/catalog/workspace_bindings.rst +++ b/docs/workspace/catalog/workspace_bindings.rst @@ -17,7 +17,7 @@ the new path (/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}) which introduces the ability to bind a securable in READ_ONLY mode (catalogs only). - Securables that support binding: - catalog + Securable types that support binding: - catalog - storage_credential - external_location .. py:method:: get(name: str) -> CurrentWorkspaceBindings @@ -50,19 +50,26 @@ :returns: :class:`CurrentWorkspaceBindings` - .. py:method:: get_bindings(securable_type: str, securable_name: str) -> WorkspaceBindingsResponse + .. py:method:: get_bindings(securable_type: GetBindingsSecurableType, securable_name: str [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[WorkspaceBinding] Get securable workspace bindings. Gets workspace bindings of the securable. The caller must be a metastore admin or an owner of the securable. - :param securable_type: str - The type of the securable. + :param securable_type: :class:`GetBindingsSecurableType` + The type of the securable to bind to a workspace. :param securable_name: str The name of the securable. + :param max_results: int (optional) + Maximum number of workspace bindings to return. - When set to 0, the page length is set to a server + configured value (recommended); - When set to a value greater than 0, the page length is the minimum + of this value and a server configured value; - When set to a value less than 0, an invalid parameter + error is returned; - If not set, all the workspace bindings are returned (not recommended). + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. - :returns: :class:`WorkspaceBindingsResponse` + :returns: Iterator over :class:`WorkspaceBinding` .. py:method:: update(name: str [, assign_workspaces: Optional[List[int]], unassign_workspaces: Optional[List[int]]]) -> CurrentWorkspaceBindings @@ -103,15 +110,15 @@ :returns: :class:`CurrentWorkspaceBindings` - .. py:method:: update_bindings(securable_type: str, securable_name: str [, add: Optional[List[WorkspaceBinding]], remove: Optional[List[WorkspaceBinding]]]) -> WorkspaceBindingsResponse + .. py:method:: update_bindings(securable_type: UpdateBindingsSecurableType, securable_name: str [, add: Optional[List[WorkspaceBinding]], remove: Optional[List[WorkspaceBinding]]]) -> WorkspaceBindingsResponse Update securable workspace bindings. Updates workspace bindings of the securable. The caller must be a metastore admin or an owner of the securable. - :param securable_type: str - The type of the securable. + :param securable_type: :class:`UpdateBindingsSecurableType` + The type of the securable to bind to a workspace. :param securable_name: str The name of the securable. :param add: List[:class:`WorkspaceBinding`] (optional) diff --git a/docs/workspace/compute/cluster_policies.rst b/docs/workspace/compute/cluster_policies.rst index b6e67acf..1cefc8ca 100644 --- a/docs/workspace/compute/cluster_policies.rst +++ b/docs/workspace/compute/cluster_policies.rst @@ -22,7 +22,7 @@ If no policies exist in the workspace, the Policy drop-down doesn't appear. Only admin users can create, edit, and delete policies. Admin users also have access to all policies. - .. py:method:: create(name: str [, definition: Optional[str], description: Optional[str], libraries: Optional[List[Library]], max_clusters_per_user: Optional[int], policy_family_definition_overrides: Optional[str], policy_family_id: Optional[str]]) -> CreatePolicyResponse + .. py:method:: create( [, definition: Optional[str], description: Optional[str], libraries: Optional[List[Library]], max_clusters_per_user: Optional[int], name: Optional[str], policy_family_definition_overrides: Optional[str], policy_family_id: Optional[str]]) -> CreatePolicyResponse Usage: @@ -51,9 +51,6 @@ Creates a new policy with prescribed settings. - :param name: str - Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100 - characters. :param definition: str (optional) Policy definition document expressed in [Databricks Cluster Policy Definition Language]. @@ -66,6 +63,9 @@ :param max_clusters_per_user: int (optional) Max number of clusters per user that can be active using this policy. If not present, there is no max limit. + :param name: str (optional) + Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100 + characters. :param policy_family_definition_overrides: str (optional) Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON document must be passed as a string and cannot be embedded in the requests. @@ -96,7 +96,7 @@ - .. py:method:: edit(policy_id: str, name: str [, definition: Optional[str], description: Optional[str], libraries: Optional[List[Library]], max_clusters_per_user: Optional[int], policy_family_definition_overrides: Optional[str], policy_family_id: Optional[str]]) + .. py:method:: edit(policy_id: str [, definition: Optional[str], description: Optional[str], libraries: Optional[List[Library]], max_clusters_per_user: Optional[int], name: Optional[str], policy_family_definition_overrides: Optional[str], policy_family_id: Optional[str]]) Usage: @@ -140,9 +140,6 @@ :param policy_id: str The ID of the policy to update. - :param name: str - Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100 - characters. :param definition: str (optional) Policy definition document expressed in [Databricks Cluster Policy Definition Language]. @@ -155,6 +152,9 @@ :param max_clusters_per_user: int (optional) Max number of clusters per user that can be active using this policy. If not present, there is no max limit. + :param name: str (optional) + Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100 + characters. :param policy_family_definition_overrides: str (optional) Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON document must be passed as a string and cannot be embedded in the requests. @@ -205,7 +205,7 @@ Get a cluster policy entity. Creation and editing is available to admins only. :param policy_id: str - Canonical unique identifier for the cluster policy. + Canonical unique identifier for the Cluster Policy. :returns: :class:`Policy` diff --git a/docs/workspace/compute/clusters.rst b/docs/workspace/compute/clusters.rst index 58362d05..601b5581 100644 --- a/docs/workspace/compute/clusters.rst +++ b/docs/workspace/compute/clusters.rst @@ -21,9 +21,8 @@ restart an all-purpose cluster. Multiple users can share such clusters to do collaborative interactive analysis. - IMPORTANT: Databricks retains cluster configuration information for up to 200 all-purpose clusters - terminated in the last 30 days and up to 30 job clusters recently terminated by the job scheduler. To keep - an all-purpose cluster configuration even after it has been terminated for more than 30 days, an + IMPORTANT: Databricks retains cluster configuration information for terminated clusters for 30 days. To + keep an all-purpose cluster configuration even after it has been terminated for more than 30 days, an administrator can pin a cluster to the cluster list. .. py:method:: change_owner(cluster_id: str, owner_username: str) @@ -604,7 +603,7 @@ :returns: :class:`ClusterPermissions` - .. py:method:: list( [, can_use_client: Optional[str]]) -> Iterator[ClusterDetails] + .. py:method:: list( [, filter_by: Optional[ListClustersFilterBy], page_size: Optional[int], page_token: Optional[str], sort_by: Optional[ListClustersSortBy]]) -> Iterator[ClusterDetails] Usage: @@ -618,21 +617,21 @@ all = w.clusters.list(compute.ListClustersRequest()) - List all clusters. + List clusters. - Return information about all pinned clusters, active clusters, up to 200 of the most recently - terminated all-purpose clusters in the past 30 days, and up to 30 of the most recently terminated job - clusters in the past 30 days. + Return information about all pinned and active clusters, and all clusters terminated within the last + 30 days. Clusters terminated prior to this period are not included. - For example, if there is 1 pinned cluster, 4 active clusters, 45 terminated all-purpose clusters in - the past 30 days, and 50 terminated job clusters in the past 30 days, then this API returns the 1 - pinned cluster, 4 active clusters, all 45 terminated all-purpose clusters, and the 30 most recently - terminated job clusters. - - :param can_use_client: str (optional) - Filter clusters based on what type of client it can be used for. Could be either NOTEBOOKS or JOBS. - No input for this field will get all clusters in the workspace without filtering on its supported - client + :param filter_by: :class:`ListClustersFilterBy` (optional) + Filters to apply to the list of clusters. + :param page_size: int (optional) + Use this field to specify the maximum number of results to be returned by the server. The server may + further constrain the maximum number of results returned in a single page. + :param page_token: str (optional) + Use next_page_token or prev_page_token returned from the previous request to list the next or + previous page of clusters respectively. + :param sort_by: :class:`ListClustersSortBy` (optional) + Sort the list of clusters by a specific criteria. :returns: Iterator over :class:`ClusterDetails` @@ -1000,6 +999,37 @@ + .. py:method:: update(cluster_id: str, update_mask: str [, cluster: Optional[UpdateClusterResource]]) -> Wait[ClusterDetails] + + Update cluster configuration (partial). + + Updates the configuration of a cluster to match the partial set of attributes and size. Denote which + fields to update using the `update_mask` field in the request body. A cluster can be updated if it is + in a `RUNNING` or `TERMINATED` state. If a cluster is updated while in a `RUNNING` state, it will be + restarted so that the new attributes can take effect. If a cluster is updated while in a `TERMINATED` + state, it will remain `TERMINATED`. The updated attributes will take effect the next time the cluster + is started using the `clusters/start` API. Attempts to update a cluster in any other state will be + rejected with an `INVALID_STATE` error code. Clusters created by the Databricks Jobs service cannot be + updated. + + :param cluster_id: str + ID of the cluster. + :param update_mask: str + Specifies which fields of the cluster will be updated. This is required in the POST request. The + update mask should be supplied as a single string. To specify multiple fields, separate them with + commas (no spaces). To delete a field from a cluster configuration, add it to the `update_mask` + string but omit it from the `cluster` object. + :param cluster: :class:`UpdateClusterResource` (optional) + The cluster to be updated. + + :returns: + Long-running operation waiter for :class:`ClusterDetails`. + See :method:wait_get_cluster_running for more details. + + + .. py:method:: update_and_wait(cluster_id: str, update_mask: str [, cluster: Optional[UpdateClusterResource], timeout: datetime.timedelta = 0:20:00]) -> ClusterDetails + + .. py:method:: update_permissions(cluster_id: str [, access_control_list: Optional[List[ClusterAccessControlRequest]]]) -> ClusterPermissions Update cluster permissions. diff --git a/docs/workspace/compute/command_execution.rst b/docs/workspace/compute/command_execution.rst index a5b94b5a..916a48ba 100644 --- a/docs/workspace/compute/command_execution.rst +++ b/docs/workspace/compute/command_execution.rst @@ -4,7 +4,8 @@ .. py:class:: CommandExecutionAPI - This API allows execution of Python, Scala, SQL, or R commands on running Databricks Clusters. + This API allows execution of Python, Scala, SQL, or R commands on running Databricks Clusters. This API + only supports (classic) all-purpose clusters. Serverless compute is not supported. .. py:method:: cancel( [, cluster_id: Optional[str], command_id: Optional[str], context_id: Optional[str]]) -> Wait[CommandStatusResponse] diff --git a/docs/workspace/compute/policy_families.rst b/docs/workspace/compute/policy_families.rst index 43194ef0..56e4f427 100644 --- a/docs/workspace/compute/policy_families.rst +++ b/docs/workspace/compute/policy_families.rst @@ -14,7 +14,7 @@ policy family. Cluster policies created using a policy family inherit the policy family's policy definition. - .. py:method:: get(policy_family_id: str) -> PolicyFamily + .. py:method:: get(policy_family_id: str [, version: Optional[int]]) -> PolicyFamily Usage: @@ -32,9 +32,12 @@ Get policy family information. - Retrieve the information for an policy family based on its identifier. + Retrieve the information for an policy family based on its identifier and version :param policy_family_id: str + The family ID about which to retrieve information. + :param version: int (optional) + The version number for the family to fetch. Defaults to the latest version. :returns: :class:`PolicyFamily` @@ -55,10 +58,11 @@ List policy families. - Retrieve a list of policy families. This API is paginated. + Returns the list of policy definition types available to use at their latest version. This API is + paginated. :param max_results: int (optional) - The max number of policy families to return. + Maximum number of policy families to return. :param page_token: str (optional) A token that can be used to get the next page of results. diff --git a/docs/workspace/dashboards/genie.rst b/docs/workspace/dashboards/genie.rst new file mode 100644 index 00000000..5581870b --- /dev/null +++ b/docs/workspace/dashboards/genie.rst @@ -0,0 +1,102 @@ +``w.genie``: Genie +================== +.. currentmodule:: databricks.sdk.service.dashboards + +.. py:class:: GenieAPI + + Genie provides a no-code experience for business users, powered by AI/BI. Analysts set up spaces that + business users can use to ask questions using natural language. Genie uses data registered to Unity + Catalog and requires at least CAN USE permission on a Pro or Serverless SQL warehouse. Also, Databricks + Assistant must be enabled. + + .. py:method:: create_message(space_id: str, conversation_id: str, content: str) -> Wait[GenieMessage] + + Create conversation message. + + Create new message in [conversation](:method:genie/startconversation). The AI response uses all + previously created messages in the conversation to respond. + + :param space_id: str + The ID associated with the Genie space where the conversation is started. + :param conversation_id: str + The ID associated with the conversation. + :param content: str + User message content. + + :returns: + Long-running operation waiter for :class:`GenieMessage`. + See :method:wait_get_message_genie_completed for more details. + + + .. py:method:: create_message_and_wait(space_id: str, conversation_id: str, content: str, timeout: datetime.timedelta = 0:20:00) -> GenieMessage + + + .. py:method:: execute_message_query(space_id: str, conversation_id: str, message_id: str) -> GenieGetMessageQueryResultResponse + + Execute SQL query in a conversation message. + + Execute the SQL query in the message. + + :param space_id: str + Genie space ID + :param conversation_id: str + Conversation ID + :param message_id: str + Message ID + + :returns: :class:`GenieGetMessageQueryResultResponse` + + + .. py:method:: get_message(space_id: str, conversation_id: str, message_id: str) -> GenieMessage + + Get conversation message. + + Get message from conversation. + + :param space_id: str + The ID associated with the Genie space where the target conversation is located. + :param conversation_id: str + The ID associated with the target conversation. + :param message_id: str + The ID associated with the target message from the identified conversation. + + :returns: :class:`GenieMessage` + + + .. py:method:: get_message_query_result(space_id: str, conversation_id: str, message_id: str) -> GenieGetMessageQueryResultResponse + + Get conversation message SQL query result. + + Get the result of SQL query if the message has a query attachment. This is only available if a message + has a query attachment and the message status is `EXECUTING_QUERY`. + + :param space_id: str + Genie space ID + :param conversation_id: str + Conversation ID + :param message_id: str + Message ID + + :returns: :class:`GenieGetMessageQueryResultResponse` + + + .. py:method:: start_conversation(space_id: str, content: str) -> Wait[GenieMessage] + + Start conversation. + + Start a new conversation. + + :param space_id: str + The ID associated with the Genie space where you want to start a conversation. + :param content: str + The text of the message that starts the conversation. + + :returns: + Long-running operation waiter for :class:`GenieMessage`. + See :method:wait_get_message_genie_completed for more details. + + + .. py:method:: start_conversation_and_wait(space_id: str, content: str, timeout: datetime.timedelta = 0:20:00) -> GenieMessage + + + .. py:method:: wait_get_message_genie_completed(conversation_id: str, message_id: str, space_id: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[GenieMessage], None]]) -> GenieMessage diff --git a/docs/workspace/dashboards/index.rst b/docs/workspace/dashboards/index.rst index 756c9b54..6d1565bb 100644 --- a/docs/workspace/dashboards/index.rst +++ b/docs/workspace/dashboards/index.rst @@ -7,4 +7,5 @@ Manage Lakeview dashboards .. toctree:: :maxdepth: 1 + genie lakeview \ No newline at end of file diff --git a/docs/workspace/dashboards/lakeview.rst b/docs/workspace/dashboards/lakeview.rst index 17f82960..d3257b79 100644 --- a/docs/workspace/dashboards/lakeview.rst +++ b/docs/workspace/dashboards/lakeview.rst @@ -151,8 +151,7 @@ The flag to include dashboards located in the trash. If unspecified, only active dashboards will be returned. :param view: :class:`DashboardView` (optional) - Indicates whether to include all metadata from the dashboard in the response. If unset, the response - defaults to `DASHBOARD_VIEW_BASIC` which only includes summary metadata from the dashboard. + `DASHBOARD_VIEW_BASIC`only includes summary metadata from the dashboard. :returns: Iterator over :class:`Dashboard` diff --git a/docs/workspace/iam/permission_migration.rst b/docs/workspace/iam/permission_migration.rst index 16d15f73..8eef6e0e 100644 --- a/docs/workspace/iam/permission_migration.rst +++ b/docs/workspace/iam/permission_migration.rst @@ -1,20 +1,17 @@ -``w.permission_migration``: Permission Migration -================================================ +``w.permission_migration``: PermissionMigration +=============================================== .. currentmodule:: databricks.sdk.service.iam .. py:class:: PermissionMigrationAPI - This spec contains undocumented permission migration APIs used in https://github.com/databrickslabs/ucx. + APIs for migrating acl permissions, used only by the ucx tool: https://github.com/databrickslabs/ucx - .. py:method:: migrate_permissions(workspace_id: int, from_workspace_group_name: str, to_account_group_name: str [, size: Optional[int]]) -> PermissionMigrationResponse + .. py:method:: migrate_permissions(workspace_id: int, from_workspace_group_name: str, to_account_group_name: str [, size: Optional[int]]) -> MigratePermissionsResponse Migrate Permissions. - Migrate a batch of permissions from a workspace local group to an account group. - :param workspace_id: int - WorkspaceId of the associated workspace where the permission migration will occur. Both workspace - group and account group must be in this workspace. + WorkspaceId of the associated workspace where the permission migration will occur. :param from_workspace_group_name: str The name of the workspace group that permissions will be migrated from. :param to_account_group_name: str @@ -22,5 +19,5 @@ :param size: int (optional) The maximum number of permissions that will be migrated. - :returns: :class:`PermissionMigrationResponse` + :returns: :class:`MigratePermissionsResponse` \ No newline at end of file diff --git a/docs/workspace/iam/permissions.rst b/docs/workspace/iam/permissions.rst index 47ff4f37..7deb9eaf 100644 --- a/docs/workspace/iam/permissions.rst +++ b/docs/workspace/iam/permissions.rst @@ -7,6 +7,8 @@ Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints. + * **[Apps permissions](:service:apps)** — Manage which users can manage or use apps. + * **[Cluster permissions](:service:clusters)** — Manage which users can manage, restart, or attach to clusters. @@ -42,7 +44,7 @@ * **[Token permissions](:service:tokenmanagement)** — Manage which users can create or use tokens. * **[Workspace object permissions](:service:workspace)** — Manage which users can read, run, edit, or - manage directories, files, and notebooks. + manage alerts, dbsql-dashboards, directories, files, notebooks and queries. For the mapping of the required permissions for specific actions or abilities and other important information, see [Access Control]. @@ -78,9 +80,9 @@ object. :param request_object_type: str - The type of the request object. Can be one of the following: authorization, clusters, - cluster-policies, directories, experiments, files, instance-pools, jobs, notebooks, pipelines, - registered-models, repos, serving-endpoints, or warehouses. + The type of the request object. Can be one of the following: alerts, authorization, clusters, + cluster-policies, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, + notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. :param request_object_id: str The id of the request object. @@ -155,9 +157,9 @@ object. :param request_object_type: str - The type of the request object. Can be one of the following: authorization, clusters, - cluster-policies, directories, experiments, files, instance-pools, jobs, notebooks, pipelines, - registered-models, repos, serving-endpoints, or warehouses. + The type of the request object. Can be one of the following: alerts, authorization, clusters, + cluster-policies, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, + notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. :param request_object_id: str The id of the request object. :param access_control_list: List[:class:`AccessControlRequest`] (optional) @@ -173,9 +175,9 @@ root object. :param request_object_type: str - The type of the request object. Can be one of the following: authorization, clusters, - cluster-policies, directories, experiments, files, instance-pools, jobs, notebooks, pipelines, - registered-models, repos, serving-endpoints, or warehouses. + The type of the request object. Can be one of the following: alerts, authorization, clusters, + cluster-policies, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, + notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. :param request_object_id: str The id of the request object. :param access_control_list: List[:class:`AccessControlRequest`] (optional) diff --git a/docs/workspace/index.rst b/docs/workspace/index.rst index 4d7eabff..1b6c5708 100644 --- a/docs/workspace/index.rst +++ b/docs/workspace/index.rst @@ -7,6 +7,7 @@ These APIs are available from WorkspaceClient .. toctree:: :maxdepth: 1 + apps/index catalog/index compute/index dashboards/index diff --git a/docs/workspace/jobs/jobs.rst b/docs/workspace/jobs/jobs.rst index 773f6fb8..c07c8e28 100644 --- a/docs/workspace/jobs/jobs.rst +++ b/docs/workspace/jobs/jobs.rst @@ -120,7 +120,7 @@ .. py:method:: cancel_run_and_wait(run_id: int, timeout: datetime.timedelta = 0:20:00) -> Run - .. py:method:: create( [, access_control_list: Optional[List[iam.AccessControlRequest]], continuous: Optional[Continuous], deployment: Optional[JobDeployment], description: Optional[str], edit_mode: Optional[JobEditMode], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], format: Optional[Format], git_source: Optional[GitSource], health: Optional[JobsHealthRules], job_clusters: Optional[List[JobCluster]], max_concurrent_runs: Optional[int], name: Optional[str], notification_settings: Optional[JobNotificationSettings], parameters: Optional[List[JobParameterDefinition]], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], schedule: Optional[CronSchedule], tags: Optional[Dict[str, str]], tasks: Optional[List[Task]], timeout_seconds: Optional[int], trigger: Optional[TriggerSettings], webhook_notifications: Optional[WebhookNotifications]]) -> CreateResponse + .. py:method:: create( [, access_control_list: Optional[List[JobAccessControlRequest]], continuous: Optional[Continuous], deployment: Optional[JobDeployment], description: Optional[str], edit_mode: Optional[JobEditMode], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], format: Optional[Format], git_source: Optional[GitSource], health: Optional[JobsHealthRules], job_clusters: Optional[List[JobCluster]], max_concurrent_runs: Optional[int], name: Optional[str], notification_settings: Optional[JobNotificationSettings], parameters: Optional[List[JobParameterDefinition]], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], schedule: Optional[CronSchedule], tags: Optional[Dict[str, str]], tasks: Optional[List[Task]], timeout_seconds: Optional[int], trigger: Optional[TriggerSettings], webhook_notifications: Optional[WebhookNotifications]]) -> CreateResponse Usage: @@ -156,7 +156,7 @@ Create a new job. - :param access_control_list: List[:class:`AccessControlRequest`] (optional) + :param access_control_list: List[:class:`JobAccessControlRequest`] (optional) List of permissions to set on the job. :param continuous: :class:`Continuous` (optional) An optional continuous property for this job. The continuous property will ensure that there is @@ -164,7 +164,7 @@ :param deployment: :class:`JobDeployment` (optional) Deployment information for jobs managed by external sources. :param description: str (optional) - An optional description for the job. The maximum length is 1024 characters in UTF-8 encoding. + An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding. :param edit_mode: :class:`JobEditMode` (optional) Edit mode of the job. @@ -376,7 +376,7 @@ :returns: :class:`JobPermissions` - .. py:method:: get_run(run_id: int [, include_history: Optional[bool], include_resolved_values: Optional[bool]]) -> Run + .. py:method:: get_run(run_id: int [, include_history: Optional[bool], include_resolved_values: Optional[bool], page_token: Optional[str]]) -> Run Usage: @@ -418,6 +418,9 @@ Whether to include the repair history in the response. :param include_resolved_values: bool (optional) Whether to include resolved parameter values in the response. + :param page_token: str (optional) + To list the next page or the previous page of job tasks, set this field to the value of the + `next_page_token` or `prev_page_token` returned in the GetJob response. :returns: :class:`Run` @@ -924,7 +927,7 @@ :returns: :class:`JobPermissions` - .. py:method:: submit( [, access_control_list: Optional[List[iam.AccessControlRequest]], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], git_source: Optional[GitSource], health: Optional[JobsHealthRules], idempotency_token: Optional[str], notification_settings: Optional[JobNotificationSettings], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], run_name: Optional[str], tasks: Optional[List[SubmitTask]], timeout_seconds: Optional[int], webhook_notifications: Optional[WebhookNotifications]]) -> Wait[Run] + .. py:method:: submit( [, access_control_list: Optional[List[JobAccessControlRequest]], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], git_source: Optional[GitSource], health: Optional[JobsHealthRules], idempotency_token: Optional[str], notification_settings: Optional[JobNotificationSettings], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], run_name: Optional[str], tasks: Optional[List[SubmitTask]], timeout_seconds: Optional[int], webhook_notifications: Optional[WebhookNotifications]]) -> Wait[Run] Usage: @@ -960,7 +963,7 @@ Runs submitted using this endpoint don’t display in the UI. Use the `jobs/runs/get` API to check the run state after the job is submitted. - :param access_control_list: List[:class:`AccessControlRequest`] (optional) + :param access_control_list: List[:class:`JobAccessControlRequest`] (optional) List of permissions to set on the job. :param email_notifications: :class:`JobEmailNotifications` (optional) An optional set of email addresses notified when the run begins or completes. @@ -1011,7 +1014,7 @@ See :method:wait_get_run_job_terminated_or_skipped for more details. - .. py:method:: submit_and_wait( [, access_control_list: Optional[List[iam.AccessControlRequest]], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], git_source: Optional[GitSource], health: Optional[JobsHealthRules], idempotency_token: Optional[str], notification_settings: Optional[JobNotificationSettings], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], run_name: Optional[str], tasks: Optional[List[SubmitTask]], timeout_seconds: Optional[int], webhook_notifications: Optional[WebhookNotifications], timeout: datetime.timedelta = 0:20:00]) -> Run + .. py:method:: submit_and_wait( [, access_control_list: Optional[List[JobAccessControlRequest]], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], git_source: Optional[GitSource], health: Optional[JobsHealthRules], idempotency_token: Optional[str], notification_settings: Optional[JobNotificationSettings], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], run_name: Optional[str], tasks: Optional[List[SubmitTask]], timeout_seconds: Optional[int], webhook_notifications: Optional[WebhookNotifications], timeout: datetime.timedelta = 0:20:00]) -> Run .. py:method:: update(job_id: int [, fields_to_remove: Optional[List[str]], new_settings: Optional[JobSettings]]) diff --git a/docs/workspace/marketplace/consumer_listings.rst b/docs/workspace/marketplace/consumer_listings.rst index 654fe82d..242a8fce 100644 --- a/docs/workspace/marketplace/consumer_listings.rst +++ b/docs/workspace/marketplace/consumer_listings.rst @@ -29,7 +29,7 @@ :returns: :class:`GetListingResponse` - .. py:method:: list( [, assets: Optional[List[AssetType]], categories: Optional[List[Category]], is_ascending: Optional[bool], is_free: Optional[bool], is_private_exchange: Optional[bool], is_staff_pick: Optional[bool], page_size: Optional[int], page_token: Optional[str], provider_ids: Optional[List[str]], sort_by: Optional[SortBy], tags: Optional[List[ListingTag]]]) -> Iterator[Listing] + .. py:method:: list( [, assets: Optional[List[AssetType]], categories: Optional[List[Category]], is_free: Optional[bool], is_private_exchange: Optional[bool], is_staff_pick: Optional[bool], page_size: Optional[int], page_token: Optional[str], provider_ids: Optional[List[str]], tags: Optional[List[ListingTag]]]) -> Iterator[Listing] List listings. @@ -39,7 +39,6 @@ Matches any of the following asset types :param categories: List[:class:`Category`] (optional) Matches any of the following categories - :param is_ascending: bool (optional) :param is_free: bool (optional) Filters each listing based on if it is free. :param is_private_exchange: bool (optional) @@ -50,15 +49,13 @@ :param page_token: str (optional) :param provider_ids: List[str] (optional) Matches any of the following provider ids - :param sort_by: :class:`SortBy` (optional) - Criteria for sorting the resulting set of listings. :param tags: List[:class:`ListingTag`] (optional) Matches any of the following tags :returns: Iterator over :class:`Listing` - .. py:method:: search(query: str [, assets: Optional[List[AssetType]], categories: Optional[List[Category]], is_ascending: Optional[bool], is_free: Optional[bool], is_private_exchange: Optional[bool], page_size: Optional[int], page_token: Optional[str], provider_ids: Optional[List[str]], sort_by: Optional[SortBy]]) -> Iterator[Listing] + .. py:method:: search(query: str [, assets: Optional[List[AssetType]], categories: Optional[List[Category]], is_free: Optional[bool], is_private_exchange: Optional[bool], page_size: Optional[int], page_token: Optional[str], provider_ids: Optional[List[str]]]) -> Iterator[Listing] Search listings. @@ -71,14 +68,12 @@ Matches any of the following asset types :param categories: List[:class:`Category`] (optional) Matches any of the following categories - :param is_ascending: bool (optional) :param is_free: bool (optional) :param is_private_exchange: bool (optional) :param page_size: int (optional) :param page_token: str (optional) :param provider_ids: List[str] (optional) Matches any of the following provider ids - :param sort_by: :class:`SortBy` (optional) :returns: Iterator over :class:`Listing` \ No newline at end of file diff --git a/docs/workspace/pipelines/pipelines.rst b/docs/workspace/pipelines/pipelines.rst index a80e7c79..ce98ac5d 100644 --- a/docs/workspace/pipelines/pipelines.rst +++ b/docs/workspace/pipelines/pipelines.rst @@ -15,7 +15,7 @@ also enforce data quality with Delta Live Tables expectations. Expectations allow you to define expected data quality and specify how to handle records that fail those expectations. - .. py:method:: create( [, allow_duplicate_names: Optional[bool], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], dry_run: Optional[bool], edition: Optional[str], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[ManagedIngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) -> CreatePipelineResponse + .. py:method:: create( [, allow_duplicate_names: Optional[bool], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], dry_run: Optional[bool], edition: Optional[str], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) -> CreatePipelineResponse Usage: @@ -80,7 +80,7 @@ The definition of a gateway pipeline to support CDC. :param id: str (optional) Unique identifier for this pipeline. - :param ingestion_definition: :class:`ManagedIngestionPipelineDefinition` (optional) + :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional) The configuration for a managed ingestion pipeline. These settings cannot be used with the 'libraries', 'target' or 'catalog' settings. :param libraries: List[:class:`PipelineLibrary`] (optional) @@ -371,7 +371,7 @@ .. py:method:: stop_and_wait(pipeline_id: str, timeout: datetime.timedelta = 0:20:00) -> GetPipelineResponse - .. py:method:: update(pipeline_id: str [, allow_duplicate_names: Optional[bool], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], edition: Optional[str], expected_last_modified: Optional[int], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[ManagedIngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) + .. py:method:: update(pipeline_id: str [, allow_duplicate_names: Optional[bool], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], edition: Optional[str], expected_last_modified: Optional[int], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) Usage: @@ -452,7 +452,7 @@ The definition of a gateway pipeline to support CDC. :param id: str (optional) Unique identifier for this pipeline. - :param ingestion_definition: :class:`ManagedIngestionPipelineDefinition` (optional) + :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional) The configuration for a managed ingestion pipeline. These settings cannot be used with the 'libraries', 'target' or 'catalog' settings. :param libraries: List[:class:`PipelineLibrary`] (optional) diff --git a/docs/workspace/serving/index.rst b/docs/workspace/serving/index.rst index 1d0bdf7f..7a39a404 100644 --- a/docs/workspace/serving/index.rst +++ b/docs/workspace/serving/index.rst @@ -7,6 +7,5 @@ Use real-time inference for machine learning .. toctree:: :maxdepth: 1 - apps serving_endpoints serving_endpoints_data_plane \ No newline at end of file diff --git a/docs/workspace/settings/index.rst b/docs/workspace/settings/index.rst index 5b56652e..d513ea9f 100644 --- a/docs/workspace/settings/index.rst +++ b/docs/workspace/settings/index.rst @@ -9,6 +9,7 @@ Manage security settings for Accounts and Workspaces credentials_manager ip_access_lists + notification_destinations settings automatic_cluster_update compliance_security_profile diff --git a/docs/workspace/settings/notification_destinations.rst b/docs/workspace/settings/notification_destinations.rst new file mode 100644 index 00000000..29d947f5 --- /dev/null +++ b/docs/workspace/settings/notification_destinations.rst @@ -0,0 +1,74 @@ +``w.notification_destinations``: Notification Destinations +========================================================== +.. currentmodule:: databricks.sdk.service.settings + +.. py:class:: NotificationDestinationsAPI + + The notification destinations API lets you programmatically manage a workspace's notification + destinations. Notification destinations are used to send notifications for query alerts and jobs to + destinations outside of Databricks. Only workspace admins can create, update, and delete notification + destinations. + + .. py:method:: create( [, config: Optional[Config], display_name: Optional[str]]) -> NotificationDestination + + Create a notification destination. + + Creates a notification destination. Requires workspace admin permissions. + + :param config: :class:`Config` (optional) + The configuration for the notification destination. Must wrap EXACTLY one of the nested configs. + :param display_name: str (optional) + The display name for the notification destination. + + :returns: :class:`NotificationDestination` + + + .. py:method:: delete(id: str) + + Delete a notification destination. + + Deletes a notification destination. Requires workspace admin permissions. + + :param id: str + + + + + .. py:method:: get(id: str) -> NotificationDestination + + Get a notification destination. + + Gets a notification destination. + + :param id: str + + :returns: :class:`NotificationDestination` + + + .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ListNotificationDestinationsResult] + + List notification destinations. + + Lists notification destinations. + + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`ListNotificationDestinationsResult` + + + .. py:method:: update(id: str [, config: Optional[Config], display_name: Optional[str]]) -> NotificationDestination + + Update a notification destination. + + Updates a notification destination. Requires workspace admin permissions. At least one field is + required in the request body. + + :param id: str + :param config: :class:`Config` (optional) + The configuration for the notification destination. Must wrap EXACTLY one of the nested configs. + :param display_name: str (optional) + The display name for the notification destination. + + :returns: :class:`NotificationDestination` + \ No newline at end of file diff --git a/docs/workspace/sharing/providers.rst b/docs/workspace/sharing/providers.rst index 1382b5a9..7cf398ac 100644 --- a/docs/workspace/sharing/providers.rst +++ b/docs/workspace/sharing/providers.rst @@ -100,7 +100,7 @@ :returns: :class:`ProviderInfo` - .. py:method:: list( [, data_provider_global_metastore_id: Optional[str]]) -> Iterator[ProviderInfo] + .. py:method:: list( [, data_provider_global_metastore_id: Optional[str], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[ProviderInfo] Usage: @@ -123,11 +123,21 @@ :param data_provider_global_metastore_id: str (optional) If not provided, all providers will be returned. If no providers exist with this ID, no results will be returned. + :param max_results: int (optional) + Maximum number of providers to return. - when set to 0, the page length is set to a server + configured value (recommended); - when set to a value greater than 0, the page length is the minimum + of this value and a server configured value; - when set to a value less than 0, an invalid parameter + error is returned; - If not set, all valid providers are returned (not recommended). - Note: The + number of returned providers might be less than the specified max_results size, even zero. The only + definitive indication that no further providers can be fetched is when the next_page_token is unset + from the response. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. :returns: Iterator over :class:`ProviderInfo` - .. py:method:: list_shares(name: str) -> Iterator[ProviderShare] + .. py:method:: list_shares(name: str [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[ProviderShare] Usage: @@ -162,6 +172,16 @@ :param name: str Name of the provider in which to list shares. + :param max_results: int (optional) + Maximum number of shares to return. - when set to 0, the page length is set to a server configured + value (recommended); - when set to a value greater than 0, the page length is the minimum of this + value and a server configured value; - when set to a value less than 0, an invalid parameter error + is returned; - If not set, all valid shares are returned (not recommended). - Note: The number of + returned shares might be less than the specified max_results size, even zero. The only definitive + indication that no further shares can be fetched is when the next_page_token is unset from the + response. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. :returns: Iterator over :class:`ProviderShare` diff --git a/docs/workspace/sharing/recipients.rst b/docs/workspace/sharing/recipients.rst index 86a004d3..44f2042b 100644 --- a/docs/workspace/sharing/recipients.rst +++ b/docs/workspace/sharing/recipients.rst @@ -18,7 +18,7 @@ recipient follows the activation link to download the credential file, and then uses the credential file to establish a secure connection to receive the shared data. This sharing mode is called **open sharing**. - .. py:method:: create(name: str, authentication_type: AuthenticationType [, comment: Optional[str], data_recipient_global_metastore_id: Optional[str], ip_access_list: Optional[IpAccessList], owner: Optional[str], properties_kvpairs: Optional[SecurablePropertiesKvPairs], sharing_code: Optional[str]]) -> RecipientInfo + .. py:method:: create(name: str, authentication_type: AuthenticationType [, comment: Optional[str], data_recipient_global_metastore_id: Optional[str], expiration_time: Optional[int], ip_access_list: Optional[IpAccessList], owner: Optional[str], properties_kvpairs: Optional[SecurablePropertiesKvPairs], sharing_code: Optional[str]]) -> RecipientInfo Usage: @@ -51,6 +51,8 @@ The global Unity Catalog metastore id provided by the data recipient. This field is required when the __authentication_type__ is **DATABRICKS**. The identifier is of format __cloud__:__region__:__metastore-uuid__. + :param expiration_time: int (optional) + Expiration timestamp of the token, in epoch milliseconds. :param ip_access_list: :class:`IpAccessList` (optional) IP Access List :param owner: str (optional) @@ -108,7 +110,7 @@ :returns: :class:`RecipientInfo` - .. py:method:: list( [, data_recipient_global_metastore_id: Optional[str]]) -> Iterator[RecipientInfo] + .. py:method:: list( [, data_recipient_global_metastore_id: Optional[str], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[RecipientInfo] Usage: @@ -132,6 +134,16 @@ :param data_recipient_global_metastore_id: str (optional) If not provided, all recipients will be returned. If no recipients exist with this ID, no results will be returned. + :param max_results: int (optional) + Maximum number of recipients to return. - when set to 0, the page length is set to a server + configured value (recommended); - when set to a value greater than 0, the page length is the minimum + of this value and a server configured value; - when set to a value less than 0, an invalid parameter + error is returned; - If not set, all valid recipients are returned (not recommended). - Note: The + number of returned recipients might be less than the specified max_results size, even zero. The only + definitive indication that no further recipients can be fetched is when the next_page_token is unset + from the response. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. :returns: Iterator over :class:`RecipientInfo` @@ -171,7 +183,7 @@ :returns: :class:`RecipientInfo` - .. py:method:: share_permissions(name: str) -> GetRecipientSharePermissionsResponse + .. py:method:: share_permissions(name: str [, max_results: Optional[int], page_token: Optional[str]]) -> GetRecipientSharePermissionsResponse Usage: @@ -198,11 +210,21 @@ :param name: str The name of the Recipient. + :param max_results: int (optional) + Maximum number of permissions to return. - when set to 0, the page length is set to a server + configured value (recommended); - when set to a value greater than 0, the page length is the minimum + of this value and a server configured value; - when set to a value less than 0, an invalid parameter + error is returned; - If not set, all valid permissions are returned (not recommended). - Note: The + number of returned permissions might be less than the specified max_results size, even zero. The + only definitive indication that no further permissions can be fetched is when the next_page_token is + unset from the response. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. :returns: :class:`GetRecipientSharePermissionsResponse` - .. py:method:: update(name: str [, comment: Optional[str], ip_access_list: Optional[IpAccessList], new_name: Optional[str], owner: Optional[str], properties_kvpairs: Optional[SecurablePropertiesKvPairs]]) + .. py:method:: update(name: str [, comment: Optional[str], expiration_time: Optional[int], ip_access_list: Optional[IpAccessList], new_name: Optional[str], owner: Optional[str], properties_kvpairs: Optional[SecurablePropertiesKvPairs]]) Usage: @@ -232,6 +254,8 @@ Name of the recipient. :param comment: str (optional) Description about the recipient. + :param expiration_time: int (optional) + Expiration timestamp of the token, in epoch milliseconds. :param ip_access_list: :class:`IpAccessList` (optional) IP Access List :param new_name: str (optional) diff --git a/docs/workspace/sharing/shares.rst b/docs/workspace/sharing/shares.rst index 82cdd4e6..4d14b811 100644 --- a/docs/workspace/sharing/shares.rst +++ b/docs/workspace/sharing/shares.rst @@ -87,7 +87,7 @@ :returns: :class:`ShareInfo` - .. py:method:: list() -> Iterator[ShareInfo] + .. py:method:: list( [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[ShareInfo] Usage: @@ -95,20 +95,32 @@ .. code-block:: from databricks.sdk import WorkspaceClient + from databricks.sdk.service import sharing w = WorkspaceClient() - all = w.shares.list() + all = w.shares.list(sharing.ListSharesRequest()) List shares. Gets an array of data object shares from the metastore. The caller must be a metastore admin or the owner of the share. There is no guarantee of a specific ordering of the elements in the array. + :param max_results: int (optional) + Maximum number of shares to return. - when set to 0, the page length is set to a server configured + value (recommended); - when set to a value greater than 0, the page length is the minimum of this + value and a server configured value; - when set to a value less than 0, an invalid parameter error + is returned; - If not set, all valid shares are returned (not recommended). - Note: The number of + returned shares might be less than the specified max_results size, even zero. The only definitive + indication that no further shares can be fetched is when the next_page_token is unset from the + response. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. + :returns: Iterator over :class:`ShareInfo` - .. py:method:: share_permissions(name: str) -> catalog.PermissionsList + .. py:method:: share_permissions(name: str [, max_results: Optional[int], page_token: Optional[str]]) -> catalog.PermissionsList Get permissions. @@ -117,6 +129,16 @@ :param name: str The name of the share. + :param max_results: int (optional) + Maximum number of permissions to return. - when set to 0, the page length is set to a server + configured value (recommended); - when set to a value greater than 0, the page length is the minimum + of this value and a server configured value; - when set to a value less than 0, an invalid parameter + error is returned; - If not set, all valid permissions are returned (not recommended). - Note: The + number of returned permissions might be less than the specified max_results size, even zero. The + only definitive indication that no further permissions can be fetched is when the next_page_token is + unset from the response. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. :returns: :class:`PermissionsList` @@ -200,7 +222,7 @@ :returns: :class:`ShareInfo` - .. py:method:: update_permissions(name: str [, changes: Optional[List[catalog.PermissionsChange]]]) + .. py:method:: update_permissions(name: str [, changes: Optional[List[catalog.PermissionsChange]], max_results: Optional[int], page_token: Optional[str]]) Update permissions. @@ -214,6 +236,16 @@ The name of the share. :param changes: List[:class:`PermissionsChange`] (optional) Array of permission changes. + :param max_results: int (optional) + Maximum number of permissions to return. - when set to 0, the page length is set to a server + configured value (recommended); - when set to a value greater than 0, the page length is the minimum + of this value and a server configured value; - when set to a value less than 0, an invalid parameter + error is returned; - If not set, all valid permissions are returned (not recommended). - Note: The + number of returned permissions might be less than the specified max_results size, even zero. The + only definitive indication that no further permissions can be fetched is when the next_page_token is + unset from the response. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. \ No newline at end of file diff --git a/docs/workspace/sql/alerts.rst b/docs/workspace/sql/alerts.rst index 26ae453a..c552d5f8 100644 --- a/docs/workspace/sql/alerts.rst +++ b/docs/workspace/sql/alerts.rst @@ -8,12 +8,8 @@ periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create. - - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources - .. py:method:: create(name: str, options: AlertOptions, query_id: str [, parent: Optional[str], rearm: Optional[int]]) -> Alert + .. py:method:: create( [, alert: Optional[CreateAlertRequestAlert]]) -> Alert Usage: @@ -29,60 +25,48 @@ srcs = w.data_sources.list() - query = w.queries.create(name=f'sdk-{time.time_ns()}', - data_source_id=srcs[0].id, - description="test query from Go SDK", - query="SELECT 1") - - alert = w.alerts.create(options=sql.AlertOptions(column="1", op="==", value="1"), - name=f'sdk-{time.time_ns()}', - query_id=query.id) + query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}', + warehouse_id=srcs[0].warehouse_id, + description="test query from Go SDK", + query_text="SELECT 1")) + + alert = w.alerts.create( + alert=sql.CreateAlertRequestAlert(condition=sql.AlertCondition(operand=sql.AlertConditionOperand( + column=sql.AlertOperandColumn(name="1")), + op=sql.AlertOperator.EQUAL, + threshold=sql.AlertConditionThreshold( + value=sql.AlertOperandValue( + double_value=1))), + display_name=f'sdk-{time.time_ns()}', + query_id=query.id)) # cleanup - w.queries.delete(query_id=query.id) - w.alerts.delete(alert_id=alert.id) + w.queries.delete(id=query.id) + w.alerts.delete(id=alert.id) Create an alert. - Creates an alert. An alert is a Databricks SQL object that periodically runs a query, evaluates a - condition of its result, and notifies users or notification destinations if the condition was met. - - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] + Creates an alert. - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources - - :param name: str - Name of the alert. - :param options: :class:`AlertOptions` - Alert configuration options. - :param query_id: str - Query ID. - :param parent: str (optional) - The identifier of the workspace folder containing the object. - :param rearm: int (optional) - Number of seconds after being triggered before the alert rearms itself and can be triggered again. - If `null`, alert will never be triggered again. + :param alert: :class:`CreateAlertRequestAlert` (optional) :returns: :class:`Alert` - .. py:method:: delete(alert_id: str) + .. py:method:: delete(id: str) Delete an alert. - Deletes an alert. Deleted alerts are no longer accessible and cannot be restored. **Note**: Unlike - queries and dashboards, alerts cannot be moved to the trash. - - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] + Moves an alert to the trash. Trashed alerts immediately disappear from searches and list views, and + can no longer trigger. You can restore a trashed alert through the UI. A trashed alert is permanently + deleted after 30 days. - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources - - :param alert_id: str + :param id: str - .. py:method:: get(alert_id: str) -> Alert + .. py:method:: get(id: str) -> Alert Usage: @@ -98,35 +82,37 @@ srcs = w.data_sources.list() - query = w.queries.create(name=f'sdk-{time.time_ns()}', - data_source_id=srcs[0].id, - description="test query from Go SDK", - query="SELECT 1") + query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}', + warehouse_id=srcs[0].warehouse_id, + description="test query from Go SDK", + query_text="SELECT 1")) - alert = w.alerts.create(options=sql.AlertOptions(column="1", op="==", value="1"), - name=f'sdk-{time.time_ns()}', - query_id=query.id) + alert = w.alerts.create( + alert=sql.CreateAlertRequestAlert(condition=sql.AlertCondition(operand=sql.AlertConditionOperand( + column=sql.AlertOperandColumn(name="1")), + op=sql.AlertOperator.EQUAL, + threshold=sql.AlertConditionThreshold( + value=sql.AlertOperandValue( + double_value=1))), + display_name=f'sdk-{time.time_ns()}', + query_id=query.id)) - by_id = w.alerts.get(alert_id=alert.id) + by_id = w.alerts.get(id=alert.id) # cleanup - w.queries.delete(query_id=query.id) - w.alerts.delete(alert_id=alert.id) + w.queries.delete(id=query.id) + w.alerts.delete(id=alert.id) Get an alert. Gets an alert. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources - - :param alert_id: str + :param id: str :returns: :class:`Alert` - .. py:method:: list() -> Iterator[Alert] + .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ListAlertsResponseAlert] Usage: @@ -134,23 +120,24 @@ .. code-block:: from databricks.sdk import WorkspaceClient + from databricks.sdk.service import sql w = WorkspaceClient() - all = w.alerts.list() + all = w.alerts.list(sql.ListAlertsRequest()) - Get alerts. + List alerts. - Gets a list of alerts. + Gets a list of alerts accessible to the user, ordered by creation time. **Warning:** Calling this API + concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] + :param page_size: int (optional) + :param page_token: str (optional) - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources - - :returns: Iterator over :class:`Alert` + :returns: Iterator over :class:`ListAlertsResponseAlert` - .. py:method:: update(alert_id: str, name: str, options: AlertOptions, query_id: str [, rearm: Optional[int]]) + .. py:method:: update(id: str, update_mask: str [, alert: Optional[UpdateAlertRequestAlert]]) -> Alert Usage: @@ -166,42 +153,39 @@ srcs = w.data_sources.list() - query = w.queries.create(name=f'sdk-{time.time_ns()}', - data_source_id=srcs[0].id, - description="test query from Go SDK", - query="SELECT 1") - - alert = w.alerts.create(options=sql.AlertOptions(column="1", op="==", value="1"), - name=f'sdk-{time.time_ns()}', - query_id=query.id) - - w.alerts.update(options=sql.AlertOptions(column="1", op="==", value="1"), - alert_id=alert.id, - name=f'sdk-{time.time_ns()}', - query_id=query.id) + query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}', + warehouse_id=srcs[0].warehouse_id, + description="test query from Go SDK", + query_text="SELECT 1")) + + alert = w.alerts.create( + alert=sql.CreateAlertRequestAlert(condition=sql.AlertCondition(operand=sql.AlertConditionOperand( + column=sql.AlertOperandColumn(name="1")), + op=sql.AlertOperator.EQUAL, + threshold=sql.AlertConditionThreshold( + value=sql.AlertOperandValue( + double_value=1))), + display_name=f'sdk-{time.time_ns()}', + query_id=query.id)) + + _ = w.alerts.update(id=alert.id, + alert=sql.UpdateAlertRequestAlert(display_name=f'sdk-{time.time_ns()}'), + update_mask="display_name") # cleanup - w.queries.delete(query_id=query.id) - w.alerts.delete(alert_id=alert.id) + w.queries.delete(id=query.id) + w.alerts.delete(id=alert.id) Update an alert. Updates an alert. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources - - :param alert_id: str - :param name: str - Name of the alert. - :param options: :class:`AlertOptions` - Alert configuration options. - :param query_id: str - Query ID. - :param rearm: int (optional) - Number of seconds after being triggered before the alert rearms itself and can be triggered again. - If `null`, alert will never be triggered again. - + :param id: str + :param update_mask: str + Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the + setting payload will be updated. The field mask needs to be supplied as single string. To specify + multiple fields in the field mask, use comma as the separator (no space). + :param alert: :class:`UpdateAlertRequestAlert` (optional) + :returns: :class:`Alert` \ No newline at end of file diff --git a/docs/workspace/sql/alerts_legacy.rst b/docs/workspace/sql/alerts_legacy.rst new file mode 100644 index 00000000..6dfd9612 --- /dev/null +++ b/docs/workspace/sql/alerts_legacy.rst @@ -0,0 +1,114 @@ +``w.alerts_legacy``: Alerts (legacy) +==================================== +.. currentmodule:: databricks.sdk.service.sql + +.. py:class:: AlertsLegacyAPI + + The alerts API can be used to perform CRUD operations on alerts. An alert is a Databricks SQL object that + periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or + notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of + the Jobs API, e.g. :method:jobs/create. + + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn + more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + .. py:method:: create(name: str, options: AlertOptions, query_id: str [, parent: Optional[str], rearm: Optional[int]]) -> LegacyAlert + + Create an alert. + + Creates an alert. An alert is a Databricks SQL object that periodically runs a query, evaluates a + condition of its result, and notifies users or notification destinations if the condition was met. + + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/create + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + :param name: str + Name of the alert. + :param options: :class:`AlertOptions` + Alert configuration options. + :param query_id: str + Query ID. + :param parent: str (optional) + The identifier of the workspace folder containing the object. + :param rearm: int (optional) + Number of seconds after being triggered before the alert rearms itself and can be triggered again. + If `null`, alert will never be triggered again. + + :returns: :class:`LegacyAlert` + + + .. py:method:: delete(alert_id: str) + + Delete an alert. + + Deletes an alert. Deleted alerts are no longer accessible and cannot be restored. **Note**: Unlike + queries and dashboards, alerts cannot be moved to the trash. + + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/delete + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + :param alert_id: str + + + + + .. py:method:: get(alert_id: str) -> LegacyAlert + + Get an alert. + + Gets an alert. + + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/get + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + :param alert_id: str + + :returns: :class:`LegacyAlert` + + + .. py:method:: list() -> Iterator[LegacyAlert] + + Get alerts. + + Gets a list of alerts. + + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/list + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + :returns: Iterator over :class:`LegacyAlert` + + + .. py:method:: update(alert_id: str, name: str, options: AlertOptions, query_id: str [, rearm: Optional[int]]) + + Update an alert. + + Updates an alert. + + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/update + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + :param alert_id: str + :param name: str + Name of the alert. + :param options: :class:`AlertOptions` + Alert configuration options. + :param query_id: str + Query ID. + :param rearm: int (optional) + Number of seconds after being triggered before the alert rearms itself and can be triggered again. + If `null`, alert will never be triggered again. + + + \ No newline at end of file diff --git a/docs/workspace/sql/data_sources.rst b/docs/workspace/sql/data_sources.rst index dcab7506..8f7321fa 100644 --- a/docs/workspace/sql/data_sources.rst +++ b/docs/workspace/sql/data_sources.rst @@ -1,5 +1,5 @@ -``w.data_sources``: Data Sources -================================ +``w.data_sources``: Data Sources (legacy) +========================================= .. currentmodule:: databricks.sdk.service.sql .. py:class:: DataSourcesAPI @@ -12,9 +12,9 @@ advise you to use any text editor, REST client, or `grep` to search the response from this API for the name of your SQL warehouse as it appears in Databricks SQL. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] + **Note**: A new version of the Databricks SQL API is now available. [Learn more] - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html .. py:method:: list() -> Iterator[DataSource] @@ -35,9 +35,10 @@ API response are enumerated for clarity. However, you need only a SQL warehouse's `id` to create new queries against it. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] + **Note**: A new version of the Databricks SQL API is now available. Please use :method:warehouses/list + instead. [Learn more] - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :returns: Iterator over :class:`DataSource` \ No newline at end of file diff --git a/docs/workspace/sql/dbsql_permissions.rst b/docs/workspace/sql/dbsql_permissions.rst index fbf1aac2..7f9e5d19 100644 --- a/docs/workspace/sql/dbsql_permissions.rst +++ b/docs/workspace/sql/dbsql_permissions.rst @@ -16,9 +16,9 @@ - `CAN_MANAGE`: Allows all actions: read, run, edit, delete, modify permissions (superset of `CAN_RUN`) - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] + **Note**: A new version of the Databricks SQL API is now available. [Learn more] - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html .. py:method:: get(object_type: ObjectTypePlural, object_id: str) -> GetResponse @@ -26,9 +26,10 @@ Gets a JSON representation of the access control list (ACL) for a specified object. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] + **Note**: A new version of the Databricks SQL API is now available. Please use + :method:workspace/getpermissions instead. [Learn more] - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param object_type: :class:`ObjectTypePlural` The type of object permissions to check. @@ -45,9 +46,10 @@ Sets the access control list (ACL) for a specified object. This operation will complete rewrite the ACL. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] + **Note**: A new version of the Databricks SQL API is now available. Please use + :method:workspace/setpermissions instead. [Learn more] - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param object_type: :class:`ObjectTypePlural` The type of object permission to set. @@ -64,9 +66,10 @@ Transfers ownership of a dashboard, query, or alert to an active user. Requires an admin API key. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] + **Note**: A new version of the Databricks SQL API is now available. For queries and alerts, please use + :method:queries/update and :method:alerts/update respectively instead. [Learn more] - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param object_type: :class:`OwnableObjectType` The type of object on which to change ownership. diff --git a/docs/workspace/sql/index.rst b/docs/workspace/sql/index.rst index 397de5c7..72873020 100644 --- a/docs/workspace/sql/index.rst +++ b/docs/workspace/sql/index.rst @@ -8,12 +8,15 @@ Manage Databricks SQL assets, including warehouses, dashboards, queries and quer :maxdepth: 1 alerts + alerts_legacy dashboard_widgets dashboards data_sources dbsql_permissions queries + queries_legacy query_history query_visualizations + query_visualizations_legacy statement_execution warehouses \ No newline at end of file diff --git a/docs/workspace/sql/queries.rst b/docs/workspace/sql/queries.rst index d26ff2ba..1f01c2f1 100644 --- a/docs/workspace/sql/queries.rst +++ b/docs/workspace/sql/queries.rst @@ -1,18 +1,14 @@ -``w.queries``: Queries / Results -================================ +``w.queries``: Queries +====================== .. currentmodule:: databricks.sdk.service.sql .. py:class:: QueriesAPI - These endpoints are used for CRUD operations on query definitions. Query definitions include the target - SQL warehouse, query text, name, description, tags, parameters, and visualizations. Queries can be + The queries API can be used to perform CRUD operations on queries. A query is a Databricks SQL object that + includes the target SQL warehouse, query text, name, description, tags, and parameters. Queries can be scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create. - - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources - .. py:method:: create( [, data_source_id: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], parent: Optional[str], query: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> Query + .. py:method:: create( [, query: Optional[CreateQueryRequestQuery]]) -> Query Usage: @@ -22,76 +18,43 @@ import time from databricks.sdk import WorkspaceClient + from databricks.sdk.service import sql w = WorkspaceClient() srcs = w.data_sources.list() - query = w.queries.create(name=f'sdk-{time.time_ns()}', - data_source_id=srcs[0].id, - description="test query from Go SDK", - query="SHOW TABLES") + query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}', + warehouse_id=srcs[0].warehouse_id, + description="test query from Go SDK", + query_text="SHOW TABLES")) # cleanup - w.queries.delete(query_id=query.id) - - Create a new query definition. - - Creates a new query definition. Queries created with this endpoint belong to the authenticated user - making the request. - - The `data_source_id` field specifies the ID of the SQL warehouse to run this query against. You can - use the Data Sources API to see a complete list of available SQL warehouses. Or you can copy the - `data_source_id` from an existing query. - - **Note**: You cannot add a visualization until you create the query. - - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources - - :param data_source_id: str (optional) - Data source ID maps to the ID of the data source used by the resource and is distinct from the - warehouse ID. [Learn more] - - [Learn more]: https://docs.databricks.com/api/workspace/datasources/list - :param description: str (optional) - General description that conveys additional information about this query such as usage notes. - :param name: str (optional) - The title of this query that appears in list views, widget headings, and on the query page. - :param options: Any (optional) - Exclusively used for storing a list parameter definitions. A parameter is an object with `title`, - `name`, `type`, and `value` properties. The `value` field here is the default value. It can be - overridden at runtime. - :param parent: str (optional) - The identifier of the workspace folder containing the object. - :param query: str (optional) - The text of the query to be run. - :param run_as_role: :class:`RunAsRole` (optional) - Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as - viewer" behavior) or `"owner"` (signifying "run as owner" behavior) - :param tags: List[str] (optional) + w.queries.delete(id=query.id) + + Create a query. + + Creates a query. + + :param query: :class:`CreateQueryRequestQuery` (optional) :returns: :class:`Query` - .. py:method:: delete(query_id: str) + .. py:method:: delete(id: str) Delete a query. Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and - they cannot be used for alerts. The trash is deleted after 30 days. - - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + cannot be used for alerts. You can restore a trashed query through the UI. A trashed query is + permanently deleted after 30 days. - :param query_id: str + :param id: str - .. py:method:: get(query_id: str) -> Query + .. py:method:: get(id: str) -> Query Usage: @@ -101,89 +64,58 @@ import time from databricks.sdk import WorkspaceClient + from databricks.sdk.service import sql w = WorkspaceClient() srcs = w.data_sources.list() - query = w.queries.create(name=f'sdk-{time.time_ns()}', - data_source_id=srcs[0].id, - description="test query from Go SDK", - query="SHOW TABLES") + query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}', + warehouse_id=srcs[0].warehouse_id, + description="test query from Go SDK", + query_text="SHOW TABLES")) - by_id = w.queries.get(query_id=query.id) + by_id = w.queries.get(id=query.id) # cleanup - w.queries.delete(query_id=query.id) + w.queries.delete(id=query.id) - Get a query definition. + Get a query. - Retrieve a query object definition along with contextual permissions information about the currently - authenticated user. + Gets a query. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources - - :param query_id: str + :param id: str :returns: :class:`Query` - .. py:method:: list( [, order: Optional[str], page: Optional[int], page_size: Optional[int], q: Optional[str]]) -> Iterator[Query] + .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ListQueryObjectsResponseQuery] - Get a list of queries. - - Gets a list of queries. Optionally, this list can be filtered by a search term. - - **Warning**: Calling this API concurrently 10 or more times could result in throttling, service - degradation, or a temporary ban. + List queries. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] + Gets a list of queries accessible to the user, ordered by creation time. **Warning:** Calling this API + concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources - - :param order: str (optional) - Name of query attribute to order by. Default sort order is ascending. Append a dash (`-`) to order - descending instead. - - - `name`: The name of the query. - - - `created_at`: The timestamp the query was created. - - - `runtime`: The time it took to run this query. This is blank for parameterized queries. A blank - value is treated as the highest value for sorting. - - - `executed_at`: The timestamp when the query was last run. - - - `created_by`: The user name of the user that created the query. - :param page: int (optional) - Page number to retrieve. :param page_size: int (optional) - Number of queries to return per page. - :param q: str (optional) - Full text search term + :param page_token: str (optional) - :returns: Iterator over :class:`Query` + :returns: Iterator over :class:`ListQueryObjectsResponseQuery` - .. py:method:: restore(query_id: str) + .. py:method:: list_visualizations(id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[Visualization] - Restore a query. - - Restore a query that has been moved to the trash. A restored query appears in list views and searches. - You can use restored queries for alerts. - - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + List visualizations on a query. - :param query_id: str + Gets a list of visualizations on a query. + :param id: str + :param page_size: int (optional) + :param page_token: str (optional) + :returns: Iterator over :class:`Visualization` - .. py:method:: update(query_id: str [, data_source_id: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], query: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> Query + .. py:method:: update(id: str, update_mask: str [, query: Optional[UpdateQueryRequestQuery]]) -> Query Usage: @@ -193,55 +125,36 @@ import time from databricks.sdk import WorkspaceClient + from databricks.sdk.service import sql w = WorkspaceClient() srcs = w.data_sources.list() - query = w.queries.create(name=f'sdk-{time.time_ns()}', - data_source_id=srcs[0].id, - description="test query from Go SDK", - query="SHOW TABLES") + query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}', + warehouse_id=srcs[0].warehouse_id, + description="test query from Go SDK", + query_text="SHOW TABLES")) - updated = w.queries.update(query_id=query.id, - name=f'sdk-{time.time_ns()}', - data_source_id=srcs[0].id, - description="UPDATED: test query from Go SDK", - query="SELECT 2+2") + updated = w.queries.update(id=query.id, + query=sql.UpdateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}', + description="UPDATED: test query from Go SDK", + query_text="SELECT 2+2"), + update_mask="display_name,description,query_text") # cleanup - w.queries.delete(query_id=query.id) - - Change a query definition. - - Modify this query definition. - - **Note**: You cannot undo this operation. - - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources - - :param query_id: str - :param data_source_id: str (optional) - Data source ID maps to the ID of the data source used by the resource and is distinct from the - warehouse ID. [Learn more] - - [Learn more]: https://docs.databricks.com/api/workspace/datasources/list - :param description: str (optional) - General description that conveys additional information about this query such as usage notes. - :param name: str (optional) - The title of this query that appears in list views, widget headings, and on the query page. - :param options: Any (optional) - Exclusively used for storing a list parameter definitions. A parameter is an object with `title`, - `name`, `type`, and `value` properties. The `value` field here is the default value. It can be - overridden at runtime. - :param query: str (optional) - The text of the query to be run. - :param run_as_role: :class:`RunAsRole` (optional) - Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as - viewer" behavior) or `"owner"` (signifying "run as owner" behavior) - :param tags: List[str] (optional) + w.queries.delete(id=query.id) + + Update a query. + + Updates a query. + + :param id: str + :param update_mask: str + Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the + setting payload will be updated. The field mask needs to be supplied as single string. To specify + multiple fields in the field mask, use comma as the separator (no space). + :param query: :class:`UpdateQueryRequestQuery` (optional) :returns: :class:`Query` \ No newline at end of file diff --git a/docs/workspace/sql/queries_legacy.rst b/docs/workspace/sql/queries_legacy.rst new file mode 100644 index 00000000..a7ab5683 --- /dev/null +++ b/docs/workspace/sql/queries_legacy.rst @@ -0,0 +1,183 @@ +``w.queries_legacy``: Queries (legacy) +====================================== +.. currentmodule:: databricks.sdk.service.sql + +.. py:class:: QueriesLegacyAPI + + These endpoints are used for CRUD operations on query definitions. Query definitions include the target + SQL warehouse, query text, name, description, tags, parameters, and visualizations. Queries can be + scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create. + + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn + more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + .. py:method:: create( [, data_source_id: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], parent: Optional[str], query: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> LegacyQuery + + Create a new query definition. + + Creates a new query definition. Queries created with this endpoint belong to the authenticated user + making the request. + + The `data_source_id` field specifies the ID of the SQL warehouse to run this query against. You can + use the Data Sources API to see a complete list of available SQL warehouses. Or you can copy the + `data_source_id` from an existing query. + + **Note**: You cannot add a visualization until you create the query. + + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/create + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + :param data_source_id: str (optional) + Data source ID maps to the ID of the data source used by the resource and is distinct from the + warehouse ID. [Learn more] + + [Learn more]: https://docs.databricks.com/api/workspace/datasources/list + :param description: str (optional) + General description that conveys additional information about this query such as usage notes. + :param name: str (optional) + The title of this query that appears in list views, widget headings, and on the query page. + :param options: Any (optional) + Exclusively used for storing a list parameter definitions. A parameter is an object with `title`, + `name`, `type`, and `value` properties. The `value` field here is the default value. It can be + overridden at runtime. + :param parent: str (optional) + The identifier of the workspace folder containing the object. + :param query: str (optional) + The text of the query to be run. + :param run_as_role: :class:`RunAsRole` (optional) + Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as + viewer" behavior) or `"owner"` (signifying "run as owner" behavior) + :param tags: List[str] (optional) + + :returns: :class:`LegacyQuery` + + + .. py:method:: delete(query_id: str) + + Delete a query. + + Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and + they cannot be used for alerts. The trash is deleted after 30 days. + + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/delete + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + :param query_id: str + + + + + .. py:method:: get(query_id: str) -> LegacyQuery + + Get a query definition. + + Retrieve a query object definition along with contextual permissions information about the currently + authenticated user. + + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/get + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + :param query_id: str + + :returns: :class:`LegacyQuery` + + + .. py:method:: list( [, order: Optional[str], page: Optional[int], page_size: Optional[int], q: Optional[str]]) -> Iterator[LegacyQuery] + + Get a list of queries. + + Gets a list of queries. Optionally, this list can be filtered by a search term. + + **Warning**: Calling this API concurrently 10 or more times could result in throttling, service + degradation, or a temporary ban. + + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/list + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + :param order: str (optional) + Name of query attribute to order by. Default sort order is ascending. Append a dash (`-`) to order + descending instead. + + - `name`: The name of the query. + + - `created_at`: The timestamp the query was created. + + - `runtime`: The time it took to run this query. This is blank for parameterized queries. A blank + value is treated as the highest value for sorting. + + - `executed_at`: The timestamp when the query was last run. + + - `created_by`: The user name of the user that created the query. + :param page: int (optional) + Page number to retrieve. + :param page_size: int (optional) + Number of queries to return per page. + :param q: str (optional) + Full text search term + + :returns: Iterator over :class:`LegacyQuery` + + + .. py:method:: restore(query_id: str) + + Restore a query. + + Restore a query that has been moved to the trash. A restored query appears in list views and searches. + You can use restored queries for alerts. + + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. + [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + :param query_id: str + + + + + .. py:method:: update(query_id: str [, data_source_id: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], query: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> LegacyQuery + + Change a query definition. + + Modify this query definition. + + **Note**: You cannot undo this operation. + + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/update + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + :param query_id: str + :param data_source_id: str (optional) + Data source ID maps to the ID of the data source used by the resource and is distinct from the + warehouse ID. [Learn more] + + [Learn more]: https://docs.databricks.com/api/workspace/datasources/list + :param description: str (optional) + General description that conveys additional information about this query such as usage notes. + :param name: str (optional) + The title of this query that appears in list views, widget headings, and on the query page. + :param options: Any (optional) + Exclusively used for storing a list parameter definitions. A parameter is an object with `title`, + `name`, `type`, and `value` properties. The `value` field here is the default value. It can be + overridden at runtime. + :param query: str (optional) + The text of the query to be run. + :param run_as_role: :class:`RunAsRole` (optional) + Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as + viewer" behavior) or `"owner"` (signifying "run as owner" behavior) + :param tags: List[str] (optional) + + :returns: :class:`LegacyQuery` + \ No newline at end of file diff --git a/docs/workspace/sql/query_history.rst b/docs/workspace/sql/query_history.rst index 6aacd3c7..5fa003c0 100644 --- a/docs/workspace/sql/query_history.rst +++ b/docs/workspace/sql/query_history.rst @@ -4,9 +4,10 @@ .. py:class:: QueryHistoryAPI - Access the history of queries through SQL warehouses. + A service responsible for storing and retrieving the list of queries run against SQL endpoints, serverless + compute, and DLT. - .. py:method:: list( [, filter_by: Optional[QueryFilter], include_metrics: Optional[bool], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[QueryInfo] + .. py:method:: list( [, filter_by: Optional[QueryFilter], max_results: Optional[int], page_token: Optional[str]]) -> ListQueriesResponse Usage: @@ -23,20 +24,20 @@ List Queries. - List the history of queries through SQL warehouses. + List the history of queries through SQL warehouses, serverless compute, and DLT. - You can filter by user ID, warehouse ID, status, and time range. + You can filter by user ID, warehouse ID, status, and time range. Most recently started queries are + returned first (up to max_results in request). The pagination token returned in response can be used + to list subsequent query statuses. :param filter_by: :class:`QueryFilter` (optional) A filter to limit query history results. This field is optional. - :param include_metrics: bool (optional) - Whether to include metrics about query. :param max_results: int (optional) - Limit the number of results returned in one page. The default is 100. + Limit the number of results returned in one page. Must be less than 1000 and the default is 100. :param page_token: str (optional) A token that can be used to get the next page of results. The token can contains characters that need to be encoded before using it in a URL. For example, the character '+' needs to be replaced by - %2B. + %2B. This field is optional. - :returns: Iterator over :class:`QueryInfo` + :returns: :class:`ListQueriesResponse` \ No newline at end of file diff --git a/docs/workspace/sql/query_visualizations.rst b/docs/workspace/sql/query_visualizations.rst index 53888cee..95095fb2 100644 --- a/docs/workspace/sql/query_visualizations.rst +++ b/docs/workspace/sql/query_visualizations.rst @@ -4,56 +4,43 @@ .. py:class:: QueryVisualizationsAPI - This is an evolving API that facilitates the addition and removal of vizualisations from existing queries - within the Databricks Workspace. Data structures may change over time. + This is an evolving API that facilitates the addition and removal of visualizations from existing queries + in the Databricks Workspace. Data structures can change over time. - .. py:method:: create(query_id: str, type: str, options: Any [, description: Optional[str], name: Optional[str]]) -> Visualization + .. py:method:: create( [, visualization: Optional[CreateVisualizationRequestVisualization]]) -> Visualization - Add visualization to a query. + Add a visualization to a query. - :param query_id: str - The identifier returned by :method:queries/create - :param type: str - The type of visualization: chart, table, pivot table, and so on. - :param options: Any - The options object varies widely from one visualization type to the next and is unsupported. - Databricks does not recommend modifying visualization settings in JSON. - :param description: str (optional) - A short description of this visualization. This is not displayed in the UI. - :param name: str (optional) - The name of the visualization that appears on dashboards and the query screen. + Adds a visualization to a query. + + :param visualization: :class:`CreateVisualizationRequestVisualization` (optional) :returns: :class:`Visualization` .. py:method:: delete(id: str) - Remove visualization. + Remove a visualization. + + Removes a visualization. :param id: str - Widget ID returned by :method:queryvizualisations/create - .. py:method:: update(id: str [, created_at: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], query: Optional[Query], type: Optional[str], updated_at: Optional[str]]) -> Visualization + .. py:method:: update(id: str, update_mask: str [, visualization: Optional[UpdateVisualizationRequestVisualization]]) -> Visualization - Edit existing visualization. + Update a visualization. + + Updates a visualization. :param id: str - The UUID for this visualization. - :param created_at: str (optional) - :param description: str (optional) - A short description of this visualization. This is not displayed in the UI. - :param name: str (optional) - The name of the visualization that appears on dashboards and the query screen. - :param options: Any (optional) - The options object varies widely from one visualization type to the next and is unsupported. - Databricks does not recommend modifying visualization settings in JSON. - :param query: :class:`Query` (optional) - :param type: str (optional) - The type of visualization: chart, table, pivot table, and so on. - :param updated_at: str (optional) + :param update_mask: str + Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the + setting payload will be updated. The field mask needs to be supplied as single string. To specify + multiple fields in the field mask, use comma as the separator (no space). + :param visualization: :class:`UpdateVisualizationRequestVisualization` (optional) :returns: :class:`Visualization` \ No newline at end of file diff --git a/docs/workspace/sql/query_visualizations_legacy.rst b/docs/workspace/sql/query_visualizations_legacy.rst new file mode 100644 index 00000000..f56f78a5 --- /dev/null +++ b/docs/workspace/sql/query_visualizations_legacy.rst @@ -0,0 +1,85 @@ +``w.query_visualizations_legacy``: Query Visualizations (legacy) +================================================================ +.. currentmodule:: databricks.sdk.service.sql + +.. py:class:: QueryVisualizationsLegacyAPI + + This is an evolving API that facilitates the addition and removal of vizualisations from existing queries + within the Databricks Workspace. Data structures may change over time. + + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn + more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + .. py:method:: create(query_id: str, type: str, options: Any [, description: Optional[str], name: Optional[str]]) -> LegacyVisualization + + Add visualization to a query. + + Creates visualization in the query. + + **Note**: A new version of the Databricks SQL API is now available. Please use + :method:queryvisualizations/create instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + :param query_id: str + The identifier returned by :method:queries/create + :param type: str + The type of visualization: chart, table, pivot table, and so on. + :param options: Any + The options object varies widely from one visualization type to the next and is unsupported. + Databricks does not recommend modifying visualization settings in JSON. + :param description: str (optional) + A short description of this visualization. This is not displayed in the UI. + :param name: str (optional) + The name of the visualization that appears on dashboards and the query screen. + + :returns: :class:`LegacyVisualization` + + + .. py:method:: delete(id: str) + + Remove visualization. + + Removes a visualization from the query. + + **Note**: A new version of the Databricks SQL API is now available. Please use + :method:queryvisualizations/delete instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + :param id: str + Widget ID returned by :method:queryvizualisations/create + + + + + .. py:method:: update(id: str [, created_at: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], query: Optional[LegacyQuery], type: Optional[str], updated_at: Optional[str]]) -> LegacyVisualization + + Edit existing visualization. + + Updates visualization in the query. + + **Note**: A new version of the Databricks SQL API is now available. Please use + :method:queryvisualizations/update instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + :param id: str + The UUID for this visualization. + :param created_at: str (optional) + :param description: str (optional) + A short description of this visualization. This is not displayed in the UI. + :param name: str (optional) + The name of the visualization that appears on dashboards and the query screen. + :param options: Any (optional) + The options object varies widely from one visualization type to the next and is unsupported. + Databricks does not recommend modifying visualization settings in JSON. + :param query: :class:`LegacyQuery` (optional) + :param type: str (optional) + The type of visualization: chart, table, pivot table, and so on. + :param updated_at: str (optional) + + :returns: :class:`LegacyVisualization` + \ No newline at end of file diff --git a/docs/workspace/sql/statement_execution.rst b/docs/workspace/sql/statement_execution.rst index 7914977c..4d133762 100644 --- a/docs/workspace/sql/statement_execution.rst +++ b/docs/workspace/sql/statement_execution.rst @@ -101,7 +101,7 @@ - .. py:method:: execute_statement(statement: str, warehouse_id: str [, byte_limit: Optional[int], catalog: Optional[str], disposition: Optional[Disposition], format: Optional[Format], on_wait_timeout: Optional[ExecuteStatementRequestOnWaitTimeout], parameters: Optional[List[StatementParameterListItem]], row_limit: Optional[int], schema: Optional[str], wait_timeout: Optional[str]]) -> ExecuteStatementResponse + .. py:method:: execute_statement(statement: str, warehouse_id: str [, byte_limit: Optional[int], catalog: Optional[str], disposition: Optional[Disposition], format: Optional[Format], on_wait_timeout: Optional[ExecuteStatementRequestOnWaitTimeout], parameters: Optional[List[StatementParameterListItem]], row_limit: Optional[int], schema: Optional[str], wait_timeout: Optional[str]]) -> StatementResponse Execute a SQL statement. @@ -122,26 +122,6 @@ [`USE CATALOG`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-catalog.html :param disposition: :class:`Disposition` (optional) - The fetch disposition provides two modes of fetching results: `INLINE` and `EXTERNAL_LINKS`. - - Statements executed with `INLINE` disposition will return result data inline, in `JSON_ARRAY` - format, in a series of chunks. If a given statement produces a result set with a size larger than 25 - MiB, that statement execution is aborted, and no result set will be available. - - **NOTE** Byte limits are computed based upon internal representations of the result set data, and - might not match the sizes visible in JSON responses. - - Statements executed with `EXTERNAL_LINKS` disposition will return result data as external links: - URLs that point to cloud storage internal to the workspace. Using `EXTERNAL_LINKS` disposition - allows statements to generate arbitrarily sized result sets for fetching up to 100 GiB. The - resulting links have two important properties: - - 1. They point to resources _external_ to the Databricks compute; therefore any associated - authentication information (typically a personal access token, OAuth token, or similar) _must be - removed_ when fetching from these links. - - 2. These are presigned URLs with a specific expiration, indicated in the response. The behavior when - attempting to use an expired link is cloud specific. :param format: :class:`Format` (optional) Statement execution supports three result formats: `JSON_ARRAY` (default), `ARROW_STREAM`, and `CSV`. @@ -229,10 +209,10 @@ the statement takes longer to execute, `on_wait_timeout` determines what should happen after the timeout is reached. - :returns: :class:`ExecuteStatementResponse` + :returns: :class:`StatementResponse` - .. py:method:: get_statement(statement_id: str) -> GetStatementResponse + .. py:method:: get_statement(statement_id: str) -> StatementResponse Get status, manifest, and result first chunk. @@ -248,7 +228,7 @@ The statement ID is returned upon successfully submitting a SQL statement, and is a required reference for all subsequent calls. - :returns: :class:`GetStatementResponse` + :returns: :class:`StatementResponse` .. py:method:: get_statement_result_chunk_n(statement_id: str, chunk_index: int) -> ResultData diff --git a/docs/workspace/sql/warehouses.rst b/docs/workspace/sql/warehouses.rst index 79385268..8a5da430 100644 --- a/docs/workspace/sql/warehouses.rst +++ b/docs/workspace/sql/warehouses.rst @@ -17,13 +17,18 @@ import time from databricks.sdk import WorkspaceClient + from databricks.sdk.service import sql w = WorkspaceClient() - created = w.warehouses.create(name=f'sdk-{time.time_ns()}', - cluster_size="2X-Small", - max_num_clusters=1, - auto_stop_mins=10).result() + created = w.warehouses.create( + name=f'sdk-{time.time_ns()}', + cluster_size="2X-Small", + max_num_clusters=1, + auto_stop_mins=10, + tags=sql.EndpointTags( + custom_tags=[sql.EndpointTagPair(key="Owner", value="eng-dev-ecosystem-team_at_databricks.com") + ])).result() # cleanup w.warehouses.delete(id=created.id) @@ -117,13 +122,18 @@ import time from databricks.sdk import WorkspaceClient + from databricks.sdk.service import sql w = WorkspaceClient() - created = w.warehouses.create(name=f'sdk-{time.time_ns()}', - cluster_size="2X-Small", - max_num_clusters=1, - auto_stop_mins=10).result() + created = w.warehouses.create( + name=f'sdk-{time.time_ns()}', + cluster_size="2X-Small", + max_num_clusters=1, + auto_stop_mins=10, + tags=sql.EndpointTags( + custom_tags=[sql.EndpointTagPair(key="Owner", value="eng-dev-ecosystem-team_at_databricks.com") + ])).result() _ = w.warehouses.edit(id=created.id, name=f'sdk-{time.time_ns()}', @@ -213,13 +223,18 @@ import time from databricks.sdk import WorkspaceClient + from databricks.sdk.service import sql w = WorkspaceClient() - created = w.warehouses.create(name=f'sdk-{time.time_ns()}', - cluster_size="2X-Small", - max_num_clusters=1, - auto_stop_mins=10).result() + created = w.warehouses.create( + name=f'sdk-{time.time_ns()}', + cluster_size="2X-Small", + max_num_clusters=1, + auto_stop_mins=10, + tags=sql.EndpointTags( + custom_tags=[sql.EndpointTagPair(key="Owner", value="eng-dev-ecosystem-team_at_databricks.com") + ])).result() wh = w.warehouses.get(id=created.id) diff --git a/examples/account/budgets/create_budgets.py b/examples/account/budgets/create_budgets.py index 12f20786..030cc8a5 100755 --- a/examples/account/budgets/create_budgets.py +++ b/examples/account/budgets/create_budgets.py @@ -5,13 +5,26 @@ a = AccountClient() -created = a.budgets.create(budget=billing.Budget( - name=f'sdk-{time.time_ns()}', - filter="tag.tagName = 'all'", - period="1 month", - start_date="2022-01-01", - target_amount="100", - alerts=[billing.BudgetAlert(email_notifications=["admin@example.com"], min_percentage=50)])) +created = a.budgets.create(budget=billing.CreateBudgetConfigurationBudget( + display_name=f'sdk-{time.time_ns()}', + filter=billing.BudgetConfigurationFilter(tags=[ + billing.BudgetConfigurationFilterTagClause(key="tagName", + value=billing.BudgetConfigurationFilterClause( + operator=billing.BudgetConfigurationFilterOperator.IN, + values=["all"])) + ]), + alert_configurations=[ + billing.CreateBudgetConfigurationBudgetAlertConfigurations( + time_period=billing.AlertConfigurationTimePeriod.MONTH, + quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD, + trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED, + quantity_threshold="100", + action_configurations=[ + billing.CreateBudgetConfigurationBudgetActionConfigurations( + action_type=billing.ActionConfigurationType.EMAIL_NOTIFICATION, + target="admin@example.com") + ]) + ])) # cleanup -a.budgets.delete(budget_id=created.budget.budget_id) +a.budgets.delete(budget_id=created.budget.budget_configuration_id) diff --git a/examples/account/budgets/get_budgets.py b/examples/account/budgets/get_budgets.py index 8640fc97..9c297311 100755 --- a/examples/account/budgets/get_budgets.py +++ b/examples/account/budgets/get_budgets.py @@ -5,15 +5,28 @@ a = AccountClient() -created = a.budgets.create(budget=billing.Budget( - name=f'sdk-{time.time_ns()}', - filter="tag.tagName = 'all'", - period="1 month", - start_date="2022-01-01", - target_amount="100", - alerts=[billing.BudgetAlert(email_notifications=["admin@example.com"], min_percentage=50)])) +created = a.budgets.create(budget=billing.CreateBudgetConfigurationBudget( + display_name=f'sdk-{time.time_ns()}', + filter=billing.BudgetConfigurationFilter(tags=[ + billing.BudgetConfigurationFilterTagClause(key="tagName", + value=billing.BudgetConfigurationFilterClause( + operator=billing.BudgetConfigurationFilterOperator.IN, + values=["all"])) + ]), + alert_configurations=[ + billing.CreateBudgetConfigurationBudgetAlertConfigurations( + time_period=billing.AlertConfigurationTimePeriod.MONTH, + quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD, + trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED, + quantity_threshold="100", + action_configurations=[ + billing.CreateBudgetConfigurationBudgetActionConfigurations( + action_type=billing.ActionConfigurationType.EMAIL_NOTIFICATION, + target="admin@example.com") + ]) + ])) -by_id = a.budgets.get(budget_id=created.budget.budget_id) +by_id = a.budgets.get(budget_id=created.budget.budget_configuration_id) # cleanup -a.budgets.delete(budget_id=created.budget.budget_id) +a.budgets.delete(budget_id=created.budget.budget_configuration_id) diff --git a/examples/account/budgets/list_budgets.py b/examples/account/budgets/list_budgets.py index 303690ab..dd425dba 100755 --- a/examples/account/budgets/list_budgets.py +++ b/examples/account/budgets/list_budgets.py @@ -1,5 +1,6 @@ from databricks.sdk import AccountClient +from databricks.sdk.service import billing a = AccountClient() -all = a.budgets.list() +all = a.budgets.list(billing.ListBudgetConfigurationsRequest()) diff --git a/examples/account/budgets/update_budgets.py b/examples/account/budgets/update_budgets.py index 1a0193b1..e1963011 100755 --- a/examples/account/budgets/update_budgets.py +++ b/examples/account/budgets/update_budgets.py @@ -5,24 +5,49 @@ a = AccountClient() -created = a.budgets.create(budget=billing.Budget( - name=f'sdk-{time.time_ns()}', - filter="tag.tagName = 'all'", - period="1 month", - start_date="2022-01-01", - target_amount="100", - alerts=[billing.BudgetAlert(email_notifications=["admin@example.com"], min_percentage=50)])) +created = a.budgets.create(budget=billing.CreateBudgetConfigurationBudget( + display_name=f'sdk-{time.time_ns()}', + filter=billing.BudgetConfigurationFilter(tags=[ + billing.BudgetConfigurationFilterTagClause(key="tagName", + value=billing.BudgetConfigurationFilterClause( + operator=billing.BudgetConfigurationFilterOperator.IN, + values=["all"])) + ]), + alert_configurations=[ + billing.CreateBudgetConfigurationBudgetAlertConfigurations( + time_period=billing.AlertConfigurationTimePeriod.MONTH, + quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD, + trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED, + quantity_threshold="100", + action_configurations=[ + billing.CreateBudgetConfigurationBudgetActionConfigurations( + action_type=billing.ActionConfigurationType.EMAIL_NOTIFICATION, + target="admin@example.com") + ]) + ])) -a.budgets.update(budget_id=created.budget.budget_id, - budget=billing.Budget(name=f'sdk-{time.time_ns()}', - filter="tag.tagName = 'all'", - period="1 month", - start_date="2022-01-01", - target_amount="100", - alerts=[ - billing.BudgetAlert(email_notifications=["admin@example.com"], - min_percentage=70) - ])) +_ = a.budgets.update( + budget_id=created.budget.budget_configuration_id, + budget=billing.UpdateBudgetConfigurationBudget( + display_name=f'sdk-{time.time_ns()}', + filter=billing.BudgetConfigurationFilter(tags=[ + billing.BudgetConfigurationFilterTagClause( + key="tagName", + value=billing.BudgetConfigurationFilterClause( + operator=billing.BudgetConfigurationFilterOperator.IN, values=["all"])) + ]), + alert_configurations=[ + billing.AlertConfiguration( + time_period=billing.AlertConfigurationTimePeriod.MONTH, + quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD, + trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED, + quantity_threshold="50", + action_configurations=[ + billing.ActionConfiguration( + action_type=billing.ActionConfigurationType.EMAIL_NOTIFICATION, + target="admin@example.com") + ]) + ])) # cleanup -a.budgets.delete(budget_id=created.budget.budget_id) +a.budgets.delete(budget_id=created.budget.budget_configuration_id) diff --git a/examples/workspace/alerts/create_alerts.py b/examples/workspace/alerts/create_alerts.py index 72367ca8..bae1ecf4 100755 --- a/examples/workspace/alerts/create_alerts.py +++ b/examples/workspace/alerts/create_alerts.py @@ -7,15 +7,21 @@ srcs = w.data_sources.list() -query = w.queries.create(name=f'sdk-{time.time_ns()}', - data_source_id=srcs[0].id, - description="test query from Go SDK", - query="SELECT 1") +query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}', + warehouse_id=srcs[0].warehouse_id, + description="test query from Go SDK", + query_text="SELECT 1")) -alert = w.alerts.create(options=sql.AlertOptions(column="1", op="==", value="1"), - name=f'sdk-{time.time_ns()}', - query_id=query.id) +alert = w.alerts.create( + alert=sql.CreateAlertRequestAlert(condition=sql.AlertCondition(operand=sql.AlertConditionOperand( + column=sql.AlertOperandColumn(name="1")), + op=sql.AlertOperator.EQUAL, + threshold=sql.AlertConditionThreshold( + value=sql.AlertOperandValue( + double_value=1))), + display_name=f'sdk-{time.time_ns()}', + query_id=query.id)) # cleanup -w.queries.delete(query_id=query.id) -w.alerts.delete(alert_id=alert.id) +w.queries.delete(id=query.id) +w.alerts.delete(id=alert.id) diff --git a/examples/workspace/alerts/get_alerts.py b/examples/workspace/alerts/get_alerts.py index 3c24e856..a1a861b1 100755 --- a/examples/workspace/alerts/get_alerts.py +++ b/examples/workspace/alerts/get_alerts.py @@ -7,17 +7,23 @@ srcs = w.data_sources.list() -query = w.queries.create(name=f'sdk-{time.time_ns()}', - data_source_id=srcs[0].id, - description="test query from Go SDK", - query="SELECT 1") +query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}', + warehouse_id=srcs[0].warehouse_id, + description="test query from Go SDK", + query_text="SELECT 1")) -alert = w.alerts.create(options=sql.AlertOptions(column="1", op="==", value="1"), - name=f'sdk-{time.time_ns()}', - query_id=query.id) +alert = w.alerts.create( + alert=sql.CreateAlertRequestAlert(condition=sql.AlertCondition(operand=sql.AlertConditionOperand( + column=sql.AlertOperandColumn(name="1")), + op=sql.AlertOperator.EQUAL, + threshold=sql.AlertConditionThreshold( + value=sql.AlertOperandValue( + double_value=1))), + display_name=f'sdk-{time.time_ns()}', + query_id=query.id)) -by_id = w.alerts.get(alert_id=alert.id) +by_id = w.alerts.get(id=alert.id) # cleanup -w.queries.delete(query_id=query.id) -w.alerts.delete(alert_id=alert.id) +w.queries.delete(id=query.id) +w.alerts.delete(id=alert.id) diff --git a/examples/workspace/alerts/list_alerts.py b/examples/workspace/alerts/list_alerts.py index 2009772c..35e4ce0a 100755 --- a/examples/workspace/alerts/list_alerts.py +++ b/examples/workspace/alerts/list_alerts.py @@ -1,5 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import sql w = WorkspaceClient() -all = w.alerts.list() +all = w.alerts.list(sql.ListAlertsRequest()) diff --git a/examples/workspace/alerts/update_alerts.py b/examples/workspace/alerts/update_alerts.py index 130f7191..5d1827f9 100755 --- a/examples/workspace/alerts/update_alerts.py +++ b/examples/workspace/alerts/update_alerts.py @@ -7,20 +7,25 @@ srcs = w.data_sources.list() -query = w.queries.create(name=f'sdk-{time.time_ns()}', - data_source_id=srcs[0].id, - description="test query from Go SDK", - query="SELECT 1") +query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}', + warehouse_id=srcs[0].warehouse_id, + description="test query from Go SDK", + query_text="SELECT 1")) -alert = w.alerts.create(options=sql.AlertOptions(column="1", op="==", value="1"), - name=f'sdk-{time.time_ns()}', - query_id=query.id) +alert = w.alerts.create( + alert=sql.CreateAlertRequestAlert(condition=sql.AlertCondition(operand=sql.AlertConditionOperand( + column=sql.AlertOperandColumn(name="1")), + op=sql.AlertOperator.EQUAL, + threshold=sql.AlertConditionThreshold( + value=sql.AlertOperandValue( + double_value=1))), + display_name=f'sdk-{time.time_ns()}', + query_id=query.id)) -w.alerts.update(options=sql.AlertOptions(column="1", op="==", value="1"), - alert_id=alert.id, - name=f'sdk-{time.time_ns()}', - query_id=query.id) +_ = w.alerts.update(id=alert.id, + alert=sql.UpdateAlertRequestAlert(display_name=f'sdk-{time.time_ns()}'), + update_mask="display_name") # cleanup -w.queries.delete(query_id=query.id) -w.alerts.delete(alert_id=alert.id) +w.queries.delete(id=query.id) +w.alerts.delete(id=alert.id) diff --git a/examples/workspace/queries/create_alerts.py b/examples/workspace/queries/create_alerts.py index 37d71ac6..f0213aea 100755 --- a/examples/workspace/queries/create_alerts.py +++ b/examples/workspace/queries/create_alerts.py @@ -1,15 +1,16 @@ import time from databricks.sdk import WorkspaceClient +from databricks.sdk.service import sql w = WorkspaceClient() srcs = w.data_sources.list() -query = w.queries.create(name=f'sdk-{time.time_ns()}', - data_source_id=srcs[0].id, - description="test query from Go SDK", - query="SELECT 1") +query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}', + warehouse_id=srcs[0].warehouse_id, + description="test query from Go SDK", + query_text="SELECT 1")) # cleanup -w.queries.delete(query_id=query.id) +w.queries.delete(id=query.id) diff --git a/examples/workspace/queries/create_queries.py b/examples/workspace/queries/create_queries.py index c8d5ac93..ce293d41 100755 --- a/examples/workspace/queries/create_queries.py +++ b/examples/workspace/queries/create_queries.py @@ -1,15 +1,16 @@ import time from databricks.sdk import WorkspaceClient +from databricks.sdk.service import sql w = WorkspaceClient() srcs = w.data_sources.list() -query = w.queries.create(name=f'sdk-{time.time_ns()}', - data_source_id=srcs[0].id, - description="test query from Go SDK", - query="SHOW TABLES") +query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}', + warehouse_id=srcs[0].warehouse_id, + description="test query from Go SDK", + query_text="SHOW TABLES")) # cleanup -w.queries.delete(query_id=query.id) +w.queries.delete(id=query.id) diff --git a/examples/workspace/queries/get_queries.py b/examples/workspace/queries/get_queries.py index d29b7598..f1854d30 100755 --- a/examples/workspace/queries/get_queries.py +++ b/examples/workspace/queries/get_queries.py @@ -1,17 +1,18 @@ import time from databricks.sdk import WorkspaceClient +from databricks.sdk.service import sql w = WorkspaceClient() srcs = w.data_sources.list() -query = w.queries.create(name=f'sdk-{time.time_ns()}', - data_source_id=srcs[0].id, - description="test query from Go SDK", - query="SHOW TABLES") +query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}', + warehouse_id=srcs[0].warehouse_id, + description="test query from Go SDK", + query_text="SHOW TABLES")) -by_id = w.queries.get(query_id=query.id) +by_id = w.queries.get(id=query.id) # cleanup -w.queries.delete(query_id=query.id) +w.queries.delete(id=query.id) diff --git a/examples/workspace/queries/update_queries.py b/examples/workspace/queries/update_queries.py index 85a9609a..948d9a91 100755 --- a/examples/workspace/queries/update_queries.py +++ b/examples/workspace/queries/update_queries.py @@ -1,21 +1,22 @@ import time from databricks.sdk import WorkspaceClient +from databricks.sdk.service import sql w = WorkspaceClient() srcs = w.data_sources.list() -query = w.queries.create(name=f'sdk-{time.time_ns()}', - data_source_id=srcs[0].id, - description="test query from Go SDK", - query="SHOW TABLES") +query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}', + warehouse_id=srcs[0].warehouse_id, + description="test query from Go SDK", + query_text="SHOW TABLES")) -updated = w.queries.update(query_id=query.id, - name=f'sdk-{time.time_ns()}', - data_source_id=srcs[0].id, - description="UPDATED: test query from Go SDK", - query="SELECT 2+2") +updated = w.queries.update(id=query.id, + query=sql.UpdateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}', + description="UPDATED: test query from Go SDK", + query_text="SELECT 2+2"), + update_mask="display_name,description,query_text") # cleanup -w.queries.delete(query_id=query.id) +w.queries.delete(id=query.id) diff --git a/examples/workspace/shares/list_shares.py b/examples/workspace/shares/list_shares.py index b8668e7f..d432854c 100755 --- a/examples/workspace/shares/list_shares.py +++ b/examples/workspace/shares/list_shares.py @@ -1,5 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import sharing w = WorkspaceClient() -all = w.shares.list() +all = w.shares.list(sharing.ListSharesRequest()) diff --git a/examples/workspace/warehouses/create_sql_warehouses.py b/examples/workspace/warehouses/create_sql_warehouses.py index 15e8f474..f01b9d5f 100755 --- a/examples/workspace/warehouses/create_sql_warehouses.py +++ b/examples/workspace/warehouses/create_sql_warehouses.py @@ -1,13 +1,18 @@ import time from databricks.sdk import WorkspaceClient +from databricks.sdk.service import sql w = WorkspaceClient() -created = w.warehouses.create(name=f'sdk-{time.time_ns()}', - cluster_size="2X-Small", - max_num_clusters=1, - auto_stop_mins=10).result() +created = w.warehouses.create( + name=f'sdk-{time.time_ns()}', + cluster_size="2X-Small", + max_num_clusters=1, + auto_stop_mins=10, + tags=sql.EndpointTags( + custom_tags=[sql.EndpointTagPair(key="Owner", value="eng-dev-ecosystem-team_at_databricks.com") + ])).result() # cleanup w.warehouses.delete(id=created.id) diff --git a/examples/workspace/warehouses/edit_sql_warehouses.py b/examples/workspace/warehouses/edit_sql_warehouses.py index 0e3c8e8f..acf06035 100755 --- a/examples/workspace/warehouses/edit_sql_warehouses.py +++ b/examples/workspace/warehouses/edit_sql_warehouses.py @@ -1,13 +1,18 @@ import time from databricks.sdk import WorkspaceClient +from databricks.sdk.service import sql w = WorkspaceClient() -created = w.warehouses.create(name=f'sdk-{time.time_ns()}', - cluster_size="2X-Small", - max_num_clusters=1, - auto_stop_mins=10).result() +created = w.warehouses.create( + name=f'sdk-{time.time_ns()}', + cluster_size="2X-Small", + max_num_clusters=1, + auto_stop_mins=10, + tags=sql.EndpointTags( + custom_tags=[sql.EndpointTagPair(key="Owner", value="eng-dev-ecosystem-team_at_databricks.com") + ])).result() _ = w.warehouses.edit(id=created.id, name=f'sdk-{time.time_ns()}', diff --git a/examples/workspace/warehouses/get_sql_warehouses.py b/examples/workspace/warehouses/get_sql_warehouses.py index 7b59844c..9f8184ab 100755 --- a/examples/workspace/warehouses/get_sql_warehouses.py +++ b/examples/workspace/warehouses/get_sql_warehouses.py @@ -1,13 +1,18 @@ import time from databricks.sdk import WorkspaceClient +from databricks.sdk.service import sql w = WorkspaceClient() -created = w.warehouses.create(name=f'sdk-{time.time_ns()}', - cluster_size="2X-Small", - max_num_clusters=1, - auto_stop_mins=10).result() +created = w.warehouses.create( + name=f'sdk-{time.time_ns()}', + cluster_size="2X-Small", + max_num_clusters=1, + auto_stop_mins=10, + tags=sql.EndpointTags( + custom_tags=[sql.EndpointTagPair(key="Owner", value="eng-dev-ecosystem-team_at_databricks.com") + ])).result() wh = w.warehouses.get(id=created.id)