From 272bd6e7d42115452a8ca2cfbfa4fdafc0502089 Mon Sep 17 00:00:00 2001 From: Miles Yucht Date: Thu, 22 Feb 2024 11:22:06 +0100 Subject: [PATCH 1/2] add back enums to docs --- docs/dbdataclasses/billing.rst | 77 +++ docs/dbdataclasses/catalog.rst | 844 +++++++++++++++++++++++++++ docs/dbdataclasses/compute.rst | 746 ++++++++++++++++++++++++ docs/dbdataclasses/iam.rst | 124 ++++ docs/dbdataclasses/jobs.rst | 433 ++++++++++++++ docs/dbdataclasses/ml.rst | 311 ++++++++++ docs/dbdataclasses/pipelines.rst | 213 +++++++ docs/dbdataclasses/provisioning.rst | 149 +++++ docs/dbdataclasses/serving.rst | 235 ++++++++ docs/dbdataclasses/settings.rst | 114 ++++ docs/dbdataclasses/sharing.rst | 246 ++++++++ docs/dbdataclasses/sql.rst | 865 ++++++++++++++++++++++++++++ docs/dbdataclasses/vectorsearch.rst | 77 +++ docs/dbdataclasses/workspace.rst | 145 +++++ docs/gen-client-docs.py | 4 +- 15 files changed, 4581 insertions(+), 2 deletions(-) diff --git a/docs/dbdataclasses/billing.rst b/docs/dbdataclasses/billing.rst index 9ce8ef3b..f575cae6 100644 --- a/docs/dbdataclasses/billing.rst +++ b/docs/dbdataclasses/billing.rst @@ -28,10 +28,48 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: DeliveryStatus + + The status string for log delivery. Possible values are: * `CREATED`: There were no log delivery + attempts since the config was created. * `SUCCEEDED`: The latest attempt of log delivery has + succeeded completely. * `USER_FAILURE`: The latest attempt of log delivery failed because of + misconfiguration of customer provided permissions on role or storage. * `SYSTEM_FAILURE`: The + latest attempt of log delivery failed because of an Databricks internal error. Contact support + if it doesn't go away soon. * `NOT_FOUND`: The log delivery status as the configuration has been + disabled since the release of this feature or there are no workspaces in the account. + + .. py:attribute:: CREATED + :value: "CREATED" + + .. py:attribute:: NOT_FOUND + :value: "NOT_FOUND" + + .. py:attribute:: SUCCEEDED + :value: "SUCCEEDED" + + .. py:attribute:: SYSTEM_FAILURE + :value: "SYSTEM_FAILURE" + + .. py:attribute:: USER_FAILURE + :value: "USER_FAILURE" + .. autoclass:: DownloadResponse :members: :undoc-members: +.. py:class:: LogDeliveryConfigStatus + + Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). + Defaults to `ENABLED`. You can [enable or disable the + configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration + is not supported, so disable a log delivery configuration that is no longer needed. + + .. py:attribute:: DISABLED + :value: "DISABLED" + + .. py:attribute:: ENABLED + :value: "ENABLED" + .. autoclass:: LogDeliveryConfiguration :members: :undoc-members: @@ -40,6 +78,45 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: LogType + + Log delivery type. Supported values are: + + * `BILLABLE_USAGE` — Configure [billable usage log delivery]. For the CSV schema, see the + [View billable usage]. + + * `AUDIT_LOGS` — Configure [audit log delivery]. For the JSON schema, see [Configure audit + logging] + + [Configure audit logging]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html + [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html + [audit log delivery]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html + [billable usage log delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html + + .. py:attribute:: AUDIT_LOGS + :value: "AUDIT_LOGS" + + .. py:attribute:: BILLABLE_USAGE + :value: "BILLABLE_USAGE" + +.. py:class:: OutputFormat + + The file type of log delivery. + + * If `log_type` is `BILLABLE_USAGE`, this value must be `CSV`. Only the CSV (comma-separated + values) format is supported. For the schema, see the [View billable usage] * If `log_type` is + `AUDIT_LOGS`, this value must be `JSON`. Only the JSON (JavaScript Object Notation) format is + supported. For the schema, see the [Configuring audit logs]. + + [Configuring audit logs]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html + [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html + + .. py:attribute:: CSV + :value: "CSV" + + .. py:attribute:: JSON + :value: "JSON" + .. autoclass:: UpdateLogDeliveryConfigurationStatusRequest :members: :undoc-members: diff --git a/docs/dbdataclasses/catalog.rst b/docs/dbdataclasses/catalog.rst index 3401589f..d2b809b3 100644 --- a/docs/dbdataclasses/catalog.rst +++ b/docs/dbdataclasses/catalog.rst @@ -48,6 +48,19 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ArtifactType + + The artifact type + + .. py:attribute:: INIT_SCRIPT + :value: "INIT_SCRIPT" + + .. py:attribute:: LIBRARY_JAR + :value: "LIBRARY_JAR" + + .. py:attribute:: LIBRARY_MAVEN + :value: "LIBRARY_MAVEN" + .. autoclass:: AwsIamRole :members: :undoc-members: @@ -64,6 +77,68 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: CatalogInfoSecurableKind + + Kind of catalog securable. + + .. py:attribute:: CATALOG_DELTASHARING + :value: "CATALOG_DELTASHARING" + + .. py:attribute:: CATALOG_FOREIGN_BIGQUERY + :value: "CATALOG_FOREIGN_BIGQUERY" + + .. py:attribute:: CATALOG_FOREIGN_DATABRICKS + :value: "CATALOG_FOREIGN_DATABRICKS" + + .. py:attribute:: CATALOG_FOREIGN_MYSQL + :value: "CATALOG_FOREIGN_MYSQL" + + .. py:attribute:: CATALOG_FOREIGN_POSTGRESQL + :value: "CATALOG_FOREIGN_POSTGRESQL" + + .. py:attribute:: CATALOG_FOREIGN_REDSHIFT + :value: "CATALOG_FOREIGN_REDSHIFT" + + .. py:attribute:: CATALOG_FOREIGN_SNOWFLAKE + :value: "CATALOG_FOREIGN_SNOWFLAKE" + + .. py:attribute:: CATALOG_FOREIGN_SQLDW + :value: "CATALOG_FOREIGN_SQLDW" + + .. py:attribute:: CATALOG_FOREIGN_SQLSERVER + :value: "CATALOG_FOREIGN_SQLSERVER" + + .. py:attribute:: CATALOG_INTERNAL + :value: "CATALOG_INTERNAL" + + .. py:attribute:: CATALOG_ONLINE + :value: "CATALOG_ONLINE" + + .. py:attribute:: CATALOG_ONLINE_INDEX + :value: "CATALOG_ONLINE_INDEX" + + .. py:attribute:: CATALOG_STANDARD + :value: "CATALOG_STANDARD" + + .. py:attribute:: CATALOG_SYSTEM + :value: "CATALOG_SYSTEM" + + .. py:attribute:: CATALOG_SYSTEM_DELTASHARING + :value: "CATALOG_SYSTEM_DELTASHARING" + +.. py:class:: CatalogType + + The type of the catalog. + + .. py:attribute:: DELTASHARING_CATALOG + :value: "DELTASHARING_CATALOG" + + .. py:attribute:: MANAGED_CATALOG + :value: "MANAGED_CATALOG" + + .. py:attribute:: SYSTEM_CATALOG + :value: "SYSTEM_CATALOG" + .. autoclass:: CloudflareApiToken :members: :undoc-members: @@ -76,10 +151,136 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ColumnTypeName + + Name of type (INT, STRUCT, MAP, etc.). + + .. py:attribute:: ARRAY + :value: "ARRAY" + + .. py:attribute:: BINARY + :value: "BINARY" + + .. py:attribute:: BOOLEAN + :value: "BOOLEAN" + + .. py:attribute:: BYTE + :value: "BYTE" + + .. py:attribute:: CHAR + :value: "CHAR" + + .. py:attribute:: DATE + :value: "DATE" + + .. py:attribute:: DECIMAL + :value: "DECIMAL" + + .. py:attribute:: DOUBLE + :value: "DOUBLE" + + .. py:attribute:: FLOAT + :value: "FLOAT" + + .. py:attribute:: INT + :value: "INT" + + .. py:attribute:: INTERVAL + :value: "INTERVAL" + + .. py:attribute:: LONG + :value: "LONG" + + .. py:attribute:: MAP + :value: "MAP" + + .. py:attribute:: NULL + :value: "NULL" + + .. py:attribute:: SHORT + :value: "SHORT" + + .. py:attribute:: STRING + :value: "STRING" + + .. py:attribute:: STRUCT + :value: "STRUCT" + + .. py:attribute:: TABLE_TYPE + :value: "TABLE_TYPE" + + .. py:attribute:: TIMESTAMP + :value: "TIMESTAMP" + + .. py:attribute:: TIMESTAMP_NTZ + :value: "TIMESTAMP_NTZ" + + .. py:attribute:: USER_DEFINED_TYPE + :value: "USER_DEFINED_TYPE" + .. autoclass:: ConnectionInfo :members: :undoc-members: +.. py:class:: ConnectionInfoSecurableKind + + Kind of connection securable. + + .. py:attribute:: CONNECTION_BIGQUERY + :value: "CONNECTION_BIGQUERY" + + .. py:attribute:: CONNECTION_DATABRICKS + :value: "CONNECTION_DATABRICKS" + + .. py:attribute:: CONNECTION_MYSQL + :value: "CONNECTION_MYSQL" + + .. py:attribute:: CONNECTION_ONLINE_CATALOG + :value: "CONNECTION_ONLINE_CATALOG" + + .. py:attribute:: CONNECTION_POSTGRESQL + :value: "CONNECTION_POSTGRESQL" + + .. py:attribute:: CONNECTION_REDSHIFT + :value: "CONNECTION_REDSHIFT" + + .. py:attribute:: CONNECTION_SNOWFLAKE + :value: "CONNECTION_SNOWFLAKE" + + .. py:attribute:: CONNECTION_SQLDW + :value: "CONNECTION_SQLDW" + + .. py:attribute:: CONNECTION_SQLSERVER + :value: "CONNECTION_SQLSERVER" + +.. py:class:: ConnectionType + + The type of connection. + + .. py:attribute:: BIGQUERY + :value: "BIGQUERY" + + .. py:attribute:: DATABRICKS + :value: "DATABRICKS" + + .. py:attribute:: MYSQL + :value: "MYSQL" + + .. py:attribute:: POSTGRESQL + :value: "POSTGRESQL" + + .. py:attribute:: REDSHIFT + :value: "REDSHIFT" + + .. py:attribute:: SNOWFLAKE + :value: "SNOWFLAKE" + + .. py:attribute:: SQLDW + :value: "SQLDW" + + .. py:attribute:: SQLSERVER + :value: "SQLSERVER" + .. autoclass:: ContinuousUpdateStatus :members: :undoc-members: @@ -100,10 +301,50 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: CreateFunctionParameterStyle + + Function parameter style. **S** is the value for SQL. + + .. py:attribute:: S + :value: "S" + .. autoclass:: CreateFunctionRequest :members: :undoc-members: +.. py:class:: CreateFunctionRoutineBody + + Function language. When **EXTERNAL** is used, the language of the routine function should be + specified in the __external_language__ field, and the __return_params__ of the function cannot + be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be + **NO_SQL**. + + .. py:attribute:: EXTERNAL + :value: "EXTERNAL" + + .. py:attribute:: SQL + :value: "SQL" + +.. py:class:: CreateFunctionSecurityType + + Function security type. + + .. py:attribute:: DEFINER + :value: "DEFINER" + +.. py:class:: CreateFunctionSqlDataAccess + + Function SQL data access. + + .. py:attribute:: CONTAINS_SQL + :value: "CONTAINS_SQL" + + .. py:attribute:: NO_SQL + :value: "NO_SQL" + + .. py:attribute:: READS_SQL_DATA + :value: "READS_SQL_DATA" + .. autoclass:: CreateMetastore :members: :undoc-members: @@ -136,10 +377,48 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: CredentialType + + The type of credential. + + .. py:attribute:: USERNAME_PASSWORD + :value: "USERNAME_PASSWORD" + .. autoclass:: CurrentWorkspaceBindings :members: :undoc-members: +.. py:class:: DataSourceFormat + + Data source format + + .. py:attribute:: AVRO + :value: "AVRO" + + .. py:attribute:: CSV + :value: "CSV" + + .. py:attribute:: DELTA + :value: "DELTA" + + .. py:attribute:: DELTASHARING + :value: "DELTASHARING" + + .. py:attribute:: JSON + :value: "JSON" + + .. py:attribute:: ORC + :value: "ORC" + + .. py:attribute:: PARQUET + :value: "PARQUET" + + .. py:attribute:: TEXT + :value: "TEXT" + + .. py:attribute:: UNITY_CATALOG + :value: "UNITY_CATALOG" + .. autoclass:: DatabricksGcpServiceAccountResponse :members: :undoc-members: @@ -156,6 +435,20 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: DisableSchemaName + + .. py:attribute:: ACCESS + :value: "ACCESS" + + .. py:attribute:: BILLING + :value: "BILLING" + + .. py:attribute:: LINEAGE + :value: "LINEAGE" + + .. py:attribute:: OPERATIONAL_DATA + :value: "OPERATIONAL_DATA" + .. autoclass:: EffectivePermissionsList :members: :undoc-members: @@ -164,6 +457,17 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: EffectivePredictiveOptimizationFlagInheritedFromType + + The type of the object from which the flag was inherited. If there was no inheritance, this + field is left blank. + + .. py:attribute:: CATALOG + :value: "CATALOG" + + .. py:attribute:: SCHEMA + :value: "SCHEMA" + .. autoclass:: EffectivePrivilege :members: :undoc-members: @@ -172,6 +476,33 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: EnablePredictiveOptimization + + Whether predictive optimization should be enabled for this object and objects under it. + + .. py:attribute:: DISABLE + :value: "DISABLE" + + .. py:attribute:: ENABLE + :value: "ENABLE" + + .. py:attribute:: INHERIT + :value: "INHERIT" + +.. py:class:: EnableSchemaName + + .. py:attribute:: ACCESS + :value: "ACCESS" + + .. py:attribute:: BILLING + :value: "BILLING" + + .. py:attribute:: LINEAGE + :value: "LINEAGE" + + .. py:attribute:: OPERATIONAL_DATA + :value: "OPERATIONAL_DATA" + .. autoclass:: EncryptionDetails :members: :undoc-members: @@ -196,6 +527,46 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: FunctionInfoParameterStyle + + Function parameter style. **S** is the value for SQL. + + .. py:attribute:: S + :value: "S" + +.. py:class:: FunctionInfoRoutineBody + + Function language. When **EXTERNAL** is used, the language of the routine function should be + specified in the __external_language__ field, and the __return_params__ of the function cannot + be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be + **NO_SQL**. + + .. py:attribute:: EXTERNAL + :value: "EXTERNAL" + + .. py:attribute:: SQL + :value: "SQL" + +.. py:class:: FunctionInfoSecurityType + + Function security type. + + .. py:attribute:: DEFINER + :value: "DEFINER" + +.. py:class:: FunctionInfoSqlDataAccess + + Function SQL data access. + + .. py:attribute:: CONTAINS_SQL + :value: "CONTAINS_SQL" + + .. py:attribute:: NO_SQL + :value: "NO_SQL" + + .. py:attribute:: READS_SQL_DATA + :value: "READS_SQL_DATA" + .. autoclass:: FunctionParameterInfo :members: :undoc-members: @@ -204,10 +575,47 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: FunctionParameterMode + + The mode of the function parameter. + + .. py:attribute:: IN + :value: "IN" + +.. py:class:: FunctionParameterType + + The type of function parameter. + + .. py:attribute:: COLUMN + :value: "COLUMN" + + .. py:attribute:: PARAM + :value: "PARAM" + .. autoclass:: GetMetastoreSummaryResponse :members: :undoc-members: +.. py:class:: GetMetastoreSummaryResponseDeltaSharingScope + + The scope of Delta Sharing enabled for the metastore. + + .. py:attribute:: INTERNAL + :value: "INTERNAL" + + .. py:attribute:: INTERNAL_AND_EXTERNAL + :value: "INTERNAL_AND_EXTERNAL" + +.. py:class:: IsolationMode + + Whether the current securable is accessible from all workspaces or a specific set of workspaces. + + .. py:attribute:: ISOLATED + :value: "ISOLATED" + + .. py:attribute:: OPEN + :value: "OPEN" + .. autoclass:: ListAccountMetastoreAssignmentsResponse :members: :undoc-members: @@ -264,6 +672,13 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: MatchType + + The artifact pattern matching type + + .. py:attribute:: PREFIX_MATCH + :value: "PREFIX_MATCH" + .. autoclass:: MetastoreAssignment :members: :undoc-members: @@ -272,18 +687,72 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: MetastoreInfoDeltaSharingScope + + The scope of Delta Sharing enabled for the metastore. + + .. py:attribute:: INTERNAL + :value: "INTERNAL" + + .. py:attribute:: INTERNAL_AND_EXTERNAL + :value: "INTERNAL_AND_EXTERNAL" + .. autoclass:: ModelVersionInfo :members: :undoc-members: +.. py:class:: ModelVersionInfoStatus + + Current status of the model version. Newly created model versions start in PENDING_REGISTRATION + status, then move to READY status once the model version files are uploaded and the model + version is finalized. Only model versions in READY status can be loaded for inference or served. + + .. py:attribute:: FAILED_REGISTRATION + :value: "FAILED_REGISTRATION" + + .. py:attribute:: PENDING_REGISTRATION + :value: "PENDING_REGISTRATION" + + .. py:attribute:: READY + :value: "READY" + .. autoclass:: MonitorCronSchedule :members: :undoc-members: +.. py:class:: MonitorCronSchedulePauseStatus + + Whether the schedule is paused or not + + .. py:attribute:: PAUSED + :value: "PAUSED" + + .. py:attribute:: UNPAUSED + :value: "UNPAUSED" + .. autoclass:: MonitorCustomMetric :members: :undoc-members: +.. py:class:: MonitorCustomMetricType + + The type of the custom metric. + + .. py:attribute:: CUSTOM_METRIC_TYPE_AGGREGATE + :value: "CUSTOM_METRIC_TYPE_AGGREGATE" + + .. py:attribute:: CUSTOM_METRIC_TYPE_DERIVED + :value: "CUSTOM_METRIC_TYPE_DERIVED" + + .. py:attribute:: CUSTOM_METRIC_TYPE_DRIFT + :value: "CUSTOM_METRIC_TYPE_DRIFT" + + .. py:attribute:: MONITOR_STATUS_ERROR + :value: "MONITOR_STATUS_ERROR" + + .. py:attribute:: MONITOR_STATUS_FAILED + :value: "MONITOR_STATUS_FAILED" + .. autoclass:: MonitorDataClassificationConfig :members: :undoc-members: @@ -296,10 +765,39 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: MonitorInferenceLogProfileTypeProblemType + + Problem type the model aims to solve. + + .. py:attribute:: PROBLEM_TYPE_CLASSIFICATION + :value: "PROBLEM_TYPE_CLASSIFICATION" + + .. py:attribute:: PROBLEM_TYPE_REGRESSION + :value: "PROBLEM_TYPE_REGRESSION" + .. autoclass:: MonitorInfo :members: :undoc-members: +.. py:class:: MonitorInfoStatus + + The status of the monitor. + + .. py:attribute:: MONITOR_STATUS_ACTIVE + :value: "MONITOR_STATUS_ACTIVE" + + .. py:attribute:: MONITOR_STATUS_DELETE_PENDING + :value: "MONITOR_STATUS_DELETE_PENDING" + + .. py:attribute:: MONITOR_STATUS_ERROR + :value: "MONITOR_STATUS_ERROR" + + .. py:attribute:: MONITOR_STATUS_FAILED + :value: "MONITOR_STATUS_FAILED" + + .. py:attribute:: MONITOR_STATUS_PENDING + :value: "MONITOR_STATUS_PENDING" + .. autoclass:: MonitorNotificationsConfig :members: :undoc-members: @@ -308,6 +806,25 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: MonitorRefreshInfoState + + The current state of the refresh. + + .. py:attribute:: CANCELED + :value: "CANCELED" + + .. py:attribute:: FAILED + :value: "FAILED" + + .. py:attribute:: PENDING + :value: "PENDING" + + .. py:attribute:: RUNNING + :value: "RUNNING" + + .. py:attribute:: SUCCESS + :value: "SUCCESS" + .. autoclass:: MonitorTimeSeriesProfileType :members: :undoc-members: @@ -324,6 +841,46 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: OnlineTableState + + The state of an online table. + + .. py:attribute:: OFFLINE + :value: "OFFLINE" + + .. py:attribute:: OFFLINE_FAILED + :value: "OFFLINE_FAILED" + + .. py:attribute:: ONLINE + :value: "ONLINE" + + .. py:attribute:: ONLINE_CONTINUOUS_UPDATE + :value: "ONLINE_CONTINUOUS_UPDATE" + + .. py:attribute:: ONLINE_NO_PENDING_UPDATE + :value: "ONLINE_NO_PENDING_UPDATE" + + .. py:attribute:: ONLINE_PIPELINE_FAILED + :value: "ONLINE_PIPELINE_FAILED" + + .. py:attribute:: ONLINE_TABLE_STATE_UNSPECIFIED + :value: "ONLINE_TABLE_STATE_UNSPECIFIED" + + .. py:attribute:: ONLINE_TRIGGERED_UPDATE + :value: "ONLINE_TRIGGERED_UPDATE" + + .. py:attribute:: ONLINE_UPDATING_PIPELINE_RESOURCES + :value: "ONLINE_UPDATING_PIPELINE_RESOURCES" + + .. py:attribute:: PROVISIONING + :value: "PROVISIONING" + + .. py:attribute:: PROVISIONING_INITIAL_SNAPSHOT + :value: "PROVISIONING_INITIAL_SNAPSHOT" + + .. py:attribute:: PROVISIONING_PIPELINE_RESOURCES + :value: "PROVISIONING_PIPELINE_RESOURCES" + .. autoclass:: OnlineTableStatus :members: :undoc-members: @@ -344,6 +901,131 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: Privilege + + .. py:attribute:: ALL_PRIVILEGES + :value: "ALL_PRIVILEGES" + + .. py:attribute:: APPLY_TAG + :value: "APPLY_TAG" + + .. py:attribute:: CREATE + :value: "CREATE" + + .. py:attribute:: CREATE_CATALOG + :value: "CREATE_CATALOG" + + .. py:attribute:: CREATE_CONNECTION + :value: "CREATE_CONNECTION" + + .. py:attribute:: CREATE_EXTERNAL_LOCATION + :value: "CREATE_EXTERNAL_LOCATION" + + .. py:attribute:: CREATE_EXTERNAL_TABLE + :value: "CREATE_EXTERNAL_TABLE" + + .. py:attribute:: CREATE_EXTERNAL_VOLUME + :value: "CREATE_EXTERNAL_VOLUME" + + .. py:attribute:: CREATE_FOREIGN_CATALOG + :value: "CREATE_FOREIGN_CATALOG" + + .. py:attribute:: CREATE_FUNCTION + :value: "CREATE_FUNCTION" + + .. py:attribute:: CREATE_MANAGED_STORAGE + :value: "CREATE_MANAGED_STORAGE" + + .. py:attribute:: CREATE_MATERIALIZED_VIEW + :value: "CREATE_MATERIALIZED_VIEW" + + .. py:attribute:: CREATE_MODEL + :value: "CREATE_MODEL" + + .. py:attribute:: CREATE_PROVIDER + :value: "CREATE_PROVIDER" + + .. py:attribute:: CREATE_RECIPIENT + :value: "CREATE_RECIPIENT" + + .. py:attribute:: CREATE_SCHEMA + :value: "CREATE_SCHEMA" + + .. py:attribute:: CREATE_SHARE + :value: "CREATE_SHARE" + + .. py:attribute:: CREATE_STORAGE_CREDENTIAL + :value: "CREATE_STORAGE_CREDENTIAL" + + .. py:attribute:: CREATE_TABLE + :value: "CREATE_TABLE" + + .. py:attribute:: CREATE_VIEW + :value: "CREATE_VIEW" + + .. py:attribute:: CREATE_VOLUME + :value: "CREATE_VOLUME" + + .. py:attribute:: EXECUTE + :value: "EXECUTE" + + .. py:attribute:: MANAGE_ALLOWLIST + :value: "MANAGE_ALLOWLIST" + + .. py:attribute:: MODIFY + :value: "MODIFY" + + .. py:attribute:: READ_FILES + :value: "READ_FILES" + + .. py:attribute:: READ_PRIVATE_FILES + :value: "READ_PRIVATE_FILES" + + .. py:attribute:: READ_VOLUME + :value: "READ_VOLUME" + + .. py:attribute:: REFRESH + :value: "REFRESH" + + .. py:attribute:: SELECT + :value: "SELECT" + + .. py:attribute:: SET_SHARE_PERMISSION + :value: "SET_SHARE_PERMISSION" + + .. py:attribute:: USAGE + :value: "USAGE" + + .. py:attribute:: USE_CATALOG + :value: "USE_CATALOG" + + .. py:attribute:: USE_CONNECTION + :value: "USE_CONNECTION" + + .. py:attribute:: USE_MARKETPLACE_ASSETS + :value: "USE_MARKETPLACE_ASSETS" + + .. py:attribute:: USE_PROVIDER + :value: "USE_PROVIDER" + + .. py:attribute:: USE_RECIPIENT + :value: "USE_RECIPIENT" + + .. py:attribute:: USE_SCHEMA + :value: "USE_SCHEMA" + + .. py:attribute:: USE_SHARE + :value: "USE_SHARE" + + .. py:attribute:: WRITE_FILES + :value: "WRITE_FILES" + + .. py:attribute:: WRITE_PRIVATE_FILES + :value: "WRITE_PRIVATE_FILES" + + .. py:attribute:: WRITE_VOLUME + :value: "WRITE_VOLUME" + .. autoclass:: PrivilegeAssignment :members: :undoc-members: @@ -352,6 +1034,23 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ProvisioningInfoState + + .. py:attribute:: ACTIVE + :value: "ACTIVE" + + .. py:attribute:: DELETING + :value: "DELETING" + + .. py:attribute:: FAILED + :value: "FAILED" + + .. py:attribute:: PROVISIONING + :value: "PROVISIONING" + + .. py:attribute:: STATE_UNSPECIFIED + :value: "STATE_UNSPECIFIED" + .. autoclass:: ProvisioningStatus :members: :undoc-members: @@ -368,6 +1067,49 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: SecurableType + + The type of Unity Catalog securable + + .. py:attribute:: CATALOG + :value: "CATALOG" + + .. py:attribute:: CONNECTION + :value: "CONNECTION" + + .. py:attribute:: EXTERNAL_LOCATION + :value: "EXTERNAL_LOCATION" + + .. py:attribute:: FUNCTION + :value: "FUNCTION" + + .. py:attribute:: METASTORE + :value: "METASTORE" + + .. py:attribute:: PIPELINE + :value: "PIPELINE" + + .. py:attribute:: PROVIDER + :value: "PROVIDER" + + .. py:attribute:: RECIPIENT + :value: "RECIPIENT" + + .. py:attribute:: SCHEMA + :value: "SCHEMA" + + .. py:attribute:: SHARE + :value: "SHARE" + + .. py:attribute:: STORAGE_CREDENTIAL + :value: "STORAGE_CREDENTIAL" + + .. py:attribute:: TABLE + :value: "TABLE" + + .. py:attribute:: VOLUME + :value: "VOLUME" + .. autoclass:: SetArtifactAllowlist :members: :undoc-members: @@ -380,6 +1122,16 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: SseEncryptionDetailsAlgorithm + + The type of key encryption to use (affects headers from s3 client). + + .. py:attribute:: AWS_SSE_KMS + :value: "AWS_SSE_KMS" + + .. py:attribute:: AWS_SSE_S3 + :value: "AWS_SSE_S3" + .. autoclass:: StorageCredentialInfo :members: :undoc-members: @@ -388,6 +1140,26 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: SystemSchemaInfoState + + The current state of enablement for the system schema. An empty string means the system schema + is available and ready for opt-in. + + .. py:attribute:: AVAILABLE + :value: "AVAILABLE" + + .. py:attribute:: DISABLE_INITIALIZED + :value: "DISABLE_INITIALIZED" + + .. py:attribute:: ENABLE_COMPLETED + :value: "ENABLE_COMPLETED" + + .. py:attribute:: ENABLE_INITIALIZED + :value: "ENABLE_INITIALIZED" + + .. py:attribute:: UNAVAILABLE + :value: "UNAVAILABLE" + .. autoclass:: TableConstraint :members: :undoc-members: @@ -412,6 +1184,23 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: TableType + + .. py:attribute:: EXTERNAL + :value: "EXTERNAL" + + .. py:attribute:: MANAGED + :value: "MANAGED" + + .. py:attribute:: MATERIALIZED_VIEW + :value: "MATERIALIZED_VIEW" + + .. py:attribute:: STREAMING_TABLE + :value: "STREAMING_TABLE" + + .. py:attribute:: VIEW + :value: "VIEW" + .. autoclass:: TriggeredUpdateStatus :members: :undoc-members: @@ -440,6 +1229,16 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: UpdateMetastoreDeltaSharingScope + + The scope of Delta Sharing enabled for the metastore. + + .. py:attribute:: INTERNAL + :value: "INTERNAL" + + .. py:attribute:: INTERNAL_AND_EXTERNAL + :value: "INTERNAL_AND_EXTERNAL" + .. autoclass:: UpdateModelVersionRequest :members: :undoc-members: @@ -488,6 +1287,35 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ValidationResultOperation + + The operation tested. + + .. py:attribute:: DELETE + :value: "DELETE" + + .. py:attribute:: LIST + :value: "LIST" + + .. py:attribute:: READ + :value: "READ" + + .. py:attribute:: WRITE + :value: "WRITE" + +.. py:class:: ValidationResultResult + + The results of the tested operation. + + .. py:attribute:: FAIL + :value: "FAIL" + + .. py:attribute:: PASS + :value: "PASS" + + .. py:attribute:: SKIP + :value: "SKIP" + .. autoclass:: ViewData :members: :undoc-members: @@ -496,10 +1324,26 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: VolumeType + + .. py:attribute:: EXTERNAL + :value: "EXTERNAL" + + .. py:attribute:: MANAGED + :value: "MANAGED" + .. autoclass:: WorkspaceBinding :members: :undoc-members: +.. py:class:: WorkspaceBindingBindingType + + .. py:attribute:: BINDING_TYPE_READ_ONLY + :value: "BINDING_TYPE_READ_ONLY" + + .. py:attribute:: BINDING_TYPE_READ_WRITE + :value: "BINDING_TYPE_READ_WRITE" + .. autoclass:: WorkspaceBindingsResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/compute.rst b/docs/dbdataclasses/compute.rst index f7ff6c85..17ea7cae 100644 --- a/docs/dbdataclasses/compute.rst +++ b/docs/dbdataclasses/compute.rst @@ -20,10 +20,40 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: AwsAvailability + + Availability type used for all subsequent nodes past the `first_on_demand` ones. + + Note: If `first_on_demand` is zero, this availability type will be used for the entire cluster. + + .. py:attribute:: ON_DEMAND + :value: "ON_DEMAND" + + .. py:attribute:: SPOT + :value: "SPOT" + + .. py:attribute:: SPOT_WITH_FALLBACK + :value: "SPOT_WITH_FALLBACK" + .. autoclass:: AzureAttributes :members: :undoc-members: +.. py:class:: AzureAvailability + + Availability type used for all subsequent nodes past the `first_on_demand` ones. Note: If + `first_on_demand` is zero (which only happens on pool clusters), this availability type will be + used for the entire cluster. + + .. py:attribute:: ON_DEMAND_AZURE + :value: "ON_DEMAND_AZURE" + + .. py:attribute:: SPOT_AZURE + :value: "SPOT_AZURE" + + .. py:attribute:: SPOT_WITH_FALLBACK_AZURE + :value: "SPOT_WITH_FALLBACK_AZURE" + .. autoclass:: CancelCommand :members: :undoc-members: @@ -40,6 +70,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: CloudProviderNodeStatus + + .. py:attribute:: NOT_AVAILABLE_IN_REGION + :value: "NOT_AVAILABLE_IN_REGION" + + .. py:attribute:: NOT_ENABLED_ON_SUBSCRIPTION + :value: "NOT_ENABLED_ON_SUBSCRIPTION" + .. autoclass:: ClusterAccessControlRequest :members: :undoc-members: @@ -72,6 +110,19 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ClusterPermissionLevel + + Permission level + + .. py:attribute:: CAN_ATTACH_TO + :value: "CAN_ATTACH_TO" + + .. py:attribute:: CAN_MANAGE + :value: "CAN_MANAGE" + + .. py:attribute:: CAN_RESTART + :value: "CAN_RESTART" + .. autoclass:: ClusterPermissions :members: :undoc-members: @@ -96,6 +147,13 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ClusterPolicyPermissionLevel + + Permission level + + .. py:attribute:: CAN_USE + :value: "CAN_USE" + .. autoclass:: ClusterPolicyPermissions :members: :undoc-members: @@ -112,6 +170,32 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ClusterSource + + Determines whether the cluster was created by a user through the UI, created by the Databricks + Jobs Scheduler, or through an API request. This is the same as cluster_creator, but read only. + + .. py:attribute:: API + :value: "API" + + .. py:attribute:: JOB + :value: "JOB" + + .. py:attribute:: MODELS + :value: "MODELS" + + .. py:attribute:: PIPELINE + :value: "PIPELINE" + + .. py:attribute:: PIPELINE_MAINTENANCE + :value: "PIPELINE_MAINTENANCE" + + .. py:attribute:: SQL + :value: "SQL" + + .. py:attribute:: UI + :value: "UI" + .. autoclass:: ClusterSpec :members: :undoc-members: @@ -120,6 +204,26 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: CommandStatus + + .. py:attribute:: CANCELLED + :value: "CANCELLED" + + .. py:attribute:: CANCELLING + :value: "CANCELLING" + + .. py:attribute:: ERROR + :value: "ERROR" + + .. py:attribute:: FINISHED + :value: "FINISHED" + + .. py:attribute:: QUEUED + :value: "QUEUED" + + .. py:attribute:: RUNNING + :value: "RUNNING" + .. autoclass:: CommandStatusResponse :members: :undoc-members: @@ -128,6 +232,24 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ComputeSpecKind + + The kind of compute described by this compute specification. + + .. py:attribute:: SERVERLESS_PREVIEW + :value: "SERVERLESS_PREVIEW" + +.. py:class:: ContextStatus + + .. py:attribute:: ERROR + :value: "ERROR" + + .. py:attribute:: PENDING + :value: "PENDING" + + .. py:attribute:: RUNNING + :value: "RUNNING" + .. autoclass:: ContextStatusResponse :members: :undoc-members: @@ -172,6 +294,50 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: DataPlaneEventDetailsEventType + + + + .. py:attribute:: NODE_BLACKLISTED + :value: "NODE_BLACKLISTED" + + .. py:attribute:: NODE_EXCLUDED_DECOMMISSIONED + :value: "NODE_EXCLUDED_DECOMMISSIONED" + +.. py:class:: DataSecurityMode + + Data security mode decides what data governance model to use when accessing data from a cluster. + + * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features + are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively + used by a single user specified in `single_user_name`. Most programming languages, cluster + features and data governance features are available in this mode. * `USER_ISOLATION`: A secure + cluster that can be shared by multiple users. Cluster users are fully isolated so that they + cannot see each other's data and credentials. Most data governance features are supported in + this mode. But programming languages and cluster features might be limited. * + `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * + `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high + concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy + Passthrough on standard clusters. + + .. py:attribute:: LEGACY_PASSTHROUGH + :value: "LEGACY_PASSTHROUGH" + + .. py:attribute:: LEGACY_SINGLE_USER + :value: "LEGACY_SINGLE_USER" + + .. py:attribute:: LEGACY_TABLE_ACL + :value: "LEGACY_TABLE_ACL" + + .. py:attribute:: NONE + :value: "NONE" + + .. py:attribute:: SINGLE_USER + :value: "SINGLE_USER" + + .. py:attribute:: USER_ISOLATION + :value: "USER_ISOLATION" + .. autoclass:: DbfsStorageInfo :members: :undoc-members: @@ -200,6 +366,22 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: DiskTypeAzureDiskVolumeType + + .. py:attribute:: PREMIUM_LRS + :value: "PREMIUM_LRS" + + .. py:attribute:: STANDARD_LRS + :value: "STANDARD_LRS" + +.. py:class:: DiskTypeEbsVolumeType + + .. py:attribute:: GENERAL_PURPOSE_SSD + :value: "GENERAL_PURPOSE_SSD" + + .. py:attribute:: THROUGHPUT_OPTIMIZED_HDD + :value: "THROUGHPUT_OPTIMIZED_HDD" + .. autoclass:: DockerBasicAuth :members: :undoc-members: @@ -208,6 +390,16 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: EbsVolumeType + + The type of EBS volumes that will be launched with this cluster. + + .. py:attribute:: GENERAL_PURPOSE_SSD + :value: "GENERAL_PURPOSE_SSD" + + .. py:attribute:: THROUGHPUT_OPTIMIZED_HDD + :value: "THROUGHPUT_OPTIMIZED_HDD" + .. autoclass:: EditCluster :members: :undoc-members: @@ -224,10 +416,117 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: EventDetailsCause + + The cause of a change in target size. + + .. py:attribute:: AUTORECOVERY + :value: "AUTORECOVERY" + + .. py:attribute:: AUTOSCALE + :value: "AUTOSCALE" + + .. py:attribute:: REPLACE_BAD_NODES + :value: "REPLACE_BAD_NODES" + + .. py:attribute:: USER_REQUEST + :value: "USER_REQUEST" + +.. py:class:: EventType + + .. py:attribute:: AUTOSCALING_STATS_REPORT + :value: "AUTOSCALING_STATS_REPORT" + + .. py:attribute:: CREATING + :value: "CREATING" + + .. py:attribute:: DBFS_DOWN + :value: "DBFS_DOWN" + + .. py:attribute:: DID_NOT_EXPAND_DISK + :value: "DID_NOT_EXPAND_DISK" + + .. py:attribute:: DRIVER_HEALTHY + :value: "DRIVER_HEALTHY" + + .. py:attribute:: DRIVER_NOT_RESPONDING + :value: "DRIVER_NOT_RESPONDING" + + .. py:attribute:: DRIVER_UNAVAILABLE + :value: "DRIVER_UNAVAILABLE" + + .. py:attribute:: EDITED + :value: "EDITED" + + .. py:attribute:: EXPANDED_DISK + :value: "EXPANDED_DISK" + + .. py:attribute:: FAILED_TO_EXPAND_DISK + :value: "FAILED_TO_EXPAND_DISK" + + .. py:attribute:: INIT_SCRIPTS_FINISHED + :value: "INIT_SCRIPTS_FINISHED" + + .. py:attribute:: INIT_SCRIPTS_STARTED + :value: "INIT_SCRIPTS_STARTED" + + .. py:attribute:: METASTORE_DOWN + :value: "METASTORE_DOWN" + + .. py:attribute:: NODES_LOST + :value: "NODES_LOST" + + .. py:attribute:: NODE_BLACKLISTED + :value: "NODE_BLACKLISTED" + + .. py:attribute:: NODE_EXCLUDED_DECOMMISSIONED + :value: "NODE_EXCLUDED_DECOMMISSIONED" + + .. py:attribute:: PINNED + :value: "PINNED" + + .. py:attribute:: RESIZING + :value: "RESIZING" + + .. py:attribute:: RESTARTING + :value: "RESTARTING" + + .. py:attribute:: RUNNING + :value: "RUNNING" + + .. py:attribute:: SPARK_EXCEPTION + :value: "SPARK_EXCEPTION" + + .. py:attribute:: STARTING + :value: "STARTING" + + .. py:attribute:: TERMINATING + :value: "TERMINATING" + + .. py:attribute:: UNPINNED + :value: "UNPINNED" + + .. py:attribute:: UPSIZE_COMPLETED + :value: "UPSIZE_COMPLETED" + .. autoclass:: GcpAttributes :members: :undoc-members: +.. py:class:: GcpAvailability + + This field determines whether the instance pool will contain preemptible VMs, on-demand VMs, or + preemptible VMs with a fallback to on-demand VMs if the former is unavailable. + + .. py:attribute:: ON_DEMAND_GCP + :value: "ON_DEMAND_GCP" + + .. py:attribute:: PREEMPTIBLE_GCP + :value: "PREEMPTIBLE_GCP" + + .. py:attribute:: PREEMPTIBLE_WITH_FALLBACK_GCP + :value: "PREEMPTIBLE_WITH_FALLBACK_GCP" + .. autoclass:: GcsStorageInfo :members: :undoc-members: @@ -244,6 +543,16 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: GetEventsOrder + + The order to list events in; either "ASC" or "DESC". Defaults to "DESC". + + .. py:attribute:: ASC + :value: "ASC" + + .. py:attribute:: DESC + :value: "DESC" + .. autoclass:: GetEventsResponse :members: :undoc-members: @@ -284,6 +593,28 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: InitScriptExecutionDetailsStatus + + The current status of the script + + .. py:attribute:: FAILED_EXECUTION + :value: "FAILED_EXECUTION" + + .. py:attribute:: FAILED_FETCH + :value: "FAILED_FETCH" + + .. py:attribute:: NOT_EXECUTED + :value: "NOT_EXECUTED" + + .. py:attribute:: SKIPPED + :value: "SKIPPED" + + .. py:attribute:: SUCCEEDED + :value: "SUCCEEDED" + + .. py:attribute:: UNKNOWN + :value: "UNKNOWN" + .. autoclass:: InitScriptInfo :members: :undoc-members: @@ -312,10 +643,34 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: InstancePoolAwsAttributesAvailability + + Availability type used for the spot nodes. + + The default value is defined by InstancePoolConf.instancePoolDefaultAwsAvailability + + .. py:attribute:: ON_DEMAND + :value: "ON_DEMAND" + + .. py:attribute:: SPOT + :value: "SPOT" + .. autoclass:: InstancePoolAzureAttributes :members: :undoc-members: +.. py:class:: InstancePoolAzureAttributesAvailability + + Shows the Availability type used for the spot nodes. + + The default value is defined by InstancePoolConf.instancePoolDefaultAzureAvailability + + .. py:attribute:: ON_DEMAND_AZURE + :value: "ON_DEMAND_AZURE" + + .. py:attribute:: SPOT_AZURE + :value: "SPOT_AZURE" + .. autoclass:: InstancePoolGcpAttributes :members: :undoc-members: @@ -324,6 +679,16 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: InstancePoolPermissionLevel + + Permission level + + .. py:attribute:: CAN_ATTACH_TO + :value: "CAN_ATTACH_TO" + + .. py:attribute:: CAN_MANAGE + :value: "CAN_MANAGE" + .. autoclass:: InstancePoolPermissions :members: :undoc-members: @@ -336,6 +701,19 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: InstancePoolState + + Current state of the instance pool. + + .. py:attribute:: ACTIVE + :value: "ACTIVE" + + .. py:attribute:: DELETED + :value: "DELETED" + + .. py:attribute:: STOPPED + :value: "STOPPED" + .. autoclass:: InstancePoolStats :members: :undoc-members: @@ -348,6 +726,17 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: Language + + .. py:attribute:: PYTHON + :value: "PYTHON" + + .. py:attribute:: SCALA + :value: "SCALA" + + .. py:attribute:: SQL + :value: "SQL" + .. autoclass:: Library :members: :undoc-members: @@ -356,6 +745,31 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: LibraryFullStatusStatus + + Status of installing the library on the cluster. + + .. py:attribute:: FAILED + :value: "FAILED" + + .. py:attribute:: INSTALLED + :value: "INSTALLED" + + .. py:attribute:: INSTALLING + :value: "INSTALLING" + + .. py:attribute:: PENDING + :value: "PENDING" + + .. py:attribute:: RESOLVING + :value: "RESOLVING" + + .. py:attribute:: SKIPPED + :value: "SKIPPED" + + .. py:attribute:: UNINSTALL_ON_RESTART + :value: "UNINSTALL_ON_RESTART" + .. autoclass:: ListAllClusterLibraryStatusesResponse :members: :undoc-members: @@ -392,6 +806,22 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ListSortColumn + + .. py:attribute:: POLICY_CREATION_TIME + :value: "POLICY_CREATION_TIME" + + .. py:attribute:: POLICY_NAME + :value: "POLICY_NAME" + +.. py:class:: ListSortOrder + + .. py:attribute:: ASC + :value: "ASC" + + .. py:attribute:: DESC + :value: "DESC" + .. autoclass:: LocalFileInfo :members: :undoc-members: @@ -456,10 +886,41 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ResultType + + .. py:attribute:: ERROR + :value: "ERROR" + + .. py:attribute:: IMAGE + :value: "IMAGE" + + .. py:attribute:: IMAGES + :value: "IMAGES" + + .. py:attribute:: TABLE + :value: "TABLE" + + .. py:attribute:: TEXT + :value: "TEXT" + .. autoclass:: Results :members: :undoc-members: +.. py:class:: RuntimeEngine + + Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime + engine is inferred from spark_version. + + .. py:attribute:: NULL + :value: "NULL" + + .. py:attribute:: PHOTON + :value: "PHOTON" + + .. py:attribute:: STANDARD + :value: "STANDARD" + .. autoclass:: S3StorageInfo :members: :undoc-members: @@ -480,10 +941,295 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: State + + Current state of the cluster. + + .. py:attribute:: ERROR + :value: "ERROR" + + .. py:attribute:: PENDING + :value: "PENDING" + + .. py:attribute:: RESIZING + :value: "RESIZING" + + .. py:attribute:: RESTARTING + :value: "RESTARTING" + + .. py:attribute:: RUNNING + :value: "RUNNING" + + .. py:attribute:: TERMINATED + :value: "TERMINATED" + + .. py:attribute:: TERMINATING + :value: "TERMINATING" + + .. py:attribute:: UNKNOWN + :value: "UNKNOWN" + .. autoclass:: TerminationReason :members: :undoc-members: +.. py:class:: TerminationReasonCode + + status code indicating why the cluster was terminated + + .. py:attribute:: ABUSE_DETECTED + :value: "ABUSE_DETECTED" + + .. py:attribute:: ATTACH_PROJECT_FAILURE + :value: "ATTACH_PROJECT_FAILURE" + + .. py:attribute:: AWS_AUTHORIZATION_FAILURE + :value: "AWS_AUTHORIZATION_FAILURE" + + .. py:attribute:: AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE + :value: "AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE" + + .. py:attribute:: AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE + :value: "AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE" + + .. py:attribute:: AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE + :value: "AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE" + + .. py:attribute:: AWS_REQUEST_LIMIT_EXCEEDED + :value: "AWS_REQUEST_LIMIT_EXCEEDED" + + .. py:attribute:: AWS_UNSUPPORTED_FAILURE + :value: "AWS_UNSUPPORTED_FAILURE" + + .. py:attribute:: AZURE_BYOK_KEY_PERMISSION_FAILURE + :value: "AZURE_BYOK_KEY_PERMISSION_FAILURE" + + .. py:attribute:: AZURE_EPHEMERAL_DISK_FAILURE + :value: "AZURE_EPHEMERAL_DISK_FAILURE" + + .. py:attribute:: AZURE_INVALID_DEPLOYMENT_TEMPLATE + :value: "AZURE_INVALID_DEPLOYMENT_TEMPLATE" + + .. py:attribute:: AZURE_OPERATION_NOT_ALLOWED_EXCEPTION + :value: "AZURE_OPERATION_NOT_ALLOWED_EXCEPTION" + + .. py:attribute:: AZURE_QUOTA_EXCEEDED_EXCEPTION + :value: "AZURE_QUOTA_EXCEEDED_EXCEPTION" + + .. py:attribute:: AZURE_RESOURCE_MANAGER_THROTTLING + :value: "AZURE_RESOURCE_MANAGER_THROTTLING" + + .. py:attribute:: AZURE_RESOURCE_PROVIDER_THROTTLING + :value: "AZURE_RESOURCE_PROVIDER_THROTTLING" + + .. py:attribute:: AZURE_UNEXPECTED_DEPLOYMENT_TEMPLATE_FAILURE + :value: "AZURE_UNEXPECTED_DEPLOYMENT_TEMPLATE_FAILURE" + + .. py:attribute:: AZURE_VM_EXTENSION_FAILURE + :value: "AZURE_VM_EXTENSION_FAILURE" + + .. py:attribute:: AZURE_VNET_CONFIGURATION_FAILURE + :value: "AZURE_VNET_CONFIGURATION_FAILURE" + + .. py:attribute:: BOOTSTRAP_TIMEOUT + :value: "BOOTSTRAP_TIMEOUT" + + .. py:attribute:: BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION + :value: "BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION" + + .. py:attribute:: CLOUD_PROVIDER_DISK_SETUP_FAILURE + :value: "CLOUD_PROVIDER_DISK_SETUP_FAILURE" + + .. py:attribute:: CLOUD_PROVIDER_LAUNCH_FAILURE + :value: "CLOUD_PROVIDER_LAUNCH_FAILURE" + + .. py:attribute:: CLOUD_PROVIDER_RESOURCE_STOCKOUT + :value: "CLOUD_PROVIDER_RESOURCE_STOCKOUT" + + .. py:attribute:: CLOUD_PROVIDER_SHUTDOWN + :value: "CLOUD_PROVIDER_SHUTDOWN" + + .. py:attribute:: COMMUNICATION_LOST + :value: "COMMUNICATION_LOST" + + .. py:attribute:: CONTAINER_LAUNCH_FAILURE + :value: "CONTAINER_LAUNCH_FAILURE" + + .. py:attribute:: CONTROL_PLANE_REQUEST_FAILURE + :value: "CONTROL_PLANE_REQUEST_FAILURE" + + .. py:attribute:: DATABASE_CONNECTION_FAILURE + :value: "DATABASE_CONNECTION_FAILURE" + + .. py:attribute:: DBFS_COMPONENT_UNHEALTHY + :value: "DBFS_COMPONENT_UNHEALTHY" + + .. py:attribute:: DOCKER_IMAGE_PULL_FAILURE + :value: "DOCKER_IMAGE_PULL_FAILURE" + + .. py:attribute:: DRIVER_UNREACHABLE + :value: "DRIVER_UNREACHABLE" + + .. py:attribute:: DRIVER_UNRESPONSIVE + :value: "DRIVER_UNRESPONSIVE" + + .. py:attribute:: EXECUTION_COMPONENT_UNHEALTHY + :value: "EXECUTION_COMPONENT_UNHEALTHY" + + .. py:attribute:: GCP_QUOTA_EXCEEDED + :value: "GCP_QUOTA_EXCEEDED" + + .. py:attribute:: GCP_SERVICE_ACCOUNT_DELETED + :value: "GCP_SERVICE_ACCOUNT_DELETED" + + .. py:attribute:: GLOBAL_INIT_SCRIPT_FAILURE + :value: "GLOBAL_INIT_SCRIPT_FAILURE" + + .. py:attribute:: HIVE_METASTORE_PROVISIONING_FAILURE + :value: "HIVE_METASTORE_PROVISIONING_FAILURE" + + .. py:attribute:: IMAGE_PULL_PERMISSION_DENIED + :value: "IMAGE_PULL_PERMISSION_DENIED" + + .. py:attribute:: INACTIVITY + :value: "INACTIVITY" + + .. py:attribute:: INIT_SCRIPT_FAILURE + :value: "INIT_SCRIPT_FAILURE" + + .. py:attribute:: INSTANCE_POOL_CLUSTER_FAILURE + :value: "INSTANCE_POOL_CLUSTER_FAILURE" + + .. py:attribute:: INSTANCE_UNREACHABLE + :value: "INSTANCE_UNREACHABLE" + + .. py:attribute:: INTERNAL_ERROR + :value: "INTERNAL_ERROR" + + .. py:attribute:: INVALID_ARGUMENT + :value: "INVALID_ARGUMENT" + + .. py:attribute:: INVALID_SPARK_IMAGE + :value: "INVALID_SPARK_IMAGE" + + .. py:attribute:: IP_EXHAUSTION_FAILURE + :value: "IP_EXHAUSTION_FAILURE" + + .. py:attribute:: JOB_FINISHED + :value: "JOB_FINISHED" + + .. py:attribute:: K8S_AUTOSCALING_FAILURE + :value: "K8S_AUTOSCALING_FAILURE" + + .. py:attribute:: K8S_DBR_CLUSTER_LAUNCH_TIMEOUT + :value: "K8S_DBR_CLUSTER_LAUNCH_TIMEOUT" + + .. py:attribute:: METASTORE_COMPONENT_UNHEALTHY + :value: "METASTORE_COMPONENT_UNHEALTHY" + + .. py:attribute:: NEPHOS_RESOURCE_MANAGEMENT + :value: "NEPHOS_RESOURCE_MANAGEMENT" + + .. py:attribute:: NETWORK_CONFIGURATION_FAILURE + :value: "NETWORK_CONFIGURATION_FAILURE" + + .. py:attribute:: NFS_MOUNT_FAILURE + :value: "NFS_MOUNT_FAILURE" + + .. py:attribute:: NPIP_TUNNEL_SETUP_FAILURE + :value: "NPIP_TUNNEL_SETUP_FAILURE" + + .. py:attribute:: NPIP_TUNNEL_TOKEN_FAILURE + :value: "NPIP_TUNNEL_TOKEN_FAILURE" + + .. py:attribute:: REQUEST_REJECTED + :value: "REQUEST_REJECTED" + + .. py:attribute:: REQUEST_THROTTLED + :value: "REQUEST_THROTTLED" + + .. py:attribute:: SECRET_RESOLUTION_ERROR + :value: "SECRET_RESOLUTION_ERROR" + + .. py:attribute:: SECURITY_DAEMON_REGISTRATION_EXCEPTION + :value: "SECURITY_DAEMON_REGISTRATION_EXCEPTION" + + .. py:attribute:: SELF_BOOTSTRAP_FAILURE + :value: "SELF_BOOTSTRAP_FAILURE" + + .. py:attribute:: SKIPPED_SLOW_NODES + :value: "SKIPPED_SLOW_NODES" + + .. py:attribute:: SLOW_IMAGE_DOWNLOAD + :value: "SLOW_IMAGE_DOWNLOAD" + + .. py:attribute:: SPARK_ERROR + :value: "SPARK_ERROR" + + .. py:attribute:: SPARK_IMAGE_DOWNLOAD_FAILURE + :value: "SPARK_IMAGE_DOWNLOAD_FAILURE" + + .. py:attribute:: SPARK_STARTUP_FAILURE + :value: "SPARK_STARTUP_FAILURE" + + .. py:attribute:: SPOT_INSTANCE_TERMINATION + :value: "SPOT_INSTANCE_TERMINATION" + + .. py:attribute:: STORAGE_DOWNLOAD_FAILURE + :value: "STORAGE_DOWNLOAD_FAILURE" + + .. py:attribute:: STS_CLIENT_SETUP_FAILURE + :value: "STS_CLIENT_SETUP_FAILURE" + + .. py:attribute:: SUBNET_EXHAUSTED_FAILURE + :value: "SUBNET_EXHAUSTED_FAILURE" + + .. py:attribute:: TEMPORARILY_UNAVAILABLE + :value: "TEMPORARILY_UNAVAILABLE" + + .. py:attribute:: TRIAL_EXPIRED + :value: "TRIAL_EXPIRED" + + .. py:attribute:: UNEXPECTED_LAUNCH_FAILURE + :value: "UNEXPECTED_LAUNCH_FAILURE" + + .. py:attribute:: UNKNOWN + :value: "UNKNOWN" + + .. py:attribute:: UNSUPPORTED_INSTANCE_TYPE + :value: "UNSUPPORTED_INSTANCE_TYPE" + + .. py:attribute:: UPDATE_INSTANCE_PROFILE_FAILURE + :value: "UPDATE_INSTANCE_PROFILE_FAILURE" + + .. py:attribute:: USER_REQUEST + :value: "USER_REQUEST" + + .. py:attribute:: WORKER_SETUP_FAILURE + :value: "WORKER_SETUP_FAILURE" + + .. py:attribute:: WORKSPACE_CANCELLED_ERROR + :value: "WORKSPACE_CANCELLED_ERROR" + + .. py:attribute:: WORKSPACE_CONFIGURATION_ERROR + :value: "WORKSPACE_CONFIGURATION_ERROR" + +.. py:class:: TerminationReasonType + + type of the termination + + .. py:attribute:: CLIENT_ERROR + :value: "CLIENT_ERROR" + + .. py:attribute:: CLOUD_FAILURE + :value: "CLOUD_FAILURE" + + .. py:attribute:: SERVICE_FAULT + :value: "SERVICE_FAULT" + + .. py:attribute:: SUCCESS + :value: "SUCCESS" + .. autoclass:: UninstallLibraries :members: :undoc-members: diff --git a/docs/dbdataclasses/iam.rst b/docs/dbdataclasses/iam.rst index 003c6774..036947d2 100644 --- a/docs/dbdataclasses/iam.rst +++ b/docs/dbdataclasses/iam.rst @@ -28,6 +28,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: GetSortOrder + + .. py:attribute:: ASCENDING + :value: "ASCENDING" + + .. py:attribute:: DESCENDING + :value: "DESCENDING" + .. autoclass:: GrantRule :members: :undoc-members: @@ -36,14 +44,32 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: GroupSchema + + .. py:attribute:: URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_GROUP + :value: "URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_GROUP" + .. autoclass:: ListGroupsResponse :members: :undoc-members: +.. py:class:: ListResponseSchema + + .. py:attribute:: URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_LIST_RESPONSE + :value: "URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_LIST_RESPONSE" + .. autoclass:: ListServicePrincipalResponse :members: :undoc-members: +.. py:class:: ListSortOrder + + .. py:attribute:: ASCENDING + :value: "ASCENDING" + + .. py:attribute:: DESCENDING + :value: "DESCENDING" + .. autoclass:: ListUsersResponse :members: :undoc-members: @@ -72,6 +98,13 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: PasswordPermissionLevel + + Permission level + + .. py:attribute:: CAN_USE + :value: "CAN_USE" + .. autoclass:: PasswordPermissions :members: :undoc-members: @@ -88,6 +121,24 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: PatchOp + + Type of patch operation. + + .. py:attribute:: ADD + :value: "ADD" + + .. py:attribute:: REMOVE + :value: "REMOVE" + + .. py:attribute:: REPLACE + :value: "REPLACE" + +.. py:class:: PatchSchema + + .. py:attribute:: URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_PATCH_OP + :value: "URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_PATCH_OP" + .. autoclass:: Permission :members: :undoc-members: @@ -100,6 +151,55 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: PermissionLevel + + Permission level + + .. py:attribute:: CAN_ATTACH_TO + :value: "CAN_ATTACH_TO" + + .. py:attribute:: CAN_BIND + :value: "CAN_BIND" + + .. py:attribute:: CAN_EDIT + :value: "CAN_EDIT" + + .. py:attribute:: CAN_EDIT_METADATA + :value: "CAN_EDIT_METADATA" + + .. py:attribute:: CAN_MANAGE + :value: "CAN_MANAGE" + + .. py:attribute:: CAN_MANAGE_PRODUCTION_VERSIONS + :value: "CAN_MANAGE_PRODUCTION_VERSIONS" + + .. py:attribute:: CAN_MANAGE_RUN + :value: "CAN_MANAGE_RUN" + + .. py:attribute:: CAN_MANAGE_STAGING_VERSIONS + :value: "CAN_MANAGE_STAGING_VERSIONS" + + .. py:attribute:: CAN_READ + :value: "CAN_READ" + + .. py:attribute:: CAN_RESTART + :value: "CAN_RESTART" + + .. py:attribute:: CAN_RUN + :value: "CAN_RUN" + + .. py:attribute:: CAN_USE + :value: "CAN_USE" + + .. py:attribute:: CAN_VIEW + :value: "CAN_VIEW" + + .. py:attribute:: CAN_VIEW_METADATA + :value: "CAN_VIEW_METADATA" + + .. py:attribute:: IS_OWNER + :value: "IS_OWNER" + .. autoclass:: PermissionOutput :members: :undoc-members: @@ -136,6 +236,11 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ServicePrincipalSchema + + .. py:attribute:: URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_SERVICE_PRINCIPAL + :value: "URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_SERVICE_PRINCIPAL" + .. autoclass:: UpdateRuleSetRequest :members: :undoc-members: @@ -148,6 +253,25 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: UserSchema + + .. py:attribute:: URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_USER + :value: "URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_USER" + + .. py:attribute:: URN_IETF_PARAMS_SCIM_SCHEMAS_EXTENSION_WORKSPACE_2_0_USER + :value: "URN_IETF_PARAMS_SCIM_SCHEMAS_EXTENSION_WORKSPACE_2_0_USER" + +.. py:class:: WorkspacePermission + + .. py:attribute:: ADMIN + :value: "ADMIN" + + .. py:attribute:: UNKNOWN + :value: "UNKNOWN" + + .. py:attribute:: USER + :value: "USER" + .. autoclass:: WorkspacePermissions :members: :undoc-members: diff --git a/docs/dbdataclasses/jobs.rst b/docs/dbdataclasses/jobs.rst index 0fa7f005..3d78504d 100644 --- a/docs/dbdataclasses/jobs.rst +++ b/docs/dbdataclasses/jobs.rst @@ -28,10 +28,48 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: Condition + + .. py:attribute:: ALL_UPDATED + :value: "ALL_UPDATED" + + .. py:attribute:: ANY_UPDATED + :value: "ANY_UPDATED" + .. autoclass:: ConditionTask :members: :undoc-members: +.. py:class:: ConditionTaskOp + + * `EQUAL_TO`, `NOT_EQUAL` operators perform string comparison of their operands. This means that + `“12.0” == “12”` will evaluate to `false`. * `GREATER_THAN`, `GREATER_THAN_OR_EQUAL`, + `LESS_THAN`, `LESS_THAN_OR_EQUAL` operators perform numeric comparison of their operands. + `“12.0” >= “12”` will evaluate to `true`, `“10.0” >= “12”` will evaluate to + `false`. + + The boolean comparison to task values can be implemented with operators `EQUAL_TO`, `NOT_EQUAL`. + If a task value was set to a boolean value, it will be serialized to `“true”` or + `“false”` for the comparison. + + .. py:attribute:: EQUAL_TO + :value: "EQUAL_TO" + + .. py:attribute:: GREATER_THAN + :value: "GREATER_THAN" + + .. py:attribute:: GREATER_THAN_OR_EQUAL + :value: "GREATER_THAN_OR_EQUAL" + + .. py:attribute:: LESS_THAN + :value: "LESS_THAN" + + .. py:attribute:: LESS_THAN_OR_EQUAL + :value: "LESS_THAN_OR_EQUAL" + + .. py:attribute:: NOT_EQUAL + :value: "NOT_EQUAL" + .. autoclass:: Continuous :members: :undoc-members: @@ -40,6 +78,19 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: CreateJobEditMode + + Edit mode of the job. + + * `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is + in an editable state and can be modified. + + .. py:attribute:: EDITABLE + :value: "EDITABLE" + + .. py:attribute:: UI_LOCKED + :value: "UI_LOCKED" + .. autoclass:: CreateResponse :members: :undoc-members: @@ -88,10 +139,44 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: Format + + .. py:attribute:: MULTI_TASK + :value: "MULTI_TASK" + + .. py:attribute:: SINGLE_TASK + :value: "SINGLE_TASK" + .. autoclass:: GetJobPermissionLevelsResponse :members: :undoc-members: +.. py:class:: GitProvider + + .. py:attribute:: AWS_CODE_COMMIT + :value: "AWS_CODE_COMMIT" + + .. py:attribute:: AZURE_DEV_OPS_SERVICES + :value: "AZURE_DEV_OPS_SERVICES" + + .. py:attribute:: BITBUCKET_CLOUD + :value: "BITBUCKET_CLOUD" + + .. py:attribute:: BITBUCKET_SERVER + :value: "BITBUCKET_SERVER" + + .. py:attribute:: GIT_HUB + :value: "GIT_HUB" + + .. py:attribute:: GIT_HUB_ENTERPRISE + :value: "GIT_HUB_ENTERPRISE" + + .. py:attribute:: GIT_LAB + :value: "GIT_LAB" + + .. py:attribute:: GIT_LAB_ENTERPRISE_EDITION + :value: "GIT_LAB_ENTERPRISE_EDITION" + .. autoclass:: GitSnapshot :members: :undoc-members: @@ -124,6 +209,15 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: JobDeploymentKind + + The kind of deployment that manages the job. + + * `BUNDLE`: The job is managed by Databricks Asset Bundle. + + .. py:attribute:: BUNDLE + :value: "BUNDLE" + .. autoclass:: JobEmailNotifications :members: :undoc-members: @@ -144,6 +238,22 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: JobPermissionLevel + + Permission level + + .. py:attribute:: CAN_MANAGE + :value: "CAN_MANAGE" + + .. py:attribute:: CAN_MANAGE_RUN + :value: "CAN_MANAGE_RUN" + + .. py:attribute:: CAN_VIEW + :value: "CAN_VIEW" + + .. py:attribute:: IS_OWNER + :value: "IS_OWNER" + .. autoclass:: JobPermissions :members: :undoc-members: @@ -164,10 +274,54 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: JobSettingsEditMode + + Edit mode of the job. + + * `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is + in an editable state and can be modified. + + .. py:attribute:: EDITABLE + :value: "EDITABLE" + + .. py:attribute:: UI_LOCKED + :value: "UI_LOCKED" + .. autoclass:: JobSource :members: :undoc-members: +.. py:class:: JobSourceDirtyState + + Dirty state indicates the job is not fully synced with the job specification in the remote + repository. + + Possible values are: * `NOT_SYNCED`: The job is not yet synced with the remote job + specification. Import the remote job specification from UI to make the job fully synced. * + `DISCONNECTED`: The job is temporary disconnected from the remote job specification and is + allowed for live edit. Import the remote job specification again from UI to make the job fully + synced. + + .. py:attribute:: DISCONNECTED + :value: "DISCONNECTED" + + .. py:attribute:: NOT_SYNCED + :value: "NOT_SYNCED" + +.. py:class:: JobsHealthMetric + + Specifies the health metric that is being evaluated for a particular health rule. + + .. py:attribute:: RUN_DURATION_SECONDS + :value: "RUN_DURATION_SECONDS" + +.. py:class:: JobsHealthOperator + + Specifies the operator used to compare the health metric value with the specified threshold. + + .. py:attribute:: GREATER_THAN + :value: "GREATER_THAN" + .. autoclass:: JobsHealthRule :members: :undoc-members: @@ -184,6 +338,23 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ListRunsRunType + + * `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. * `WORKFLOW_RUN`: Workflow + run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit run. A run created with + :method:jobs/submit. + + [dbutils.notebook.run]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-workflow + + .. py:attribute:: JOB_RUN + :value: "JOB_RUN" + + .. py:attribute:: SUBMIT_RUN + :value: "SUBMIT_RUN" + + .. py:attribute:: WORKFLOW_RUN + :value: "WORKFLOW_RUN" + .. autoclass:: NotebookOutput :members: :undoc-members: @@ -192,6 +363,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: PauseStatus + + .. py:attribute:: PAUSED + :value: "PAUSED" + + .. py:attribute:: UNPAUSED + :value: "UNPAUSED" + .. autoclass:: PipelineParams :members: :undoc-members: @@ -212,6 +391,16 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: RepairHistoryItemType + + The repair history item type. Indicates whether a run is the original run or a repair run. + + .. py:attribute:: ORIGINAL + :value: "ORIGINAL" + + .. py:attribute:: REPAIR + :value: "REPAIR" + .. autoclass:: RepairRun :members: :undoc-members: @@ -264,10 +453,61 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: RunConditionTaskOp + + The condtion task operator. + + .. py:attribute:: EQUAL_TO + :value: "EQUAL_TO" + + .. py:attribute:: GREATER_THAN + :value: "GREATER_THAN" + + .. py:attribute:: GREATER_THAN_OR_EQUAL + :value: "GREATER_THAN_OR_EQUAL" + + .. py:attribute:: LESS_THAN + :value: "LESS_THAN" + + .. py:attribute:: LESS_THAN_OR_EQUAL + :value: "LESS_THAN_OR_EQUAL" + + .. py:attribute:: NOT_EQUAL + :value: "NOT_EQUAL" + .. autoclass:: RunForEachTask :members: :undoc-members: +.. py:class:: RunIf + + An optional value indicating the condition that determines whether the task should be run once + its dependencies have been completed. When omitted, defaults to `ALL_SUCCESS`. + + Possible values are: * `ALL_SUCCESS`: All dependencies have executed and succeeded * + `AT_LEAST_ONE_SUCCESS`: At least one dependency has succeeded * `NONE_FAILED`: None of the + dependencies have failed and at least one was executed * `ALL_DONE`: All dependencies have been + completed * `AT_LEAST_ONE_FAILED`: At least one dependency failed * `ALL_FAILED`: ALl + dependencies have failed + + .. py:attribute:: ALL_DONE + :value: "ALL_DONE" + + .. py:attribute:: ALL_FAILED + :value: "ALL_FAILED" + + .. py:attribute:: ALL_SUCCESS + :value: "ALL_SUCCESS" + + .. py:attribute:: AT_LEAST_ONE_FAILED + :value: "AT_LEAST_ONE_FAILED" + + .. py:attribute:: AT_LEAST_ONE_SUCCESS + :value: "AT_LEAST_ONE_SUCCESS" + + .. py:attribute:: NONE_FAILED + :value: "NONE_FAILED" + .. autoclass:: RunJobOutput :members: :undoc-members: @@ -276,6 +516,47 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: RunLifeCycleState + + A value indicating the run's lifecycle state. The possible values are: * `QUEUED`: The run is + queued. * `PENDING`: The run is waiting to be executed while the cluster and execution context + are being prepared. * `RUNNING`: The task of this run is being executed. * `TERMINATING`: The + task of this run has completed, and the cluster and execution context are being cleaned up. * + `TERMINATED`: The task of this run has completed, and the cluster and execution context have + been cleaned up. This state is terminal. * `SKIPPED`: This run was aborted because a previous + run of the same job was already active. This state is terminal. * `INTERNAL_ERROR`: An + exceptional state that indicates a failure in the Jobs service, such as network failure over a + long period. If a run on a new cluster ends in the `INTERNAL_ERROR` state, the Jobs service + terminates the cluster as soon as possible. This state is terminal. * `BLOCKED`: The run is + blocked on an upstream dependency. * `WAITING_FOR_RETRY`: The run is waiting for a retry. + + .. py:attribute:: BLOCKED + :value: "BLOCKED" + + .. py:attribute:: INTERNAL_ERROR + :value: "INTERNAL_ERROR" + + .. py:attribute:: PENDING + :value: "PENDING" + + .. py:attribute:: QUEUED + :value: "QUEUED" + + .. py:attribute:: RUNNING + :value: "RUNNING" + + .. py:attribute:: SKIPPED + :value: "SKIPPED" + + .. py:attribute:: TERMINATED + :value: "TERMINATED" + + .. py:attribute:: TERMINATING + :value: "TERMINATING" + + .. py:attribute:: WAITING_FOR_RETRY + :value: "WAITING_FOR_RETRY" + .. autoclass:: RunNow :members: :undoc-members: @@ -292,6 +573,44 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: RunResultState + + A value indicating the run's result. The possible values are: * `SUCCESS`: The task completed + successfully. * `FAILED`: The task completed with an error. * `TIMEDOUT`: The run was stopped + after reaching the timeout. * `CANCELED`: The run was canceled at user request. * + `MAXIMUM_CONCURRENT_RUNS_REACHED`: The run was skipped because the maximum concurrent runs were + reached. * `EXCLUDED`: The run was skipped because the necessary conditions were not met. * + `SUCCESS_WITH_FAILURES`: The job run completed successfully with some failures; leaf tasks were + successful. * `UPSTREAM_FAILED`: The run was skipped because of an upstream failure. * + `UPSTREAM_CANCELED`: The run was skipped because an upstream task was canceled. + + .. py:attribute:: CANCELED + :value: "CANCELED" + + .. py:attribute:: EXCLUDED + :value: "EXCLUDED" + + .. py:attribute:: FAILED + :value: "FAILED" + + .. py:attribute:: MAXIMUM_CONCURRENT_RUNS_REACHED + :value: "MAXIMUM_CONCURRENT_RUNS_REACHED" + + .. py:attribute:: SUCCESS + :value: "SUCCESS" + + .. py:attribute:: SUCCESS_WITH_FAILURES + :value: "SUCCESS_WITH_FAILURES" + + .. py:attribute:: TIMEDOUT + :value: "TIMEDOUT" + + .. py:attribute:: UPSTREAM_CANCELED + :value: "UPSTREAM_CANCELED" + + .. py:attribute:: UPSTREAM_FAILED + :value: "UPSTREAM_FAILED" + .. autoclass:: RunState :members: :undoc-members: @@ -300,6 +619,31 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: RunType + + * `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. * `WORKFLOW_RUN`: Workflow + run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit run. A run created with + :method:jobs/submit. + + [dbutils.notebook.run]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-workflow + + .. py:attribute:: JOB_RUN + :value: "JOB_RUN" + + .. py:attribute:: SUBMIT_RUN + :value: "SUBMIT_RUN" + + .. py:attribute:: WORKFLOW_RUN + :value: "WORKFLOW_RUN" + +.. py:class:: Source + + .. py:attribute:: GIT + :value: "GIT" + + .. py:attribute:: WORKSPACE + :value: "WORKSPACE" + .. autoclass:: SparkJarTask :members: :undoc-members: @@ -316,6 +660,22 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: SqlAlertState + + The state of the SQL alert. + + * UNKNOWN: alert yet to be evaluated * OK: alert evaluated and did not fulfill trigger + conditions * TRIGGERED: alert evaluated and fulfilled trigger conditions + + .. py:attribute:: OK + :value: "OK" + + .. py:attribute:: TRIGGERED + :value: "TRIGGERED" + + .. py:attribute:: UNKNOWN + :value: "UNKNOWN" + .. autoclass:: SqlDashboardOutput :members: :undoc-members: @@ -324,6 +684,25 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: SqlDashboardWidgetOutputStatus + + The execution status of the SQL widget. + + .. py:attribute:: CANCELLED + :value: "CANCELLED" + + .. py:attribute:: FAILED + :value: "FAILED" + + .. py:attribute:: PENDING + :value: "PENDING" + + .. py:attribute:: RUNNING + :value: "RUNNING" + + .. py:attribute:: SUCCESS + :value: "SUCCESS" + .. autoclass:: SqlOutput :members: :undoc-members: @@ -404,6 +783,36 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: TriggerType + + The type of trigger that fired this run. + + * `PERIODIC`: Schedules that periodically trigger runs, such as a cron scheduler. * `ONE_TIME`: + One time triggers that fire a single run. This occurs you triggered a single run on demand + through the UI or the API. * `RETRY`: Indicates a run that is triggered as a retry of a + previously failed run. This occurs when you request to re-run the job in case of failures. * + `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task. * `FILE_ARRIVAL`: + Indicates a run that is triggered by a file arrival. * `TABLE`: Indicates a run that is + triggered by a table update. + + .. py:attribute:: FILE_ARRIVAL + :value: "FILE_ARRIVAL" + + .. py:attribute:: ONE_TIME + :value: "ONE_TIME" + + .. py:attribute:: PERIODIC + :value: "PERIODIC" + + .. py:attribute:: RETRY + :value: "RETRY" + + .. py:attribute:: RUN_JOB_TASK + :value: "RUN_JOB_TASK" + + .. py:attribute:: TABLE + :value: "TABLE" + .. autoclass:: UpdateJob :members: :undoc-members: @@ -412,6 +821,30 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ViewType + + * `NOTEBOOK`: Notebook view item. * `DASHBOARD`: Dashboard view item. + + .. py:attribute:: DASHBOARD + :value: "DASHBOARD" + + .. py:attribute:: NOTEBOOK + :value: "NOTEBOOK" + +.. py:class:: ViewsToExport + + * `CODE`: Code view of the notebook. * `DASHBOARDS`: All dashboard views of the notebook. * + `ALL`: All views of the notebook. + + .. py:attribute:: ALL + :value: "ALL" + + .. py:attribute:: CODE + :value: "CODE" + + .. py:attribute:: DASHBOARDS + :value: "DASHBOARDS" + .. autoclass:: Webhook :members: :undoc-members: diff --git a/docs/dbdataclasses/ml.rst b/docs/dbdataclasses/ml.rst index f84bb04e..d2464440 100644 --- a/docs/dbdataclasses/ml.rst +++ b/docs/dbdataclasses/ml.rst @@ -8,6 +8,61 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ActivityAction + + An action that a user (with sufficient permissions) could take on an activity. Valid values are: + * `APPROVE_TRANSITION_REQUEST`: Approve a transition request + + * `REJECT_TRANSITION_REQUEST`: Reject a transition request + + * `CANCEL_TRANSITION_REQUEST`: Cancel (delete) a transition request + + .. py:attribute:: APPROVE_TRANSITION_REQUEST + :value: "APPROVE_TRANSITION_REQUEST" + + .. py:attribute:: CANCEL_TRANSITION_REQUEST + :value: "CANCEL_TRANSITION_REQUEST" + + .. py:attribute:: REJECT_TRANSITION_REQUEST + :value: "REJECT_TRANSITION_REQUEST" + +.. py:class:: ActivityType + + Type of activity. Valid values are: * `APPLIED_TRANSITION`: User applied the corresponding stage + transition. + + * `REQUESTED_TRANSITION`: User requested the corresponding stage transition. + + * `CANCELLED_REQUEST`: User cancelled an existing transition request. + + * `APPROVED_REQUEST`: User approved the corresponding stage transition. + + * `REJECTED_REQUEST`: User rejected the coressponding stage transition. + + * `SYSTEM_TRANSITION`: For events performed as a side effect, such as archiving existing model + versions in a stage. + + .. py:attribute:: APPLIED_TRANSITION + :value: "APPLIED_TRANSITION" + + .. py:attribute:: APPROVED_REQUEST + :value: "APPROVED_REQUEST" + + .. py:attribute:: CANCELLED_REQUEST + :value: "CANCELLED_REQUEST" + + .. py:attribute:: NEW_COMMENT + :value: "NEW_COMMENT" + + .. py:attribute:: REJECTED_REQUEST + :value: "REJECTED_REQUEST" + + .. py:attribute:: REQUESTED_TRANSITION + :value: "REQUESTED_TRANSITION" + + .. py:attribute:: SYSTEM_TRANSITION + :value: "SYSTEM_TRANSITION" + .. autoclass:: ApproveTransitionRequest :members: :undoc-members: @@ -16,6 +71,19 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: CommentActivityAction + + An action that a user (with sufficient permissions) could take on a comment. Valid values are: * + `EDIT_COMMENT`: Edit the comment + + * `DELETE_COMMENT`: Delete the comment + + .. py:attribute:: DELETE_COMMENT + :value: "DELETE_COMMENT" + + .. py:attribute:: EDIT_COMMENT + :value: "EDIT_COMMENT" + .. autoclass:: CommentObject :members: :undoc-members: @@ -104,6 +172,20 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: DeleteTransitionRequestStage + + .. py:attribute:: ARCHIVED + :value: "ARCHIVED" + + .. py:attribute:: NONE + :value: "NONE" + + .. py:attribute:: PRODUCTION + :value: "PRODUCTION" + + .. py:attribute:: STAGING + :value: "STAGING" + .. autoclass:: Experiment :members: :undoc-members: @@ -120,6 +202,19 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ExperimentPermissionLevel + + Permission level + + .. py:attribute:: CAN_EDIT + :value: "CAN_EDIT" + + .. py:attribute:: CAN_MANAGE + :value: "CAN_MANAGE" + + .. py:attribute:: CAN_READ + :value: "CAN_READ" + .. autoclass:: ExperimentPermissions :members: :undoc-members: @@ -264,6 +359,19 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ModelVersionStatus + + Current status of `model_version` + + .. py:attribute:: FAILED_REGISTRATION + :value: "FAILED_REGISTRATION" + + .. py:attribute:: PENDING_REGISTRATION + :value: "PENDING_REGISTRATION" + + .. py:attribute:: READY + :value: "READY" + .. autoclass:: ModelVersionTag :members: :undoc-members: @@ -272,6 +380,26 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: PermissionLevel + + Permission level of the requesting user on the object. For what is allowed at each level, see + [MLflow Model permissions](..). + + .. py:attribute:: CAN_EDIT + :value: "CAN_EDIT" + + .. py:attribute:: CAN_MANAGE + :value: "CAN_MANAGE" + + .. py:attribute:: CAN_MANAGE_PRODUCTION_VERSIONS + :value: "CAN_MANAGE_PRODUCTION_VERSIONS" + + .. py:attribute:: CAN_MANAGE_STAGING_VERSIONS + :value: "CAN_MANAGE_STAGING_VERSIONS" + + .. py:attribute:: CAN_READ + :value: "CAN_READ" + .. autoclass:: RegisteredModelAccessControlRequest :members: :undoc-members: @@ -284,6 +412,25 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: RegisteredModelPermissionLevel + + Permission level + + .. py:attribute:: CAN_EDIT + :value: "CAN_EDIT" + + .. py:attribute:: CAN_MANAGE + :value: "CAN_MANAGE" + + .. py:attribute:: CAN_MANAGE_PRODUCTION_VERSIONS + :value: "CAN_MANAGE_PRODUCTION_VERSIONS" + + .. py:attribute:: CAN_MANAGE_STAGING_VERSIONS + :value: "CAN_MANAGE_STAGING_VERSIONS" + + .. py:attribute:: CAN_READ + :value: "CAN_READ" + .. autoclass:: RegisteredModelPermissions :members: :undoc-members: @@ -300,6 +447,63 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: RegistryWebhookEvent + + .. py:attribute:: COMMENT_CREATED + :value: "COMMENT_CREATED" + + .. py:attribute:: MODEL_VERSION_CREATED + :value: "MODEL_VERSION_CREATED" + + .. py:attribute:: MODEL_VERSION_TAG_SET + :value: "MODEL_VERSION_TAG_SET" + + .. py:attribute:: MODEL_VERSION_TRANSITIONED_STAGE + :value: "MODEL_VERSION_TRANSITIONED_STAGE" + + .. py:attribute:: MODEL_VERSION_TRANSITIONED_TO_ARCHIVED + :value: "MODEL_VERSION_TRANSITIONED_TO_ARCHIVED" + + .. py:attribute:: MODEL_VERSION_TRANSITIONED_TO_PRODUCTION + :value: "MODEL_VERSION_TRANSITIONED_TO_PRODUCTION" + + .. py:attribute:: MODEL_VERSION_TRANSITIONED_TO_STAGING + :value: "MODEL_VERSION_TRANSITIONED_TO_STAGING" + + .. py:attribute:: REGISTERED_MODEL_CREATED + :value: "REGISTERED_MODEL_CREATED" + + .. py:attribute:: TRANSITION_REQUEST_CREATED + :value: "TRANSITION_REQUEST_CREATED" + + .. py:attribute:: TRANSITION_REQUEST_TO_ARCHIVED_CREATED + :value: "TRANSITION_REQUEST_TO_ARCHIVED_CREATED" + + .. py:attribute:: TRANSITION_REQUEST_TO_PRODUCTION_CREATED + :value: "TRANSITION_REQUEST_TO_PRODUCTION_CREATED" + + .. py:attribute:: TRANSITION_REQUEST_TO_STAGING_CREATED + :value: "TRANSITION_REQUEST_TO_STAGING_CREATED" + +.. py:class:: RegistryWebhookStatus + + Enable or disable triggering the webhook, or put the webhook into test mode. The default is + `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens. + + * `DISABLED`: Webhook is not triggered. + + * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a + real event. + + .. py:attribute:: ACTIVE + :value: "ACTIVE" + + .. py:attribute:: DISABLED + :value: "DISABLED" + + .. py:attribute:: TEST_MODE + :value: "TEST_MODE" + .. autoclass:: RejectTransitionRequest :members: :undoc-members: @@ -344,6 +548,25 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: RunInfoStatus + + Current status of the run. + + .. py:attribute:: FAILED + :value: "FAILED" + + .. py:attribute:: FINISHED + :value: "FINISHED" + + .. py:attribute:: KILLED + :value: "KILLED" + + .. py:attribute:: RUNNING + :value: "RUNNING" + + .. py:attribute:: SCHEDULED + :value: "SCHEDULED" + .. autoclass:: RunInputs :members: :undoc-members: @@ -360,6 +583,20 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: SearchExperimentsViewType + + Qualifier for type of experiments to be returned. If unspecified, return only active + experiments. + + .. py:attribute:: ACTIVE_ONLY + :value: "ACTIVE_ONLY" + + .. py:attribute:: ALL + :value: "ALL" + + .. py:attribute:: DELETED_ONLY + :value: "DELETED_ONLY" + .. autoclass:: SearchModelVersionsResponse :members: :undoc-members: @@ -376,6 +613,19 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: SearchRunsRunViewType + + Whether to display only active, only deleted, or all runs. Defaults to only active runs. + + .. py:attribute:: ACTIVE_ONLY + :value: "ACTIVE_ONLY" + + .. py:attribute:: ALL + :value: "ALL" + + .. py:attribute:: DELETED_ONLY + :value: "DELETED_ONLY" + .. autoclass:: SetExperimentTag :members: :undoc-members: @@ -392,6 +642,48 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: Stage + + Stage of the model version. Valid values are: + + * `None`: The initial stage of a model version. + + * `Staging`: Staging or pre-production stage. + + * `Production`: Production stage. + + * `Archived`: Archived stage. + + .. py:attribute:: ARCHIVED + :value: "ARCHIVED" + + .. py:attribute:: NONE + :value: "NONE" + + .. py:attribute:: PRODUCTION + :value: "PRODUCTION" + + .. py:attribute:: STAGING + :value: "STAGING" + +.. py:class:: Status + + The status of the model version. Valid values are: * `PENDING_REGISTRATION`: Request to register + a new model version is pending as server performs background tasks. + + * `FAILED_REGISTRATION`: Request to register a new model version has failed. + + * `READY`: Model version is ready for use. + + .. py:attribute:: FAILED_REGISTRATION + :value: "FAILED_REGISTRATION" + + .. py:attribute:: PENDING_REGISTRATION + :value: "PENDING_REGISTRATION" + + .. py:attribute:: READY + :value: "READY" + .. autoclass:: TestRegistryWebhook :members: :undoc-members: @@ -447,3 +739,22 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. autoclass:: UpdateRunResponse :members: :undoc-members: + +.. py:class:: UpdateRunStatus + + Updated status of the run. + + .. py:attribute:: FAILED + :value: "FAILED" + + .. py:attribute:: FINISHED + :value: "FINISHED" + + .. py:attribute:: KILLED + :value: "KILLED" + + .. py:attribute:: RUNNING + :value: "RUNNING" + + .. py:attribute:: SCHEDULED + :value: "SCHEDULED" diff --git a/docs/dbdataclasses/pipelines.rst b/docs/dbdataclasses/pipelines.rst index 39499235..64a8c891 100644 --- a/docs/dbdataclasses/pipelines.rst +++ b/docs/dbdataclasses/pipelines.rst @@ -28,6 +28,22 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: EventLevel + + The severity level of the event. + + .. py:attribute:: ERROR + :value: "ERROR" + + .. py:attribute:: INFO + :value: "INFO" + + .. py:attribute:: METRICS + :value: "METRICS" + + .. py:attribute:: WARN + :value: "WARN" + .. autoclass:: FileLibrary :members: :undoc-members: @@ -44,6 +60,16 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: GetPipelineResponseHealth + + The health of a pipeline. + + .. py:attribute:: HEALTHY + :value: "HEALTHY" + + .. py:attribute:: UNHEALTHY + :value: "UNHEALTHY" + .. autoclass:: GetUpdateResponse :members: :undoc-members: @@ -60,6 +86,19 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: MaturityLevel + + Maturity level for EventDetails. + + .. py:attribute:: DEPRECATED + :value: "DEPRECATED" + + .. py:attribute:: EVOLVING + :value: "EVOLVING" + + .. py:attribute:: STABLE + :value: "STABLE" + .. autoclass:: NotebookLibrary :members: :undoc-members: @@ -88,6 +127,19 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: PipelineClusterAutoscaleMode + + Databricks Enhanced Autoscaling optimizes cluster utilization by automatically allocating + cluster resources based on workload volume, with minimal impact to the data processing latency + of your pipelines. Enhanced Autoscaling is available for `updates` clusters only. The legacy + autoscaling feature is used for `maintenance` clusters. + + .. py:attribute:: ENHANCED + :value: "ENHANCED" + + .. py:attribute:: LEGACY + :value: "LEGACY" + .. autoclass:: PipelineEvent :members: :undoc-members: @@ -100,6 +152,22 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: PipelinePermissionLevel + + Permission level + + .. py:attribute:: CAN_MANAGE + :value: "CAN_MANAGE" + + .. py:attribute:: CAN_RUN + :value: "CAN_RUN" + + .. py:attribute:: CAN_VIEW + :value: "CAN_VIEW" + + .. py:attribute:: IS_OWNER + :value: "IS_OWNER" + .. autoclass:: PipelinePermissions :members: :undoc-members: @@ -116,6 +184,37 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: PipelineState + + The pipeline state. + + .. py:attribute:: DELETED + :value: "DELETED" + + .. py:attribute:: DEPLOYING + :value: "DEPLOYING" + + .. py:attribute:: FAILED + :value: "FAILED" + + .. py:attribute:: IDLE + :value: "IDLE" + + .. py:attribute:: RECOVERING + :value: "RECOVERING" + + .. py:attribute:: RESETTING + :value: "RESETTING" + + .. py:attribute:: RUNNING + :value: "RUNNING" + + .. py:attribute:: STARTING + :value: "STARTING" + + .. py:attribute:: STOPPING + :value: "STOPPING" + .. autoclass:: PipelineStateInfo :members: :undoc-members: @@ -140,6 +239,26 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: StartUpdateCause + + .. py:attribute:: API_CALL + :value: "API_CALL" + + .. py:attribute:: JOB_TASK + :value: "JOB_TASK" + + .. py:attribute:: RETRY_ON_FAILURE + :value: "RETRY_ON_FAILURE" + + .. py:attribute:: SCHEMA_CHANGE + :value: "SCHEMA_CHANGE" + + .. py:attribute:: SERVICE_UPGRADE + :value: "SERVICE_UPGRADE" + + .. py:attribute:: USER_ACTION + :value: "USER_ACTION" + .. autoclass:: StartUpdateResponse :members: :undoc-members: @@ -148,6 +267,100 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: UpdateInfoCause + + What triggered this update. + + .. py:attribute:: API_CALL + :value: "API_CALL" + + .. py:attribute:: JOB_TASK + :value: "JOB_TASK" + + .. py:attribute:: RETRY_ON_FAILURE + :value: "RETRY_ON_FAILURE" + + .. py:attribute:: SCHEMA_CHANGE + :value: "SCHEMA_CHANGE" + + .. py:attribute:: SERVICE_UPGRADE + :value: "SERVICE_UPGRADE" + + .. py:attribute:: USER_ACTION + :value: "USER_ACTION" + +.. py:class:: UpdateInfoState + + The update state. + + .. py:attribute:: CANCELED + :value: "CANCELED" + + .. py:attribute:: COMPLETED + :value: "COMPLETED" + + .. py:attribute:: CREATED + :value: "CREATED" + + .. py:attribute:: FAILED + :value: "FAILED" + + .. py:attribute:: INITIALIZING + :value: "INITIALIZING" + + .. py:attribute:: QUEUED + :value: "QUEUED" + + .. py:attribute:: RESETTING + :value: "RESETTING" + + .. py:attribute:: RUNNING + :value: "RUNNING" + + .. py:attribute:: SETTING_UP_TABLES + :value: "SETTING_UP_TABLES" + + .. py:attribute:: STOPPING + :value: "STOPPING" + + .. py:attribute:: WAITING_FOR_RESOURCES + :value: "WAITING_FOR_RESOURCES" + .. autoclass:: UpdateStateInfo :members: :undoc-members: + +.. py:class:: UpdateStateInfoState + + .. py:attribute:: CANCELED + :value: "CANCELED" + + .. py:attribute:: COMPLETED + :value: "COMPLETED" + + .. py:attribute:: CREATED + :value: "CREATED" + + .. py:attribute:: FAILED + :value: "FAILED" + + .. py:attribute:: INITIALIZING + :value: "INITIALIZING" + + .. py:attribute:: QUEUED + :value: "QUEUED" + + .. py:attribute:: RESETTING + :value: "RESETTING" + + .. py:attribute:: RUNNING + :value: "RUNNING" + + .. py:attribute:: SETTING_UP_TABLES + :value: "SETTING_UP_TABLES" + + .. py:attribute:: STOPPING + :value: "STOPPING" + + .. py:attribute:: WAITING_FOR_RESOURCES + :value: "WAITING_FOR_RESOURCES" diff --git a/docs/dbdataclasses/provisioning.rst b/docs/dbdataclasses/provisioning.rst index 2e1637e7..85523504 100644 --- a/docs/dbdataclasses/provisioning.rst +++ b/docs/dbdataclasses/provisioning.rst @@ -72,6 +72,39 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: EndpointUseCase + + This enumeration represents the type of Databricks VPC [endpoint service] that was used when + creating this VPC endpoint. + + [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/endpoint-service.html + + .. py:attribute:: DATAPLANE_RELAY_ACCESS + :value: "DATAPLANE_RELAY_ACCESS" + + .. py:attribute:: WORKSPACE_ACCESS + :value: "WORKSPACE_ACCESS" + +.. py:class:: ErrorType + + The AWS resource associated with this error: credentials, VPC, subnet, security group, or + network ACL. + + .. py:attribute:: CREDENTIALS + :value: "CREDENTIALS" + + .. py:attribute:: NETWORK_ACL + :value: "NETWORK_ACL" + + .. py:attribute:: SECURITY_GROUP + :value: "SECURITY_GROUP" + + .. py:attribute:: SUBNET + :value: "SUBNET" + + .. py:attribute:: VPC + :value: "VPC" + .. autoclass:: GcpKeyInfo :members: :undoc-members: @@ -92,6 +125,34 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: GkeConfigConnectivityType + + Specifies the network connectivity types for the GKE nodes and the GKE master network. + + Set to `PRIVATE_NODE_PUBLIC_MASTER` for a private GKE cluster for the workspace. The GKE nodes + will not have public IPs. + + Set to `PUBLIC_NODE_PUBLIC_MASTER` for a public GKE cluster. The nodes of a public GKE cluster + have public IP addresses. + + .. py:attribute:: PRIVATE_NODE_PUBLIC_MASTER + :value: "PRIVATE_NODE_PUBLIC_MASTER" + + .. py:attribute:: PUBLIC_NODE_PUBLIC_MASTER + :value: "PUBLIC_NODE_PUBLIC_MASTER" + +.. py:class:: KeyUseCase + + Possible values are: * `MANAGED_SERVICES`: Encrypts notebook and secret data in the control + plane * `STORAGE`: Encrypts the workspace's root S3 bucket (root DBFS and system data) and, + optionally, cluster EBS volumes. + + .. py:attribute:: MANAGED_SERVICES + :value: "MANAGED_SERVICES" + + .. py:attribute:: STORAGE + :value: "STORAGE" + .. autoclass:: Network :members: :undoc-members: @@ -108,6 +169,44 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: PricingTier + + The pricing tier of the workspace. For pricing tier information, see [AWS Pricing]. + + [AWS Pricing]: https://databricks.com/product/aws-pricing + + .. py:attribute:: COMMUNITY_EDITION + :value: "COMMUNITY_EDITION" + + .. py:attribute:: DEDICATED + :value: "DEDICATED" + + .. py:attribute:: ENTERPRISE + :value: "ENTERPRISE" + + .. py:attribute:: PREMIUM + :value: "PREMIUM" + + .. py:attribute:: STANDARD + :value: "STANDARD" + + .. py:attribute:: UNKNOWN + :value: "UNKNOWN" + +.. py:class:: PrivateAccessLevel + + The private access level controls which VPC endpoints can connect to the UI or API of any + workspace that attaches this private access settings object. * `ACCOUNT` level access (the + default) allows only VPC endpoints that are registered in your Databricks account connect to + your workspace. * `ENDPOINT` level access allows only specified VPC endpoints connect to your + workspace. For details, see `allowed_vpc_endpoint_ids`. + + .. py:attribute:: ACCOUNT + :value: "ACCOUNT" + + .. py:attribute:: ENDPOINT + :value: "ENDPOINT" + .. autoclass:: PrivateAccessSettings :members: :undoc-members: @@ -136,6 +235,56 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: VpcStatus + + The status of this network configuration object in terms of its use in a workspace: * + `UNATTACHED`: Unattached. * `VALID`: Valid. * `BROKEN`: Broken. * `WARNED`: Warned. + + .. py:attribute:: BROKEN + :value: "BROKEN" + + .. py:attribute:: UNATTACHED + :value: "UNATTACHED" + + .. py:attribute:: VALID + :value: "VALID" + + .. py:attribute:: WARNED + :value: "WARNED" + +.. py:class:: WarningType + + The AWS resource associated with this warning: a subnet or a security group. + + .. py:attribute:: SECURITY_GROUP + :value: "SECURITY_GROUP" + + .. py:attribute:: SUBNET + :value: "SUBNET" + .. autoclass:: Workspace :members: :undoc-members: + +.. py:class:: WorkspaceStatus + + The status of the workspace. For workspace creation, usually it is set to `PROVISIONING` + initially. Continue to check the status until the status is `RUNNING`. + + .. py:attribute:: BANNED + :value: "BANNED" + + .. py:attribute:: CANCELLING + :value: "CANCELLING" + + .. py:attribute:: FAILED + :value: "FAILED" + + .. py:attribute:: NOT_PROVISIONED + :value: "NOT_PROVISIONED" + + .. py:attribute:: PROVISIONING + :value: "PROVISIONING" + + .. py:attribute:: RUNNING + :value: "RUNNING" diff --git a/docs/dbdataclasses/serving.rst b/docs/dbdataclasses/serving.rst index c6186e5b..d7e0634c 100644 --- a/docs/dbdataclasses/serving.rst +++ b/docs/dbdataclasses/serving.rst @@ -40,6 +40,23 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: AwsBedrockConfigBedrockProvider + + The underlying provider in AWS Bedrock. Supported values (case insensitive) include: Anthropic, + Cohere, AI21Labs, Amazon. + + .. py:attribute:: AI21LABS + :value: "AI21LABS" + + .. py:attribute:: AMAZON + :value: "AMAZON" + + .. py:attribute:: ANTHROPIC + :value: "ANTHROPIC" + + .. py:attribute:: COHERE + :value: "COHERE" + .. autoclass:: BuildLogsResponse :members: :undoc-members: @@ -48,6 +65,19 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ChatMessageRole + + The role of the message. One of [system, user, assistant]. + + .. py:attribute:: ASSISTANT + :value: "ASSISTANT" + + .. py:attribute:: SYSTEM + :value: "SYSTEM" + + .. py:attribute:: USER + :value: "USER" + .. autoclass:: CohereConfig :members: :undoc-members: @@ -76,10 +106,33 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: DeploymentStatusState + + State: one of DEPLOYING,SUCCESS, FAILURE, DEPLOYMENT_STATE_UNSPECIFIED + + .. py:attribute:: DEPLOYING + :value: "DEPLOYING" + + .. py:attribute:: DEPLOYMENT_STATE_UNSPECIFIED + :value: "DEPLOYMENT_STATE_UNSPECIFIED" + + .. py:attribute:: FAILURE + :value: "FAILURE" + + .. py:attribute:: SUCCESS + :value: "SUCCESS" + .. autoclass:: EmbeddingsV1ResponseEmbeddingElement :members: :undoc-members: +.. py:class:: EmbeddingsV1ResponseEmbeddingElementObject + + This will always be 'embedding'. + + .. py:attribute:: EMBEDDING + :value: "EMBEDDING" + .. autoclass:: EndpointCoreConfigInput :members: :undoc-members: @@ -100,6 +153,34 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: EndpointStateConfigUpdate + + The state of an endpoint's config update. This informs the user if the pending_config is in + progress, if the update failed, or if there is no update in progress. Note that if the + endpoint's config_update state value is IN_PROGRESS, another update can not be made until the + update completes or fails. + + .. py:attribute:: IN_PROGRESS + :value: "IN_PROGRESS" + + .. py:attribute:: NOT_UPDATING + :value: "NOT_UPDATING" + + .. py:attribute:: UPDATE_FAILED + :value: "UPDATE_FAILED" + +.. py:class:: EndpointStateReady + + The state of an endpoint, indicating whether or not the endpoint is queryable. An endpoint is + READY if all of the served entities in its active configuration are ready. If any of the + actively served entities are in a non-ready state, the endpoint state will be NOT_READY. + + .. py:attribute:: NOT_READY + :value: "NOT_READY" + + .. py:attribute:: READY + :value: "READY" + .. autoclass:: EndpointTag :members: :undoc-members: @@ -108,6 +189,33 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ExternalModelProvider + + The name of the provider for the external model. Currently, the supported providers are + 'ai21labs', 'anthropic', 'aws-bedrock', 'cohere', 'databricks-model-serving', 'openai', and + 'palm'.", + + .. py:attribute:: AI21LABS + :value: "AI21LABS" + + .. py:attribute:: ANTHROPIC + :value: "ANTHROPIC" + + .. py:attribute:: AWS_BEDROCK + :value: "AWS_BEDROCK" + + .. py:attribute:: COHERE + :value: "COHERE" + + .. py:attribute:: DATABRICKS_MODEL_SERVING + :value: "DATABRICKS_MODEL_SERVING" + + .. py:attribute:: OPENAI + :value: "OPENAI" + + .. py:attribute:: PALM + :value: "PALM" + .. autoclass:: ExternalModelUsageElement :members: :undoc-members: @@ -164,10 +272,42 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: QueryEndpointResponseObject + + The type of object returned by the __external/foundation model__ serving endpoint, one of + [text_completion, chat.completion, list (of embeddings)]. + + .. py:attribute:: CHAT_COMPLETION + :value: "CHAT_COMPLETION" + + .. py:attribute:: LIST + :value: "LIST" + + .. py:attribute:: TEXT_COMPLETION + :value: "TEXT_COMPLETION" + .. autoclass:: RateLimit :members: :undoc-members: +.. py:class:: RateLimitKey + + Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are + supported, with 'endpoint' being the default if not specified. + + .. py:attribute:: ENDPOINT + :value: "ENDPOINT" + + .. py:attribute:: USER + :value: "USER" + +.. py:class:: RateLimitRenewalPeriod + + Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported. + + .. py:attribute:: MINUTE + :value: "MINUTE" + .. autoclass:: Route :members: :undoc-members: @@ -188,6 +328,48 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ServedModelInputWorkloadSize + + The workload size of the served model. The workload size corresponds to a range of provisioned + concurrency that the compute will autoscale between. A single unit of provisioned concurrency + can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned + concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned + concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for + each workload size will be 0. + + .. py:attribute:: LARGE + :value: "LARGE" + + .. py:attribute:: MEDIUM + :value: "MEDIUM" + + .. py:attribute:: SMALL + :value: "SMALL" + +.. py:class:: ServedModelInputWorkloadType + + The workload type of the served model. The workload type selects which type of compute to use in + the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU + acceleration is available by selecting workload types like GPU_SMALL and others. See the + available [GPU types]. + + [GPU types]: https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types + + .. py:attribute:: CPU + :value: "CPU" + + .. py:attribute:: GPU_LARGE + :value: "GPU_LARGE" + + .. py:attribute:: GPU_MEDIUM + :value: "GPU_MEDIUM" + + .. py:attribute:: GPU_SMALL + :value: "GPU_SMALL" + + .. py:attribute:: MULTIGPU_MEDIUM + :value: "MULTIGPU_MEDIUM" + .. autoclass:: ServedModelOutput :members: :undoc-members: @@ -200,6 +382,33 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ServedModelStateDeployment + + The state of the served entity deployment. DEPLOYMENT_CREATING indicates that the served entity + is not ready yet because the deployment is still being created (i.e container image is building, + model server is deploying for the first time, etc.). DEPLOYMENT_RECOVERING indicates that the + served entity was previously in a ready state but no longer is and is attempting to recover. + DEPLOYMENT_READY indicates that the served entity is ready to receive traffic. DEPLOYMENT_FAILED + indicates that there was an error trying to bring up the served entity (e.g container image + build failed, the model server failed to start due to a model loading error, etc.) + DEPLOYMENT_ABORTED indicates that the deployment was terminated likely due to a failure in + bringing up another served entity under the same endpoint and config version. + + .. py:attribute:: ABORTED + :value: "ABORTED" + + .. py:attribute:: CREATING + :value: "CREATING" + + .. py:attribute:: FAILED + :value: "FAILED" + + .. py:attribute:: READY + :value: "READY" + + .. py:attribute:: RECOVERING + :value: "RECOVERING" + .. autoclass:: ServerLogsResponse :members: :undoc-members: @@ -220,10 +429,36 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ServingEndpointDetailedPermissionLevel + + The permission level of the principal making the request. + + .. py:attribute:: CAN_MANAGE + :value: "CAN_MANAGE" + + .. py:attribute:: CAN_QUERY + :value: "CAN_QUERY" + + .. py:attribute:: CAN_VIEW + :value: "CAN_VIEW" + .. autoclass:: ServingEndpointPermission :members: :undoc-members: +.. py:class:: ServingEndpointPermissionLevel + + Permission level + + .. py:attribute:: CAN_MANAGE + :value: "CAN_MANAGE" + + .. py:attribute:: CAN_QUERY + :value: "CAN_QUERY" + + .. py:attribute:: CAN_VIEW + :value: "CAN_VIEW" + .. autoclass:: ServingEndpointPermissions :members: :undoc-members: diff --git a/docs/dbdataclasses/settings.rst b/docs/dbdataclasses/settings.rst index d4843893..3b64aced 100644 --- a/docs/dbdataclasses/settings.rst +++ b/docs/dbdataclasses/settings.rst @@ -28,6 +28,23 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: CreatePrivateEndpointRuleRequestGroupId + + The sub-resource type (group ID) of the target resource. Note that to connect to workspace root + storage (root DBFS), you need two endpoints, one for `blob` and one for `dfs`. + + .. py:attribute:: BLOB + :value: "BLOB" + + .. py:attribute:: DFS + :value: "DFS" + + .. py:attribute:: MYSQL_SERVER + :value: "MYSQL_SERVER" + + .. py:attribute:: SQL_SERVER + :value: "SQL_SERVER" + .. autoclass:: CreateTokenRequest :members: :undoc-members: @@ -108,10 +125,68 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ListType + + Type of IP access list. Valid values are as follows and are case-sensitive: + + * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or + range. IP addresses in the block list are excluded even if they are included in an allow list. + + .. py:attribute:: ALLOW + :value: "ALLOW" + + .. py:attribute:: BLOCK + :value: "BLOCK" + .. autoclass:: NccAzurePrivateEndpointRule :members: :undoc-members: +.. py:class:: NccAzurePrivateEndpointRuleConnectionState + + The current status of this private endpoint. The private endpoint rules are effective only if + the connection state is `ESTABLISHED`. Remember that you must approve new endpoints on your + resources in the Azure portal before they take effect. + + The possible values are: - INIT: (deprecated) The endpoint has been created and pending + approval. - PENDING: The endpoint has been created and pending approval. - ESTABLISHED: The + endpoint has been approved and is ready to use in your serverless compute resources. - REJECTED: + Connection was rejected by the private link resource owner. - DISCONNECTED: Connection was + removed by the private link resource owner, the private endpoint becomes informative and should + be deleted for clean-up. + + .. py:attribute:: DISCONNECTED + :value: "DISCONNECTED" + + .. py:attribute:: ESTABLISHED + :value: "ESTABLISHED" + + .. py:attribute:: INIT + :value: "INIT" + + .. py:attribute:: PENDING + :value: "PENDING" + + .. py:attribute:: REJECTED + :value: "REJECTED" + +.. py:class:: NccAzurePrivateEndpointRuleGroupId + + The sub-resource type (group ID) of the target resource. Note that to connect to workspace root + storage (root DBFS), you need two endpoints, one for `blob` and one for `dfs`. + + .. py:attribute:: BLOB + :value: "BLOB" + + .. py:attribute:: DFS + :value: "DFS" + + .. py:attribute:: MYSQL_SERVER + :value: "MYSQL_SERVER" + + .. py:attribute:: SQL_SERVER + :value: "SQL_SERVER" + .. autoclass:: NccAzureServiceEndpointRule :members: :undoc-members: @@ -140,6 +215,20 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: PersonalComputeMessageEnum + + ON: Grants all users in all workspaces access to the Personal Compute default policy, allowing + all users to create single-machine compute resources. DELEGATE: Moves access control for the + Personal Compute default policy to individual workspaces and requires a workspace’s users or + groups to be added to the ACLs of that workspace’s Personal Compute default policy before they + will be able to create compute resources through that policy. + + .. py:attribute:: DELEGATE + :value: "DELEGATE" + + .. py:attribute:: ON + :value: "ON" + .. autoclass:: PersonalComputeSetting :members: :undoc-members: @@ -156,6 +245,17 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: RestrictWorkspaceAdminsMessageStatus + + .. py:attribute:: ALLOW_ALL + :value: "ALLOW_ALL" + + .. py:attribute:: RESTRICT_TOKENS_AND_JOB_RUN_AS + :value: "RESTRICT_TOKENS_AND_JOB_RUN_AS" + + .. py:attribute:: STATUS_UNSPECIFIED + :value: "STATUS_UNSPECIFIED" + .. autoclass:: RestrictWorkspaceAdminsSetting :members: :undoc-members: @@ -184,6 +284,13 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: TokenPermissionLevel + + Permission level + + .. py:attribute:: CAN_USE + :value: "CAN_USE" + .. autoclass:: TokenPermissions :members: :undoc-members: @@ -196,6 +303,13 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: TokenType + + The type of token request. As of now, only `AZURE_ACTIVE_DIRECTORY_TOKEN` is supported. + + .. py:attribute:: AZURE_ACTIVE_DIRECTORY_TOKEN + :value: "AZURE_ACTIVE_DIRECTORY_TOKEN" + .. autoclass:: UpdateDefaultNamespaceSettingRequest :members: :undoc-members: diff --git a/docs/dbdataclasses/sharing.rst b/docs/dbdataclasses/sharing.rst index acd78b33..ab935d69 100644 --- a/docs/dbdataclasses/sharing.rst +++ b/docs/dbdataclasses/sharing.rst @@ -4,6 +4,16 @@ Delta Sharing These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.sharing`` module. .. py:currentmodule:: databricks.sdk.service.sharing +.. py:class:: AuthenticationType + + The delta sharing authentication type. + + .. py:attribute:: DATABRICKS + :value: "DATABRICKS" + + .. py:attribute:: TOKEN + :value: "TOKEN" + .. autoclass:: CentralCleanRoomInfo :members: :undoc-members: @@ -44,6 +54,73 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ColumnTypeName + + Name of type (INT, STRUCT, MAP, etc.). + + .. py:attribute:: ARRAY + :value: "ARRAY" + + .. py:attribute:: BINARY + :value: "BINARY" + + .. py:attribute:: BOOLEAN + :value: "BOOLEAN" + + .. py:attribute:: BYTE + :value: "BYTE" + + .. py:attribute:: CHAR + :value: "CHAR" + + .. py:attribute:: DATE + :value: "DATE" + + .. py:attribute:: DECIMAL + :value: "DECIMAL" + + .. py:attribute:: DOUBLE + :value: "DOUBLE" + + .. py:attribute:: FLOAT + :value: "FLOAT" + + .. py:attribute:: INT + :value: "INT" + + .. py:attribute:: INTERVAL + :value: "INTERVAL" + + .. py:attribute:: LONG + :value: "LONG" + + .. py:attribute:: MAP + :value: "MAP" + + .. py:attribute:: NULL + :value: "NULL" + + .. py:attribute:: SHORT + :value: "SHORT" + + .. py:attribute:: STRING + :value: "STRING" + + .. py:attribute:: STRUCT + :value: "STRUCT" + + .. py:attribute:: TABLE_TYPE + :value: "TABLE_TYPE" + + .. py:attribute:: TIMESTAMP + :value: "TIMESTAMP" + + .. py:attribute:: TIMESTAMP_NTZ + :value: "TIMESTAMP_NTZ" + + .. py:attribute:: USER_DEFINED_TYPE + :value: "USER_DEFINED_TYPE" + .. autoclass:: CreateCleanRoom :members: :undoc-members: @@ -96,6 +173,141 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: PartitionValueOp + + The operator to apply for the value. + + .. py:attribute:: EQUAL + :value: "EQUAL" + + .. py:attribute:: LIKE + :value: "LIKE" + +.. py:class:: Privilege + + .. py:attribute:: ALL_PRIVILEGES + :value: "ALL_PRIVILEGES" + + .. py:attribute:: APPLY_TAG + :value: "APPLY_TAG" + + .. py:attribute:: CREATE + :value: "CREATE" + + .. py:attribute:: CREATE_CATALOG + :value: "CREATE_CATALOG" + + .. py:attribute:: CREATE_CONNECTION + :value: "CREATE_CONNECTION" + + .. py:attribute:: CREATE_EXTERNAL_LOCATION + :value: "CREATE_EXTERNAL_LOCATION" + + .. py:attribute:: CREATE_EXTERNAL_TABLE + :value: "CREATE_EXTERNAL_TABLE" + + .. py:attribute:: CREATE_EXTERNAL_VOLUME + :value: "CREATE_EXTERNAL_VOLUME" + + .. py:attribute:: CREATE_FOREIGN_CATALOG + :value: "CREATE_FOREIGN_CATALOG" + + .. py:attribute:: CREATE_FUNCTION + :value: "CREATE_FUNCTION" + + .. py:attribute:: CREATE_MANAGED_STORAGE + :value: "CREATE_MANAGED_STORAGE" + + .. py:attribute:: CREATE_MATERIALIZED_VIEW + :value: "CREATE_MATERIALIZED_VIEW" + + .. py:attribute:: CREATE_MODEL + :value: "CREATE_MODEL" + + .. py:attribute:: CREATE_PROVIDER + :value: "CREATE_PROVIDER" + + .. py:attribute:: CREATE_RECIPIENT + :value: "CREATE_RECIPIENT" + + .. py:attribute:: CREATE_SCHEMA + :value: "CREATE_SCHEMA" + + .. py:attribute:: CREATE_SHARE + :value: "CREATE_SHARE" + + .. py:attribute:: CREATE_STORAGE_CREDENTIAL + :value: "CREATE_STORAGE_CREDENTIAL" + + .. py:attribute:: CREATE_TABLE + :value: "CREATE_TABLE" + + .. py:attribute:: CREATE_VIEW + :value: "CREATE_VIEW" + + .. py:attribute:: CREATE_VOLUME + :value: "CREATE_VOLUME" + + .. py:attribute:: EXECUTE + :value: "EXECUTE" + + .. py:attribute:: MANAGE_ALLOWLIST + :value: "MANAGE_ALLOWLIST" + + .. py:attribute:: MODIFY + :value: "MODIFY" + + .. py:attribute:: READ_FILES + :value: "READ_FILES" + + .. py:attribute:: READ_PRIVATE_FILES + :value: "READ_PRIVATE_FILES" + + .. py:attribute:: READ_VOLUME + :value: "READ_VOLUME" + + .. py:attribute:: REFRESH + :value: "REFRESH" + + .. py:attribute:: SELECT + :value: "SELECT" + + .. py:attribute:: SET_SHARE_PERMISSION + :value: "SET_SHARE_PERMISSION" + + .. py:attribute:: USAGE + :value: "USAGE" + + .. py:attribute:: USE_CATALOG + :value: "USE_CATALOG" + + .. py:attribute:: USE_CONNECTION + :value: "USE_CONNECTION" + + .. py:attribute:: USE_MARKETPLACE_ASSETS + :value: "USE_MARKETPLACE_ASSETS" + + .. py:attribute:: USE_PROVIDER + :value: "USE_PROVIDER" + + .. py:attribute:: USE_RECIPIENT + :value: "USE_RECIPIENT" + + .. py:attribute:: USE_SCHEMA + :value: "USE_SCHEMA" + + .. py:attribute:: USE_SHARE + :value: "USE_SHARE" + + .. py:attribute:: WRITE_FILES + :value: "WRITE_FILES" + + .. py:attribute:: WRITE_PRIVATE_FILES + :value: "WRITE_PRIVATE_FILES" + + .. py:attribute:: WRITE_VOLUME + :value: "WRITE_VOLUME" + .. autoclass:: PrivilegeAssignment :members: :undoc-members: @@ -144,10 +356,44 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: SharedDataObjectHistoryDataSharingStatus + + Whether to enable or disable sharing of data history. If not specified, the default is + **DISABLED**. + + .. py:attribute:: DISABLED + :value: "DISABLED" + + .. py:attribute:: ENABLED + :value: "ENABLED" + +.. py:class:: SharedDataObjectStatus + + One of: **ACTIVE**, **PERMISSION_DENIED**. + + .. py:attribute:: ACTIVE + :value: "ACTIVE" + + .. py:attribute:: PERMISSION_DENIED + :value: "PERMISSION_DENIED" + .. autoclass:: SharedDataObjectUpdate :members: :undoc-members: +.. py:class:: SharedDataObjectUpdateAction + + One of: **ADD**, **REMOVE**, **UPDATE**. + + .. py:attribute:: ADD + :value: "ADD" + + .. py:attribute:: REMOVE + :value: "REMOVE" + + .. py:attribute:: UPDATE + :value: "UPDATE" + .. autoclass:: UpdateCleanRoom :members: :undoc-members: diff --git a/docs/dbdataclasses/sql.rst b/docs/dbdataclasses/sql.rst index ebd5d22d..5bc58b4f 100644 --- a/docs/dbdataclasses/sql.rst +++ b/docs/dbdataclasses/sql.rst @@ -16,10 +16,37 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: AlertOptionsEmptyResultState + + State that alert evaluates to when query result is empty. + + .. py:attribute:: OK + :value: "OK" + + .. py:attribute:: TRIGGERED + :value: "TRIGGERED" + + .. py:attribute:: UNKNOWN + :value: "UNKNOWN" + .. autoclass:: AlertQuery :members: :undoc-members: +.. py:class:: AlertState + + State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated + and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions). + + .. py:attribute:: OK + :value: "OK" + + .. py:attribute:: TRIGGERED + :value: "TRIGGERED" + + .. py:attribute:: UNKNOWN + :value: "UNKNOWN" + .. autoclass:: BaseChunkInfo :members: :undoc-members: @@ -32,10 +59,89 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ChannelName + + .. py:attribute:: CHANNEL_NAME_CURRENT + :value: "CHANNEL_NAME_CURRENT" + + .. py:attribute:: CHANNEL_NAME_CUSTOM + :value: "CHANNEL_NAME_CUSTOM" + + .. py:attribute:: CHANNEL_NAME_PREVIEW + :value: "CHANNEL_NAME_PREVIEW" + + .. py:attribute:: CHANNEL_NAME_PREVIOUS + :value: "CHANNEL_NAME_PREVIOUS" + + .. py:attribute:: CHANNEL_NAME_UNSPECIFIED + :value: "CHANNEL_NAME_UNSPECIFIED" + .. autoclass:: ColumnInfo :members: :undoc-members: +.. py:class:: ColumnInfoTypeName + + The name of the base data type. This doesn't include details for complex types such as STRUCT, + MAP or ARRAY. + + .. py:attribute:: ARRAY + :value: "ARRAY" + + .. py:attribute:: BINARY + :value: "BINARY" + + .. py:attribute:: BOOLEAN + :value: "BOOLEAN" + + .. py:attribute:: BYTE + :value: "BYTE" + + .. py:attribute:: CHAR + :value: "CHAR" + + .. py:attribute:: DATE + :value: "DATE" + + .. py:attribute:: DECIMAL + :value: "DECIMAL" + + .. py:attribute:: DOUBLE + :value: "DOUBLE" + + .. py:attribute:: FLOAT + :value: "FLOAT" + + .. py:attribute:: INT + :value: "INT" + + .. py:attribute:: INTERVAL + :value: "INTERVAL" + + .. py:attribute:: LONG + :value: "LONG" + + .. py:attribute:: MAP + :value: "MAP" + + .. py:attribute:: NULL + :value: "NULL" + + .. py:attribute:: SHORT + :value: "SHORT" + + .. py:attribute:: STRING + :value: "STRING" + + .. py:attribute:: STRUCT + :value: "STRUCT" + + .. py:attribute:: TIMESTAMP + :value: "TIMESTAMP" + + .. py:attribute:: USER_DEFINED_TYPE + :value: "USER_DEFINED_TYPE" + .. autoclass:: CreateAlert :members: :undoc-members: @@ -44,6 +150,20 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: CreateWarehouseRequestWarehouseType + + Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` + and also set the field `enable_serverless_compute` to `true`. + + .. py:attribute:: CLASSIC + :value: "CLASSIC" + + .. py:attribute:: PRO + :value: "PRO" + + .. py:attribute:: TYPE_UNSPECIFIED + :value: "TYPE_UNSPECIFIED" + .. autoclass:: CreateWarehouseResponse :members: :undoc-members: @@ -72,6 +192,35 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: Disposition + + The fetch disposition provides two modes of fetching results: `INLINE` and `EXTERNAL_LINKS`. + + Statements executed with `INLINE` disposition will return result data inline, in `JSON_ARRAY` + format, in a series of chunks. If a given statement produces a result set with a size larger + than 25 MiB, that statement execution is aborted, and no result set will be available. + + **NOTE** Byte limits are computed based upon internal representations of the result set data, + and might not match the sizes visible in JSON responses. + + Statements executed with `EXTERNAL_LINKS` disposition will return result data as external links: + URLs that point to cloud storage internal to the workspace. Using `EXTERNAL_LINKS` disposition + allows statements to generate arbitrarily sized result sets for fetching up to 100 GiB. The + resulting links have two important properties: + + 1. They point to resources _external_ to the Databricks compute; therefore any associated + authentication information (typically a personal access token, OAuth token, or similar) _must be + removed_ when fetching from these links. + + 2. These are presigned URLs with a specific expiration, indicated in the response. The behavior + when attempting to use an expired link is cloud specific. + + .. py:attribute:: EXTERNAL_LINKS + :value: "EXTERNAL_LINKS" + + .. py:attribute:: INLINE + :value: "INLINE" + .. autoclass:: EditAlert :members: :undoc-members: @@ -80,6 +229,20 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: EditWarehouseRequestWarehouseType + + Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` + and also set the field `enable_serverless_compute` to `true`. + + .. py:attribute:: CLASSIC + :value: "CLASSIC" + + .. py:attribute:: PRO + :value: "PRO" + + .. py:attribute:: TYPE_UNSPECIFIED + :value: "TYPE_UNSPECIFIED" + .. autoclass:: EndpointConfPair :members: :undoc-members: @@ -92,6 +255,20 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: EndpointInfoWarehouseType + + Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` + and also set the field `enable_serverless_compute` to `true`. + + .. py:attribute:: CLASSIC + :value: "CLASSIC" + + .. py:attribute:: PRO + :value: "PRO" + + .. py:attribute:: TYPE_UNSPECIFIED + :value: "TYPE_UNSPECIFIED" + .. autoclass:: EndpointTagPair :members: :undoc-members: @@ -104,6 +281,21 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ExecuteStatementRequestOnWaitTimeout + + When `wait_timeout > 0s`, the call will block up to the specified time. If the statement + execution doesn't finish within this time, `on_wait_timeout` determines whether the execution + should continue or be canceled. When set to `CONTINUE`, the statement execution continues + asynchronously and the call returns a statement ID which can be used for polling with + :method:statementexecution/getStatement. When set to `CANCEL`, the statement execution is + canceled and the call returns with a `CANCELED` state. + + .. py:attribute:: CANCEL + :value: "CANCEL" + + .. py:attribute:: CONTINUE + :value: "CONTINUE" + .. autoclass:: ExecuteStatementResponse :members: :undoc-members: @@ -112,6 +304,17 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: Format + + .. py:attribute:: ARROW_STREAM + :value: "ARROW_STREAM" + + .. py:attribute:: CSV + :value: "CSV" + + .. py:attribute:: JSON_ARRAY + :value: "JSON_ARRAY" + .. autoclass:: GetResponse :members: :undoc-members: @@ -128,10 +331,45 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: GetWarehouseResponseWarehouseType + + Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` + and also set the field `enable_serverless_compute` to `true`. + + .. py:attribute:: CLASSIC + :value: "CLASSIC" + + .. py:attribute:: PRO + :value: "PRO" + + .. py:attribute:: TYPE_UNSPECIFIED + :value: "TYPE_UNSPECIFIED" + .. autoclass:: GetWorkspaceWarehouseConfigResponse :members: :undoc-members: +.. py:class:: GetWorkspaceWarehouseConfigResponseSecurityPolicy + + Security policy for warehouses + + .. py:attribute:: DATA_ACCESS_CONTROL + :value: "DATA_ACCESS_CONTROL" + + .. py:attribute:: NONE + :value: "NONE" + + .. py:attribute:: PASSTHROUGH + :value: "PASSTHROUGH" + +.. py:class:: ListOrder + + .. py:attribute:: CREATED_AT + :value: "CREATED_AT" + + .. py:attribute:: NAME + :value: "NAME" + .. autoclass:: ListQueriesResponse :members: :undoc-members: @@ -148,14 +386,117 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ObjectType + + A singular noun object type. + + .. py:attribute:: ALERT + :value: "ALERT" + + .. py:attribute:: DASHBOARD + :value: "DASHBOARD" + + .. py:attribute:: DATA_SOURCE + :value: "DATA_SOURCE" + + .. py:attribute:: QUERY + :value: "QUERY" + +.. py:class:: ObjectTypePlural + + Always a plural of the object type. + + .. py:attribute:: ALERTS + :value: "ALERTS" + + .. py:attribute:: DASHBOARDS + :value: "DASHBOARDS" + + .. py:attribute:: DATA_SOURCES + :value: "DATA_SOURCES" + + .. py:attribute:: QUERIES + :value: "QUERIES" + .. autoclass:: OdbcParams :members: :undoc-members: +.. py:class:: OwnableObjectType + + The singular form of the type of object which can be owned. + + .. py:attribute:: ALERT + :value: "ALERT" + + .. py:attribute:: DASHBOARD + :value: "DASHBOARD" + + .. py:attribute:: QUERY + :value: "QUERY" + .. autoclass:: Parameter :members: :undoc-members: +.. py:class:: ParameterType + + Parameters can have several different types. + + .. py:attribute:: DATETIME + :value: "DATETIME" + + .. py:attribute:: ENUM + :value: "ENUM" + + .. py:attribute:: NUMBER + :value: "NUMBER" + + .. py:attribute:: QUERY + :value: "QUERY" + + .. py:attribute:: TEXT + :value: "TEXT" + +.. py:class:: PermissionLevel + + * `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the query + * `CAN_MANAGE`: Can manage the query + + .. py:attribute:: CAN_EDIT + :value: "CAN_EDIT" + + .. py:attribute:: CAN_MANAGE + :value: "CAN_MANAGE" + + .. py:attribute:: CAN_RUN + :value: "CAN_RUN" + + .. py:attribute:: CAN_VIEW + :value: "CAN_VIEW" + +.. py:class:: PlansState + + Whether plans exist for the execution, or the reason why they are missing + + .. py:attribute:: EMPTY + :value: "EMPTY" + + .. py:attribute:: EXISTS + :value: "EXISTS" + + .. py:attribute:: IGNORED_LARGE_PLANS_SIZE + :value: "IGNORED_LARGE_PLANS_SIZE" + + .. py:attribute:: IGNORED_SMALL_DURATION + :value: "IGNORED_SMALL_DURATION" + + .. py:attribute:: IGNORED_SPARK_PLAN_TYPE + :value: "IGNORED_SPARK_PLAN_TYPE" + + .. py:attribute:: UNKNOWN + :value: "UNKNOWN" + .. autoclass:: Query :members: :undoc-members: @@ -188,6 +529,97 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: QueryStatementType + + Type of statement for this query + + .. py:attribute:: ALTER + :value: "ALTER" + + .. py:attribute:: ANALYZE + :value: "ANALYZE" + + .. py:attribute:: COPY + :value: "COPY" + + .. py:attribute:: CREATE + :value: "CREATE" + + .. py:attribute:: DELETE + :value: "DELETE" + + .. py:attribute:: DESCRIBE + :value: "DESCRIBE" + + .. py:attribute:: DROP + :value: "DROP" + + .. py:attribute:: EXPLAIN + :value: "EXPLAIN" + + .. py:attribute:: GRANT + :value: "GRANT" + + .. py:attribute:: INSERT + :value: "INSERT" + + .. py:attribute:: MERGE + :value: "MERGE" + + .. py:attribute:: OPTIMIZE + :value: "OPTIMIZE" + + .. py:attribute:: OTHER + :value: "OTHER" + + .. py:attribute:: REFRESH + :value: "REFRESH" + + .. py:attribute:: REPLACE + :value: "REPLACE" + + .. py:attribute:: REVOKE + :value: "REVOKE" + + .. py:attribute:: SELECT + :value: "SELECT" + + .. py:attribute:: SET + :value: "SET" + + .. py:attribute:: SHOW + :value: "SHOW" + + .. py:attribute:: TRUNCATE + :value: "TRUNCATE" + + .. py:attribute:: UPDATE + :value: "UPDATE" + + .. py:attribute:: USE + :value: "USE" + +.. py:class:: QueryStatus + + Query status with one the following values: * `QUEUED`: Query has been received and queued. * + `RUNNING`: Query has started. * `CANCELED`: Query has been cancelled by the user. * `FAILED`: + Query has failed. * `FINISHED`: Query has completed. + + .. py:attribute:: CANCELED + :value: "CANCELED" + + .. py:attribute:: FAILED + :value: "FAILED" + + .. py:attribute:: FINISHED + :value: "FINISHED" + + .. py:attribute:: QUEUED + :value: "QUEUED" + + .. py:attribute:: RUNNING + :value: "RUNNING" + .. autoclass:: RepeatedEndpointConfPairs :members: :undoc-members: @@ -204,10 +636,65 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: RunAsRole + + Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as + viewer" behavior) or `"owner"` (signifying "run as owner" behavior) + + .. py:attribute:: OWNER + :value: "OWNER" + + .. py:attribute:: VIEWER + :value: "VIEWER" + .. autoclass:: ServiceError :members: :undoc-members: +.. py:class:: ServiceErrorCode + + .. py:attribute:: ABORTED + :value: "ABORTED" + + .. py:attribute:: ALREADY_EXISTS + :value: "ALREADY_EXISTS" + + .. py:attribute:: BAD_REQUEST + :value: "BAD_REQUEST" + + .. py:attribute:: CANCELLED + :value: "CANCELLED" + + .. py:attribute:: DEADLINE_EXCEEDED + :value: "DEADLINE_EXCEEDED" + + .. py:attribute:: INTERNAL_ERROR + :value: "INTERNAL_ERROR" + + .. py:attribute:: IO_ERROR + :value: "IO_ERROR" + + .. py:attribute:: NOT_FOUND + :value: "NOT_FOUND" + + .. py:attribute:: RESOURCE_EXHAUSTED + :value: "RESOURCE_EXHAUSTED" + + .. py:attribute:: SERVICE_UNDER_MAINTENANCE + :value: "SERVICE_UNDER_MAINTENANCE" + + .. py:attribute:: TEMPORARILY_UNAVAILABLE + :value: "TEMPORARILY_UNAVAILABLE" + + .. py:attribute:: UNAUTHENTICATED + :value: "UNAUTHENTICATED" + + .. py:attribute:: UNKNOWN + :value: "UNKNOWN" + + .. py:attribute:: WORKSPACE_TEMPORARILY_UNAVAILABLE + :value: "WORKSPACE_TEMPORARILY_UNAVAILABLE" + .. autoclass:: SetResponse :members: :undoc-members: @@ -216,22 +703,374 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: SetWorkspaceWarehouseConfigRequestSecurityPolicy + + Security policy for warehouses + + .. py:attribute:: DATA_ACCESS_CONTROL + :value: "DATA_ACCESS_CONTROL" + + .. py:attribute:: NONE + :value: "NONE" + + .. py:attribute:: PASSTHROUGH + :value: "PASSTHROUGH" + +.. py:class:: SpotInstancePolicy + + Configurations whether the warehouse should use spot instances. + + .. py:attribute:: COST_OPTIMIZED + :value: "COST_OPTIMIZED" + + .. py:attribute:: POLICY_UNSPECIFIED + :value: "POLICY_UNSPECIFIED" + + .. py:attribute:: RELIABILITY_OPTIMIZED + :value: "RELIABILITY_OPTIMIZED" + +.. py:class:: State + + State of the warehouse + + .. py:attribute:: DELETED + :value: "DELETED" + + .. py:attribute:: DELETING + :value: "DELETING" + + .. py:attribute:: RUNNING + :value: "RUNNING" + + .. py:attribute:: STARTING + :value: "STARTING" + + .. py:attribute:: STOPPED + :value: "STOPPED" + + .. py:attribute:: STOPPING + :value: "STOPPING" + .. autoclass:: StatementParameterListItem :members: :undoc-members: +.. py:class:: StatementState + + Statement execution state: - `PENDING`: waiting for warehouse - `RUNNING`: running - + `SUCCEEDED`: execution was successful, result data available for fetch - `FAILED`: execution + failed; reason for failure described in accomanying error message - `CANCELED`: user canceled; + can come from explicit cancel call, or timeout with `on_wait_timeout=CANCEL` - `CLOSED`: + execution successful, and statement closed; result no longer available for fetch + + .. py:attribute:: CANCELED + :value: "CANCELED" + + .. py:attribute:: CLOSED + :value: "CLOSED" + + .. py:attribute:: FAILED + :value: "FAILED" + + .. py:attribute:: PENDING + :value: "PENDING" + + .. py:attribute:: RUNNING + :value: "RUNNING" + + .. py:attribute:: SUCCEEDED + :value: "SUCCEEDED" + .. autoclass:: StatementStatus :members: :undoc-members: +.. py:class:: Status + + Health status of the warehouse. + + .. py:attribute:: DEGRADED + :value: "DEGRADED" + + .. py:attribute:: FAILED + :value: "FAILED" + + .. py:attribute:: HEALTHY + :value: "HEALTHY" + + .. py:attribute:: STATUS_UNSPECIFIED + :value: "STATUS_UNSPECIFIED" + .. autoclass:: Success :members: :undoc-members: +.. py:class:: SuccessMessage + + .. py:attribute:: SUCCESS + :value: "SUCCESS" + .. autoclass:: TerminationReason :members: :undoc-members: +.. py:class:: TerminationReasonCode + + status code indicating why the cluster was terminated + + .. py:attribute:: ABUSE_DETECTED + :value: "ABUSE_DETECTED" + + .. py:attribute:: ATTACH_PROJECT_FAILURE + :value: "ATTACH_PROJECT_FAILURE" + + .. py:attribute:: AWS_AUTHORIZATION_FAILURE + :value: "AWS_AUTHORIZATION_FAILURE" + + .. py:attribute:: AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE + :value: "AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE" + + .. py:attribute:: AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE + :value: "AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE" + + .. py:attribute:: AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE + :value: "AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE" + + .. py:attribute:: AWS_REQUEST_LIMIT_EXCEEDED + :value: "AWS_REQUEST_LIMIT_EXCEEDED" + + .. py:attribute:: AWS_UNSUPPORTED_FAILURE + :value: "AWS_UNSUPPORTED_FAILURE" + + .. py:attribute:: AZURE_BYOK_KEY_PERMISSION_FAILURE + :value: "AZURE_BYOK_KEY_PERMISSION_FAILURE" + + .. py:attribute:: AZURE_EPHEMERAL_DISK_FAILURE + :value: "AZURE_EPHEMERAL_DISK_FAILURE" + + .. py:attribute:: AZURE_INVALID_DEPLOYMENT_TEMPLATE + :value: "AZURE_INVALID_DEPLOYMENT_TEMPLATE" + + .. py:attribute:: AZURE_OPERATION_NOT_ALLOWED_EXCEPTION + :value: "AZURE_OPERATION_NOT_ALLOWED_EXCEPTION" + + .. py:attribute:: AZURE_QUOTA_EXCEEDED_EXCEPTION + :value: "AZURE_QUOTA_EXCEEDED_EXCEPTION" + + .. py:attribute:: AZURE_RESOURCE_MANAGER_THROTTLING + :value: "AZURE_RESOURCE_MANAGER_THROTTLING" + + .. py:attribute:: AZURE_RESOURCE_PROVIDER_THROTTLING + :value: "AZURE_RESOURCE_PROVIDER_THROTTLING" + + .. py:attribute:: AZURE_UNEXPECTED_DEPLOYMENT_TEMPLATE_FAILURE + :value: "AZURE_UNEXPECTED_DEPLOYMENT_TEMPLATE_FAILURE" + + .. py:attribute:: AZURE_VM_EXTENSION_FAILURE + :value: "AZURE_VM_EXTENSION_FAILURE" + + .. py:attribute:: AZURE_VNET_CONFIGURATION_FAILURE + :value: "AZURE_VNET_CONFIGURATION_FAILURE" + + .. py:attribute:: BOOTSTRAP_TIMEOUT + :value: "BOOTSTRAP_TIMEOUT" + + .. py:attribute:: BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION + :value: "BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION" + + .. py:attribute:: CLOUD_PROVIDER_DISK_SETUP_FAILURE + :value: "CLOUD_PROVIDER_DISK_SETUP_FAILURE" + + .. py:attribute:: CLOUD_PROVIDER_LAUNCH_FAILURE + :value: "CLOUD_PROVIDER_LAUNCH_FAILURE" + + .. py:attribute:: CLOUD_PROVIDER_RESOURCE_STOCKOUT + :value: "CLOUD_PROVIDER_RESOURCE_STOCKOUT" + + .. py:attribute:: CLOUD_PROVIDER_SHUTDOWN + :value: "CLOUD_PROVIDER_SHUTDOWN" + + .. py:attribute:: COMMUNICATION_LOST + :value: "COMMUNICATION_LOST" + + .. py:attribute:: CONTAINER_LAUNCH_FAILURE + :value: "CONTAINER_LAUNCH_FAILURE" + + .. py:attribute:: CONTROL_PLANE_REQUEST_FAILURE + :value: "CONTROL_PLANE_REQUEST_FAILURE" + + .. py:attribute:: DATABASE_CONNECTION_FAILURE + :value: "DATABASE_CONNECTION_FAILURE" + + .. py:attribute:: DBFS_COMPONENT_UNHEALTHY + :value: "DBFS_COMPONENT_UNHEALTHY" + + .. py:attribute:: DOCKER_IMAGE_PULL_FAILURE + :value: "DOCKER_IMAGE_PULL_FAILURE" + + .. py:attribute:: DRIVER_UNREACHABLE + :value: "DRIVER_UNREACHABLE" + + .. py:attribute:: DRIVER_UNRESPONSIVE + :value: "DRIVER_UNRESPONSIVE" + + .. py:attribute:: EXECUTION_COMPONENT_UNHEALTHY + :value: "EXECUTION_COMPONENT_UNHEALTHY" + + .. py:attribute:: GCP_QUOTA_EXCEEDED + :value: "GCP_QUOTA_EXCEEDED" + + .. py:attribute:: GCP_SERVICE_ACCOUNT_DELETED + :value: "GCP_SERVICE_ACCOUNT_DELETED" + + .. py:attribute:: GLOBAL_INIT_SCRIPT_FAILURE + :value: "GLOBAL_INIT_SCRIPT_FAILURE" + + .. py:attribute:: HIVE_METASTORE_PROVISIONING_FAILURE + :value: "HIVE_METASTORE_PROVISIONING_FAILURE" + + .. py:attribute:: IMAGE_PULL_PERMISSION_DENIED + :value: "IMAGE_PULL_PERMISSION_DENIED" + + .. py:attribute:: INACTIVITY + :value: "INACTIVITY" + + .. py:attribute:: INIT_SCRIPT_FAILURE + :value: "INIT_SCRIPT_FAILURE" + + .. py:attribute:: INSTANCE_POOL_CLUSTER_FAILURE + :value: "INSTANCE_POOL_CLUSTER_FAILURE" + + .. py:attribute:: INSTANCE_UNREACHABLE + :value: "INSTANCE_UNREACHABLE" + + .. py:attribute:: INTERNAL_ERROR + :value: "INTERNAL_ERROR" + + .. py:attribute:: INVALID_ARGUMENT + :value: "INVALID_ARGUMENT" + + .. py:attribute:: INVALID_SPARK_IMAGE + :value: "INVALID_SPARK_IMAGE" + + .. py:attribute:: IP_EXHAUSTION_FAILURE + :value: "IP_EXHAUSTION_FAILURE" + + .. py:attribute:: JOB_FINISHED + :value: "JOB_FINISHED" + + .. py:attribute:: K8S_AUTOSCALING_FAILURE + :value: "K8S_AUTOSCALING_FAILURE" + + .. py:attribute:: K8S_DBR_CLUSTER_LAUNCH_TIMEOUT + :value: "K8S_DBR_CLUSTER_LAUNCH_TIMEOUT" + + .. py:attribute:: METASTORE_COMPONENT_UNHEALTHY + :value: "METASTORE_COMPONENT_UNHEALTHY" + + .. py:attribute:: NEPHOS_RESOURCE_MANAGEMENT + :value: "NEPHOS_RESOURCE_MANAGEMENT" + + .. py:attribute:: NETWORK_CONFIGURATION_FAILURE + :value: "NETWORK_CONFIGURATION_FAILURE" + + .. py:attribute:: NFS_MOUNT_FAILURE + :value: "NFS_MOUNT_FAILURE" + + .. py:attribute:: NPIP_TUNNEL_SETUP_FAILURE + :value: "NPIP_TUNNEL_SETUP_FAILURE" + + .. py:attribute:: NPIP_TUNNEL_TOKEN_FAILURE + :value: "NPIP_TUNNEL_TOKEN_FAILURE" + + .. py:attribute:: REQUEST_REJECTED + :value: "REQUEST_REJECTED" + + .. py:attribute:: REQUEST_THROTTLED + :value: "REQUEST_THROTTLED" + + .. py:attribute:: SECRET_RESOLUTION_ERROR + :value: "SECRET_RESOLUTION_ERROR" + + .. py:attribute:: SECURITY_DAEMON_REGISTRATION_EXCEPTION + :value: "SECURITY_DAEMON_REGISTRATION_EXCEPTION" + + .. py:attribute:: SELF_BOOTSTRAP_FAILURE + :value: "SELF_BOOTSTRAP_FAILURE" + + .. py:attribute:: SKIPPED_SLOW_NODES + :value: "SKIPPED_SLOW_NODES" + + .. py:attribute:: SLOW_IMAGE_DOWNLOAD + :value: "SLOW_IMAGE_DOWNLOAD" + + .. py:attribute:: SPARK_ERROR + :value: "SPARK_ERROR" + + .. py:attribute:: SPARK_IMAGE_DOWNLOAD_FAILURE + :value: "SPARK_IMAGE_DOWNLOAD_FAILURE" + + .. py:attribute:: SPARK_STARTUP_FAILURE + :value: "SPARK_STARTUP_FAILURE" + + .. py:attribute:: SPOT_INSTANCE_TERMINATION + :value: "SPOT_INSTANCE_TERMINATION" + + .. py:attribute:: STORAGE_DOWNLOAD_FAILURE + :value: "STORAGE_DOWNLOAD_FAILURE" + + .. py:attribute:: STS_CLIENT_SETUP_FAILURE + :value: "STS_CLIENT_SETUP_FAILURE" + + .. py:attribute:: SUBNET_EXHAUSTED_FAILURE + :value: "SUBNET_EXHAUSTED_FAILURE" + + .. py:attribute:: TEMPORARILY_UNAVAILABLE + :value: "TEMPORARILY_UNAVAILABLE" + + .. py:attribute:: TRIAL_EXPIRED + :value: "TRIAL_EXPIRED" + + .. py:attribute:: UNEXPECTED_LAUNCH_FAILURE + :value: "UNEXPECTED_LAUNCH_FAILURE" + + .. py:attribute:: UNKNOWN + :value: "UNKNOWN" + + .. py:attribute:: UNSUPPORTED_INSTANCE_TYPE + :value: "UNSUPPORTED_INSTANCE_TYPE" + + .. py:attribute:: UPDATE_INSTANCE_PROFILE_FAILURE + :value: "UPDATE_INSTANCE_PROFILE_FAILURE" + + .. py:attribute:: USER_REQUEST + :value: "USER_REQUEST" + + .. py:attribute:: WORKER_SETUP_FAILURE + :value: "WORKER_SETUP_FAILURE" + + .. py:attribute:: WORKSPACE_CANCELLED_ERROR + :value: "WORKSPACE_CANCELLED_ERROR" + + .. py:attribute:: WORKSPACE_CONFIGURATION_ERROR + :value: "WORKSPACE_CONFIGURATION_ERROR" + +.. py:class:: TerminationReasonType + + type of the termination + + .. py:attribute:: CLIENT_ERROR + :value: "CLIENT_ERROR" + + .. py:attribute:: CLOUD_FAILURE + :value: "CLOUD_FAILURE" + + .. py:attribute:: SERVICE_FAULT + :value: "SERVICE_FAULT" + + .. py:attribute:: SUCCESS + :value: "SUCCESS" + .. autoclass:: TimeRange :members: :undoc-members: @@ -260,6 +1099,19 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: WarehousePermissionLevel + + Permission level + + .. py:attribute:: CAN_MANAGE + :value: "CAN_MANAGE" + + .. py:attribute:: CAN_USE + :value: "CAN_USE" + + .. py:attribute:: IS_OWNER + :value: "IS_OWNER" + .. autoclass:: WarehousePermissions :members: :undoc-members: @@ -276,6 +1128,19 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: WarehouseTypePairWarehouseType + + Warehouse type: `PRO` or `CLASSIC`. + + .. py:attribute:: CLASSIC + :value: "CLASSIC" + + .. py:attribute:: PRO + :value: "PRO" + + .. py:attribute:: TYPE_UNSPECIFIED + :value: "TYPE_UNSPECIFIED" + .. autoclass:: Widget :members: :undoc-members: diff --git a/docs/dbdataclasses/vectorsearch.rst b/docs/dbdataclasses/vectorsearch.rst index 2bde5274..521eccbf 100644 --- a/docs/dbdataclasses/vectorsearch.rst +++ b/docs/dbdataclasses/vectorsearch.rst @@ -24,6 +24,19 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: DeleteDataStatus + + Status of the delete operation. + + .. py:attribute:: FAILURE + :value: "FAILURE" + + .. py:attribute:: PARTIAL_SUCCESS + :value: "PARTIAL_SUCCESS" + + .. py:attribute:: SUCCESS + :value: "SUCCESS" + .. autoclass:: DeleteDataVectorIndexRequest :members: :undoc-members: @@ -64,6 +77,26 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: EndpointStatusState + + Current state of the endpoint + + .. py:attribute:: OFFLINE + :value: "OFFLINE" + + .. py:attribute:: ONLINE + :value: "ONLINE" + + .. py:attribute:: PROVISIONING + :value: "PROVISIONING" + +.. py:class:: EndpointType + + Type of endpoint. + + .. py:attribute:: STANDARD + :value: "STANDARD" + .. autoclass:: ListEndpointResponse :members: :undoc-members: @@ -76,6 +109,22 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: PipelineType + + Pipeline execution mode. + + - `TRIGGERED`: If the pipeline uses the triggered execution mode, the system stops processing + after successfully refreshing the source table in the pipeline once, ensuring the table is + updated based on the data available when the update started. - `CONTINUOUS`: If the pipeline + uses continuous execution, the pipeline processes new data as it arrives in the source table to + keep vector index fresh. + + .. py:attribute:: CONTINUOUS + :value: "CONTINUOUS" + + .. py:attribute:: TRIGGERED + :value: "TRIGGERED" + .. autoclass:: QueryVectorIndexRequest :members: :undoc-members: @@ -96,6 +145,19 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: UpsertDataStatus + + Status of the upsert operation. + + .. py:attribute:: FAILURE + :value: "FAILURE" + + .. py:attribute:: PARTIAL_SUCCESS + :value: "PARTIAL_SUCCESS" + + .. py:attribute:: SUCCESS + :value: "SUCCESS" + .. autoclass:: UpsertDataVectorIndexRequest :members: :undoc-members: @@ -111,3 +173,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. autoclass:: VectorIndexStatus :members: :undoc-members: + +.. py:class:: VectorIndexType + + There are 2 types of Vector Search indexes: + + - `DELTA_SYNC`: An index that automatically syncs with a source Delta Table, automatically and + incrementally updating the index as the underlying data in the Delta Table changes. - + `DIRECT_ACCESS`: An index that supports direct read and write of vectors and metadata through + our REST and SDK APIs. With this model, the user manages index updates. + + .. py:attribute:: DELTA_SYNC + :value: "DELTA_SYNC" + + .. py:attribute:: DIRECT_ACCESS + :value: "DIRECT_ACCESS" diff --git a/docs/dbdataclasses/workspace.rst b/docs/dbdataclasses/workspace.rst index d94d0d73..7821544b 100644 --- a/docs/dbdataclasses/workspace.rst +++ b/docs/dbdataclasses/workspace.rst @@ -8,6 +8,17 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: AclPermission + + .. py:attribute:: MANAGE + :value: "MANAGE" + + .. py:attribute:: READ + :value: "READ" + + .. py:attribute:: WRITE + :value: "WRITE" + .. autoclass:: AzureKeyVaultSecretScopeMetadata :members: :undoc-members: @@ -48,6 +59,26 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ExportFormat + + .. py:attribute:: AUTO + :value: "AUTO" + + .. py:attribute:: DBC + :value: "DBC" + + .. py:attribute:: HTML + :value: "HTML" + + .. py:attribute:: JUPYTER + :value: "JUPYTER" + + .. py:attribute:: R_MARKDOWN + :value: "R_MARKDOWN" + + .. py:attribute:: SOURCE + :value: "SOURCE" + .. autoclass:: ExportResponse :members: :undoc-members: @@ -72,6 +103,54 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ImportFormat + + This specifies the format of the file to be imported. + + The value is case sensitive. + + - `AUTO`: The item is imported depending on an analysis of the item's extension and the header + content provided in the request. If the item is imported as a notebook, then the item's + extension is automatically removed. - `SOURCE`: The notebook or directory is imported as source + code. - `HTML`: The notebook is imported as an HTML file. - `JUPYTER`: The notebook is imported + as a Jupyter/IPython Notebook file. - `DBC`: The notebook is imported in Databricks archive + format. Required for directories. - `R_MARKDOWN`: The notebook is imported from R Markdown + format. + + .. py:attribute:: AUTO + :value: "AUTO" + + .. py:attribute:: DBC + :value: "DBC" + + .. py:attribute:: HTML + :value: "HTML" + + .. py:attribute:: JUPYTER + :value: "JUPYTER" + + .. py:attribute:: R_MARKDOWN + :value: "R_MARKDOWN" + + .. py:attribute:: SOURCE + :value: "SOURCE" + +.. py:class:: Language + + The language of the object. This value is set only if the object type is `NOTEBOOK`. + + .. py:attribute:: PYTHON + :value: "PYTHON" + + .. py:attribute:: R + :value: "R" + + .. py:attribute:: SCALA + :value: "SCALA" + + .. py:attribute:: SQL + :value: "SQL" + .. autoclass:: ListAclsResponse :members: :undoc-members: @@ -100,6 +179,32 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ObjectType + + The type of the object in workspace. + + - `NOTEBOOK`: document that contains runnable code, visualizations, and explanatory text. - + `DIRECTORY`: directory - `LIBRARY`: library - `FILE`: file - `REPO`: repository - `DASHBOARD`: + Lakeview dashboard + + .. py:attribute:: DASHBOARD + :value: "DASHBOARD" + + .. py:attribute:: DIRECTORY + :value: "DIRECTORY" + + .. py:attribute:: FILE + :value: "FILE" + + .. py:attribute:: LIBRARY + :value: "LIBRARY" + + .. py:attribute:: NOTEBOOK + :value: "NOTEBOOK" + + .. py:attribute:: REPO + :value: "REPO" + .. autoclass:: PutAcl :members: :undoc-members: @@ -124,6 +229,22 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: RepoPermissionLevel + + Permission level + + .. py:attribute:: CAN_EDIT + :value: "CAN_EDIT" + + .. py:attribute:: CAN_MANAGE + :value: "CAN_MANAGE" + + .. py:attribute:: CAN_READ + :value: "CAN_READ" + + .. py:attribute:: CAN_RUN + :value: "CAN_RUN" + .. autoclass:: RepoPermissions :members: :undoc-members: @@ -136,6 +257,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: ScopeBackendType + + .. py:attribute:: AZURE_KEYVAULT + :value: "AZURE_KEYVAULT" + + .. py:attribute:: DATABRICKS + :value: "DATABRICKS" + .. autoclass:: SecretMetadata :members: :undoc-members: @@ -172,6 +301,22 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: WorkspaceObjectPermissionLevel + + Permission level + + .. py:attribute:: CAN_EDIT + :value: "CAN_EDIT" + + .. py:attribute:: CAN_MANAGE + :value: "CAN_MANAGE" + + .. py:attribute:: CAN_READ + :value: "CAN_READ" + + .. py:attribute:: CAN_RUN + :value: "CAN_RUN" + .. autoclass:: WorkspaceObjectPermissions :members: :undoc-members: diff --git a/docs/gen-client-docs.py b/docs/gen-client-docs.py index 47449e27..1f805ce7 100644 --- a/docs/gen-client-docs.py +++ b/docs/gen-client-docs.py @@ -1,6 +1,6 @@ #!env python3 import collections -import dbdataclasses +import enum import inspect import json import os.path @@ -298,7 +298,7 @@ def service_docs(self, client_inst) -> list[ServiceDoc]: @staticmethod def _should_document(obj): - return is_dataclass(obj) or (type(obj) == type and issubclass(obj, Enum)) + return is_dataclass(obj) or (type(obj) == enum.EnumType and obj != Enum) @staticmethod def _make_folder_if_not_exists(folder): From 497fc4b7275194355547ddf500fc8b17fa0cbe6c Mon Sep 17 00:00:00 2001 From: Miles Yucht Date: Thu, 22 Feb 2024 11:35:59 +0100 Subject: [PATCH 2/2] cleanup --- docs/dbdataclasses/billing.rst | 37 +++--------- docs/dbdataclasses/catalog.rst | 20 ++----- docs/dbdataclasses/compute.rst | 35 +++-------- docs/dbdataclasses/jobs.rst | 93 ++++++----------------------- docs/dbdataclasses/ml.rst | 71 +++++++--------------- docs/dbdataclasses/pipelines.rst | 5 +- docs/dbdataclasses/provisioning.rst | 36 ++++------- docs/dbdataclasses/serving.rst | 47 ++++----------- docs/dbdataclasses/settings.rst | 28 ++------- docs/dbdataclasses/sharing.rst | 3 +- docs/dbdataclasses/sql.rst | 65 +++++--------------- docs/dbdataclasses/vectorsearch.rst | 13 +--- docs/dbdataclasses/workspace.rst | 17 +----- docs/gen-client-docs.py | 21 ++++++- 14 files changed, 129 insertions(+), 362 deletions(-) diff --git a/docs/dbdataclasses/billing.rst b/docs/dbdataclasses/billing.rst index f575cae6..2ec56c8e 100644 --- a/docs/dbdataclasses/billing.rst +++ b/docs/dbdataclasses/billing.rst @@ -30,13 +30,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: DeliveryStatus - The status string for log delivery. Possible values are: * `CREATED`: There were no log delivery - attempts since the config was created. * `SUCCEEDED`: The latest attempt of log delivery has - succeeded completely. * `USER_FAILURE`: The latest attempt of log delivery failed because of - misconfiguration of customer provided permissions on role or storage. * `SYSTEM_FAILURE`: The - latest attempt of log delivery failed because of an Databricks internal error. Contact support - if it doesn't go away soon. * `NOT_FOUND`: The log delivery status as the configuration has been - disabled since the release of this feature or there are no workspaces in the account. + The status string for log delivery. Possible values are: * `CREATED`: There were no log delivery attempts since the config was created. * `SUCCEEDED`: The latest attempt of log delivery has succeeded completely. * `USER_FAILURE`: The latest attempt of log delivery failed because of misconfiguration of customer provided permissions on role or storage. * `SYSTEM_FAILURE`: The latest attempt of log delivery failed because of an Databricks internal error. Contact support if it doesn't go away soon. * `NOT_FOUND`: The log delivery status as the configuration has been disabled since the release of this feature or there are no workspaces in the account. .. py:attribute:: CREATED :value: "CREATED" @@ -59,10 +53,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: LogDeliveryConfigStatus - Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). - Defaults to `ENABLED`. You can [enable or disable the - configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration - is not supported, so disable a log delivery configuration that is no longer needed. + Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). Defaults to `ENABLED`. You can [enable or disable the configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration is not supported, so disable a log delivery configuration that is no longer needed. .. py:attribute:: DISABLED :value: "DISABLED" @@ -81,17 +72,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: LogType Log delivery type. Supported values are: - - * `BILLABLE_USAGE` — Configure [billable usage log delivery]. For the CSV schema, see the - [View billable usage]. - - * `AUDIT_LOGS` — Configure [audit log delivery]. For the JSON schema, see [Configure audit - logging] - - [Configure audit logging]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html - [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html - [audit log delivery]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html - [billable usage log delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html + * `BILLABLE_USAGE` — Configure [billable usage log delivery]. For the CSV schema, see the [View billable usage]. + * `AUDIT_LOGS` — Configure [audit log delivery]. For the JSON schema, see [Configure audit logging] + [Configure audit logging]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html [audit log delivery]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [billable usage log delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html .. py:attribute:: AUDIT_LOGS :value: "AUDIT_LOGS" @@ -102,14 +85,8 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: OutputFormat The file type of log delivery. - - * If `log_type` is `BILLABLE_USAGE`, this value must be `CSV`. Only the CSV (comma-separated - values) format is supported. For the schema, see the [View billable usage] * If `log_type` is - `AUDIT_LOGS`, this value must be `JSON`. Only the JSON (JavaScript Object Notation) format is - supported. For the schema, see the [Configuring audit logs]. - - [Configuring audit logs]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html - [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html + * If `log_type` is `BILLABLE_USAGE`, this value must be `CSV`. Only the CSV (comma-separated values) format is supported. For the schema, see the [View billable usage] * If `log_type` is `AUDIT_LOGS`, this value must be `JSON`. Only the JSON (JavaScript Object Notation) format is supported. For the schema, see the [Configuring audit logs]. + [Configuring audit logs]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html .. py:attribute:: CSV :value: "CSV" diff --git a/docs/dbdataclasses/catalog.rst b/docs/dbdataclasses/catalog.rst index d2b809b3..4c386ae5 100644 --- a/docs/dbdataclasses/catalog.rst +++ b/docs/dbdataclasses/catalog.rst @@ -314,10 +314,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: CreateFunctionRoutineBody - Function language. When **EXTERNAL** is used, the language of the routine function should be - specified in the __external_language__ field, and the __return_params__ of the function cannot - be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be - **NO_SQL**. + Function language. When **EXTERNAL** is used, the language of the routine function should be specified in the __external_language__ field, and the __return_params__ of the function cannot be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be **NO_SQL**. .. py:attribute:: EXTERNAL :value: "EXTERNAL" @@ -459,8 +456,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: EffectivePredictiveOptimizationFlagInheritedFromType - The type of the object from which the flag was inherited. If there was no inheritance, this - field is left blank. + The type of the object from which the flag was inherited. If there was no inheritance, this field is left blank. .. py:attribute:: CATALOG :value: "CATALOG" @@ -536,10 +532,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: FunctionInfoRoutineBody - Function language. When **EXTERNAL** is used, the language of the routine function should be - specified in the __external_language__ field, and the __return_params__ of the function cannot - be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be - **NO_SQL**. + Function language. When **EXTERNAL** is used, the language of the routine function should be specified in the __external_language__ field, and the __return_params__ of the function cannot be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be **NO_SQL**. .. py:attribute:: EXTERNAL :value: "EXTERNAL" @@ -703,9 +696,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ModelVersionInfoStatus - Current status of the model version. Newly created model versions start in PENDING_REGISTRATION - status, then move to READY status once the model version files are uploaded and the model - version is finalized. Only model versions in READY status can be loaded for inference or served. + Current status of the model version. Newly created model versions start in PENDING_REGISTRATION status, then move to READY status once the model version files are uploaded and the model version is finalized. Only model versions in READY status can be loaded for inference or served. .. py:attribute:: FAILED_REGISTRATION :value: "FAILED_REGISTRATION" @@ -1142,8 +1133,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: SystemSchemaInfoState - The current state of enablement for the system schema. An empty string means the system schema - is available and ready for opt-in. + The current state of enablement for the system schema. An empty string means the system schema is available and ready for opt-in. .. py:attribute:: AVAILABLE :value: "AVAILABLE" diff --git a/docs/dbdataclasses/compute.rst b/docs/dbdataclasses/compute.rst index 17ea7cae..bb6c88c4 100644 --- a/docs/dbdataclasses/compute.rst +++ b/docs/dbdataclasses/compute.rst @@ -23,8 +23,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: AwsAvailability Availability type used for all subsequent nodes past the `first_on_demand` ones. - - Note: If `first_on_demand` is zero, this availability type will be used for the entire cluster. + Note: If `first_on_demand` is zero, this availability type will be used for the entire cluster. .. py:attribute:: ON_DEMAND :value: "ON_DEMAND" @@ -41,9 +40,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: AzureAvailability - Availability type used for all subsequent nodes past the `first_on_demand` ones. Note: If - `first_on_demand` is zero (which only happens on pool clusters), this availability type will be - used for the entire cluster. + Availability type used for all subsequent nodes past the `first_on_demand` ones. Note: If `first_on_demand` is zero (which only happens on pool clusters), this availability type will be used for the entire cluster. .. py:attribute:: ON_DEMAND_AZURE :value: "ON_DEMAND_AZURE" @@ -172,8 +169,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ClusterSource - Determines whether the cluster was created by a user through the UI, created by the Databricks - Jobs Scheduler, or through an API request. This is the same as cluster_creator, but read only. + Determines whether the cluster was created by a user through the UI, created by the Databricks Jobs Scheduler, or through an API request. This is the same as cluster_creator, but read only. .. py:attribute:: API :value: "API" @@ -307,18 +303,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: DataSecurityMode Data security mode decides what data governance model to use when accessing data from a cluster. - - * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features - are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively - used by a single user specified in `single_user_name`. Most programming languages, cluster - features and data governance features are available in this mode. * `USER_ISOLATION`: A secure - cluster that can be shared by multiple users. Cluster users are fully isolated so that they - cannot see each other's data and credentials. Most data governance features are supported in - this mode. But programming languages and cluster features might be limited. * - `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * - `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high - concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy - Passthrough on standard clusters. + * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in `single_user_name`. Most programming languages, cluster features and data governance features are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are fully isolated so that they cannot see each other's data and credentials. Most data governance features are supported in this mode. But programming languages and cluster features might be limited. * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on standard clusters. .. py:attribute:: LEGACY_PASSTHROUGH :value: "LEGACY_PASSTHROUGH" @@ -515,8 +500,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: GcpAvailability - This field determines whether the instance pool will contain preemptible VMs, on-demand VMs, or - preemptible VMs with a fallback to on-demand VMs if the former is unavailable. + This field determines whether the instance pool will contain preemptible VMs, on-demand VMs, or preemptible VMs with a fallback to on-demand VMs if the former is unavailable. .. py:attribute:: ON_DEMAND_GCP :value: "ON_DEMAND_GCP" @@ -646,8 +630,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: InstancePoolAwsAttributesAvailability Availability type used for the spot nodes. - - The default value is defined by InstancePoolConf.instancePoolDefaultAwsAvailability + The default value is defined by InstancePoolConf.instancePoolDefaultAwsAvailability .. py:attribute:: ON_DEMAND :value: "ON_DEMAND" @@ -662,8 +645,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: InstancePoolAzureAttributesAvailability Shows the Availability type used for the spot nodes. - - The default value is defined by InstancePoolConf.instancePoolDefaultAzureAvailability + The default value is defined by InstancePoolConf.instancePoolDefaultAzureAvailability .. py:attribute:: ON_DEMAND_AZURE :value: "ON_DEMAND_AZURE" @@ -909,8 +891,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: RuntimeEngine - Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime - engine is inferred from spark_version. + Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime engine is inferred from spark_version. .. py:attribute:: NULL :value: "NULL" diff --git a/docs/dbdataclasses/jobs.rst b/docs/dbdataclasses/jobs.rst index 3d78504d..4e414a77 100644 --- a/docs/dbdataclasses/jobs.rst +++ b/docs/dbdataclasses/jobs.rst @@ -42,15 +42,8 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ConditionTaskOp - * `EQUAL_TO`, `NOT_EQUAL` operators perform string comparison of their operands. This means that - `“12.0” == “12”` will evaluate to `false`. * `GREATER_THAN`, `GREATER_THAN_OR_EQUAL`, - `LESS_THAN`, `LESS_THAN_OR_EQUAL` operators perform numeric comparison of their operands. - `“12.0” >= “12”` will evaluate to `true`, `“10.0” >= “12”` will evaluate to - `false`. - - The boolean comparison to task values can be implemented with operators `EQUAL_TO`, `NOT_EQUAL`. - If a task value was set to a boolean value, it will be serialized to `“true”` or - `“false”` for the comparison. + * `EQUAL_TO`, `NOT_EQUAL` operators perform string comparison of their operands. This means that `“12.0” == “12”` will evaluate to `false`. * `GREATER_THAN`, `GREATER_THAN_OR_EQUAL`, `LESS_THAN`, `LESS_THAN_OR_EQUAL` operators perform numeric comparison of their operands. `“12.0” >= “12”` will evaluate to `true`, `“10.0” >= “12”` will evaluate to `false`. + The boolean comparison to task values can be implemented with operators `EQUAL_TO`, `NOT_EQUAL`. If a task value was set to a boolean value, it will be serialized to `“true”` or `“false”` for the comparison. .. py:attribute:: EQUAL_TO :value: "EQUAL_TO" @@ -81,9 +74,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: CreateJobEditMode Edit mode of the job. - - * `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is - in an editable state and can be modified. + * `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is in an editable state and can be modified. .. py:attribute:: EDITABLE :value: "EDITABLE" @@ -212,8 +203,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: JobDeploymentKind The kind of deployment that manages the job. - - * `BUNDLE`: The job is managed by Databricks Asset Bundle. + * `BUNDLE`: The job is managed by Databricks Asset Bundle. .. py:attribute:: BUNDLE :value: "BUNDLE" @@ -277,9 +267,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: JobSettingsEditMode Edit mode of the job. - - * `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is - in an editable state and can be modified. + * `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is in an editable state and can be modified. .. py:attribute:: EDITABLE :value: "EDITABLE" @@ -293,14 +281,8 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: JobSourceDirtyState - Dirty state indicates the job is not fully synced with the job specification in the remote - repository. - - Possible values are: * `NOT_SYNCED`: The job is not yet synced with the remote job - specification. Import the remote job specification from UI to make the job fully synced. * - `DISCONNECTED`: The job is temporary disconnected from the remote job specification and is - allowed for live edit. Import the remote job specification again from UI to make the job fully - synced. + Dirty state indicates the job is not fully synced with the job specification in the remote repository. + Possible values are: * `NOT_SYNCED`: The job is not yet synced with the remote job specification. Import the remote job specification from UI to make the job fully synced. * `DISCONNECTED`: The job is temporary disconnected from the remote job specification and is allowed for live edit. Import the remote job specification again from UI to make the job fully synced. .. py:attribute:: DISCONNECTED :value: "DISCONNECTED" @@ -340,11 +322,8 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ListRunsRunType - * `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. * `WORKFLOW_RUN`: Workflow - run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit run. A run created with - :method:jobs/submit. - - [dbutils.notebook.run]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-workflow + * `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. * `WORKFLOW_RUN`: Workflow run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit run. A run created with :method:jobs/submit. + [dbutils.notebook.run]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-workflow .. py:attribute:: JOB_RUN :value: "JOB_RUN" @@ -481,14 +460,8 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: RunIf - An optional value indicating the condition that determines whether the task should be run once - its dependencies have been completed. When omitted, defaults to `ALL_SUCCESS`. - - Possible values are: * `ALL_SUCCESS`: All dependencies have executed and succeeded * - `AT_LEAST_ONE_SUCCESS`: At least one dependency has succeeded * `NONE_FAILED`: None of the - dependencies have failed and at least one was executed * `ALL_DONE`: All dependencies have been - completed * `AT_LEAST_ONE_FAILED`: At least one dependency failed * `ALL_FAILED`: ALl - dependencies have failed + An optional value indicating the condition that determines whether the task should be run once its dependencies have been completed. When omitted, defaults to `ALL_SUCCESS`. + Possible values are: * `ALL_SUCCESS`: All dependencies have executed and succeeded * `AT_LEAST_ONE_SUCCESS`: At least one dependency has succeeded * `NONE_FAILED`: None of the dependencies have failed and at least one was executed * `ALL_DONE`: All dependencies have been completed * `AT_LEAST_ONE_FAILED`: At least one dependency failed * `ALL_FAILED`: ALl dependencies have failed .. py:attribute:: ALL_DONE :value: "ALL_DONE" @@ -518,17 +491,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: RunLifeCycleState - A value indicating the run's lifecycle state. The possible values are: * `QUEUED`: The run is - queued. * `PENDING`: The run is waiting to be executed while the cluster and execution context - are being prepared. * `RUNNING`: The task of this run is being executed. * `TERMINATING`: The - task of this run has completed, and the cluster and execution context are being cleaned up. * - `TERMINATED`: The task of this run has completed, and the cluster and execution context have - been cleaned up. This state is terminal. * `SKIPPED`: This run was aborted because a previous - run of the same job was already active. This state is terminal. * `INTERNAL_ERROR`: An - exceptional state that indicates a failure in the Jobs service, such as network failure over a - long period. If a run on a new cluster ends in the `INTERNAL_ERROR` state, the Jobs service - terminates the cluster as soon as possible. This state is terminal. * `BLOCKED`: The run is - blocked on an upstream dependency. * `WAITING_FOR_RETRY`: The run is waiting for a retry. + A value indicating the run's lifecycle state. The possible values are: * `QUEUED`: The run is queued. * `PENDING`: The run is waiting to be executed while the cluster and execution context are being prepared. * `RUNNING`: The task of this run is being executed. * `TERMINATING`: The task of this run has completed, and the cluster and execution context are being cleaned up. * `TERMINATED`: The task of this run has completed, and the cluster and execution context have been cleaned up. This state is terminal. * `SKIPPED`: This run was aborted because a previous run of the same job was already active. This state is terminal. * `INTERNAL_ERROR`: An exceptional state that indicates a failure in the Jobs service, such as network failure over a long period. If a run on a new cluster ends in the `INTERNAL_ERROR` state, the Jobs service terminates the cluster as soon as possible. This state is terminal. * `BLOCKED`: The run is blocked on an upstream dependency. * `WAITING_FOR_RETRY`: The run is waiting for a retry. .. py:attribute:: BLOCKED :value: "BLOCKED" @@ -575,14 +538,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: RunResultState - A value indicating the run's result. The possible values are: * `SUCCESS`: The task completed - successfully. * `FAILED`: The task completed with an error. * `TIMEDOUT`: The run was stopped - after reaching the timeout. * `CANCELED`: The run was canceled at user request. * - `MAXIMUM_CONCURRENT_RUNS_REACHED`: The run was skipped because the maximum concurrent runs were - reached. * `EXCLUDED`: The run was skipped because the necessary conditions were not met. * - `SUCCESS_WITH_FAILURES`: The job run completed successfully with some failures; leaf tasks were - successful. * `UPSTREAM_FAILED`: The run was skipped because of an upstream failure. * - `UPSTREAM_CANCELED`: The run was skipped because an upstream task was canceled. + A value indicating the run's result. The possible values are: * `SUCCESS`: The task completed successfully. * `FAILED`: The task completed with an error. * `TIMEDOUT`: The run was stopped after reaching the timeout. * `CANCELED`: The run was canceled at user request. * `MAXIMUM_CONCURRENT_RUNS_REACHED`: The run was skipped because the maximum concurrent runs were reached. * `EXCLUDED`: The run was skipped because the necessary conditions were not met. * `SUCCESS_WITH_FAILURES`: The job run completed successfully with some failures; leaf tasks were successful. * `UPSTREAM_FAILED`: The run was skipped because of an upstream failure. * `UPSTREAM_CANCELED`: The run was skipped because an upstream task was canceled. .. py:attribute:: CANCELED :value: "CANCELED" @@ -621,11 +577,8 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: RunType - * `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. * `WORKFLOW_RUN`: Workflow - run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit run. A run created with - :method:jobs/submit. - - [dbutils.notebook.run]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-workflow + * `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. * `WORKFLOW_RUN`: Workflow run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit run. A run created with :method:jobs/submit. + [dbutils.notebook.run]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-workflow .. py:attribute:: JOB_RUN :value: "JOB_RUN" @@ -663,9 +616,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: SqlAlertState The state of the SQL alert. - - * UNKNOWN: alert yet to be evaluated * OK: alert evaluated and did not fulfill trigger - conditions * TRIGGERED: alert evaluated and fulfilled trigger conditions + * UNKNOWN: alert yet to be evaluated * OK: alert evaluated and did not fulfill trigger conditions * TRIGGERED: alert evaluated and fulfilled trigger conditions .. py:attribute:: OK :value: "OK" @@ -786,14 +737,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: TriggerType The type of trigger that fired this run. - - * `PERIODIC`: Schedules that periodically trigger runs, such as a cron scheduler. * `ONE_TIME`: - One time triggers that fire a single run. This occurs you triggered a single run on demand - through the UI or the API. * `RETRY`: Indicates a run that is triggered as a retry of a - previously failed run. This occurs when you request to re-run the job in case of failures. * - `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task. * `FILE_ARRIVAL`: - Indicates a run that is triggered by a file arrival. * `TABLE`: Indicates a run that is - triggered by a table update. + * `PERIODIC`: Schedules that periodically trigger runs, such as a cron scheduler. * `ONE_TIME`: One time triggers that fire a single run. This occurs you triggered a single run on demand through the UI or the API. * `RETRY`: Indicates a run that is triggered as a retry of a previously failed run. This occurs when you request to re-run the job in case of failures. * `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task. * `FILE_ARRIVAL`: Indicates a run that is triggered by a file arrival. * `TABLE`: Indicates a run that is triggered by a table update. .. py:attribute:: FILE_ARRIVAL :value: "FILE_ARRIVAL" @@ -833,8 +777,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ViewsToExport - * `CODE`: Code view of the notebook. * `DASHBOARDS`: All dashboard views of the notebook. * - `ALL`: All views of the notebook. + * `CODE`: Code view of the notebook. * `DASHBOARDS`: All dashboard views of the notebook. * `ALL`: All views of the notebook. .. py:attribute:: ALL :value: "ALL" diff --git a/docs/dbdataclasses/ml.rst b/docs/dbdataclasses/ml.rst index d2464440..20953503 100644 --- a/docs/dbdataclasses/ml.rst +++ b/docs/dbdataclasses/ml.rst @@ -10,12 +10,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ActivityAction - An action that a user (with sufficient permissions) could take on an activity. Valid values are: - * `APPROVE_TRANSITION_REQUEST`: Approve a transition request - - * `REJECT_TRANSITION_REQUEST`: Reject a transition request - - * `CANCEL_TRANSITION_REQUEST`: Cancel (delete) a transition request + An action that a user (with sufficient permissions) could take on an activity. Valid values are: * `APPROVE_TRANSITION_REQUEST`: Approve a transition request + * `REJECT_TRANSITION_REQUEST`: Reject a transition request + * `CANCEL_TRANSITION_REQUEST`: Cancel (delete) a transition request .. py:attribute:: APPROVE_TRANSITION_REQUEST :value: "APPROVE_TRANSITION_REQUEST" @@ -28,19 +25,12 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ActivityType - Type of activity. Valid values are: * `APPLIED_TRANSITION`: User applied the corresponding stage - transition. - - * `REQUESTED_TRANSITION`: User requested the corresponding stage transition. - - * `CANCELLED_REQUEST`: User cancelled an existing transition request. - - * `APPROVED_REQUEST`: User approved the corresponding stage transition. - - * `REJECTED_REQUEST`: User rejected the coressponding stage transition. - - * `SYSTEM_TRANSITION`: For events performed as a side effect, such as archiving existing model - versions in a stage. + Type of activity. Valid values are: * `APPLIED_TRANSITION`: User applied the corresponding stage transition. + * `REQUESTED_TRANSITION`: User requested the corresponding stage transition. + * `CANCELLED_REQUEST`: User cancelled an existing transition request. + * `APPROVED_REQUEST`: User approved the corresponding stage transition. + * `REJECTED_REQUEST`: User rejected the coressponding stage transition. + * `SYSTEM_TRANSITION`: For events performed as a side effect, such as archiving existing model versions in a stage. .. py:attribute:: APPLIED_TRANSITION :value: "APPLIED_TRANSITION" @@ -73,10 +63,8 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: CommentActivityAction - An action that a user (with sufficient permissions) could take on a comment. Valid values are: * - `EDIT_COMMENT`: Edit the comment - - * `DELETE_COMMENT`: Delete the comment + An action that a user (with sufficient permissions) could take on a comment. Valid values are: * `EDIT_COMMENT`: Edit the comment + * `DELETE_COMMENT`: Delete the comment .. py:attribute:: DELETE_COMMENT :value: "DELETE_COMMENT" @@ -382,8 +370,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: PermissionLevel - Permission level of the requesting user on the object. For what is allowed at each level, see - [MLflow Model permissions](..). + Permission level of the requesting user on the object. For what is allowed at each level, see [MLflow Model permissions](..). .. py:attribute:: CAN_EDIT :value: "CAN_EDIT" @@ -487,13 +474,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: RegistryWebhookStatus - Enable or disable triggering the webhook, or put the webhook into test mode. The default is - `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens. - - * `DISABLED`: Webhook is not triggered. - - * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a - real event. + Enable or disable triggering the webhook, or put the webhook into test mode. The default is `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens. + * `DISABLED`: Webhook is not triggered. + * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a real event. .. py:attribute:: ACTIVE :value: "ACTIVE" @@ -585,8 +568,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: SearchExperimentsViewType - Qualifier for type of experiments to be returned. If unspecified, return only active - experiments. + Qualifier for type of experiments to be returned. If unspecified, return only active experiments. .. py:attribute:: ACTIVE_ONLY :value: "ACTIVE_ONLY" @@ -645,14 +627,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: Stage Stage of the model version. Valid values are: - - * `None`: The initial stage of a model version. - - * `Staging`: Staging or pre-production stage. - - * `Production`: Production stage. - - * `Archived`: Archived stage. + * `None`: The initial stage of a model version. + * `Staging`: Staging or pre-production stage. + * `Production`: Production stage. + * `Archived`: Archived stage. .. py:attribute:: ARCHIVED :value: "ARCHIVED" @@ -668,12 +646,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: Status - The status of the model version. Valid values are: * `PENDING_REGISTRATION`: Request to register - a new model version is pending as server performs background tasks. - - * `FAILED_REGISTRATION`: Request to register a new model version has failed. - - * `READY`: Model version is ready for use. + The status of the model version. Valid values are: * `PENDING_REGISTRATION`: Request to register a new model version is pending as server performs background tasks. + * `FAILED_REGISTRATION`: Request to register a new model version has failed. + * `READY`: Model version is ready for use. .. py:attribute:: FAILED_REGISTRATION :value: "FAILED_REGISTRATION" diff --git a/docs/dbdataclasses/pipelines.rst b/docs/dbdataclasses/pipelines.rst index 64a8c891..2c2f1a19 100644 --- a/docs/dbdataclasses/pipelines.rst +++ b/docs/dbdataclasses/pipelines.rst @@ -129,10 +129,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: PipelineClusterAutoscaleMode - Databricks Enhanced Autoscaling optimizes cluster utilization by automatically allocating - cluster resources based on workload volume, with minimal impact to the data processing latency - of your pipelines. Enhanced Autoscaling is available for `updates` clusters only. The legacy - autoscaling feature is used for `maintenance` clusters. + Databricks Enhanced Autoscaling optimizes cluster utilization by automatically allocating cluster resources based on workload volume, with minimal impact to the data processing latency of your pipelines. Enhanced Autoscaling is available for `updates` clusters only. The legacy autoscaling feature is used for `maintenance` clusters. .. py:attribute:: ENHANCED :value: "ENHANCED" diff --git a/docs/dbdataclasses/provisioning.rst b/docs/dbdataclasses/provisioning.rst index 85523504..528f3394 100644 --- a/docs/dbdataclasses/provisioning.rst +++ b/docs/dbdataclasses/provisioning.rst @@ -74,10 +74,8 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: EndpointUseCase - This enumeration represents the type of Databricks VPC [endpoint service] that was used when - creating this VPC endpoint. - - [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/endpoint-service.html + This enumeration represents the type of Databricks VPC [endpoint service] that was used when creating this VPC endpoint. + [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/endpoint-service.html .. py:attribute:: DATAPLANE_RELAY_ACCESS :value: "DATAPLANE_RELAY_ACCESS" @@ -87,8 +85,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ErrorType - The AWS resource associated with this error: credentials, VPC, subnet, security group, or - network ACL. + The AWS resource associated with this error: credentials, VPC, subnet, security group, or network ACL. .. py:attribute:: CREDENTIALS :value: "CREDENTIALS" @@ -128,12 +125,8 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: GkeConfigConnectivityType Specifies the network connectivity types for the GKE nodes and the GKE master network. - - Set to `PRIVATE_NODE_PUBLIC_MASTER` for a private GKE cluster for the workspace. The GKE nodes - will not have public IPs. - - Set to `PUBLIC_NODE_PUBLIC_MASTER` for a public GKE cluster. The nodes of a public GKE cluster - have public IP addresses. + Set to `PRIVATE_NODE_PUBLIC_MASTER` for a private GKE cluster for the workspace. The GKE nodes will not have public IPs. + Set to `PUBLIC_NODE_PUBLIC_MASTER` for a public GKE cluster. The nodes of a public GKE cluster have public IP addresses. .. py:attribute:: PRIVATE_NODE_PUBLIC_MASTER :value: "PRIVATE_NODE_PUBLIC_MASTER" @@ -143,9 +136,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: KeyUseCase - Possible values are: * `MANAGED_SERVICES`: Encrypts notebook and secret data in the control - plane * `STORAGE`: Encrypts the workspace's root S3 bucket (root DBFS and system data) and, - optionally, cluster EBS volumes. + Possible values are: * `MANAGED_SERVICES`: Encrypts notebook and secret data in the control plane * `STORAGE`: Encrypts the workspace's root S3 bucket (root DBFS and system data) and, optionally, cluster EBS volumes. .. py:attribute:: MANAGED_SERVICES :value: "MANAGED_SERVICES" @@ -172,8 +163,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: PricingTier The pricing tier of the workspace. For pricing tier information, see [AWS Pricing]. - - [AWS Pricing]: https://databricks.com/product/aws-pricing + [AWS Pricing]: https://databricks.com/product/aws-pricing .. py:attribute:: COMMUNITY_EDITION :value: "COMMUNITY_EDITION" @@ -195,11 +185,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: PrivateAccessLevel - The private access level controls which VPC endpoints can connect to the UI or API of any - workspace that attaches this private access settings object. * `ACCOUNT` level access (the - default) allows only VPC endpoints that are registered in your Databricks account connect to - your workspace. * `ENDPOINT` level access allows only specified VPC endpoints connect to your - workspace. For details, see `allowed_vpc_endpoint_ids`. + The private access level controls which VPC endpoints can connect to the UI or API of any workspace that attaches this private access settings object. * `ACCOUNT` level access (the default) allows only VPC endpoints that are registered in your Databricks account connect to your workspace. * `ENDPOINT` level access allows only specified VPC endpoints connect to your workspace. For details, see `allowed_vpc_endpoint_ids`. .. py:attribute:: ACCOUNT :value: "ACCOUNT" @@ -237,8 +223,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: VpcStatus - The status of this network configuration object in terms of its use in a workspace: * - `UNATTACHED`: Unattached. * `VALID`: Valid. * `BROKEN`: Broken. * `WARNED`: Warned. + The status of this network configuration object in terms of its use in a workspace: * `UNATTACHED`: Unattached. * `VALID`: Valid. * `BROKEN`: Broken. * `WARNED`: Warned. .. py:attribute:: BROKEN :value: "BROKEN" @@ -268,8 +253,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: WorkspaceStatus - The status of the workspace. For workspace creation, usually it is set to `PROVISIONING` - initially. Continue to check the status until the status is `RUNNING`. + The status of the workspace. For workspace creation, usually it is set to `PROVISIONING` initially. Continue to check the status until the status is `RUNNING`. .. py:attribute:: BANNED :value: "BANNED" diff --git a/docs/dbdataclasses/serving.rst b/docs/dbdataclasses/serving.rst index d7e0634c..c4ab1d81 100644 --- a/docs/dbdataclasses/serving.rst +++ b/docs/dbdataclasses/serving.rst @@ -42,8 +42,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: AwsBedrockConfigBedrockProvider - The underlying provider in AWS Bedrock. Supported values (case insensitive) include: Anthropic, - Cohere, AI21Labs, Amazon. + The underlying provider in AWS Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon. .. py:attribute:: AI21LABS :value: "AI21LABS" @@ -155,10 +154,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: EndpointStateConfigUpdate - The state of an endpoint's config update. This informs the user if the pending_config is in - progress, if the update failed, or if there is no update in progress. Note that if the - endpoint's config_update state value is IN_PROGRESS, another update can not be made until the - update completes or fails. + The state of an endpoint's config update. This informs the user if the pending_config is in progress, if the update failed, or if there is no update in progress. Note that if the endpoint's config_update state value is IN_PROGRESS, another update can not be made until the update completes or fails. .. py:attribute:: IN_PROGRESS :value: "IN_PROGRESS" @@ -171,9 +167,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: EndpointStateReady - The state of an endpoint, indicating whether or not the endpoint is queryable. An endpoint is - READY if all of the served entities in its active configuration are ready. If any of the - actively served entities are in a non-ready state, the endpoint state will be NOT_READY. + The state of an endpoint, indicating whether or not the endpoint is queryable. An endpoint is READY if all of the served entities in its active configuration are ready. If any of the actively served entities are in a non-ready state, the endpoint state will be NOT_READY. .. py:attribute:: NOT_READY :value: "NOT_READY" @@ -191,9 +185,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ExternalModelProvider - The name of the provider for the external model. Currently, the supported providers are - 'ai21labs', 'anthropic', 'aws-bedrock', 'cohere', 'databricks-model-serving', 'openai', and - 'palm'.", + The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic', 'aws-bedrock', 'cohere', 'databricks-model-serving', 'openai', and 'palm'.", .. py:attribute:: AI21LABS :value: "AI21LABS" @@ -274,8 +266,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: QueryEndpointResponseObject - The type of object returned by the __external/foundation model__ serving endpoint, one of - [text_completion, chat.completion, list (of embeddings)]. + The type of object returned by the __external/foundation model__ serving endpoint, one of [text_completion, chat.completion, list (of embeddings)]. .. py:attribute:: CHAT_COMPLETION :value: "CHAT_COMPLETION" @@ -292,8 +283,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: RateLimitKey - Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are - supported, with 'endpoint' being the default if not specified. + Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified. .. py:attribute:: ENDPOINT :value: "ENDPOINT" @@ -330,12 +320,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ServedModelInputWorkloadSize - The workload size of the served model. The workload size corresponds to a range of provisioned - concurrency that the compute will autoscale between. A single unit of provisioned concurrency - can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned - concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned - concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for - each workload size will be 0. + The workload size of the served model. The workload size corresponds to a range of provisioned concurrency that the compute will autoscale between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size will be 0. .. py:attribute:: LARGE :value: "LARGE" @@ -348,12 +333,8 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ServedModelInputWorkloadType - The workload type of the served model. The workload type selects which type of compute to use in - the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU - acceleration is available by selecting workload types like GPU_SMALL and others. See the - available [GPU types]. - - [GPU types]: https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types + The workload type of the served model. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types]. + [GPU types]: https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types .. py:attribute:: CPU :value: "CPU" @@ -384,15 +365,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ServedModelStateDeployment - The state of the served entity deployment. DEPLOYMENT_CREATING indicates that the served entity - is not ready yet because the deployment is still being created (i.e container image is building, - model server is deploying for the first time, etc.). DEPLOYMENT_RECOVERING indicates that the - served entity was previously in a ready state but no longer is and is attempting to recover. - DEPLOYMENT_READY indicates that the served entity is ready to receive traffic. DEPLOYMENT_FAILED - indicates that there was an error trying to bring up the served entity (e.g container image - build failed, the model server failed to start due to a model loading error, etc.) - DEPLOYMENT_ABORTED indicates that the deployment was terminated likely due to a failure in - bringing up another served entity under the same endpoint and config version. + The state of the served entity deployment. DEPLOYMENT_CREATING indicates that the served entity is not ready yet because the deployment is still being created (i.e container image is building, model server is deploying for the first time, etc.). DEPLOYMENT_RECOVERING indicates that the served entity was previously in a ready state but no longer is and is attempting to recover. DEPLOYMENT_READY indicates that the served entity is ready to receive traffic. DEPLOYMENT_FAILED indicates that there was an error trying to bring up the served entity (e.g container image build failed, the model server failed to start due to a model loading error, etc.) DEPLOYMENT_ABORTED indicates that the deployment was terminated likely due to a failure in bringing up another served entity under the same endpoint and config version. .. py:attribute:: ABORTED :value: "ABORTED" diff --git a/docs/dbdataclasses/settings.rst b/docs/dbdataclasses/settings.rst index 3b64aced..b0d989ad 100644 --- a/docs/dbdataclasses/settings.rst +++ b/docs/dbdataclasses/settings.rst @@ -30,8 +30,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: CreatePrivateEndpointRuleRequestGroupId - The sub-resource type (group ID) of the target resource. Note that to connect to workspace root - storage (root DBFS), you need two endpoints, one for `blob` and one for `dfs`. + The sub-resource type (group ID) of the target resource. Note that to connect to workspace root storage (root DBFS), you need two endpoints, one for `blob` and one for `dfs`. .. py:attribute:: BLOB :value: "BLOB" @@ -128,9 +127,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ListType Type of IP access list. Valid values are as follows and are case-sensitive: - - * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or - range. IP addresses in the block list are excluded even if they are included in an allow list. + * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list. .. py:attribute:: ALLOW :value: "ALLOW" @@ -144,16 +141,8 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: NccAzurePrivateEndpointRuleConnectionState - The current status of this private endpoint. The private endpoint rules are effective only if - the connection state is `ESTABLISHED`. Remember that you must approve new endpoints on your - resources in the Azure portal before they take effect. - - The possible values are: - INIT: (deprecated) The endpoint has been created and pending - approval. - PENDING: The endpoint has been created and pending approval. - ESTABLISHED: The - endpoint has been approved and is ready to use in your serverless compute resources. - REJECTED: - Connection was rejected by the private link resource owner. - DISCONNECTED: Connection was - removed by the private link resource owner, the private endpoint becomes informative and should - be deleted for clean-up. + The current status of this private endpoint. The private endpoint rules are effective only if the connection state is `ESTABLISHED`. Remember that you must approve new endpoints on your resources in the Azure portal before they take effect. + The possible values are: - INIT: (deprecated) The endpoint has been created and pending approval. - PENDING: The endpoint has been created and pending approval. - ESTABLISHED: The endpoint has been approved and is ready to use in your serverless compute resources. - REJECTED: Connection was rejected by the private link resource owner. - DISCONNECTED: Connection was removed by the private link resource owner, the private endpoint becomes informative and should be deleted for clean-up. .. py:attribute:: DISCONNECTED :value: "DISCONNECTED" @@ -172,8 +161,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: NccAzurePrivateEndpointRuleGroupId - The sub-resource type (group ID) of the target resource. Note that to connect to workspace root - storage (root DBFS), you need two endpoints, one for `blob` and one for `dfs`. + The sub-resource type (group ID) of the target resource. Note that to connect to workspace root storage (root DBFS), you need two endpoints, one for `blob` and one for `dfs`. .. py:attribute:: BLOB :value: "BLOB" @@ -217,11 +205,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: PersonalComputeMessageEnum - ON: Grants all users in all workspaces access to the Personal Compute default policy, allowing - all users to create single-machine compute resources. DELEGATE: Moves access control for the - Personal Compute default policy to individual workspaces and requires a workspace’s users or - groups to be added to the ACLs of that workspace’s Personal Compute default policy before they - will be able to create compute resources through that policy. + ON: Grants all users in all workspaces access to the Personal Compute default policy, allowing all users to create single-machine compute resources. DELEGATE: Moves access control for the Personal Compute default policy to individual workspaces and requires a workspace’s users or groups to be added to the ACLs of that workspace’s Personal Compute default policy before they will be able to create compute resources through that policy. .. py:attribute:: DELEGATE :value: "DELEGATE" diff --git a/docs/dbdataclasses/sharing.rst b/docs/dbdataclasses/sharing.rst index ab935d69..a4e78954 100644 --- a/docs/dbdataclasses/sharing.rst +++ b/docs/dbdataclasses/sharing.rst @@ -358,8 +358,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: SharedDataObjectHistoryDataSharingStatus - Whether to enable or disable sharing of data history. If not specified, the default is - **DISABLED**. + Whether to enable or disable sharing of data history. If not specified, the default is **DISABLED**. .. py:attribute:: DISABLED :value: "DISABLED" diff --git a/docs/dbdataclasses/sql.rst b/docs/dbdataclasses/sql.rst index 5bc58b4f..e6ec6205 100644 --- a/docs/dbdataclasses/sql.rst +++ b/docs/dbdataclasses/sql.rst @@ -35,8 +35,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: AlertState - State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated - and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions). + State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions). .. py:attribute:: OK :value: "OK" @@ -82,8 +81,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ColumnInfoTypeName - The name of the base data type. This doesn't include details for complex types such as STRUCT, - MAP or ARRAY. + The name of the base data type. This doesn't include details for complex types such as STRUCT, MAP or ARRAY. .. py:attribute:: ARRAY :value: "ARRAY" @@ -152,8 +150,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: CreateWarehouseRequestWarehouseType - Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` - and also set the field `enable_serverless_compute` to `true`. + Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`. .. py:attribute:: CLASSIC :value: "CLASSIC" @@ -195,25 +192,11 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: Disposition The fetch disposition provides two modes of fetching results: `INLINE` and `EXTERNAL_LINKS`. - - Statements executed with `INLINE` disposition will return result data inline, in `JSON_ARRAY` - format, in a series of chunks. If a given statement produces a result set with a size larger - than 25 MiB, that statement execution is aborted, and no result set will be available. - - **NOTE** Byte limits are computed based upon internal representations of the result set data, - and might not match the sizes visible in JSON responses. - - Statements executed with `EXTERNAL_LINKS` disposition will return result data as external links: - URLs that point to cloud storage internal to the workspace. Using `EXTERNAL_LINKS` disposition - allows statements to generate arbitrarily sized result sets for fetching up to 100 GiB. The - resulting links have two important properties: - - 1. They point to resources _external_ to the Databricks compute; therefore any associated - authentication information (typically a personal access token, OAuth token, or similar) _must be - removed_ when fetching from these links. - - 2. These are presigned URLs with a specific expiration, indicated in the response. The behavior - when attempting to use an expired link is cloud specific. + Statements executed with `INLINE` disposition will return result data inline, in `JSON_ARRAY` format, in a series of chunks. If a given statement produces a result set with a size larger than 25 MiB, that statement execution is aborted, and no result set will be available. + **NOTE** Byte limits are computed based upon internal representations of the result set data, and might not match the sizes visible in JSON responses. + Statements executed with `EXTERNAL_LINKS` disposition will return result data as external links: URLs that point to cloud storage internal to the workspace. Using `EXTERNAL_LINKS` disposition allows statements to generate arbitrarily sized result sets for fetching up to 100 GiB. The resulting links have two important properties: + 1. They point to resources _external_ to the Databricks compute; therefore any associated authentication information (typically a personal access token, OAuth token, or similar) _must be removed_ when fetching from these links. + 2. These are presigned URLs with a specific expiration, indicated in the response. The behavior when attempting to use an expired link is cloud specific. .. py:attribute:: EXTERNAL_LINKS :value: "EXTERNAL_LINKS" @@ -231,8 +214,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: EditWarehouseRequestWarehouseType - Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` - and also set the field `enable_serverless_compute` to `true`. + Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`. .. py:attribute:: CLASSIC :value: "CLASSIC" @@ -257,8 +239,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: EndpointInfoWarehouseType - Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` - and also set the field `enable_serverless_compute` to `true`. + Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`. .. py:attribute:: CLASSIC :value: "CLASSIC" @@ -283,12 +264,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ExecuteStatementRequestOnWaitTimeout - When `wait_timeout > 0s`, the call will block up to the specified time. If the statement - execution doesn't finish within this time, `on_wait_timeout` determines whether the execution - should continue or be canceled. When set to `CONTINUE`, the statement execution continues - asynchronously and the call returns a statement ID which can be used for polling with - :method:statementexecution/getStatement. When set to `CANCEL`, the statement execution is - canceled and the call returns with a `CANCELED` state. + When `wait_timeout > 0s`, the call will block up to the specified time. If the statement execution doesn't finish within this time, `on_wait_timeout` determines whether the execution should continue or be canceled. When set to `CONTINUE`, the statement execution continues asynchronously and the call returns a statement ID which can be used for polling with :method:statementexecution/getStatement. When set to `CANCEL`, the statement execution is canceled and the call returns with a `CANCELED` state. .. py:attribute:: CANCEL :value: "CANCEL" @@ -333,8 +309,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: GetWarehouseResponseWarehouseType - Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` - and also set the field `enable_serverless_compute` to `true`. + Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`. .. py:attribute:: CLASSIC :value: "CLASSIC" @@ -460,8 +435,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: PermissionLevel - * `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the query - * `CAN_MANAGE`: Can manage the query + * `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the query * `CAN_MANAGE`: Can manage the query .. py:attribute:: CAN_EDIT :value: "CAN_EDIT" @@ -601,9 +575,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: QueryStatus - Query status with one the following values: * `QUEUED`: Query has been received and queued. * - `RUNNING`: Query has started. * `CANCELED`: Query has been cancelled by the user. * `FAILED`: - Query has failed. * `FINISHED`: Query has completed. + Query status with one the following values: * `QUEUED`: Query has been received and queued. * `RUNNING`: Query has started. * `CANCELED`: Query has been cancelled by the user. * `FAILED`: Query has failed. * `FINISHED`: Query has completed. .. py:attribute:: CANCELED :value: "CANCELED" @@ -638,8 +610,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: RunAsRole - Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as - viewer" behavior) or `"owner"` (signifying "run as owner" behavior) + Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior) .. py:attribute:: OWNER :value: "OWNER" @@ -757,11 +728,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: StatementState - Statement execution state: - `PENDING`: waiting for warehouse - `RUNNING`: running - - `SUCCEEDED`: execution was successful, result data available for fetch - `FAILED`: execution - failed; reason for failure described in accomanying error message - `CANCELED`: user canceled; - can come from explicit cancel call, or timeout with `on_wait_timeout=CANCEL` - `CLOSED`: - execution successful, and statement closed; result no longer available for fetch + Statement execution state: - `PENDING`: waiting for warehouse - `RUNNING`: running - `SUCCEEDED`: execution was successful, result data available for fetch - `FAILED`: execution failed; reason for failure described in accomanying error message - `CANCELED`: user canceled; can come from explicit cancel call, or timeout with `on_wait_timeout=CANCEL` - `CLOSED`: execution successful, and statement closed; result no longer available for fetch .. py:attribute:: CANCELED :value: "CANCELED" diff --git a/docs/dbdataclasses/vectorsearch.rst b/docs/dbdataclasses/vectorsearch.rst index 521eccbf..d8c28117 100644 --- a/docs/dbdataclasses/vectorsearch.rst +++ b/docs/dbdataclasses/vectorsearch.rst @@ -112,12 +112,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: PipelineType Pipeline execution mode. - - - `TRIGGERED`: If the pipeline uses the triggered execution mode, the system stops processing - after successfully refreshing the source table in the pipeline once, ensuring the table is - updated based on the data available when the update started. - `CONTINUOUS`: If the pipeline - uses continuous execution, the pipeline processes new data as it arrives in the source table to - keep vector index fresh. + - `TRIGGERED`: If the pipeline uses the triggered execution mode, the system stops processing after successfully refreshing the source table in the pipeline once, ensuring the table is updated based on the data available when the update started. - `CONTINUOUS`: If the pipeline uses continuous execution, the pipeline processes new data as it arrives in the source table to keep vector index fresh. .. py:attribute:: CONTINUOUS :value: "CONTINUOUS" @@ -177,11 +172,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: VectorIndexType There are 2 types of Vector Search indexes: - - - `DELTA_SYNC`: An index that automatically syncs with a source Delta Table, automatically and - incrementally updating the index as the underlying data in the Delta Table changes. - - `DIRECT_ACCESS`: An index that supports direct read and write of vectors and metadata through - our REST and SDK APIs. With this model, the user manages index updates. + - `DELTA_SYNC`: An index that automatically syncs with a source Delta Table, automatically and incrementally updating the index as the underlying data in the Delta Table changes. - `DIRECT_ACCESS`: An index that supports direct read and write of vectors and metadata through our REST and SDK APIs. With this model, the user manages index updates. .. py:attribute:: DELTA_SYNC :value: "DELTA_SYNC" diff --git a/docs/dbdataclasses/workspace.rst b/docs/dbdataclasses/workspace.rst index 7821544b..fdedd26c 100644 --- a/docs/dbdataclasses/workspace.rst +++ b/docs/dbdataclasses/workspace.rst @@ -106,16 +106,8 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ImportFormat This specifies the format of the file to be imported. - - The value is case sensitive. - - - `AUTO`: The item is imported depending on an analysis of the item's extension and the header - content provided in the request. If the item is imported as a notebook, then the item's - extension is automatically removed. - `SOURCE`: The notebook or directory is imported as source - code. - `HTML`: The notebook is imported as an HTML file. - `JUPYTER`: The notebook is imported - as a Jupyter/IPython Notebook file. - `DBC`: The notebook is imported in Databricks archive - format. Required for directories. - `R_MARKDOWN`: The notebook is imported from R Markdown - format. + The value is case sensitive. + - `AUTO`: The item is imported depending on an analysis of the item's extension and the header content provided in the request. If the item is imported as a notebook, then the item's extension is automatically removed. - `SOURCE`: The notebook or directory is imported as source code. - `HTML`: The notebook is imported as an HTML file. - `JUPYTER`: The notebook is imported as a Jupyter/IPython Notebook file. - `DBC`: The notebook is imported in Databricks archive format. Required for directories. - `R_MARKDOWN`: The notebook is imported from R Markdown format. .. py:attribute:: AUTO :value: "AUTO" @@ -182,10 +174,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ObjectType The type of the object in workspace. - - - `NOTEBOOK`: document that contains runnable code, visualizations, and explanatory text. - - `DIRECTORY`: directory - `LIBRARY`: library - `FILE`: file - `REPO`: repository - `DASHBOARD`: - Lakeview dashboard + - `NOTEBOOK`: document that contains runnable code, visualizations, and explanatory text. - `DIRECTORY`: directory - `LIBRARY`: library - `FILE`: file - `REPO`: repository - `DASHBOARD`: Lakeview dashboard .. py:attribute:: DASHBOARD :value: "DASHBOARD" diff --git a/docs/gen-client-docs.py b/docs/gen-client-docs.py index 1f805ce7..170d7200 100644 --- a/docs/gen-client-docs.py +++ b/docs/gen-client-docs.py @@ -4,6 +4,7 @@ import inspect import json import os.path +import re import subprocess import importlib from dataclasses import dataclass, is_dataclass @@ -17,7 +18,6 @@ __dir__ = os.path.dirname(__file__) __examples__ = Path(f'{__dir__}/../examples').absolute() - @dataclass class Package: name: str @@ -158,7 +158,7 @@ def dataclass_rst(self, cls) -> str: '', ] if clss.__doc__ is not None: - out.append(f' {clss.__doc__}') + out.append(f' {self._get_enum_doc(clss)}') out.append('') for v in clss.__members__.keys(): out.append(f' .. py:attribute:: {v}') @@ -173,6 +173,23 @@ def dataclass_rst(self, cls) -> str: ] return "\n".join(out) + @staticmethod + def _get_enum_doc(cls) -> str: + stripped = [] + for line in cls.__doc__.split('\n'): + stripped.append(line.strip()) + result = [] + current = [] + for line in stripped: + if line == '': + if len(current) > 0: + result.append(' '.join(current)) + current = [] + else: + current.append(line) + if len(current) > 0: + result.append(' '.join(current)) + return '\n '.join(result) class Generator: packages = [