diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 1079283d..4800af05 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -0a1949ba96f71680dad30e06973eaae85b1307bb \ No newline at end of file +a1b6c1ecfaab6635911d3c060a8dd797ac6b2d4d \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 0dc018d9..d852a5d4 100755 --- a/.gitattributes +++ b/.gitattributes @@ -134,9 +134,9 @@ examples/log_delivery/list_log_delivery.py linguist-generated=true examples/metastores/assign_metastores.py linguist-generated=true examples/metastores/create_metastores.py linguist-generated=true examples/metastores/current_metastores.py linguist-generated=true -examples/metastores/enable_optimization_metastores.py linguist-generated=true examples/metastores/get_metastores.py linguist-generated=true examples/metastores/list_metastores.py linguist-generated=true +examples/metastores/maintenance_metastores.py linguist-generated=true examples/metastores/summary_metastores.py linguist-generated=true examples/metastores/unassign_metastores.py linguist-generated=true examples/metastores/update_metastores.py linguist-generated=true @@ -235,16 +235,12 @@ examples/token_management/list_create_obo_token_on_aws.py linguist-generated=tru examples/tokens/create_tokens.py linguist-generated=true examples/tokens/get_tokens.py linguist-generated=true examples/tokens/list_tokens.py linguist-generated=true -examples/users/create_account_users.py linguist-generated=true examples/users/create_clusters_api_integration.py linguist-generated=true -examples/users/create_workspace_users.py linguist-generated=true -examples/users/delete_account_users.py linguist-generated=true +examples/users/create_users.py linguist-generated=true examples/users/delete_clusters_api_integration.py linguist-generated=true -examples/users/delete_workspace_users.py linguist-generated=true -examples/users/get_account_users.py linguist-generated=true -examples/users/get_workspace_users.py linguist-generated=true -examples/users/list_workspace_users.py linguist-generated=true -examples/users/patch_account_users.py linguist-generated=true +examples/users/delete_users.py linguist-generated=true +examples/users/get_users.py linguist-generated=true +examples/users/list_users.py linguist-generated=true examples/volumes/create_volumes.py linguist-generated=true examples/volumes/list_volumes.py linguist-generated=true examples/volumes/read_volumes.py linguist-generated=true diff --git a/CHANGELOG.md b/CHANGELOG.md index fe3f2357..b7baeac7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,39 @@ # Version changelog +## 0.3.0 + +* Fixed serialization of lists of enum values ([#248](https://github.com/databricks/databricks-sdk-py/pull/248)). +* Fixed examples that used incorrect keyword argument names. (https://github.com/databricks/databricks-sdk-go/pull/560) +* Handled nested query parameters in ApiClient.do() ([#249](https://github.com/databricks/databricks-sdk-py/pull/249)). +* Improved access of `__annotations__` ([#239](https://github.com/databricks/databricks-sdk-py/pull/239)). + +API Changes: + + * Changed `create()` method for [a.account_metastore_assignments](https://databricks-sdk-py.readthedocs.io/en/latest/account/account_metastore_assignments.html) account-level service to no longer return `databricks.sdk.service.catalog.CreateMetastoreAssignmentsResponseItemList` dataclass. + * Added `connection_name` field for `databricks.sdk.service.catalog.CreateCatalog`. + * Added `access_point` field for `databricks.sdk.service.catalog.CreateExternalLocation`. + * Added `encryption_details` field for `databricks.sdk.service.catalog.CreateExternalLocation`. + * Removed `databricks.sdk.service.catalog.CreateMetastoreAssignmentsResponseItem` dataclass. + * Added `access_point` field for `databricks.sdk.service.catalog.ExternalLocationInfo`. + * Added `encryption_details` field for `databricks.sdk.service.catalog.ExternalLocationInfo`. + * Added `access_point` field for `databricks.sdk.service.catalog.TableInfo`. + * Added `encryption_details` field for `databricks.sdk.service.catalog.TableInfo`. + * Added `access_point` field for `databricks.sdk.service.catalog.UpdateExternalLocation`. + * Added `encryption_details` field for `databricks.sdk.service.catalog.UpdateExternalLocation`. + * Added `access_point` field for `databricks.sdk.service.catalog.VolumeInfo`. + * Added `encryption_details` field for `databricks.sdk.service.catalog.VolumeInfo`. + * Added `databricks.sdk.service.catalog.EncryptionDetails` dataclass. + * Added `databricks.sdk.service.catalog.SseEncryptionDetails` dataclass. + * Added `databricks.sdk.service.catalog.SseEncryptionDetailsAlgorithm` dataclass. + * Added [a.account_network_policy](https://databricks-sdk-py.readthedocs.io/en/latest/account/account_network_policy.html) account-level service. + * Added `databricks.sdk.service.settings.AccountNetworkPolicyMessage` dataclass. + * Added `databricks.sdk.service.settings.DeleteAccountNetworkPolicyRequest` dataclass. + * Added `databricks.sdk.service.settings.DeleteAccountNetworkPolicyResponse` dataclass. + * Added `databricks.sdk.service.settings.ReadAccountNetworkPolicyRequest` dataclass. + * Added `databricks.sdk.service.settings.UpdateAccountNetworkPolicyRequest` dataclass. + +OpenAPI SHA: a1b6c1ecfaab6635911d3c060a8dd797ac6b2d4d, Date: 2023-07-27 + ## 0.2.1 * Support older versions of `urllib3` and Databricks Runtime with regards to `DEFAULT_METHOD_WHITELIST` change to `DEFAULT_ALLOWED_METHODS` ([#240](https://github.com/databricks/databricks-sdk-py/pull/240)). diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py index c1c3d189..6d90a51f 100755 --- a/databricks/sdk/__init__.py +++ b/databricks/sdk/__init__.py @@ -44,6 +44,7 @@ WorkspacesAPI) from databricks.sdk.service.serving import ServingEndpointsAPI from databricks.sdk.service.settings import (AccountIpAccessListsAPI, + AccountNetworkPolicyAPI, AccountSettingsAPI, IpAccessListsAPI, TokenManagementAPI, TokensAPI, @@ -244,6 +245,7 @@ def __init__(self, self.log_delivery = LogDeliveryAPI(self.api_client) self.metastore_assignments = AccountMetastoreAssignmentsAPI(self.api_client) self.metastores = AccountMetastoresAPI(self.api_client) + self.network_policy = AccountNetworkPolicyAPI(self.api_client) self.networks = NetworksAPI(self.api_client) self.o_auth_enrollment = OAuthEnrollmentAPI(self.api_client) self.private_access = PrivateAccessAPI(self.api_client) diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py index 9f6b03a7..c9e211cd 100755 --- a/databricks/sdk/service/catalog.py +++ b/databricks/sdk/service/catalog.py @@ -452,6 +452,7 @@ class ConnectionType(Enum): class CreateCatalog: name: str comment: Optional[str] = None + connection_name: Optional[str] = None properties: Optional['Dict[str,str]'] = None provider_name: Optional[str] = None share_name: Optional[str] = None @@ -460,6 +461,7 @@ class CreateCatalog: def as_dict(self) -> dict: body = {} if self.comment is not None: body['comment'] = self.comment + if self.connection_name is not None: body['connection_name'] = self.connection_name if self.name is not None: body['name'] = self.name if self.properties: body['properties'] = self.properties if self.provider_name is not None: body['provider_name'] = self.provider_name @@ -470,6 +472,7 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'CreateCatalog': return cls(comment=d.get('comment', None), + connection_name=d.get('connection_name', None), name=d.get('name', None), properties=d.get('properties', None), provider_name=d.get('provider_name', None), @@ -514,14 +517,18 @@ class CreateExternalLocation: name: str url: str credential_name: str + access_point: Optional[str] = None comment: Optional[str] = None + encryption_details: Optional['EncryptionDetails'] = None read_only: Optional[bool] = None skip_validation: Optional[bool] = None def as_dict(self) -> dict: body = {} + if self.access_point is not None: body['access_point'] = self.access_point if self.comment is not None: body['comment'] = self.comment if self.credential_name is not None: body['credential_name'] = self.credential_name + if self.encryption_details: body['encryption_details'] = self.encryption_details.as_dict() if self.name is not None: body['name'] = self.name if self.read_only is not None: body['read_only'] = self.read_only if self.skip_validation is not None: body['skip_validation'] = self.skip_validation @@ -530,8 +537,10 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'CreateExternalLocation': - return cls(comment=d.get('comment', None), + return cls(access_point=d.get('access_point', None), + comment=d.get('comment', None), credential_name=d.get('credential_name', None), + encryption_details=_from_dict(d, 'encryption_details', EncryptionDetails), name=d.get('name', None), read_only=d.get('read_only', None), skip_validation=d.get('skip_validation', None), @@ -683,26 +692,6 @@ def from_dict(cls, d: Dict[str, any]) -> 'CreateMetastoreAssignment': workspace_id=d.get('workspace_id', None)) -@dataclass -class CreateMetastoreAssignmentsResponseItem: - message: Optional[str] = None - metastore_assignment: Optional['MetastoreAssignment'] = None - status_code: Optional[int] = None - - def as_dict(self) -> dict: - body = {} - if self.message is not None: body['message'] = self.message - if self.metastore_assignment: body['metastore_assignment'] = self.metastore_assignment.as_dict() - if self.status_code is not None: body['status_code'] = self.status_code - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'CreateMetastoreAssignmentsResponseItem': - return cls(message=d.get('message', None), - metastore_assignment=_from_dict(d, 'metastore_assignment', MetastoreAssignment), - status_code=d.get('status_code', None)) - - @dataclass class CreateSchema: name: str @@ -1124,13 +1113,31 @@ class EnableSchemaName(Enum): OPERATIONAL_DATA = 'operational_data' +@dataclass +class EncryptionDetails: + """Encryption options that apply to clients connecting to cloud storage.""" + + sse_encryption_details: Optional['SseEncryptionDetails'] = None + + def as_dict(self) -> dict: + body = {} + if self.sse_encryption_details: body['sse_encryption_details'] = self.sse_encryption_details.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'EncryptionDetails': + return cls(sse_encryption_details=_from_dict(d, 'sse_encryption_details', SseEncryptionDetails)) + + @dataclass class ExternalLocationInfo: + access_point: Optional[str] = None comment: Optional[str] = None created_at: Optional[int] = None created_by: Optional[str] = None credential_id: Optional[str] = None credential_name: Optional[str] = None + encryption_details: Optional['EncryptionDetails'] = None metastore_id: Optional[str] = None name: Optional[str] = None owner: Optional[str] = None @@ -1141,11 +1148,13 @@ class ExternalLocationInfo: def as_dict(self) -> dict: body = {} + if self.access_point is not None: body['access_point'] = self.access_point if self.comment is not None: body['comment'] = self.comment if self.created_at is not None: body['created_at'] = self.created_at if self.created_by is not None: body['created_by'] = self.created_by if self.credential_id is not None: body['credential_id'] = self.credential_id if self.credential_name is not None: body['credential_name'] = self.credential_name + if self.encryption_details: body['encryption_details'] = self.encryption_details.as_dict() if self.metastore_id is not None: body['metastore_id'] = self.metastore_id if self.name is not None: body['name'] = self.name if self.owner is not None: body['owner'] = self.owner @@ -1157,11 +1166,13 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ExternalLocationInfo': - return cls(comment=d.get('comment', None), + return cls(access_point=d.get('access_point', None), + comment=d.get('comment', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), credential_id=d.get('credential_id', None), credential_name=d.get('credential_name', None), + encryption_details=_from_dict(d, 'encryption_details', EncryptionDetails), metastore_id=d.get('metastore_id', None), name=d.get('name', None), owner=d.get('owner', None), @@ -1986,8 +1997,10 @@ class Privilege(Enum): ALL_PRIVILEGES = 'ALL_PRIVILEGES' CREATE = 'CREATE' CREATE_CATALOG = 'CREATE_CATALOG' + CREATE_CONNECTION = 'CREATE_CONNECTION' CREATE_EXTERNAL_LOCATION = 'CREATE_EXTERNAL_LOCATION' CREATE_EXTERNAL_TABLE = 'CREATE_EXTERNAL_TABLE' + CREATE_FOREIGN_CATALOG = 'CREATE_FOREIGN_CATALOG' CREATE_FUNCTION = 'CREATE_FUNCTION' CREATE_MANAGED_STORAGE = 'CREATE_MANAGED_STORAGE' CREATE_MATERIALIZED_VIEW = 'CREATE_MATERIALIZED_VIEW' @@ -2007,6 +2020,7 @@ class Privilege(Enum): SET_SHARE_PERMISSION = 'SET_SHARE_PERMISSION' USAGE = 'USAGE' USE_CATALOG = 'USE_CATALOG' + USE_CONNECTION = 'USE_CONNECTION' USE_MARKETPLACE_ASSETS = 'USE_MARKETPLACE_ASSETS' USE_PROVIDER = 'USE_PROVIDER' USE_RECIPIENT = 'USE_RECIPIENT' @@ -2113,6 +2127,7 @@ class SecurableType(Enum): """The type of Unity Catalog securable""" CATALOG = 'catalog' + CONNECTION = 'connection' EXTERNAL_LOCATION = 'external_location' FUNCTION = 'function' METASTORE = 'metastore' @@ -2125,6 +2140,32 @@ class SecurableType(Enum): TABLE = 'table' +@dataclass +class SseEncryptionDetails: + """Server-Side Encryption properties for clients communicating with AWS s3.""" + + algorithm: 'SseEncryptionDetailsAlgorithm' + aws_kms_key_arn: Optional[str] = None + + def as_dict(self) -> dict: + body = {} + if self.algorithm is not None: body['algorithm'] = self.algorithm.value + if self.aws_kms_key_arn is not None: body['aws_kms_key_arn'] = self.aws_kms_key_arn + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'SseEncryptionDetails': + return cls(algorithm=_enum(d, 'algorithm', SseEncryptionDetailsAlgorithm), + aws_kms_key_arn=d.get('aws_kms_key_arn', None)) + + +class SseEncryptionDetailsAlgorithm(Enum): + """The type of key encryption to use (affects headers from s3 client).""" + + AWS_SSE_KMS = 'AWS_SSE_KMS' + AWS_SSE_S3 = 'AWS_SSE_S3' + + @dataclass class StorageCredentialInfo: aws_iam_role: Optional['AwsIamRole'] = None @@ -2267,6 +2308,7 @@ def from_dict(cls, d: Dict[str, any]) -> 'TableDependency': @dataclass class TableInfo: + access_point: Optional[str] = None catalog_name: Optional[str] = None columns: Optional['List[ColumnInfo]'] = None comment: Optional[str] = None @@ -2278,6 +2320,7 @@ class TableInfo: delta_runtime_properties_kvpairs: Optional['DeltaRuntimePropertiesKvPairs'] = None effective_auto_maintenance_flag: Optional['EffectiveAutoMaintenanceFlag'] = None enable_auto_maintenance: Optional['EnableAutoMaintenance'] = None + encryption_details: Optional['EncryptionDetails'] = None full_name: Optional[str] = None metastore_id: Optional[str] = None name: Optional[str] = None @@ -2298,6 +2341,7 @@ class TableInfo: def as_dict(self) -> dict: body = {} + if self.access_point is not None: body['access_point'] = self.access_point if self.catalog_name is not None: body['catalog_name'] = self.catalog_name if self.columns: body['columns'] = [v.as_dict() for v in self.columns] if self.comment is not None: body['comment'] = self.comment @@ -2313,6 +2357,7 @@ def as_dict(self) -> dict: body['effective_auto_maintenance_flag'] = self.effective_auto_maintenance_flag.as_dict() if self.enable_auto_maintenance is not None: body['enable_auto_maintenance'] = self.enable_auto_maintenance.value + if self.encryption_details: body['encryption_details'] = self.encryption_details.as_dict() if self.full_name is not None: body['full_name'] = self.full_name if self.metastore_id is not None: body['metastore_id'] = self.metastore_id if self.name is not None: body['name'] = self.name @@ -2335,7 +2380,8 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'TableInfo': - return cls(catalog_name=d.get('catalog_name', None), + return cls(access_point=d.get('access_point', None), + catalog_name=d.get('catalog_name', None), columns=_repeated(d, 'columns', ColumnInfo), comment=d.get('comment', None), created_at=d.get('created_at', None), @@ -2348,6 +2394,7 @@ def from_dict(cls, d: Dict[str, any]) -> 'TableInfo': effective_auto_maintenance_flag=_from_dict(d, 'effective_auto_maintenance_flag', EffectiveAutoMaintenanceFlag), enable_auto_maintenance=_enum(d, 'enable_auto_maintenance', EnableAutoMaintenance), + encryption_details=_from_dict(d, 'encryption_details', EncryptionDetails), full_name=d.get('full_name', None), metastore_id=d.get('metastore_id', None), name=d.get('name', None), @@ -2464,8 +2511,10 @@ def from_dict(cls, d: Dict[str, any]) -> 'UpdateConnection': @dataclass class UpdateExternalLocation: + access_point: Optional[str] = None comment: Optional[str] = None credential_name: Optional[str] = None + encryption_details: Optional['EncryptionDetails'] = None force: Optional[bool] = None name: Optional[str] = None owner: Optional[str] = None @@ -2474,8 +2523,10 @@ class UpdateExternalLocation: def as_dict(self) -> dict: body = {} + if self.access_point is not None: body['access_point'] = self.access_point if self.comment is not None: body['comment'] = self.comment if self.credential_name is not None: body['credential_name'] = self.credential_name + if self.encryption_details: body['encryption_details'] = self.encryption_details.as_dict() if self.force is not None: body['force'] = self.force if self.name is not None: body['name'] = self.name if self.owner is not None: body['owner'] = self.owner @@ -2485,8 +2536,10 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'UpdateExternalLocation': - return cls(comment=d.get('comment', None), + return cls(access_point=d.get('access_point', None), + comment=d.get('comment', None), credential_name=d.get('credential_name', None), + encryption_details=_from_dict(d, 'encryption_details', EncryptionDetails), force=d.get('force', None), name=d.get('name', None), owner=d.get('owner', None), @@ -2857,10 +2910,12 @@ class ValidationResultResult(Enum): @dataclass class VolumeInfo: + access_point: Optional[str] = None catalog_name: Optional[str] = None comment: Optional[str] = None created_at: Optional[int] = None created_by: Optional[str] = None + encryption_details: Optional['EncryptionDetails'] = None full_name: Optional[str] = None metastore_id: Optional[str] = None name: Optional[str] = None @@ -2874,10 +2929,12 @@ class VolumeInfo: def as_dict(self) -> dict: body = {} + if self.access_point is not None: body['access_point'] = self.access_point if self.catalog_name is not None: body['catalog_name'] = self.catalog_name if self.comment is not None: body['comment'] = self.comment if self.created_at is not None: body['created_at'] = self.created_at if self.created_by is not None: body['created_by'] = self.created_by + if self.encryption_details: body['encryption_details'] = self.encryption_details.as_dict() if self.full_name is not None: body['full_name'] = self.full_name if self.metastore_id is not None: body['metastore_id'] = self.metastore_id if self.name is not None: body['name'] = self.name @@ -2892,10 +2949,12 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'VolumeInfo': - return cls(catalog_name=d.get('catalog_name', None), + return cls(access_point=d.get('access_point', None), + catalog_name=d.get('catalog_name', None), comment=d.get('comment', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), + encryption_details=_from_dict(d, 'encryption_details', EncryptionDetails), full_name=d.get('full_name', None), metastore_id=d.get('metastore_id', None), name=d.get('name', None), @@ -2925,11 +2984,10 @@ def create(self, metastore_id: str, *, metastore_assignment: Optional[CreateMetastoreAssignment] = None, - **kwargs) -> Iterator[CreateMetastoreAssignmentsResponseItem]: + **kwargs): """Assigns a workspace to a metastore. - Creates an assignment to a metastore for a workspace Please add a header - X-Databricks-Account-Console-API-Version: 2.0 to access this API. + Creates an assignment to a metastore for a workspace :param workspace_id: int Workspace ID. @@ -2937,7 +2995,7 @@ def create(self, Unity Catalog metastore ID :param metastore_assignment: :class:`CreateMetastoreAssignment` (optional) - :returns: Iterator over :class:`CreateMetastoreAssignmentsResponseItem` + """ request = kwargs.get('request', None) if not request: # request is not given through keyed args @@ -2945,18 +3003,15 @@ def create(self, metastore_id=metastore_id, workspace_id=workspace_id) body = request.as_dict() - - json = self._api.do( + self._api.do( 'POST', f'/api/2.0/accounts/{self._api.account_id}/workspaces/{request.workspace_id}/metastores/{request.metastore_id}', body=body) - return [CreateMetastoreAssignmentsResponseItem.from_dict(v) for v in json] def delete(self, workspace_id: int, metastore_id: str, **kwargs): """Delete a metastore assignment. - Deletes a metastore assignment to a workspace, leaving the workspace with no metastore. Please add a - header X-Databricks-Account-Console-API-Version: 2.0 to access this API. + Deletes a metastore assignment to a workspace, leaving the workspace with no metastore. :param workspace_id: int Workspace ID. @@ -2980,8 +3035,7 @@ def get(self, workspace_id: int, **kwargs) -> AccountsMetastoreAssignment: Gets the metastore assignment, if any, for the workspace specified by ID. If the workspace is assigned a metastore, the mappig will be returned. If no metastore is assigned to the workspace, the assignment - will not be found and a 404 returned. Please add a header X-Databricks-Account-Console-API-Version: - 2.0 to access this API. + will not be found and a 404 returned. :param workspace_id: int Workspace ID. @@ -2999,8 +3053,7 @@ def get(self, workspace_id: int, **kwargs) -> AccountsMetastoreAssignment: def list(self, metastore_id: str, **kwargs) -> Iterator[MetastoreAssignment]: """Get all workspaces assigned to a metastore. - Gets a list of all Databricks workspace IDs that have been assigned to given metastore. Please add a - header X-Databricks-Account-Console-API-Version: 2.0 to access this API + Gets a list of all Databricks workspace IDs that have been assigned to given metastore. :param metastore_id: str Unity Catalog metastore ID @@ -3024,7 +3077,7 @@ def update(self, """Updates a metastore assignment to a workspaces. Updates an assignment to a metastore for a workspace. Currently, only the default catalog may be - updated. Please add a header X-Databricks-Account-Console-API-Version: 2.0 to access this API. + updated. :param workspace_id: int Workspace ID. @@ -3056,8 +3109,7 @@ def __init__(self, api_client): def create(self, *, metastore_info: Optional[CreateMetastore] = None, **kwargs) -> AccountsMetastoreInfo: """Create metastore. - Creates a Unity Catalog metastore. Please add a header X-Databricks-Account-Console-API-Version: 2.0 - to access this API. + Creates a Unity Catalog metastore. :param metastore_info: :class:`CreateMetastore` (optional) @@ -3074,8 +3126,7 @@ def create(self, *, metastore_info: Optional[CreateMetastore] = None, **kwargs) def delete(self, metastore_id: str, *, force: Optional[bool] = None, **kwargs): """Delete a metastore. - Deletes a Unity Catalog metastore for an account, both specified by ID. Please add a header - X-Databricks-Account-Console-API-Version: 2.0 to access this API. + Deletes a Unity Catalog metastore for an account, both specified by ID. :param metastore_id: str Unity Catalog metastore ID @@ -3098,8 +3149,7 @@ def delete(self, metastore_id: str, *, force: Optional[bool] = None, **kwargs): def get(self, metastore_id: str, **kwargs) -> AccountsMetastoreInfo: """Get a metastore. - Gets a Unity Catalog metastore from an account, both specified by ID. Please add a header - X-Databricks-Account-Console-API-Version: 2.0 to access this API. + Gets a Unity Catalog metastore from an account, both specified by ID. :param metastore_id: str Unity Catalog metastore ID @@ -3117,8 +3167,7 @@ def get(self, metastore_id: str, **kwargs) -> AccountsMetastoreInfo: def list(self) -> ListMetastoresResponse: """Get all metastores associated with an account. - Gets all Unity Catalog metastores associated with an account specified by ID. Please add a header - X-Databricks-Account-Console-API-Version: 2.0 to access this API. + Gets all Unity Catalog metastores associated with an account specified by ID. :returns: :class:`ListMetastoresResponse` """ @@ -3133,8 +3182,7 @@ def update(self, **kwargs) -> AccountsMetastoreInfo: """Update a metastore. - Updates an existing Unity Catalog metastore. Please add a header - X-Databricks-Account-Console-API-Version: 2.0 to access this API. + Updates an existing Unity Catalog metastore. :param metastore_id: str Unity Catalog metastore ID @@ -3309,6 +3357,7 @@ def create(self, name: str, *, comment: Optional[str] = None, + connection_name: Optional[str] = None, properties: Optional[Dict[str, str]] = None, provider_name: Optional[str] = None, share_name: Optional[str] = None, @@ -3323,6 +3372,8 @@ def create(self, Name of catalog. :param comment: str (optional) User-provided free-form text description. + :param connection_name: str (optional) + The name of the connection to an external data source. :param properties: Dict[str,str] (optional) A map of key-value properties attached to the securable. :param provider_name: str (optional) @@ -3339,6 +3390,7 @@ def create(self, request = kwargs.get('request', None) if not request: # request is not given through keyed args request = CreateCatalog(comment=comment, + connection_name=connection_name, name=name, properties=properties, provider_name=provider_name, @@ -3590,7 +3642,9 @@ def create(self, url: str, credential_name: str, *, + access_point: Optional[str] = None, comment: Optional[str] = None, + encryption_details: Optional[EncryptionDetails] = None, read_only: Optional[bool] = None, skip_validation: Optional[bool] = None, **kwargs) -> ExternalLocationInfo: @@ -3606,8 +3660,12 @@ def create(self, Path URL of the external location. :param credential_name: str Name of the storage credential used with this location. + :param access_point: str (optional) + The AWS access point to use when accesing s3 for this external location. :param comment: str (optional) User-provided free-form text description. + :param encryption_details: :class:`EncryptionDetails` (optional) + Encryption options that apply to clients connecting to cloud storage. :param read_only: bool (optional) Indicates whether the external location is read-only. :param skip_validation: bool (optional) @@ -3617,8 +3675,10 @@ def create(self, """ request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = CreateExternalLocation(comment=comment, + request = CreateExternalLocation(access_point=access_point, + comment=comment, credential_name=credential_name, + encryption_details=encryption_details, name=name, read_only=read_only, skip_validation=skip_validation, @@ -3684,8 +3744,10 @@ def list(self) -> Iterator[ExternalLocationInfo]: def update(self, name: str, *, + access_point: Optional[str] = None, comment: Optional[str] = None, credential_name: Optional[str] = None, + encryption_details: Optional[EncryptionDetails] = None, force: Optional[bool] = None, owner: Optional[str] = None, read_only: Optional[bool] = None, @@ -3699,10 +3761,14 @@ def update(self, :param name: str Name of the external location. + :param access_point: str (optional) + The AWS access point to use when accesing s3 for this external location. :param comment: str (optional) User-provided free-form text description. :param credential_name: str (optional) Name of the storage credential used with this location. + :param encryption_details: :class:`EncryptionDetails` (optional) + Encryption options that apply to clients connecting to cloud storage. :param force: bool (optional) Force update even if changing url invalidates dependent external tables or mounts. :param owner: str (optional) @@ -3716,8 +3782,10 @@ def update(self, """ request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = UpdateExternalLocation(comment=comment, + request = UpdateExternalLocation(access_point=access_point, + comment=comment, credential_name=credential_name, + encryption_details=encryption_details, force=force, name=name, owner=owner, diff --git a/databricks/sdk/service/provisioning.py b/databricks/sdk/service/provisioning.py index 26ca3799..5c2b9584 100755 --- a/databricks/sdk/service/provisioning.py +++ b/databricks/sdk/service/provisioning.py @@ -1936,10 +1936,9 @@ def create(self, :param location: str (optional) The Google Cloud region of the workspace data plane in your Google account. For example, `us-east4`. :param managed_services_customer_managed_key_id: str (optional) - The ID of the workspace's managed services encryption key configuration object. This is used to - encrypt the workspace's notebook and secret data in the control plane, in addition to Databricks SQL - queries and query history. The provided key configuration object property `use_cases` must contain - `MANAGED_SERVICES`. + The ID of the workspace's managed services encryption key configuration object. This is used to help + protect and control access to the workspace's notebooks, secrets, Databricks SQL queries, and query + history. The provided key configuration object property `use_cases` must contain `MANAGED_SERVICES`. :param network_id: str (optional) :param pricing_tier: :class:`PricingTier` (optional) The pricing tier of the workspace. For pricing tier information, see [AWS Pricing]. diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py index 9cc0aa7a..ca144902 100755 --- a/databricks/sdk/service/settings.py +++ b/databricks/sdk/service/settings.py @@ -12,6 +12,21 @@ # all definitions in this file are in alphabetical order +@dataclass +class AccountNetworkPolicyMessage: + serverless_internet_access_enabled: Optional[bool] = None + + def as_dict(self) -> dict: + body = {} + if self.serverless_internet_access_enabled is not None: + body['serverless_internet_access_enabled'] = self.serverless_internet_access_enabled + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'AccountNetworkPolicyMessage': + return cls(serverless_internet_access_enabled=d.get('serverless_internet_access_enabled', None)) + + @dataclass class CreateIpAccessList: label: str @@ -122,6 +137,27 @@ class DeleteAccountIpAccessListRequest: ip_access_list_id: str +@dataclass +class DeleteAccountNetworkPolicyRequest: + """Delete Account Network Policy""" + + etag: str + + +@dataclass +class DeleteAccountNetworkPolicyResponse: + etag: str + + def as_dict(self) -> dict: + body = {} + if self.etag is not None: body['etag'] = self.etag + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'DeleteAccountNetworkPolicyResponse': + return cls(etag=d.get('etag', None)) + + @dataclass class DeleteIpAccessListRequest: """Delete access list""" @@ -365,6 +401,13 @@ def from_dict(cls, d: Dict[str, any]) -> 'PublicTokenInfo': token_id=d.get('token_id', None)) +@dataclass +class ReadAccountNetworkPolicyRequest: + """Get Account Network Policy""" + + etag: str + + @dataclass class ReadPersonalComputeSettingRequest: """Get Personal Compute setting""" @@ -447,6 +490,25 @@ def from_dict(cls, d: Dict[str, any]) -> 'TokenInfo': token_id=d.get('token_id', None)) +@dataclass +class UpdateAccountNetworkPolicyRequest: + """Update Account Network Policy""" + + allow_missing: Optional[bool] = None + setting: Optional['AccountNetworkPolicyMessage'] = None + + def as_dict(self) -> dict: + body = {} + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.setting: body['setting'] = self.setting.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'UpdateAccountNetworkPolicyRequest': + return cls(allow_missing=d.get('allow_missing', None), + setting=_from_dict(d, 'setting', AccountNetworkPolicyMessage)) + + @dataclass class UpdateIpAccessList: label: str @@ -711,6 +773,98 @@ def update(self, body=body) +class AccountNetworkPolicyAPI: + """Network policy is a set of rules that defines what can be accessed from your Databricks network. E.g.: You + can choose to block your SQL UDF to access internet from your Databricks serverless clusters. + + There is only one instance of this setting per account. Since this setting has a default value, this + setting is present on all accounts even though it's never set on a given account. Deletion reverts the + value of the setting back to the default value.""" + + def __init__(self, api_client): + self._api = api_client + + def delete_account_network_policy(self, etag: str, **kwargs) -> DeleteAccountNetworkPolicyResponse: + """Delete Account Network Policy. + + Reverts back all the account network policies back to default. + + :param etag: str + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`DeleteAccountNetworkPolicyResponse` + """ + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = DeleteAccountNetworkPolicyRequest(etag=etag) + + query = {} + if etag: query['etag'] = request.etag + + json = self._api.do( + 'DELETE', + f'/api/2.0/accounts/{self._api.account_id}/settings/types/network_policy/names/default', + query=query) + return DeleteAccountNetworkPolicyResponse.from_dict(json) + + def read_account_network_policy(self, etag: str, **kwargs) -> AccountNetworkPolicyMessage: + """Get Account Network Policy. + + Gets the value of Account level Network Policy. + + :param etag: str + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`AccountNetworkPolicyMessage` + """ + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = ReadAccountNetworkPolicyRequest(etag=etag) + + query = {} + if etag: query['etag'] = request.etag + + json = self._api.do( + 'GET', + f'/api/2.0/accounts/{self._api.account_id}/settings/types/network_policy/names/default', + query=query) + return AccountNetworkPolicyMessage.from_dict(json) + + def update_account_network_policy(self, + *, + allow_missing: Optional[bool] = None, + setting: Optional[AccountNetworkPolicyMessage] = None, + **kwargs) -> AccountNetworkPolicyMessage: + """Update Account Network Policy. + + Updates the policy content of Account level Network Policy. + + :param allow_missing: bool (optional) + This should always be set to true for Settings RPCs. Added for AIP compliance. + :param setting: :class:`AccountNetworkPolicyMessage` (optional) + + :returns: :class:`AccountNetworkPolicyMessage` + """ + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = UpdateAccountNetworkPolicyRequest(allow_missing=allow_missing, setting=setting) + body = request.as_dict() + + json = self._api.do( + 'PATCH', + f'/api/2.0/accounts/{self._api.account_id}/settings/types/network_policy/names/default', + body=body) + return AccountNetworkPolicyMessage.from_dict(json) + + class AccountSettingsAPI: """The Personal Compute enablement setting lets you control which users can use the Personal Compute default policy to create compute resources. By default all users in all workspaces have access (ON), but you can diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py index 5a07c9e7..0052704d 100755 --- a/databricks/sdk/service/sharing.py +++ b/databricks/sdk/service/sharing.py @@ -627,8 +627,10 @@ class Privilege(Enum): ALL_PRIVILEGES = 'ALL_PRIVILEGES' CREATE = 'CREATE' CREATE_CATALOG = 'CREATE_CATALOG' + CREATE_CONNECTION = 'CREATE_CONNECTION' CREATE_EXTERNAL_LOCATION = 'CREATE_EXTERNAL_LOCATION' CREATE_EXTERNAL_TABLE = 'CREATE_EXTERNAL_TABLE' + CREATE_FOREIGN_CATALOG = 'CREATE_FOREIGN_CATALOG' CREATE_FUNCTION = 'CREATE_FUNCTION' CREATE_MANAGED_STORAGE = 'CREATE_MANAGED_STORAGE' CREATE_MATERIALIZED_VIEW = 'CREATE_MATERIALIZED_VIEW' @@ -648,6 +650,7 @@ class Privilege(Enum): SET_SHARE_PERMISSION = 'SET_SHARE_PERMISSION' USAGE = 'USAGE' USE_CATALOG = 'USE_CATALOG' + USE_CONNECTION = 'USE_CONNECTION' USE_MARKETPLACE_ASSETS = 'USE_MARKETPLACE_ASSETS' USE_PROVIDER = 'USE_PROVIDER' USE_RECIPIENT = 'USE_RECIPIENT' @@ -1328,7 +1331,8 @@ def update(self, class ProvidersAPI: - """Databricks Providers REST API""" + """A data provider is an object representing the organization in the real world who shares the data. A + provider contains shares which further contain the shared data.""" def __init__(self, api_client): self._api = api_client @@ -1486,7 +1490,13 @@ def update(self, class RecipientActivationAPI: - """Databricks Recipient Activation REST API""" + """The Recipient Activation API is only applicable in the open sharing model where the recipient object has + the authentication type of `TOKEN`. The data recipient follows the activation link shared by the data + provider to download the credential file that includes the access token. The recipient will then use the + credential file to establish a secure connection with the provider to receive the shared data. + + Note that you can download the credential file only once. Recipients should treat the downloaded + credential as a secret and must not share it outside of their organization.""" def __init__(self, api_client): self._api = api_client @@ -1528,7 +1538,19 @@ def retrieve_token(self, activation_url: str, **kwargs) -> RetrieveTokenResponse class RecipientsAPI: - """Databricks Recipients REST API""" + """A recipient is an object you create using :method:recipients/create to represent an organization which you + want to allow access shares. The way how sharing works differs depending on whether or not your recipient + has access to a Databricks workspace that is enabled for Unity Catalog: + + - For recipients with access to a Databricks workspace that is enabled for Unity Catalog, you can create a + recipient object along with a unique sharing identifier you get from the recipient. The sharing identifier + is the key identifier that enables the secure connection. This sharing mode is called + **Databricks-to-Databricks sharing**. + + - For recipients without access to a Databricks workspace that is enabled for Unity Catalog, when you + create a recipient object, Databricks generates an activation link you can send to the recipient. The + recipient follows the activation link to download the credential file, and then uses the credential file + to establish a secure connection to receive the shared data. This sharing mode is called **open sharing**.""" def __init__(self, api_client): self._api = api_client @@ -1735,7 +1757,10 @@ def update(self, class SharesAPI: - """Databricks Shares REST API""" + """A share is a container instantiated with :method:shares/create. Once created you can iteratively register + a collection of existing data assets defined within the metastore using :method:shares/update. You can + register data assets under their original name, qualified by their original schema, or provide alternate + exposed names.""" def __init__(self, api_client): self._api = api_client diff --git a/databricks/sdk/version.py b/databricks/sdk/version.py index fc79d63d..0404d810 100644 --- a/databricks/sdk/version.py +++ b/databricks/sdk/version.py @@ -1 +1 @@ -__version__ = '0.2.1' +__version__ = '0.3.0' diff --git a/docs/account/account-settings.rst b/docs/account/account-settings.rst index 1feecca1..afed5089 100644 --- a/docs/account/account-settings.rst +++ b/docs/account/account-settings.rst @@ -8,4 +8,5 @@ Manage security settings for Accounts and Workspaces :maxdepth: 1 ip_access_lists + network_policy settings \ No newline at end of file diff --git a/docs/account/budgets.rst b/docs/account/budgets.rst index 4914ab32..108d33d1 100644 --- a/docs/account/budgets.rst +++ b/docs/account/budgets.rst @@ -27,7 +27,7 @@ Budgets alerts=[billing.BudgetAlert(email_notifications=["admin@example.com"], min_percentage=50)])) # cleanup - a.budgets.delete(delete=created.budget.budget_id) + a.budgets.delete(budget_id=created.budget.budget_id) Create a new budget. @@ -72,10 +72,10 @@ Budgets target_amount="100", alerts=[billing.BudgetAlert(email_notifications=["admin@example.com"], min_percentage=50)])) - by_id = a.budgets.get(get=created.budget.budget_id) + by_id = a.budgets.get(budget_id=created.budget.budget_id) # cleanup - a.budgets.delete(delete=created.budget.budget_id) + a.budgets.delete(budget_id=created.budget.budget_id) Get budget and its status. @@ -141,7 +141,7 @@ Budgets ])) # cleanup - a.budgets.delete(delete=created.budget.budget_id) + a.budgets.delete(budget_id=created.budget.budget_id) Modify budget. diff --git a/docs/account/credentials.rst b/docs/account/credentials.rst index 0c2510cb..8dd5a13f 100644 --- a/docs/account/credentials.rst +++ b/docs/account/credentials.rst @@ -27,7 +27,7 @@ Credential configurations role_arn=os.environ["TEST_CROSSACCOUNT_ARN"]))) # cleanup - a.credentials.delete(delete=role.credentials_id) + a.credentials.delete(credentials_id=role.credentials_id) Create credential configuration. @@ -83,10 +83,10 @@ Credential configurations aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole( role_arn=os.environ["TEST_CROSSACCOUNT_ARN"]))) - by_id = a.credentials.get(get=role.credentials_id) + by_id = a.credentials.get(credentials_id=role.credentials_id) # cleanup - a.credentials.delete(delete=role.credentials_id) + a.credentials.delete(credentials_id=role.credentials_id) Get credential configuration. diff --git a/docs/account/encryption_keys.rst b/docs/account/encryption_keys.rst index 633251d5..7b0b5aab 100644 --- a/docs/account/encryption_keys.rst +++ b/docs/account/encryption_keys.rst @@ -34,7 +34,7 @@ Key configurations use_cases=[provisioning.KeyUseCase.MANAGED_SERVICES]) # cleanup - a.encryption_keys.delete(delete=created.customer_managed_key_id) + a.encryption_keys.delete(customer_managed_key_id=created.customer_managed_key_id) Create encryption key configuration. @@ -90,10 +90,10 @@ Key configurations key_arn=os.environ["TEST_MANAGED_KMS_KEY_ARN"], key_alias=os.environ["TEST_STORAGE_KMS_KEY_ALIAS"]), use_cases=[provisioning.KeyUseCase.MANAGED_SERVICES]) - by_id = a.encryption_keys.get(get=created.customer_managed_key_id) + by_id = a.encryption_keys.get(customer_managed_key_id=created.customer_managed_key_id) # cleanup - a.encryption_keys.delete(delete=created.customer_managed_key_id) + a.encryption_keys.delete(customer_managed_key_id=created.customer_managed_key_id) Get encryption key configuration. diff --git a/docs/account/groups.rst b/docs/account/groups.rst index 4595ed45..134becf8 100644 --- a/docs/account/groups.rst +++ b/docs/account/groups.rst @@ -24,7 +24,7 @@ Account Groups group = w.groups.create(display_name=f'sdk-{time.time_ns()}') # cleanup - w.groups.delete(delete=group.id) + w.groups.delete(id=group.id) Create a new group. @@ -59,7 +59,7 @@ Account Groups group = w.groups.create(display_name=f'sdk-{time.time_ns()}') - w.groups.delete(delete=group.id) + w.groups.delete(id=group.id) Delete a group. @@ -85,10 +85,10 @@ Account Groups group = w.groups.create(display_name=f'sdk-{time.time_ns()}') - fetch = w.groups.get(get=group.id) + fetch = w.groups.get(id=group.id) # cleanup - w.groups.delete(delete=group.id) + w.groups.delete(id=group.id) Get group details. diff --git a/docs/account/ip_access_lists.rst b/docs/account/ip_access_lists.rst index de65aa37..370a25c1 100644 --- a/docs/account/ip_access_lists.rst +++ b/docs/account/ip_access_lists.rst @@ -39,7 +39,7 @@ Account IP Access Lists list_type=settings.ListType.BLOCK) # cleanup - w.ip_access_lists.delete(delete=created.ip_access_list.list_id) + w.ip_access_lists.delete(ip_access_list_id=created.ip_access_list.list_id) Create access list. @@ -96,10 +96,10 @@ Account IP Access Lists ip_addresses=["1.0.0.0/16"], list_type=settings.ListType.BLOCK) - by_id = w.ip_access_lists.get(get=created.ip_access_list.list_id) + by_id = w.ip_access_lists.get(ip_access_list_id=created.ip_access_list.list_id) # cleanup - w.ip_access_lists.delete(delete=created.ip_access_list.list_id) + w.ip_access_lists.delete(ip_access_list_id=created.ip_access_list.list_id) Get IP access list. @@ -154,7 +154,7 @@ Account IP Access Lists enabled=False) # cleanup - w.ip_access_lists.delete(delete=created.ip_access_list.list_id) + w.ip_access_lists.delete(ip_access_list_id=created.ip_access_list.list_id) Replace access list. diff --git a/docs/account/log_delivery.rst b/docs/account/log_delivery.rst index fe63af8a..587ef5f3 100644 --- a/docs/account/log_delivery.rst +++ b/docs/account/log_delivery.rst @@ -79,8 +79,8 @@ Log delivery configurations output_format=billing.OutputFormat.JSON)) # cleanup - a.storage.delete(delete=bucket.storage_configuration_id) - a.credentials.delete(delete=creds.credentials_id) + a.storage.delete(storage_configuration_id=bucket.storage_configuration_id) + a.credentials.delete(credentials_id=creds.credentials_id) a.log_delivery.patch_status(log_delivery_configuration_id=created.log_delivery_configuration.config_id, status=billing.LogDeliveryConfigStatus.DISABLED) @@ -141,11 +141,11 @@ Log delivery configurations log_type=billing.LogType.AUDIT_LOGS, output_format=billing.OutputFormat.JSON)) - by_id = a.log_delivery.get(get=created.log_delivery_configuration.config_id) + by_id = a.log_delivery.get(log_delivery_configuration_id=created.log_delivery_configuration.config_id) # cleanup - a.storage.delete(delete=bucket.storage_configuration_id) - a.credentials.delete(delete=creds.credentials_id) + a.storage.delete(storage_configuration_id=bucket.storage_configuration_id) + a.credentials.delete(credentials_id=creds.credentials_id) a.log_delivery.patch_status(log_delivery_configuration_id=created.log_delivery_configuration.config_id, status=billing.LogDeliveryConfigStatus.DISABLED) diff --git a/docs/account/metastore_assignments.rst b/docs/account/metastore_assignments.rst index c2c2cb7f..574fd225 100644 --- a/docs/account/metastore_assignments.rst +++ b/docs/account/metastore_assignments.rst @@ -8,8 +8,7 @@ Account Metastore Assignments Assigns a workspace to a metastore. - Creates an assignment to a metastore for a workspace Please add a header - X-Databricks-Account-Console-API-Version: 2.0 to access this API. + Creates an assignment to a metastore for a workspace :param workspace_id: int Workspace ID. @@ -17,15 +16,14 @@ Account Metastore Assignments Unity Catalog metastore ID :param metastore_assignment: :class:`CreateMetastoreAssignment` (optional) - :returns: Iterator over :class:`CreateMetastoreAssignmentsResponseItem` + .. py:method:: delete(workspace_id, metastore_id) Delete a metastore assignment. - Deletes a metastore assignment to a workspace, leaving the workspace with no metastore. Please add a - header X-Databricks-Account-Console-API-Version: 2.0 to access this API. + Deletes a metastore assignment to a workspace, leaving the workspace with no metastore. :param workspace_id: int Workspace ID. @@ -41,8 +39,7 @@ Account Metastore Assignments Gets the metastore assignment, if any, for the workspace specified by ID. If the workspace is assigned a metastore, the mappig will be returned. If no metastore is assigned to the workspace, the assignment - will not be found and a 404 returned. Please add a header X-Databricks-Account-Console-API-Version: - 2.0 to access this API. + will not be found and a 404 returned. :param workspace_id: int Workspace ID. @@ -54,8 +51,7 @@ Account Metastore Assignments Get all workspaces assigned to a metastore. - Gets a list of all Databricks workspace IDs that have been assigned to given metastore. Please add a - header X-Databricks-Account-Console-API-Version: 2.0 to access this API + Gets a list of all Databricks workspace IDs that have been assigned to given metastore. :param metastore_id: str Unity Catalog metastore ID @@ -68,7 +64,7 @@ Account Metastore Assignments Updates a metastore assignment to a workspaces. Updates an assignment to a metastore for a workspace. Currently, only the default catalog may be - updated. Please add a header X-Databricks-Account-Console-API-Version: 2.0 to access this API. + updated. :param workspace_id: int Workspace ID. diff --git a/docs/account/metastores.rst b/docs/account/metastores.rst index 0b20dc9d..4625ef88 100644 --- a/docs/account/metastores.rst +++ b/docs/account/metastores.rst @@ -27,8 +27,7 @@ Account Metastores Create metastore. - Creates a Unity Catalog metastore. Please add a header X-Databricks-Account-Console-API-Version: 2.0 - to access this API. + Creates a Unity Catalog metastore. :param metastore_info: :class:`CreateMetastore` (optional) @@ -39,8 +38,7 @@ Account Metastores Delete a metastore. - Deletes a Unity Catalog metastore for an account, both specified by ID. Please add a header - X-Databricks-Account-Console-API-Version: 2.0 to access this API. + Deletes a Unity Catalog metastore for an account, both specified by ID. :param metastore_id: str Unity Catalog metastore ID @@ -67,15 +65,14 @@ Account Metastores storage_root="s3://%s/%s" % (os.environ["TEST_BUCKET"], f'sdk-{time.time_ns()}')) - _ = w.metastores.get(get=created.metastore_id) + _ = w.metastores.get(id=created.metastore_id) # cleanup w.metastores.delete(id=created.metastore_id, force=True) Get a metastore. - Gets a Unity Catalog metastore from an account, both specified by ID. Please add a header - X-Databricks-Account-Console-API-Version: 2.0 to access this API. + Gets a Unity Catalog metastore from an account, both specified by ID. :param metastore_id: str Unity Catalog metastore ID @@ -97,8 +94,7 @@ Account Metastores Get all metastores associated with an account. - Gets all Unity Catalog metastores associated with an account specified by ID. Please add a header - X-Databricks-Account-Console-API-Version: 2.0 to access this API. + Gets all Unity Catalog metastores associated with an account specified by ID. :returns: :class:`ListMetastoresResponse` @@ -127,8 +123,7 @@ Account Metastores Update a metastore. - Updates an existing Unity Catalog metastore. Please add a header - X-Databricks-Account-Console-API-Version: 2.0 to access this API. + Updates an existing Unity Catalog metastore. :param metastore_id: str Unity Catalog metastore ID diff --git a/docs/account/network_policy.rst b/docs/account/network_policy.rst new file mode 100644 index 00000000..17b60ac4 --- /dev/null +++ b/docs/account/network_policy.rst @@ -0,0 +1,55 @@ +Network Policy +============== +.. py:class:: AccountNetworkPolicyAPI + + Network policy is a set of rules that defines what can be accessed from your Databricks network. E.g.: You + can choose to block your SQL UDF to access internet from your Databricks serverless clusters. + + There is only one instance of this setting per account. Since this setting has a default value, this + setting is present on all accounts even though it's never set on a given account. Deletion reverts the + value of the setting back to the default value. + + .. py:method:: delete_account_network_policy(etag) + + Delete Account Network Policy. + + Reverts back all the account network policies back to default. + + :param etag: str + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`DeleteAccountNetworkPolicyResponse` + + + .. py:method:: read_account_network_policy(etag) + + Get Account Network Policy. + + Gets the value of Account level Network Policy. + + :param etag: str + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`AccountNetworkPolicyMessage` + + + .. py:method:: update_account_network_policy( [, allow_missing, setting]) + + Update Account Network Policy. + + Updates the policy content of Account level Network Policy. + + :param allow_missing: bool (optional) + This should always be set to true for Settings RPCs. Added for AIP compliance. + :param setting: :class:`AccountNetworkPolicyMessage` (optional) + + :returns: :class:`AccountNetworkPolicyMessage` + \ No newline at end of file diff --git a/docs/account/networks.rst b/docs/account/networks.rst index 42c1b601..a498c7af 100644 --- a/docs/account/networks.rst +++ b/docs/account/networks.rst @@ -84,7 +84,7 @@ Network configurations hex(time.time_ns())[2:]], security_group_ids=[hex(time.time_ns())[2:]]) - by_id = a.networks.get(get=netw.network_id) + by_id = a.networks.get(network_id=netw.network_id) Get a network configuration. diff --git a/docs/account/private_access.rst b/docs/account/private_access.rst index c9da5eb4..98ea23d0 100644 --- a/docs/account/private_access.rst +++ b/docs/account/private_access.rst @@ -21,7 +21,7 @@ Private Access Settings region=os.environ["AWS_REGION"]) # cleanup - a.private_access.delete(delete=created.private_access_settings_id) + a.private_access.delete(private_access_settings_id=created.private_access_settings_id) Create private access settings. @@ -103,10 +103,10 @@ Private Access Settings created = a.private_access.create(private_access_settings_name=f'sdk-{time.time_ns()}', region=os.environ["AWS_REGION"]) - by_id = a.private_access.get(get=created.private_access_settings_id) + by_id = a.private_access.get(private_access_settings_id=created.private_access_settings_id) # cleanup - a.private_access.delete(delete=created.private_access_settings_id) + a.private_access.delete(private_access_settings_id=created.private_access_settings_id) Get a private access settings object. @@ -164,7 +164,7 @@ Private Access Settings region=os.environ["AWS_REGION"]) # cleanup - a.private_access.delete(delete=created.private_access_settings_id) + a.private_access.delete(private_access_settings_id=created.private_access_settings_id) Replace private access settings. diff --git a/docs/account/service_principals.rst b/docs/account/service_principals.rst index 497ec8db..8381d1cb 100644 --- a/docs/account/service_principals.rst +++ b/docs/account/service_principals.rst @@ -27,7 +27,7 @@ Account Service Principals groups=[iam.ComplexValue(value=groups["admins"])]) # cleanup - w.service_principals.delete(delete=spn.id) + w.service_principals.delete(id=spn.id) Create a service principal. @@ -75,10 +75,10 @@ Account Service Principals created = w.service_principals.create(display_name=f'sdk-{time.time_ns()}') - by_id = w.service_principals.get(get=created.id) + by_id = w.service_principals.get(id=created.id) # cleanup - w.service_principals.delete(delete=created.id) + w.service_principals.delete(id=created.id) Get service principal details. @@ -165,7 +165,7 @@ Account Service Principals roles=[iam.ComplexValue(value="xyz")]) # cleanup - w.service_principals.delete(delete=created.id) + w.service_principals.delete(id=created.id) Replace service principal. diff --git a/docs/account/storage.rst b/docs/account/storage.rst index f130e94c..71351f9d 100644 --- a/docs/account/storage.rst +++ b/docs/account/storage.rst @@ -26,7 +26,7 @@ Storage configurations root_bucket_info=provisioning.RootBucketInfo(bucket_name=os.environ["TEST_ROOT_BUCKET"])) # cleanup - a.storage.delete(delete=storage.storage_configuration_id) + a.storage.delete(storage_configuration_id=storage.storage_configuration_id) Create new storage configuration. @@ -77,7 +77,7 @@ Storage configurations storage = a.storage.create(storage_configuration_name=f'sdk-{time.time_ns()}', root_bucket_info=provisioning.RootBucketInfo(bucket_name=f'sdk-{time.time_ns()}')) - by_id = a.storage.get(get=storage.storage_configuration_id) + by_id = a.storage.get(storage_configuration_id=storage.storage_configuration_id) Get storage configuration. diff --git a/docs/account/storage_credentials.rst b/docs/account/storage_credentials.rst index 17f30768..f2e22ef8 100644 --- a/docs/account/storage_credentials.rst +++ b/docs/account/storage_credentials.rst @@ -77,10 +77,10 @@ Account Storage Credentials name=f'sdk-{time.time_ns()}', aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"])) - by_name = w.storage_credentials.get(get=created.name) + by_name = w.storage_credentials.get(name=created.name) # cleanup - w.storage_credentials.delete(delete=created.name) + w.storage_credentials.delete(name=created.name) Gets the named storage credential. diff --git a/docs/account/users.rst b/docs/account/users.rst index a87b6283..97dcae9f 100644 --- a/docs/account/users.rst +++ b/docs/account/users.rst @@ -67,7 +67,7 @@ Account Users other_owner = w.users.create(user_name=f'sdk-{time.time_ns()}@example.com') - w.users.delete(delete=other_owner.id) + w.users.delete(id=other_owner.id) Delete a user. diff --git a/docs/account/vpc_endpoints.rst b/docs/account/vpc_endpoints.rst index bae67845..495fbce2 100644 --- a/docs/account/vpc_endpoints.rst +++ b/docs/account/vpc_endpoints.rst @@ -22,7 +22,7 @@ VPC Endpoint Configurations vpc_endpoint_name=f'sdk-{time.time_ns()}') # cleanup - a.vpc_endpoints.delete(delete=created.vpc_endpoint_id) + a.vpc_endpoints.delete(vpc_endpoint_id=created.vpc_endpoint_id) Create VPC endpoint configuration. @@ -87,10 +87,10 @@ VPC Endpoint Configurations region=os.environ["AWS_REGION"], vpc_endpoint_name=f'sdk-{time.time_ns()}') - by_id = a.vpc_endpoints.get(get=created.vpc_endpoint_id) + by_id = a.vpc_endpoints.get(vpc_endpoint_id=created.vpc_endpoint_id) # cleanup - a.vpc_endpoints.delete(delete=created.vpc_endpoint_id) + a.vpc_endpoints.delete(vpc_endpoint_id=created.vpc_endpoint_id) Get a VPC endpoint configuration. diff --git a/docs/account/workspaces.rst b/docs/account/workspaces.rst index f90202c1..2e9347da 100644 --- a/docs/account/workspaces.rst +++ b/docs/account/workspaces.rst @@ -38,9 +38,9 @@ Workspaces storage_configuration_id=storage.storage_configuration_id).result() # cleanup - a.storage.delete(delete=storage.storage_configuration_id) - a.credentials.delete(delete=role.credentials_id) - a.workspaces.delete(delete=created.workspace_id) + a.storage.delete(storage_configuration_id=storage.storage_configuration_id) + a.credentials.delete(credentials_id=role.credentials_id) + a.workspaces.delete(workspace_id=created.workspace_id) Create a new workspace. @@ -93,10 +93,9 @@ Workspaces :param location: str (optional) The Google Cloud region of the workspace data plane in your Google account. For example, `us-east4`. :param managed_services_customer_managed_key_id: str (optional) - The ID of the workspace's managed services encryption key configuration object. This is used to - encrypt the workspace's notebook and secret data in the control plane, in addition to Databricks SQL - queries and query history. The provided key configuration object property `use_cases` must contain - `MANAGED_SERVICES`. + The ID of the workspace's managed services encryption key configuration object. This is used to help + protect and control access to the workspace's notebooks, secrets, Databricks SQL queries, and query + history. The provided key configuration object property `use_cases` must contain `MANAGED_SERVICES`. :param network_id: str (optional) :param pricing_tier: :class:`PricingTier` (optional) The pricing tier of the workspace. For pricing tier information, see [AWS Pricing]. @@ -168,12 +167,12 @@ Workspaces credentials_id=role.credentials_id, storage_configuration_id=storage.storage_configuration_id).result() - by_id = a.workspaces.get(get=created.workspace_id) + by_id = a.workspaces.get(workspace_id=created.workspace_id) # cleanup - a.storage.delete(delete=storage.storage_configuration_id) - a.credentials.delete(delete=role.credentials_id) - a.workspaces.delete(delete=created.workspace_id) + a.storage.delete(storage_configuration_id=storage.storage_configuration_id) + a.credentials.delete(credentials_id=role.credentials_id) + a.workspaces.delete(workspace_id=created.workspace_id) Get a workspace. @@ -254,10 +253,10 @@ Workspaces _ = a.workspaces.update(workspace_id=created.workspace_id, credentials_id=update_role.credentials_id).result() # cleanup - a.storage.delete(delete=storage.storage_configuration_id) - a.credentials.delete(delete=role.credentials_id) - a.credentials.delete(delete=update_role.credentials_id) - a.workspaces.delete(delete=created.workspace_id) + a.storage.delete(storage_configuration_id=storage.storage_configuration_id) + a.credentials.delete(credentials_id=role.credentials_id) + a.credentials.delete(credentials_id=update_role.credentials_id) + a.workspaces.delete(workspace_id=created.workspace_id) Update workspace configuration. diff --git a/docs/workspace/alerts.rst b/docs/workspace/alerts.rst index 0778ee93..01b1cad9 100644 --- a/docs/workspace/alerts.rst +++ b/docs/workspace/alerts.rst @@ -32,8 +32,8 @@ Alerts query_id=query.id) # cleanup - w.queries.delete(delete=query.id) - w.alerts.delete(delete=alert.id) + w.queries.delete(query_id=query.id) + w.alerts.delete(alert_id=alert.id) Create an alert. @@ -91,11 +91,11 @@ Alerts name=f'sdk-{time.time_ns()}', query_id=query.id) - by_id = w.alerts.get(get=alert.id) + by_id = w.alerts.get(alert_id=alert.id) # cleanup - w.queries.delete(delete=query.id) - w.alerts.delete(delete=alert.id) + w.queries.delete(query_id=query.id) + w.alerts.delete(alert_id=alert.id) Get an alert. @@ -155,8 +155,8 @@ Alerts query_id=query.id) # cleanup - w.queries.delete(delete=query.id) - w.alerts.delete(delete=alert.id) + w.queries.delete(query_id=query.id) + w.alerts.delete(alert_id=alert.id) Update an alert. diff --git a/docs/workspace/catalogs.rst b/docs/workspace/catalogs.rst index 6ac1f049..a9818cc4 100644 --- a/docs/workspace/catalogs.rst +++ b/docs/workspace/catalogs.rst @@ -9,7 +9,7 @@ Catalogs the workspaces in a Databricks account. Users in different workspaces can share access to the same data, depending on privileges granted centrally in Unity Catalog. - .. py:method:: create(name [, comment, properties, provider_name, share_name, storage_root]) + .. py:method:: create(name [, comment, connection_name, properties, provider_name, share_name, storage_root]) Usage: @@ -35,6 +35,8 @@ Catalogs Name of catalog. :param comment: str (optional) User-provided free-form text description. + :param connection_name: str (optional) + The name of the connection to an external data source. :param properties: Dict[str,str] (optional) A map of key-value properties attached to the securable. :param provider_name: str (optional) @@ -78,7 +80,7 @@ Catalogs created = w.catalogs.create(name=f'sdk-{time.time_ns()}') - _ = w.catalogs.get(get=created.name) + _ = w.catalogs.get(name=created.name) # cleanup w.catalogs.delete(name=created.name, force=True) diff --git a/docs/workspace/cluster_policies.rst b/docs/workspace/cluster_policies.rst index e86d25b1..534e3928 100644 --- a/docs/workspace/cluster_policies.rst +++ b/docs/workspace/cluster_policies.rst @@ -43,7 +43,7 @@ Cluster Policies """) # cleanup - w.cluster_policies.delete(delete=created.policy_id) + w.cluster_policies.delete(policy_id=created.policy_id) Create a new policy. @@ -108,7 +108,7 @@ Cluster Policies } """) - policy = w.cluster_policies.get(get=created.policy_id) + policy = w.cluster_policies.get(policy_id=created.policy_id) w.cluster_policies.edit(policy_id=policy.policy_id, name=policy.name, @@ -121,7 +121,7 @@ Cluster Policies """) # cleanup - w.cluster_policies.delete(delete=created.policy_id) + w.cluster_policies.delete(policy_id=created.policy_id) Update a cluster policy. @@ -177,10 +177,10 @@ Cluster Policies } """) - policy = w.cluster_policies.get(get=created.policy_id) + policy = w.cluster_policies.get(policy_id=created.policy_id) # cleanup - w.cluster_policies.delete(delete=created.policy_id) + w.cluster_policies.delete(policy_id=created.policy_id) Get entity. diff --git a/docs/workspace/clusters.rst b/docs/workspace/clusters.rst index eb511eb5..7243f761 100644 --- a/docs/workspace/clusters.rst +++ b/docs/workspace/clusters.rst @@ -52,8 +52,8 @@ Clusters w.clusters.change_owner(cluster_id=clstr.cluster_id, owner_username=other_owner.user_name) # cleanup - w.users.delete(delete=other_owner.id) - w.clusters.permanent_delete(permanent_delete=clstr.cluster_id) + w.users.delete(id=other_owner.id) + w.clusters.permanent_delete(cluster_id=clstr.cluster_id) Change cluster owner. @@ -91,7 +91,7 @@ Clusters num_workers=1).result() # cleanup - w.clusters.permanent_delete(permanent_delete=clstr.cluster_id) + w.clusters.permanent_delete(cluster_id=clstr.cluster_id) Create new cluster. @@ -229,10 +229,10 @@ Clusters autotermination_minutes=15, num_workers=1).result() - _ = w.clusters.delete(delete=clstr.cluster_id).result() + _ = w.clusters.delete(cluster_id=clstr.cluster_id).result() # cleanup - w.clusters.permanent_delete(permanent_delete=clstr.cluster_id) + w.clusters.permanent_delete(cluster_id=clstr.cluster_id) Terminate cluster. @@ -279,7 +279,7 @@ Clusters num_workers=2).result() # cleanup - w.clusters.permanent_delete(permanent_delete=clstr.cluster_id) + w.clusters.permanent_delete(cluster_id=clstr.cluster_id) Update cluster configuration. @@ -456,7 +456,7 @@ Clusters events = w.clusters.events(cluster_id=clstr.cluster_id) # cleanup - w.clusters.permanent_delete(permanent_delete=clstr.cluster_id) + w.clusters.permanent_delete(cluster_id=clstr.cluster_id) List cluster activity events. @@ -507,10 +507,10 @@ Clusters autotermination_minutes=15, num_workers=1).result() - by_id = w.clusters.get(get=clstr.cluster_id) + by_id = w.clusters.get(cluster_id=clstr.cluster_id) # cleanup - w.clusters.permanent_delete(permanent_delete=clstr.cluster_id) + w.clusters.permanent_delete(cluster_id=clstr.cluster_id) Get cluster info. @@ -623,10 +623,10 @@ Clusters autotermination_minutes=15, num_workers=1).result() - w.clusters.pin(pin=clstr.cluster_id) + w.clusters.pin(cluster_id=clstr.cluster_id) # cleanup - w.clusters.permanent_delete(permanent_delete=clstr.cluster_id) + w.clusters.permanent_delete(cluster_id=clstr.cluster_id) Pin cluster. @@ -665,7 +665,7 @@ Clusters by_id = w.clusters.resize(cluster_id=clstr.cluster_id, num_workers=1).result() # cleanup - w.clusters.permanent_delete(permanent_delete=clstr.cluster_id) + w.clusters.permanent_delete(cluster_id=clstr.cluster_id) Resize cluster. @@ -718,7 +718,7 @@ Clusters _ = w.clusters.restart(cluster_id=clstr.cluster_id).result() # cleanup - w.clusters.permanent_delete(permanent_delete=clstr.cluster_id) + w.clusters.permanent_delete(cluster_id=clstr.cluster_id) Restart cluster. @@ -823,10 +823,10 @@ Clusters autotermination_minutes=15, num_workers=1).result() - _ = w.clusters.start(start=clstr.cluster_id).result() + _ = w.clusters.start(cluster_id=clstr.cluster_id).result() # cleanup - w.clusters.permanent_delete(permanent_delete=clstr.cluster_id) + w.clusters.permanent_delete(cluster_id=clstr.cluster_id) Start terminated cluster. @@ -868,10 +868,10 @@ Clusters autotermination_minutes=15, num_workers=1).result() - w.clusters.unpin(unpin=clstr.cluster_id) + w.clusters.unpin(cluster_id=clstr.cluster_id) # cleanup - w.clusters.permanent_delete(permanent_delete=clstr.cluster_id) + w.clusters.permanent_delete(cluster_id=clstr.cluster_id) Unpin cluster. diff --git a/docs/workspace/dashboards.rst b/docs/workspace/dashboards.rst index 6983366c..04852edd 100644 --- a/docs/workspace/dashboards.rst +++ b/docs/workspace/dashboards.rst @@ -23,7 +23,7 @@ Dashboards created = w.dashboards.create(name=f'sdk-{time.time_ns()}') # cleanup - w.dashboards.delete(delete=created.id) + w.dashboards.delete(dashboard_id=created.id) Create a dashboard object. @@ -53,10 +53,10 @@ Dashboards created = w.dashboards.create(name=f'sdk-{time.time_ns()}') - w.dashboards.delete(delete=created.id) + w.dashboards.delete(dashboard_id=created.id) # cleanup - w.dashboards.delete(delete=created.id) + w.dashboards.delete(dashboard_id=created.id) Remove a dashboard. @@ -82,10 +82,10 @@ Dashboards created = w.dashboards.create(name=f'sdk-{time.time_ns()}') - by_id = w.dashboards.get(get=created.id) + by_id = w.dashboards.get(dashboard_id=created.id) # cleanup - w.dashboards.delete(delete=created.id) + w.dashboards.delete(dashboard_id=created.id) Retrieve a definition. @@ -142,7 +142,7 @@ Dashboards w.dashboards.restore(dashboard_id=created.id) # cleanup - w.dashboards.delete(delete=created.id) + w.dashboards.delete(dashboard_id=created.id) Restore a dashboard. diff --git a/docs/workspace/external_locations.rst b/docs/workspace/external_locations.rst index d0baf87a..c188fc20 100644 --- a/docs/workspace/external_locations.rst +++ b/docs/workspace/external_locations.rst @@ -13,7 +13,7 @@ External Locations To create external locations, you must be a metastore admin or a user with the **CREATE_EXTERNAL_LOCATION** privilege. - .. py:method:: create(name, url, credential_name [, comment, read_only, skip_validation]) + .. py:method:: create(name, url, credential_name [, access_point, comment, encryption_details, read_only, skip_validation]) Usage: @@ -50,8 +50,12 @@ External Locations Path URL of the external location. :param credential_name: str Name of the storage credential used with this location. + :param access_point: str (optional) + The AWS access point to use when accesing s3 for this external location. :param comment: str (optional) User-provided free-form text description. + :param encryption_details: :class:`EncryptionDetails` (optional) + Encryption options that apply to clients connecting to cloud storage. :param read_only: bool (optional) Indicates whether the external location is read-only. :param skip_validation: bool (optional) @@ -135,7 +139,7 @@ External Locations :returns: Iterator over :class:`ExternalLocationInfo` - .. py:method:: update(name [, comment, credential_name, force, owner, read_only, url]) + .. py:method:: update(name [, access_point, comment, credential_name, encryption_details, force, owner, read_only, url]) Usage: @@ -162,8 +166,8 @@ External Locations url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f'sdk-{time.time_ns()}')) # cleanup - w.storage_credentials.delete(delete=credential.name) - w.external_locations.delete(delete=created.name) + w.storage_credentials.delete(name=credential.name) + w.external_locations.delete(name=created.name) Update an external location. @@ -173,10 +177,14 @@ External Locations :param name: str Name of the external location. + :param access_point: str (optional) + The AWS access point to use when accesing s3 for this external location. :param comment: str (optional) User-provided free-form text description. :param credential_name: str (optional) Name of the storage credential used with this location. + :param encryption_details: :class:`EncryptionDetails` (optional) + Encryption options that apply to clients connecting to cloud storage. :param force: bool (optional) Force update even if changing url invalidates dependent external tables or mounts. :param owner: str (optional) diff --git a/docs/workspace/git_credentials.rst b/docs/workspace/git_credentials.rst index 6552dd34..9c0eb3e4 100644 --- a/docs/workspace/git_credentials.rst +++ b/docs/workspace/git_credentials.rst @@ -21,7 +21,7 @@ Git Credentials cr = w.git_credentials.create(git_provider="gitHub", git_username="test", personal_access_token="test") # cleanup - w.git_credentials.delete(delete=cr.credential_id) + w.git_credentials.delete(credential_id=cr.credential_id) Create a credential entry. @@ -65,10 +65,10 @@ Git Credentials cr = w.git_credentials.create(git_provider="gitHub", git_username="test", personal_access_token="test") - by_id = w.git_credentials.get(get=cr.credential_id) + by_id = w.git_credentials.get(credential_id=cr.credential_id) # cleanup - w.git_credentials.delete(delete=cr.credential_id) + w.git_credentials.delete(credential_id=cr.credential_id) Get a credential entry. @@ -119,7 +119,7 @@ Git Credentials personal_access_token=f'sdk-{time.time_ns()}') # cleanup - w.git_credentials.delete(delete=cr.credential_id) + w.git_credentials.delete(credential_id=cr.credential_id) Update a credential. diff --git a/docs/workspace/global_init_scripts.rst b/docs/workspace/global_init_scripts.rst index 504168d5..7a399c7b 100644 --- a/docs/workspace/global_init_scripts.rst +++ b/docs/workspace/global_init_scripts.rst @@ -29,7 +29,7 @@ Global Init Scripts position=10) # cleanup - w.global_init_scripts.delete(delete=created.script_id) + w.global_init_scripts.delete(script_id=created.script_id) Create init script. @@ -85,10 +85,10 @@ Global Init Scripts enabled=True, position=10) - by_id = w.global_init_scripts.get(get=created.script_id) + by_id = w.global_init_scripts.get(script_id=created.script_id) # cleanup - w.global_init_scripts.delete(delete=created.script_id) + w.global_init_scripts.delete(script_id=created.script_id) Get an init script. @@ -144,7 +144,7 @@ Global Init Scripts script=base64.b64encode(("echo 2").encode()).decode()) # cleanup - w.global_init_scripts.delete(delete=created.script_id) + w.global_init_scripts.delete(script_id=created.script_id) Update init script. diff --git a/docs/workspace/grants.rst b/docs/workspace/grants.rst index c3b16e3e..09f2df95 100644 --- a/docs/workspace/grants.rst +++ b/docs/workspace/grants.rst @@ -39,14 +39,14 @@ Grants table_full_name = "%s.%s.%s" % (created_catalog.name, created_schema.name, table_name) - created_table = w.tables.get(get=table_full_name) + created_table = w.tables.get(full_name=table_full_name) - grants = w.grants.get_effective(get_effective=catalog.SecurableType.TABLE) + grants = w.grants.get_effective(securable_type=catalog.SecurableType.TABLE, full_name=created_table.full_name) # cleanup - w.schemas.delete(delete=created_schema.full_name) + w.schemas.delete(full_name=created_schema.full_name) w.catalogs.delete(name=created_catalog.name, force=True) - w.tables.delete(delete=table_full_name) + w.tables.delete(full_name=table_full_name) Get permissions. @@ -89,14 +89,14 @@ Grants table_full_name = "%s.%s.%s" % (created_catalog.name, created_schema.name, table_name) - created_table = w.tables.get(get=table_full_name) + created_table = w.tables.get(full_name=table_full_name) - grants = w.grants.get_effective(get_effective=catalog.SecurableType.TABLE) + grants = w.grants.get_effective(securable_type=catalog.SecurableType.TABLE, full_name=created_table.full_name) # cleanup - w.schemas.delete(delete=created_schema.full_name) + w.schemas.delete(full_name=created_schema.full_name) w.catalogs.delete(name=created_catalog.name, force=True) - w.tables.delete(delete=table_full_name) + w.tables.delete(full_name=table_full_name) Get effective permissions. @@ -142,7 +142,7 @@ Grants account_level_group_name = os.environ["TEST_DATA_ENG_GROUP"] - created_table = w.tables.get(get=table_full_name) + created_table = w.tables.get(full_name=table_full_name) x = w.grants.update(full_name=created_table.full_name, securable_type=catalog.SecurableType.TABLE, @@ -152,9 +152,9 @@ Grants ]) # cleanup - w.schemas.delete(delete=created_schema.full_name) + w.schemas.delete(full_name=created_schema.full_name) w.catalogs.delete(name=created_catalog.name, force=True) - w.tables.delete(delete=table_full_name) + w.tables.delete(full_name=table_full_name) Update permissions. diff --git a/docs/workspace/groups.rst b/docs/workspace/groups.rst index 58a5c4b8..1ed4d645 100644 --- a/docs/workspace/groups.rst +++ b/docs/workspace/groups.rst @@ -24,7 +24,7 @@ Groups group = w.groups.create(display_name=f'sdk-{time.time_ns()}') # cleanup - w.groups.delete(delete=group.id) + w.groups.delete(id=group.id) Create a new group. @@ -59,7 +59,7 @@ Groups group = w.groups.create(display_name=f'sdk-{time.time_ns()}') - w.groups.delete(delete=group.id) + w.groups.delete(id=group.id) Delete a group. @@ -85,10 +85,10 @@ Groups group = w.groups.create(display_name=f'sdk-{time.time_ns()}') - fetch = w.groups.get(get=group.id) + fetch = w.groups.get(id=group.id) # cleanup - w.groups.delete(delete=group.id) + w.groups.delete(id=group.id) Get group details. diff --git a/docs/workspace/instance_pools.rst b/docs/workspace/instance_pools.rst index a61a9641..e4e92a65 100644 --- a/docs/workspace/instance_pools.rst +++ b/docs/workspace/instance_pools.rst @@ -34,7 +34,7 @@ Instance Pools created = w.instance_pools.create(instance_pool_name=f'sdk-{time.time_ns()}', node_type_id=smallest) # cleanup - w.instance_pools.delete(delete=created.instance_pool_id) + w.instance_pools.delete(instance_pool_id=created.instance_pool_id) Create a new instance pool. @@ -125,7 +125,7 @@ Instance Pools node_type_id=smallest) # cleanup - w.instance_pools.delete(delete=created.instance_pool_id) + w.instance_pools.delete(instance_pool_id=created.instance_pool_id) Edit an existing instance pool. @@ -201,10 +201,10 @@ Instance Pools created = w.instance_pools.create(instance_pool_name=f'sdk-{time.time_ns()}', node_type_id=smallest) - by_id = w.instance_pools.get(get=created.instance_pool_id) + by_id = w.instance_pools.get(instance_pool_id=created.instance_pool_id) # cleanup - w.instance_pools.delete(delete=created.instance_pool_id) + w.instance_pools.delete(instance_pool_id=created.instance_pool_id) Get instance pool information. diff --git a/docs/workspace/ip_access_lists.rst b/docs/workspace/ip_access_lists.rst index d0d6adc3..c53cbcb7 100644 --- a/docs/workspace/ip_access_lists.rst +++ b/docs/workspace/ip_access_lists.rst @@ -38,7 +38,7 @@ IP Access Lists list_type=settings.ListType.BLOCK) # cleanup - w.ip_access_lists.delete(delete=created.ip_access_list.list_id) + w.ip_access_lists.delete(ip_access_list_id=created.ip_access_list.list_id) Create access list. @@ -96,10 +96,10 @@ IP Access Lists ip_addresses=["1.0.0.0/16"], list_type=settings.ListType.BLOCK) - by_id = w.ip_access_lists.get(get=created.ip_access_list.list_id) + by_id = w.ip_access_lists.get(ip_access_list_id=created.ip_access_list.list_id) # cleanup - w.ip_access_lists.delete(delete=created.ip_access_list.list_id) + w.ip_access_lists.delete(ip_access_list_id=created.ip_access_list.list_id) Get access list. @@ -154,7 +154,7 @@ IP Access Lists enabled=False) # cleanup - w.ip_access_lists.delete(delete=created.ip_access_list.list_id) + w.ip_access_lists.delete(ip_access_list_id=created.ip_access_list.list_id) Replace access list. diff --git a/docs/workspace/jobs.rst b/docs/workspace/jobs.rst index f78f6184..cea15c51 100644 --- a/docs/workspace/jobs.rst +++ b/docs/workspace/jobs.rst @@ -46,10 +46,10 @@ Jobs timeout_seconds=0) ]) - w.jobs.cancel_all_runs(cancel_all_runs=created_job.job_id) + w.jobs.cancel_all_runs(job_id=created_job.job_id) # cleanup - w.jobs.delete(delete=created_job.job_id) + w.jobs.delete(job_id=created_job.job_id) Cancel all runs of a job. @@ -95,7 +95,7 @@ Jobs cancelled_run = w.jobs.cancel_run(run_id=run_now_response.response.run_id).result() # cleanup - w.jobs.delete(delete=created_job.job_id) + w.jobs.delete(job_id=created_job.job_id) Cancel a job run. @@ -139,7 +139,7 @@ Jobs ]) # cleanup - w.jobs.delete(delete=created_job.job_id) + w.jobs.delete(job_id=created_job.job_id) Create a new job. @@ -273,7 +273,7 @@ Jobs exported_view = w.jobs.export_run(run_id=run_by_id.tasks[0].run_id, views_to_export="CODE") # cleanup - w.jobs.delete(delete=created_job.job_id) + w.jobs.delete(job_id=created_job.job_id) Export and retrieve a job run. @@ -313,10 +313,10 @@ Jobs task_key=f'sdk-{time.time_ns()}') ]).result() - output = w.jobs.get_run_output(get_run_output=run.tasks[0].run_id) + output = w.jobs.get_run_output(run_id=run.tasks[0].run_id) # cleanup - w.jobs.delete_run(delete_run=run.run_id) + w.jobs.delete_run(run_id=run.run_id) Get a single job. @@ -354,10 +354,10 @@ Jobs task_key=f'sdk-{time.time_ns()}') ]).result() - output = w.jobs.get_run_output(get_run_output=run.tasks[0].run_id) + output = w.jobs.get_run_output(run_id=run.tasks[0].run_id) # cleanup - w.jobs.delete_run(delete_run=run.run_id) + w.jobs.delete_run(run_id=run.run_id) Get a single job run. @@ -397,10 +397,10 @@ Jobs task_key=f'sdk-{time.time_ns()}') ]).result() - output = w.jobs.get_run_output(get_run_output=run.tasks[0].run_id) + output = w.jobs.get_run_output(run_id=run.tasks[0].run_id) # cleanup - w.jobs.delete_run(delete_run=run.run_id) + w.jobs.delete_run(run_id=run.run_id) Get the output for a single run. @@ -528,7 +528,7 @@ Jobs run_id=run_now_response.response.run_id).result() # cleanup - w.jobs.delete(delete=created_job.job_id) + w.jobs.delete(job_id=created_job.job_id) Repair a job run. @@ -649,12 +649,12 @@ Jobs new_name = f'sdk-{time.time_ns()}' - by_id = w.jobs.get(get=created_job.job_id) + by_id = w.jobs.get(job_id=created_job.job_id) w.jobs.reset(job_id=by_id.job_id, new_settings=jobs.JobSettings(name=new_name, tasks=by_id.settings.tasks)) # cleanup - w.jobs.delete(delete=created_job.job_id) + w.jobs.delete(job_id=created_job.job_id) Overwrites all settings for a job. @@ -703,7 +703,7 @@ Jobs run_by_id = w.jobs.run_now(job_id=created_job.job_id).result() # cleanup - w.jobs.delete(delete=created_job.job_id) + w.jobs.delete(job_id=created_job.job_id) Trigger a new job run. @@ -825,7 +825,7 @@ Jobs ]).result() # cleanup - w.jobs.delete_run(delete_run=run.run_id) + w.jobs.delete_run(run_id=run.run_id) Create and trigger a one-time run. @@ -906,7 +906,7 @@ Jobs w.jobs.update(job_id=created_job.job_id, new_settings=jobs.JobSettings(name=new_name, max_concurrent_runs=5)) # cleanup - w.jobs.delete(delete=created_job.job_id) + w.jobs.delete(job_id=created_job.job_id) Partially update a job. diff --git a/docs/workspace/metastores.rst b/docs/workspace/metastores.rst index 2cf34301..ded6810d 100644 --- a/docs/workspace/metastores.rst +++ b/docs/workspace/metastores.rst @@ -173,7 +173,7 @@ Metastores storage_root="s3://%s/%s" % (os.environ["TEST_BUCKET"], f'sdk-{time.time_ns()}')) - _ = w.metastores.get(get=created.metastore_id) + _ = w.metastores.get(id=created.metastore_id) # cleanup w.metastores.delete(id=created.metastore_id, force=True) diff --git a/docs/workspace/permissions.rst b/docs/workspace/permissions.rst index f685b364..69211932 100644 --- a/docs/workspace/permissions.rst +++ b/docs/workspace/permissions.rst @@ -19,7 +19,7 @@ Permissions notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}' - obj = w.workspace.get_status(get_status=notebook_path) + obj = w.workspace.get_status(path=notebook_path) levels = w.permissions.get_permission_levels(request_object_type="notebooks", request_object_id="%d" % (obj.object_id)) @@ -50,7 +50,7 @@ Permissions notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}' - obj = w.workspace.get_status(get_status=notebook_path) + obj = w.workspace.get_status(path=notebook_path) levels = w.permissions.get_permission_levels(request_object_type="notebooks", request_object_id="%d" % (obj.object_id)) @@ -84,7 +84,7 @@ Permissions group = w.groups.create(display_name=f'sdk-{time.time_ns()}') - obj = w.workspace.get_status(get_status=notebook_path) + obj = w.workspace.get_status(path=notebook_path) w.permissions.set(request_object_type="notebooks", request_object_id="%d" % (obj.object_id), @@ -94,7 +94,7 @@ Permissions ]) # cleanup - w.groups.delete(delete=group.id) + w.groups.delete(id=group.id) Set permissions. diff --git a/docs/workspace/pipelines.rst b/docs/workspace/pipelines.rst index 90cd754b..bd92ee0a 100644 --- a/docs/workspace/pipelines.rst +++ b/docs/workspace/pipelines.rst @@ -43,7 +43,7 @@ Pipelines ]) # cleanup - w.pipelines.delete(delete=created.pipeline_id) + w.pipelines.delete(pipeline_id=created.pipeline_id) Create a pipeline. @@ -132,10 +132,10 @@ Pipelines }) ]) - by_id = w.pipelines.get(get=created.pipeline_id) + by_id = w.pipelines.get(pipeline_id=created.pipeline_id) # cleanup - w.pipelines.delete(delete=created.pipeline_id) + w.pipelines.delete(pipeline_id=created.pipeline_id) Get a pipeline. @@ -190,7 +190,7 @@ Pipelines events = w.pipelines.list_pipeline_events(pipeline_id=created.pipeline_id) # cleanup - w.pipelines.delete(delete=created.pipeline_id) + w.pipelines.delete(pipeline_id=created.pipeline_id) List pipeline events. @@ -367,7 +367,7 @@ Pipelines ]) # cleanup - w.pipelines.delete(delete=created.pipeline_id) + w.pipelines.delete(pipeline_id=created.pipeline_id) Edit a pipeline. diff --git a/docs/workspace/providers.rst b/docs/workspace/providers.rst index 241c316a..8a543c2e 100644 --- a/docs/workspace/providers.rst +++ b/docs/workspace/providers.rst @@ -2,7 +2,8 @@ Providers ========= .. py:class:: ProvidersAPI - Databricks Providers REST API + A data provider is an object representing the organization in the real world who shares the data. A + provider contains shares which further contain the shared data. .. py:method:: create(name, authentication_type [, comment, recipient_profile_str]) @@ -26,7 +27,7 @@ Providers created = w.providers.create(name=f'sdk-{time.time_ns()}', recipient_profile_str=public_share_recipient) # cleanup - w.providers.delete(delete=created.name) + w.providers.delete(name=created.name) Create an auth provider. @@ -79,10 +80,10 @@ Providers created = w.providers.create(name=f'sdk-{time.time_ns()}', recipient_profile_str=public_share_recipient) - _ = w.providers.get(get=created.name) + _ = w.providers.get(name=created.name) # cleanup - w.providers.delete(delete=created.name) + w.providers.delete(name=created.name) Get a provider. @@ -145,7 +146,7 @@ Providers shares = w.providers.list_shares(name=created.name) # cleanup - w.providers.delete(delete=created.name) + w.providers.delete(name=created.name) List shares by Provider. @@ -183,7 +184,7 @@ Providers _ = w.providers.update(name=created.name, comment="Comment for update") # cleanup - w.providers.delete(delete=created.name) + w.providers.delete(name=created.name) Update a provider. diff --git a/docs/workspace/queries.rst b/docs/workspace/queries.rst index 9c015020..bf6e3c21 100644 --- a/docs/workspace/queries.rst +++ b/docs/workspace/queries.rst @@ -26,7 +26,7 @@ Queries / Results query="SHOW TABLES") # cleanup - w.queries.delete(delete=query.id) + w.queries.delete(query_id=query.id) Create a new query definition. @@ -88,10 +88,10 @@ Queries / Results description="test query from Go SDK", query="SHOW TABLES") - by_id = w.queries.get(get=query.id) + by_id = w.queries.get(query_id=query.id) # cleanup - w.queries.delete(delete=query.id) + w.queries.delete(query_id=query.id) Get a query definition. @@ -171,7 +171,7 @@ Queries / Results query="SELECT 2+2") # cleanup - w.queries.delete(delete=query.id) + w.queries.delete(query_id=query.id) Change a query definition. diff --git a/docs/workspace/recipient_activation.rst b/docs/workspace/recipient_activation.rst index be9cd595..b8bec317 100644 --- a/docs/workspace/recipient_activation.rst +++ b/docs/workspace/recipient_activation.rst @@ -2,7 +2,13 @@ Recipient Activation ==================== .. py:class:: RecipientActivationAPI - Databricks Recipient Activation REST API + The Recipient Activation API is only applicable in the open sharing model where the recipient object has + the authentication type of `TOKEN`. The data recipient follows the activation link shared by the data + provider to download the credential file that includes the access token. The recipient will then use the + credential file to establish a secure connection with the provider to receive the shared data. + + Note that you can download the credential file only once. Recipients should treat the downloaded + credential as a secret and must not share it outside of their organization. .. py:method:: get_activation_url_info(activation_url) diff --git a/docs/workspace/recipients.rst b/docs/workspace/recipients.rst index 3242c810..4c09fc03 100644 --- a/docs/workspace/recipients.rst +++ b/docs/workspace/recipients.rst @@ -2,7 +2,19 @@ Recipients ========== .. py:class:: RecipientsAPI - Databricks Recipients REST API + A recipient is an object you create using :method:recipients/create to represent an organization which you + want to allow access shares. The way how sharing works differs depending on whether or not your recipient + has access to a Databricks workspace that is enabled for Unity Catalog: + + - For recipients with access to a Databricks workspace that is enabled for Unity Catalog, you can create a + recipient object along with a unique sharing identifier you get from the recipient. The sharing identifier + is the key identifier that enables the secure connection. This sharing mode is called + **Databricks-to-Databricks sharing**. + + - For recipients without access to a Databricks workspace that is enabled for Unity Catalog, when you + create a recipient object, Databricks generates an activation link you can send to the recipient. The + recipient follows the activation link to download the credential file, and then uses the credential file + to establish a secure connection to receive the shared data. This sharing mode is called **open sharing**. .. py:method:: create(name, authentication_type [, comment, data_recipient_global_metastore_id, ip_access_list, owner, properties_kvpairs, sharing_code]) @@ -19,7 +31,7 @@ Recipients created = w.recipients.create(name=f'sdk-{time.time_ns()}') # cleanup - w.recipients.delete(delete=created.name) + w.recipients.delete(name=created.name) Create a share recipient. @@ -75,10 +87,10 @@ Recipients created = w.recipients.create(name=f'sdk-{time.time_ns()}') - _ = w.recipients.get(get=created.name) + _ = w.recipients.get(name=created.name) # cleanup - w.recipients.delete(delete=created.name) + w.recipients.delete(name=created.name) Get a share recipient. @@ -136,7 +148,7 @@ Recipients recipient_info = w.recipients.rotate_token(name=created.name, existing_token_expire_in_seconds=0) # cleanup - w.recipients.delete(delete=created.name) + w.recipients.delete(name=created.name) Rotate a token. @@ -167,10 +179,10 @@ Recipients created = w.recipients.create(name=f'sdk-{time.time_ns()}') - share_permissions = w.recipients.share_permissions(share_permissions=created.name) + share_permissions = w.recipients.share_permissions(name=created.name) # cleanup - w.recipients.delete(delete=created.name) + w.recipients.delete(name=created.name) Get recipient share permissions. @@ -200,7 +212,7 @@ Recipients w.recipients.update(name=created.name, comment=f'sdk-{time.time_ns()}') # cleanup - w.recipients.delete(delete=created.name) + w.recipients.delete(name=created.name) Update a share recipient. diff --git a/docs/workspace/repos.rst b/docs/workspace/repos.rst index 47580bd9..fa20be14 100644 --- a/docs/workspace/repos.rst +++ b/docs/workspace/repos.rst @@ -29,7 +29,7 @@ Repos ri = w.repos.create(path=root, url="https://github.com/shreyas-goenka/empty-repo.git", provider="github") # cleanup - w.repos.delete(delete=ri.id) + w.repos.delete(repo_id=ri.id) Create a repo. @@ -79,10 +79,10 @@ Repos ri = w.repos.create(path=root, url="https://github.com/shreyas-goenka/empty-repo.git", provider="github") - by_id = w.repos.get(get=ri.id) + by_id = w.repos.get(repo_id=ri.id) # cleanup - w.repos.delete(delete=ri.id) + w.repos.delete(repo_id=ri.id) Get a repo. @@ -140,7 +140,7 @@ Repos w.repos.update(repo_id=ri.id, branch="foo") # cleanup - w.repos.delete(delete=ri.id) + w.repos.delete(repo_id=ri.id) Update a repo. diff --git a/docs/workspace/schemas.rst b/docs/workspace/schemas.rst index a8e6731e..54d0bb61 100644 --- a/docs/workspace/schemas.rst +++ b/docs/workspace/schemas.rst @@ -25,7 +25,7 @@ Schemas # cleanup w.catalogs.delete(name=created_catalog.name, force=True) - w.schemas.delete(delete=created_schema.full_name) + w.schemas.delete(full_name=created_schema.full_name) Create a schema. @@ -75,11 +75,11 @@ Schemas created = w.schemas.create(name=f'sdk-{time.time_ns()}', catalog_name=new_catalog.name) - _ = w.schemas.get(get=created.full_name) + _ = w.schemas.get(full_name=created.full_name) # cleanup w.catalogs.delete(name=new_catalog.name, force=True) - w.schemas.delete(delete=created.full_name) + w.schemas.delete(full_name=created.full_name) Get a schema. @@ -144,7 +144,7 @@ Schemas # cleanup w.catalogs.delete(name=new_catalog.name, force=True) - w.schemas.delete(delete=created.full_name) + w.schemas.delete(full_name=created.full_name) Update a schema. diff --git a/docs/workspace/secrets.rst b/docs/workspace/secrets.rst index 8dd83513..11bd0a94 100644 --- a/docs/workspace/secrets.rst +++ b/docs/workspace/secrets.rst @@ -32,7 +32,7 @@ Secret # cleanup w.secrets.delete_secret(scope=scope_name, key=key_name) - w.secrets.delete_scope(delete_scope=scope_name) + w.secrets.delete_scope(scope=scope_name) Create a new secret scope. @@ -138,11 +138,11 @@ Secret w.secrets.create_scope(scope=scope_name) - acls = w.secrets.list_acls(list_acls=scope_name) + acls = w.secrets.list_acls(scope=scope_name) # cleanup w.secrets.delete_secret(scope=scope_name, key=key_name) - w.secrets.delete_scope(delete_scope=scope_name) + w.secrets.delete_scope(scope=scope_name) Lists ACLs. @@ -196,11 +196,11 @@ Secret w.secrets.create_scope(scope=scope_name) - scrts = w.secrets.list_secrets(list_secrets=scope_name) + scrts = w.secrets.list_secrets(scope=scope_name) # cleanup w.secrets.delete_secret(scope=scope_name, key=key_name) - w.secrets.delete_scope(delete_scope=scope_name) + w.secrets.delete_scope(scope=scope_name) List secret keys. @@ -241,9 +241,9 @@ Secret w.secrets.put_acl(scope=scope_name, permission=workspace.AclPermission.MANAGE, principal=group.display_name) # cleanup - w.groups.delete(delete=group.id) + w.groups.delete(id=group.id) w.secrets.delete_secret(scope=scope_name, key=key_name) - w.secrets.delete_scope(delete_scope=scope_name) + w.secrets.delete_scope(scope=scope_name) Create/update an ACL. @@ -304,7 +304,7 @@ Secret # cleanup w.secrets.delete_secret(scope=scope_name, key=key_name) - w.secrets.delete_scope(delete_scope=scope_name) + w.secrets.delete_scope(scope=scope_name) Add a secret. diff --git a/docs/workspace/service_principals.rst b/docs/workspace/service_principals.rst index 34cbb0c0..5bf0e93d 100644 --- a/docs/workspace/service_principals.rst +++ b/docs/workspace/service_principals.rst @@ -27,7 +27,7 @@ Service Principals groups=[iam.ComplexValue(value=groups["admins"])]) # cleanup - w.service_principals.delete(delete=spn.id) + w.service_principals.delete(id=spn.id) Create a service principal. @@ -75,10 +75,10 @@ Service Principals created = w.service_principals.create(display_name=f'sdk-{time.time_ns()}') - by_id = w.service_principals.get(get=created.id) + by_id = w.service_principals.get(id=created.id) # cleanup - w.service_principals.delete(delete=created.id) + w.service_principals.delete(id=created.id) Get service principal details. @@ -165,7 +165,7 @@ Service Principals roles=[iam.ComplexValue(value="xyz")]) # cleanup - w.service_principals.delete(delete=created.id) + w.service_principals.delete(id=created.id) Replace service principal. diff --git a/docs/workspace/shares.rst b/docs/workspace/shares.rst index 8f458917..b961e689 100644 --- a/docs/workspace/shares.rst +++ b/docs/workspace/shares.rst @@ -2,7 +2,10 @@ Shares ====== .. py:class:: SharesAPI - Databricks Shares REST API + A share is a container instantiated with :method:shares/create. Once created you can iteratively register + a collection of existing data assets defined within the metastore using :method:shares/update. You can + register data assets under their original name, qualified by their original schema, or provide alternate + exposed names. .. py:method:: create(name [, comment]) @@ -19,7 +22,7 @@ Shares created_share = w.shares.create(name=f'sdk-{time.time_ns()}') # cleanup - w.shares.delete(delete=created_share.name) + w.shares.delete(name=created_share.name) Create a share. @@ -60,10 +63,10 @@ Shares created_share = w.shares.create(name=f'sdk-{time.time_ns()}') - _ = w.shares.get(get=created_share.name) + _ = w.shares.get(name=created_share.name) # cleanup - w.shares.delete(delete=created_share.name) + w.shares.delete(name=created_share.name) Get a share. @@ -148,10 +151,10 @@ Shares ]) # cleanup - w.schemas.delete(delete=created_schema.full_name) + w.schemas.delete(full_name=created_schema.full_name) w.catalogs.delete(name=created_catalog.name, force=True) - w.tables.delete(delete=table_full_name) - w.shares.delete(delete=created_share.name) + w.tables.delete(full_name=table_full_name) + w.shares.delete(name=created_share.name) Update a share. diff --git a/docs/workspace/storage_credentials.rst b/docs/workspace/storage_credentials.rst index eed1b363..997a3090 100644 --- a/docs/workspace/storage_credentials.rst +++ b/docs/workspace/storage_credentials.rst @@ -98,10 +98,10 @@ Storage Credentials name=f'sdk-{time.time_ns()}', aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"])) - by_name = w.storage_credentials.get(get=created.name) + by_name = w.storage_credentials.get(name=created.name) # cleanup - w.storage_credentials.delete(delete=created.name) + w.storage_credentials.delete(name=created.name) Get a credential. diff --git a/docs/workspace/tables.rst b/docs/workspace/tables.rst index 7508cfcf..d323ba0f 100644 --- a/docs/workspace/tables.rst +++ b/docs/workspace/tables.rst @@ -52,12 +52,12 @@ Tables table_full_name = "%s.%s.%s" % (created_catalog.name, created_schema.name, table_name) - created_table = w.tables.get(get=table_full_name) + created_table = w.tables.get(full_name=table_full_name) # cleanup - w.schemas.delete(delete=created_schema.full_name) + w.schemas.delete(full_name=created_schema.full_name) w.catalogs.delete(name=created_catalog.name, force=True) - w.tables.delete(delete=table_full_name) + w.tables.delete(full_name=table_full_name) Get a table. @@ -93,7 +93,7 @@ Tables all_tables = w.tables.list(catalog_name=created_catalog.name, schema_name=created_schema.name) # cleanup - w.schemas.delete(delete=created_schema.full_name) + w.schemas.delete(full_name=created_schema.full_name) w.catalogs.delete(name=created_catalog.name, force=True) List tables. @@ -142,7 +142,7 @@ Tables schema_name_pattern=created_schema.name) # cleanup - w.schemas.delete(delete=created_schema.full_name) + w.schemas.delete(full_name=created_schema.full_name) w.catalogs.delete(name=created_catalog.name, force=True) List table summaries. diff --git a/docs/workspace/token_management.rst b/docs/workspace/token_management.rst index 87de081f..857874cd 100644 --- a/docs/workspace/token_management.rst +++ b/docs/workspace/token_management.rst @@ -26,8 +26,8 @@ Token management obo = w.token_management.create_obo_token(application_id=spn.application_id, lifetime_seconds=60) # cleanup - w.service_principals.delete(delete=spn.id) - w.token_management.delete(delete=obo.token_info.token_id) + w.service_principals.delete(id=spn.id) + w.token_management.delete(token_id=obo.token_info.token_id) Create on-behalf token. @@ -75,11 +75,11 @@ Token management obo = w.token_management.create_obo_token(application_id=spn.application_id, lifetime_seconds=60) - by_id = w.token_management.get(get=obo.token_info.token_id) + by_id = w.token_management.get(token_id=obo.token_info.token_id) # cleanup - w.service_principals.delete(delete=spn.id) - w.token_management.delete(delete=obo.token_info.token_id) + w.service_principals.delete(id=spn.id) + w.token_management.delete(token_id=obo.token_info.token_id) Get token info. diff --git a/docs/workspace/tokens.rst b/docs/workspace/tokens.rst index 15008d4f..e70ab8d4 100644 --- a/docs/workspace/tokens.rst +++ b/docs/workspace/tokens.rst @@ -20,7 +20,7 @@ Token token = w.tokens.create(comment=f'sdk-{time.time_ns()}', lifetime_seconds=300) # cleanup - w.tokens.delete(delete=token.token_info.token_id) + w.tokens.delete(token_id=token.token_info.token_id) Create a user token. diff --git a/docs/workspace/users.rst b/docs/workspace/users.rst index b6fda861..13580d49 100644 --- a/docs/workspace/users.rst +++ b/docs/workspace/users.rst @@ -67,7 +67,7 @@ Users other_owner = w.users.create(user_name=f'sdk-{time.time_ns()}@example.com') - w.users.delete(delete=other_owner.id) + w.users.delete(id=other_owner.id) Delete a user. diff --git a/docs/workspace/volumes.rst b/docs/workspace/volumes.rst index 7d9c6d89..e22e884c 100644 --- a/docs/workspace/volumes.rst +++ b/docs/workspace/volumes.rst @@ -45,9 +45,9 @@ Volumes volume_type=catalog.VolumeType.EXTERNAL) # cleanup - w.schemas.delete(delete=created_schema.full_name) + w.schemas.delete(full_name=created_schema.full_name) w.catalogs.delete(name=created_catalog.name, force=True) - w.volumes.delete(delete=created_volume.full_name) + w.volumes.delete(full_name_arg=created_volume.full_name) Create a Volume. @@ -117,7 +117,7 @@ Volumes all_volumes = w.volumes.list(catalog_name=created_catalog.name, schema_name=created_schema.name) # cleanup - w.schemas.delete(delete=created_schema.full_name) + w.schemas.delete(full_name=created_schema.full_name) w.catalogs.delete(name=created_catalog.name, force=True) List Volumes. @@ -175,12 +175,12 @@ Volumes storage_location=external_location.url, volume_type=catalog.VolumeType.EXTERNAL) - loaded_volume = w.volumes.read(read=created_volume.full_name) + loaded_volume = w.volumes.read(full_name_arg=created_volume.full_name) # cleanup - w.schemas.delete(delete=created_schema.full_name) + w.schemas.delete(full_name=created_schema.full_name) w.catalogs.delete(name=created_catalog.name, force=True) - w.volumes.delete(delete=created_volume.full_name) + w.volumes.delete(full_name_arg=created_volume.full_name) Get a Volume. @@ -231,14 +231,14 @@ Volumes storage_location=external_location.url, volume_type=catalog.VolumeType.EXTERNAL) - loaded_volume = w.volumes.read(read=created_volume.full_name) + loaded_volume = w.volumes.read(full_name_arg=created_volume.full_name) _ = w.volumes.update(full_name_arg=loaded_volume.full_name, comment="Updated volume comment") # cleanup - w.schemas.delete(delete=created_schema.full_name) + w.schemas.delete(full_name=created_schema.full_name) w.catalogs.delete(name=created_catalog.name, force=True) - w.volumes.delete(delete=created_volume.full_name) + w.volumes.delete(full_name_arg=created_volume.full_name) Update a Volume. diff --git a/docs/workspace/warehouses.rst b/docs/workspace/warehouses.rst index c3ba5e89..a843a10c 100644 --- a/docs/workspace/warehouses.rst +++ b/docs/workspace/warehouses.rst @@ -23,7 +23,7 @@ SQL Warehouses auto_stop_mins=10).result() # cleanup - w.warehouses.delete(delete=created.id) + w.warehouses.delete(id=created.id) Create a warehouse. @@ -125,7 +125,7 @@ SQL Warehouses auto_stop_mins=10) # cleanup - w.warehouses.delete(delete=created.id) + w.warehouses.delete(id=created.id) Update a warehouse. @@ -210,10 +210,10 @@ SQL Warehouses max_num_clusters=1, auto_stop_mins=10).result() - wh = w.warehouses.get(get=created.id) + wh = w.warehouses.get(id=created.id) # cleanup - w.warehouses.delete(delete=created.id) + w.warehouses.delete(id=created.id) Get warehouse info. diff --git a/docs/workspace/workspace.rst b/docs/workspace/workspace.rst index 4748f273..62447ff1 100644 --- a/docs/workspace/workspace.rst +++ b/docs/workspace/workspace.rst @@ -118,7 +118,7 @@ Workspace notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}' - obj = w.workspace.get_status(get_status=notebook_path) + obj = w.workspace.get_status(path=notebook_path) Get status. diff --git a/docs/workspace/workspace_bindings.rst b/docs/workspace/workspace_bindings.rst index 33e7c6ca..970375da 100644 --- a/docs/workspace/workspace_bindings.rst +++ b/docs/workspace/workspace_bindings.rst @@ -21,7 +21,7 @@ Workspace Bindings created = w.catalogs.create(name=f'sdk-{time.time_ns()}') - bindings = w.workspace_bindings.get(get=created.name) + bindings = w.workspace_bindings.get(name=created.name) # cleanup w.catalogs.delete(name=created.name, force=True) diff --git a/examples/alerts/create_alerts.py b/examples/alerts/create_alerts.py index 951a5334..72367ca8 100755 --- a/examples/alerts/create_alerts.py +++ b/examples/alerts/create_alerts.py @@ -17,5 +17,5 @@ query_id=query.id) # cleanup -w.queries.delete(delete=query.id) -w.alerts.delete(delete=alert.id) +w.queries.delete(query_id=query.id) +w.alerts.delete(alert_id=alert.id) diff --git a/examples/alerts/get_alerts.py b/examples/alerts/get_alerts.py index 4e9d5674..3c24e856 100755 --- a/examples/alerts/get_alerts.py +++ b/examples/alerts/get_alerts.py @@ -16,8 +16,8 @@ name=f'sdk-{time.time_ns()}', query_id=query.id) -by_id = w.alerts.get(get=alert.id) +by_id = w.alerts.get(alert_id=alert.id) # cleanup -w.queries.delete(delete=query.id) -w.alerts.delete(delete=alert.id) +w.queries.delete(query_id=query.id) +w.alerts.delete(alert_id=alert.id) diff --git a/examples/alerts/update_alerts.py b/examples/alerts/update_alerts.py index e454db50..130f7191 100755 --- a/examples/alerts/update_alerts.py +++ b/examples/alerts/update_alerts.py @@ -22,5 +22,5 @@ query_id=query.id) # cleanup -w.queries.delete(delete=query.id) -w.alerts.delete(delete=alert.id) +w.queries.delete(query_id=query.id) +w.alerts.delete(alert_id=alert.id) diff --git a/examples/budgets/create_budgets.py b/examples/budgets/create_budgets.py index 6c8d3f72..12f20786 100755 --- a/examples/budgets/create_budgets.py +++ b/examples/budgets/create_budgets.py @@ -14,4 +14,4 @@ alerts=[billing.BudgetAlert(email_notifications=["admin@example.com"], min_percentage=50)])) # cleanup -a.budgets.delete(delete=created.budget.budget_id) +a.budgets.delete(budget_id=created.budget.budget_id) diff --git a/examples/budgets/get_budgets.py b/examples/budgets/get_budgets.py index e5bda369..8640fc97 100755 --- a/examples/budgets/get_budgets.py +++ b/examples/budgets/get_budgets.py @@ -13,7 +13,7 @@ target_amount="100", alerts=[billing.BudgetAlert(email_notifications=["admin@example.com"], min_percentage=50)])) -by_id = a.budgets.get(get=created.budget.budget_id) +by_id = a.budgets.get(budget_id=created.budget.budget_id) # cleanup -a.budgets.delete(delete=created.budget.budget_id) +a.budgets.delete(budget_id=created.budget.budget_id) diff --git a/examples/budgets/update_budgets.py b/examples/budgets/update_budgets.py index 879b363a..1a0193b1 100755 --- a/examples/budgets/update_budgets.py +++ b/examples/budgets/update_budgets.py @@ -25,4 +25,4 @@ ])) # cleanup -a.budgets.delete(delete=created.budget.budget_id) +a.budgets.delete(budget_id=created.budget.budget_id) diff --git a/examples/catalogs/get_catalogs.py b/examples/catalogs/get_catalogs.py index adbda9ef..af9c4f66 100755 --- a/examples/catalogs/get_catalogs.py +++ b/examples/catalogs/get_catalogs.py @@ -6,7 +6,7 @@ created = w.catalogs.create(name=f'sdk-{time.time_ns()}') -_ = w.catalogs.get(get=created.name) +_ = w.catalogs.get(name=created.name) # cleanup w.catalogs.delete(name=created.name, force=True) diff --git a/examples/cluster_policies/create_cluster_policies.py b/examples/cluster_policies/create_cluster_policies.py index 6e059c6f..53d6ce96 100755 --- a/examples/cluster_policies/create_cluster_policies.py +++ b/examples/cluster_policies/create_cluster_policies.py @@ -14,4 +14,4 @@ """) # cleanup -w.cluster_policies.delete(delete=created.policy_id) +w.cluster_policies.delete(policy_id=created.policy_id) diff --git a/examples/cluster_policies/edit_cluster_policies.py b/examples/cluster_policies/edit_cluster_policies.py index 2289137a..5c2777e1 100755 --- a/examples/cluster_policies/edit_cluster_policies.py +++ b/examples/cluster_policies/edit_cluster_policies.py @@ -13,7 +13,7 @@ } """) -policy = w.cluster_policies.get(get=created.policy_id) +policy = w.cluster_policies.get(policy_id=created.policy_id) w.cluster_policies.edit(policy_id=policy.policy_id, name=policy.name, @@ -26,4 +26,4 @@ """) # cleanup -w.cluster_policies.delete(delete=created.policy_id) +w.cluster_policies.delete(policy_id=created.policy_id) diff --git a/examples/cluster_policies/get_cluster_policies.py b/examples/cluster_policies/get_cluster_policies.py index 6d9dcbed..d93196be 100755 --- a/examples/cluster_policies/get_cluster_policies.py +++ b/examples/cluster_policies/get_cluster_policies.py @@ -13,7 +13,7 @@ } """) -policy = w.cluster_policies.get(get=created.policy_id) +policy = w.cluster_policies.get(policy_id=created.policy_id) # cleanup -w.cluster_policies.delete(delete=created.policy_id) +w.cluster_policies.delete(policy_id=created.policy_id) diff --git a/examples/clusters/change_owner_clusters_api_integration.py b/examples/clusters/change_owner_clusters_api_integration.py index c2fdb8a9..bea93c1c 100755 --- a/examples/clusters/change_owner_clusters_api_integration.py +++ b/examples/clusters/change_owner_clusters_api_integration.py @@ -20,5 +20,5 @@ w.clusters.change_owner(cluster_id=clstr.cluster_id, owner_username=other_owner.user_name) # cleanup -w.users.delete(delete=other_owner.id) -w.clusters.permanent_delete(permanent_delete=clstr.cluster_id) +w.users.delete(id=other_owner.id) +w.clusters.permanent_delete(cluster_id=clstr.cluster_id) diff --git a/examples/clusters/create_clusters_api_integration.py b/examples/clusters/create_clusters_api_integration.py index 08991088..9b6111a5 100755 --- a/examples/clusters/create_clusters_api_integration.py +++ b/examples/clusters/create_clusters_api_integration.py @@ -16,4 +16,4 @@ num_workers=1).result() # cleanup -w.clusters.permanent_delete(permanent_delete=clstr.cluster_id) +w.clusters.permanent_delete(cluster_id=clstr.cluster_id) diff --git a/examples/clusters/delete_clusters_api_integration.py b/examples/clusters/delete_clusters_api_integration.py index 2fbf5a95..3f61f0fe 100755 --- a/examples/clusters/delete_clusters_api_integration.py +++ b/examples/clusters/delete_clusters_api_integration.py @@ -15,7 +15,7 @@ autotermination_minutes=15, num_workers=1).result() -_ = w.clusters.delete(delete=clstr.cluster_id).result() +_ = w.clusters.delete(cluster_id=clstr.cluster_id).result() # cleanup -w.clusters.permanent_delete(permanent_delete=clstr.cluster_id) +w.clusters.permanent_delete(cluster_id=clstr.cluster_id) diff --git a/examples/clusters/edit_clusters_api_integration.py b/examples/clusters/edit_clusters_api_integration.py index f268d673..d58ad65e 100755 --- a/examples/clusters/edit_clusters_api_integration.py +++ b/examples/clusters/edit_clusters_api_integration.py @@ -23,4 +23,4 @@ num_workers=2).result() # cleanup -w.clusters.permanent_delete(permanent_delete=clstr.cluster_id) +w.clusters.permanent_delete(cluster_id=clstr.cluster_id) diff --git a/examples/clusters/events_clusters_api_integration.py b/examples/clusters/events_clusters_api_integration.py index de777d69..971f520e 100755 --- a/examples/clusters/events_clusters_api_integration.py +++ b/examples/clusters/events_clusters_api_integration.py @@ -18,4 +18,4 @@ events = w.clusters.events(cluster_id=clstr.cluster_id) # cleanup -w.clusters.permanent_delete(permanent_delete=clstr.cluster_id) +w.clusters.permanent_delete(cluster_id=clstr.cluster_id) diff --git a/examples/clusters/get_clusters_api_integration.py b/examples/clusters/get_clusters_api_integration.py index 7772d5b6..23e833cf 100755 --- a/examples/clusters/get_clusters_api_integration.py +++ b/examples/clusters/get_clusters_api_integration.py @@ -15,7 +15,7 @@ autotermination_minutes=15, num_workers=1).result() -by_id = w.clusters.get(get=clstr.cluster_id) +by_id = w.clusters.get(cluster_id=clstr.cluster_id) # cleanup -w.clusters.permanent_delete(permanent_delete=clstr.cluster_id) +w.clusters.permanent_delete(cluster_id=clstr.cluster_id) diff --git a/examples/clusters/pin_clusters_api_integration.py b/examples/clusters/pin_clusters_api_integration.py index 5dc2002e..b3b57097 100755 --- a/examples/clusters/pin_clusters_api_integration.py +++ b/examples/clusters/pin_clusters_api_integration.py @@ -15,7 +15,7 @@ autotermination_minutes=15, num_workers=1).result() -w.clusters.pin(pin=clstr.cluster_id) +w.clusters.pin(cluster_id=clstr.cluster_id) # cleanup -w.clusters.permanent_delete(permanent_delete=clstr.cluster_id) +w.clusters.permanent_delete(cluster_id=clstr.cluster_id) diff --git a/examples/clusters/resize_clusters_api_integration.py b/examples/clusters/resize_clusters_api_integration.py index 9e614e9e..96ca09ad 100755 --- a/examples/clusters/resize_clusters_api_integration.py +++ b/examples/clusters/resize_clusters_api_integration.py @@ -18,4 +18,4 @@ by_id = w.clusters.resize(cluster_id=clstr.cluster_id, num_workers=1).result() # cleanup -w.clusters.permanent_delete(permanent_delete=clstr.cluster_id) +w.clusters.permanent_delete(cluster_id=clstr.cluster_id) diff --git a/examples/clusters/restart_clusters_api_integration.py b/examples/clusters/restart_clusters_api_integration.py index ca3814ef..9ddd8cd2 100755 --- a/examples/clusters/restart_clusters_api_integration.py +++ b/examples/clusters/restart_clusters_api_integration.py @@ -18,4 +18,4 @@ _ = w.clusters.restart(cluster_id=clstr.cluster_id).result() # cleanup -w.clusters.permanent_delete(permanent_delete=clstr.cluster_id) +w.clusters.permanent_delete(cluster_id=clstr.cluster_id) diff --git a/examples/clusters/start_clusters_api_integration.py b/examples/clusters/start_clusters_api_integration.py index 78c85141..5870b68a 100755 --- a/examples/clusters/start_clusters_api_integration.py +++ b/examples/clusters/start_clusters_api_integration.py @@ -15,7 +15,7 @@ autotermination_minutes=15, num_workers=1).result() -_ = w.clusters.start(start=clstr.cluster_id).result() +_ = w.clusters.start(cluster_id=clstr.cluster_id).result() # cleanup -w.clusters.permanent_delete(permanent_delete=clstr.cluster_id) +w.clusters.permanent_delete(cluster_id=clstr.cluster_id) diff --git a/examples/clusters/unpin_clusters_api_integration.py b/examples/clusters/unpin_clusters_api_integration.py index 175903a4..d671a0e5 100755 --- a/examples/clusters/unpin_clusters_api_integration.py +++ b/examples/clusters/unpin_clusters_api_integration.py @@ -15,7 +15,7 @@ autotermination_minutes=15, num_workers=1).result() -w.clusters.unpin(unpin=clstr.cluster_id) +w.clusters.unpin(cluster_id=clstr.cluster_id) # cleanup -w.clusters.permanent_delete(permanent_delete=clstr.cluster_id) +w.clusters.permanent_delete(cluster_id=clstr.cluster_id) diff --git a/examples/credentials/create_credentials.py b/examples/credentials/create_credentials.py index 7c1afd5f..9885467c 100755 --- a/examples/credentials/create_credentials.py +++ b/examples/credentials/create_credentials.py @@ -12,4 +12,4 @@ role_arn=os.environ["TEST_CROSSACCOUNT_ARN"]))) # cleanup -a.credentials.delete(delete=role.credentials_id) +a.credentials.delete(credentials_id=role.credentials_id) diff --git a/examples/credentials/create_log_delivery.py b/examples/credentials/create_log_delivery.py index 265a03c1..28b521cd 100755 --- a/examples/credentials/create_log_delivery.py +++ b/examples/credentials/create_log_delivery.py @@ -12,4 +12,4 @@ role_arn=os.environ["TEST_LOGDELIVERY_ARN"]))) # cleanup -a.credentials.delete(delete=creds.credentials_id) +a.credentials.delete(credentials_id=creds.credentials_id) diff --git a/examples/credentials/create_workspaces.py b/examples/credentials/create_workspaces.py index 7c1afd5f..9885467c 100755 --- a/examples/credentials/create_workspaces.py +++ b/examples/credentials/create_workspaces.py @@ -12,4 +12,4 @@ role_arn=os.environ["TEST_CROSSACCOUNT_ARN"]))) # cleanup -a.credentials.delete(delete=role.credentials_id) +a.credentials.delete(credentials_id=role.credentials_id) diff --git a/examples/credentials/get_credentials.py b/examples/credentials/get_credentials.py index b0dd297d..847fc70b 100755 --- a/examples/credentials/get_credentials.py +++ b/examples/credentials/get_credentials.py @@ -11,7 +11,7 @@ aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole( role_arn=os.environ["TEST_CROSSACCOUNT_ARN"]))) -by_id = a.credentials.get(get=role.credentials_id) +by_id = a.credentials.get(credentials_id=role.credentials_id) # cleanup -a.credentials.delete(delete=role.credentials_id) +a.credentials.delete(credentials_id=role.credentials_id) diff --git a/examples/dashboards/create_dashboards.py b/examples/dashboards/create_dashboards.py index f1988d1d..9ac5670a 100755 --- a/examples/dashboards/create_dashboards.py +++ b/examples/dashboards/create_dashboards.py @@ -7,4 +7,4 @@ created = w.dashboards.create(name=f'sdk-{time.time_ns()}') # cleanup -w.dashboards.delete(delete=created.id) +w.dashboards.delete(dashboard_id=created.id) diff --git a/examples/dashboards/delete_dashboards.py b/examples/dashboards/delete_dashboards.py index 87416276..8c5806ec 100755 --- a/examples/dashboards/delete_dashboards.py +++ b/examples/dashboards/delete_dashboards.py @@ -6,7 +6,7 @@ created = w.dashboards.create(name=f'sdk-{time.time_ns()}') -w.dashboards.delete(delete=created.id) +w.dashboards.delete(dashboard_id=created.id) # cleanup -w.dashboards.delete(delete=created.id) +w.dashboards.delete(dashboard_id=created.id) diff --git a/examples/dashboards/get_dashboards.py b/examples/dashboards/get_dashboards.py index 0282a6cd..3aa5ae55 100755 --- a/examples/dashboards/get_dashboards.py +++ b/examples/dashboards/get_dashboards.py @@ -6,7 +6,7 @@ created = w.dashboards.create(name=f'sdk-{time.time_ns()}') -by_id = w.dashboards.get(get=created.id) +by_id = w.dashboards.get(dashboard_id=created.id) # cleanup -w.dashboards.delete(delete=created.id) +w.dashboards.delete(dashboard_id=created.id) diff --git a/examples/dashboards/restore_dashboards.py b/examples/dashboards/restore_dashboards.py index b721f93a..7a34049d 100755 --- a/examples/dashboards/restore_dashboards.py +++ b/examples/dashboards/restore_dashboards.py @@ -9,4 +9,4 @@ w.dashboards.restore(dashboard_id=created.id) # cleanup -w.dashboards.delete(delete=created.id) +w.dashboards.delete(dashboard_id=created.id) diff --git a/examples/databricks/must_tokens.py b/examples/databricks/must_tokens.py index f3d5196b..3c1c6c6d 100755 --- a/examples/databricks/must_tokens.py +++ b/examples/databricks/must_tokens.py @@ -10,4 +10,4 @@ new_workspace_client(databricks.Config(host=w.config.host, token=token.token_value, auth_type="pat"))) # cleanup -w.tokens.delete(delete=token.token_info.token_id) +w.tokens.delete(token_id=token.token_info.token_id) diff --git a/examples/encryption_keys/create_encryption_keys.py b/examples/encryption_keys/create_encryption_keys.py index f3f4b131..83201e13 100755 --- a/examples/encryption_keys/create_encryption_keys.py +++ b/examples/encryption_keys/create_encryption_keys.py @@ -10,4 +10,4 @@ use_cases=[provisioning.KeyUseCase.MANAGED_SERVICES]) # cleanup -a.encryption_keys.delete(delete=created.customer_managed_key_id) +a.encryption_keys.delete(customer_managed_key_id=created.customer_managed_key_id) diff --git a/examples/encryption_keys/get_encryption_keys.py b/examples/encryption_keys/get_encryption_keys.py index cf68f288..9b325dc2 100755 --- a/examples/encryption_keys/get_encryption_keys.py +++ b/examples/encryption_keys/get_encryption_keys.py @@ -9,7 +9,7 @@ key_arn=os.environ["TEST_MANAGED_KMS_KEY_ARN"], key_alias=os.environ["TEST_STORAGE_KMS_KEY_ALIAS"]), use_cases=[provisioning.KeyUseCase.MANAGED_SERVICES]) -by_id = a.encryption_keys.get(get=created.customer_managed_key_id) +by_id = a.encryption_keys.get(customer_managed_key_id=created.customer_managed_key_id) # cleanup -a.encryption_keys.delete(delete=created.customer_managed_key_id) +a.encryption_keys.delete(customer_managed_key_id=created.customer_managed_key_id) diff --git a/examples/external_locations/create_external_locations_on_aws.py b/examples/external_locations/create_external_locations_on_aws.py index 61ba2336..f136e305 100755 --- a/examples/external_locations/create_external_locations_on_aws.py +++ b/examples/external_locations/create_external_locations_on_aws.py @@ -15,5 +15,5 @@ url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f'sdk-{time.time_ns()}')) # cleanup -w.storage_credentials.delete(delete=credential.name) -w.external_locations.delete(delete=created.name) +w.storage_credentials.delete(name=credential.name) +w.external_locations.delete(name=created.name) diff --git a/examples/external_locations/get_external_locations_on_aws.py b/examples/external_locations/get_external_locations_on_aws.py index ef90d4a5..8189fc0d 100755 --- a/examples/external_locations/get_external_locations_on_aws.py +++ b/examples/external_locations/get_external_locations_on_aws.py @@ -14,8 +14,8 @@ credential_name=credential.name, url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f'sdk-{time.time_ns()}')) -_ = w.external_locations.get(get=created.name) +_ = w.external_locations.get(name=created.name) # cleanup -w.storage_credentials.delete(delete=credential.name) -w.external_locations.delete(delete=created.name) +w.storage_credentials.delete(name=credential.name) +w.external_locations.delete(name=created.name) diff --git a/examples/external_locations/update_external_locations_on_aws.py b/examples/external_locations/update_external_locations_on_aws.py index ae2b0ca0..8d0e8e60 100755 --- a/examples/external_locations/update_external_locations_on_aws.py +++ b/examples/external_locations/update_external_locations_on_aws.py @@ -19,5 +19,5 @@ url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f'sdk-{time.time_ns()}')) # cleanup -w.storage_credentials.delete(delete=credential.name) -w.external_locations.delete(delete=created.name) +w.storage_credentials.delete(name=credential.name) +w.external_locations.delete(name=created.name) diff --git a/examples/git_credentials/create_git_credentials.py b/examples/git_credentials/create_git_credentials.py index 8c85306b..d505d22a 100755 --- a/examples/git_credentials/create_git_credentials.py +++ b/examples/git_credentials/create_git_credentials.py @@ -5,4 +5,4 @@ cr = w.git_credentials.create(git_provider="gitHub", git_username="test", personal_access_token="test") # cleanup -w.git_credentials.delete(delete=cr.credential_id) +w.git_credentials.delete(credential_id=cr.credential_id) diff --git a/examples/git_credentials/get_git_credentials.py b/examples/git_credentials/get_git_credentials.py index 5a02732b..a42965b8 100755 --- a/examples/git_credentials/get_git_credentials.py +++ b/examples/git_credentials/get_git_credentials.py @@ -4,7 +4,7 @@ cr = w.git_credentials.create(git_provider="gitHub", git_username="test", personal_access_token="test") -by_id = w.git_credentials.get(get=cr.credential_id) +by_id = w.git_credentials.get(credential_id=cr.credential_id) # cleanup -w.git_credentials.delete(delete=cr.credential_id) +w.git_credentials.delete(credential_id=cr.credential_id) diff --git a/examples/git_credentials/update_git_credentials.py b/examples/git_credentials/update_git_credentials.py index 2d4e6118..c877fefc 100755 --- a/examples/git_credentials/update_git_credentials.py +++ b/examples/git_credentials/update_git_credentials.py @@ -12,4 +12,4 @@ personal_access_token=f'sdk-{time.time_ns()}') # cleanup -w.git_credentials.delete(delete=cr.credential_id) +w.git_credentials.delete(credential_id=cr.credential_id) diff --git a/examples/global_init_scripts/create_global_init_scripts.py b/examples/global_init_scripts/create_global_init_scripts.py index dbc290d4..6058bbf5 100755 --- a/examples/global_init_scripts/create_global_init_scripts.py +++ b/examples/global_init_scripts/create_global_init_scripts.py @@ -11,4 +11,4 @@ position=10) # cleanup -w.global_init_scripts.delete(delete=created.script_id) +w.global_init_scripts.delete(script_id=created.script_id) diff --git a/examples/global_init_scripts/get_global_init_scripts.py b/examples/global_init_scripts/get_global_init_scripts.py index 812abe91..fe12d41b 100755 --- a/examples/global_init_scripts/get_global_init_scripts.py +++ b/examples/global_init_scripts/get_global_init_scripts.py @@ -10,7 +10,7 @@ enabled=True, position=10) -by_id = w.global_init_scripts.get(get=created.script_id) +by_id = w.global_init_scripts.get(script_id=created.script_id) # cleanup -w.global_init_scripts.delete(delete=created.script_id) +w.global_init_scripts.delete(script_id=created.script_id) diff --git a/examples/global_init_scripts/update_global_init_scripts.py b/examples/global_init_scripts/update_global_init_scripts.py index f311aedf..a53e410a 100755 --- a/examples/global_init_scripts/update_global_init_scripts.py +++ b/examples/global_init_scripts/update_global_init_scripts.py @@ -15,4 +15,4 @@ script=base64.b64encode(("echo 2").encode()).decode()) # cleanup -w.global_init_scripts.delete(delete=created.script_id) +w.global_init_scripts.delete(script_id=created.script_id) diff --git a/examples/grants/get_effective_tables.py b/examples/grants/get_effective_tables.py index ce846186..b7958d71 100755 --- a/examples/grants/get_effective_tables.py +++ b/examples/grants/get_effective_tables.py @@ -19,11 +19,11 @@ table_full_name = "%s.%s.%s" % (created_catalog.name, created_schema.name, table_name) -created_table = w.tables.get(get=table_full_name) +created_table = w.tables.get(full_name=table_full_name) -grants = w.grants.get_effective(get_effective=catalog.SecurableType.TABLE) +grants = w.grants.get_effective(securable_type=catalog.SecurableType.TABLE, full_name=created_table.full_name) # cleanup -w.schemas.delete(delete=created_schema.full_name) +w.schemas.delete(full_name=created_schema.full_name) w.catalogs.delete(name=created_catalog.name, force=True) -w.tables.delete(delete=table_full_name) +w.tables.delete(full_name=table_full_name) diff --git a/examples/grants/update_tables.py b/examples/grants/update_tables.py index ea7b14c2..3bba5dc6 100755 --- a/examples/grants/update_tables.py +++ b/examples/grants/update_tables.py @@ -21,7 +21,7 @@ account_level_group_name = os.environ["TEST_DATA_ENG_GROUP"] -created_table = w.tables.get(get=table_full_name) +created_table = w.tables.get(full_name=table_full_name) x = w.grants.update(full_name=created_table.full_name, securable_type=catalog.SecurableType.TABLE, @@ -31,6 +31,6 @@ ]) # cleanup -w.schemas.delete(delete=created_schema.full_name) +w.schemas.delete(full_name=created_schema.full_name) w.catalogs.delete(name=created_catalog.name, force=True) -w.tables.delete(delete=table_full_name) +w.tables.delete(full_name=table_full_name) diff --git a/examples/groups/create_generic_permissions.py b/examples/groups/create_generic_permissions.py index 12ee00bb..1ba53cbb 100755 --- a/examples/groups/create_generic_permissions.py +++ b/examples/groups/create_generic_permissions.py @@ -7,4 +7,4 @@ group = w.groups.create(display_name=f'sdk-{time.time_ns()}') # cleanup -w.groups.delete(delete=group.id) +w.groups.delete(id=group.id) diff --git a/examples/groups/create_groups.py b/examples/groups/create_groups.py index 12ee00bb..1ba53cbb 100755 --- a/examples/groups/create_groups.py +++ b/examples/groups/create_groups.py @@ -7,4 +7,4 @@ group = w.groups.create(display_name=f'sdk-{time.time_ns()}') # cleanup -w.groups.delete(delete=group.id) +w.groups.delete(id=group.id) diff --git a/examples/groups/create_secrets.py b/examples/groups/create_secrets.py index 12ee00bb..1ba53cbb 100755 --- a/examples/groups/create_secrets.py +++ b/examples/groups/create_secrets.py @@ -7,4 +7,4 @@ group = w.groups.create(display_name=f'sdk-{time.time_ns()}') # cleanup -w.groups.delete(delete=group.id) +w.groups.delete(id=group.id) diff --git a/examples/groups/delete_generic_permissions.py b/examples/groups/delete_generic_permissions.py index 3c080960..261d5c77 100755 --- a/examples/groups/delete_generic_permissions.py +++ b/examples/groups/delete_generic_permissions.py @@ -6,4 +6,4 @@ group = w.groups.create(display_name=f'sdk-{time.time_ns()}') -w.groups.delete(delete=group.id) +w.groups.delete(id=group.id) diff --git a/examples/groups/delete_groups.py b/examples/groups/delete_groups.py index 805a2ed5..0033bdbb 100755 --- a/examples/groups/delete_groups.py +++ b/examples/groups/delete_groups.py @@ -6,7 +6,7 @@ group = w.groups.create(display_name=f'sdk-{time.time_ns()}') -w.groups.delete(delete=group.id) +w.groups.delete(id=group.id) # cleanup -w.groups.delete(delete=group.id) +w.groups.delete(id=group.id) diff --git a/examples/groups/delete_secrets.py b/examples/groups/delete_secrets.py index 3c080960..261d5c77 100755 --- a/examples/groups/delete_secrets.py +++ b/examples/groups/delete_secrets.py @@ -6,4 +6,4 @@ group = w.groups.create(display_name=f'sdk-{time.time_ns()}') -w.groups.delete(delete=group.id) +w.groups.delete(id=group.id) diff --git a/examples/groups/get_groups.py b/examples/groups/get_groups.py index 07f5a0b0..e6d176a8 100755 --- a/examples/groups/get_groups.py +++ b/examples/groups/get_groups.py @@ -6,7 +6,7 @@ group = w.groups.create(display_name=f'sdk-{time.time_ns()}') -fetch = w.groups.get(get=group.id) +fetch = w.groups.get(id=group.id) # cleanup -w.groups.delete(delete=group.id) +w.groups.delete(id=group.id) diff --git a/examples/instance_pools/create_instance_pools.py b/examples/instance_pools/create_instance_pools.py index eab741f5..de027a22 100755 --- a/examples/instance_pools/create_instance_pools.py +++ b/examples/instance_pools/create_instance_pools.py @@ -9,4 +9,4 @@ created = w.instance_pools.create(instance_pool_name=f'sdk-{time.time_ns()}', node_type_id=smallest) # cleanup -w.instance_pools.delete(delete=created.instance_pool_id) +w.instance_pools.delete(instance_pool_id=created.instance_pool_id) diff --git a/examples/instance_pools/edit_instance_pools.py b/examples/instance_pools/edit_instance_pools.py index ffa1469d..69a8186e 100755 --- a/examples/instance_pools/edit_instance_pools.py +++ b/examples/instance_pools/edit_instance_pools.py @@ -13,4 +13,4 @@ node_type_id=smallest) # cleanup -w.instance_pools.delete(delete=created.instance_pool_id) +w.instance_pools.delete(instance_pool_id=created.instance_pool_id) diff --git a/examples/instance_pools/get_instance_pools.py b/examples/instance_pools/get_instance_pools.py index 80ab3bd6..4eeb201b 100755 --- a/examples/instance_pools/get_instance_pools.py +++ b/examples/instance_pools/get_instance_pools.py @@ -8,7 +8,7 @@ created = w.instance_pools.create(instance_pool_name=f'sdk-{time.time_ns()}', node_type_id=smallest) -by_id = w.instance_pools.get(get=created.instance_pool_id) +by_id = w.instance_pools.get(instance_pool_id=created.instance_pool_id) # cleanup -w.instance_pools.delete(delete=created.instance_pool_id) +w.instance_pools.delete(instance_pool_id=created.instance_pool_id) diff --git a/examples/ip_access_lists/create_ip_access_lists.py b/examples/ip_access_lists/create_ip_access_lists.py index a9fc2b03..a52b80f9 100755 --- a/examples/ip_access_lists/create_ip_access_lists.py +++ b/examples/ip_access_lists/create_ip_access_lists.py @@ -10,4 +10,4 @@ list_type=settings.ListType.BLOCK) # cleanup -w.ip_access_lists.delete(delete=created.ip_access_list.list_id) +w.ip_access_lists.delete(ip_access_list_id=created.ip_access_list.list_id) diff --git a/examples/ip_access_lists/get_ip_access_lists.py b/examples/ip_access_lists/get_ip_access_lists.py index 8efd99c8..85d7929e 100755 --- a/examples/ip_access_lists/get_ip_access_lists.py +++ b/examples/ip_access_lists/get_ip_access_lists.py @@ -9,7 +9,7 @@ ip_addresses=["1.0.0.0/16"], list_type=settings.ListType.BLOCK) -by_id = w.ip_access_lists.get(get=created.ip_access_list.list_id) +by_id = w.ip_access_lists.get(ip_access_list_id=created.ip_access_list.list_id) # cleanup -w.ip_access_lists.delete(delete=created.ip_access_list.list_id) +w.ip_access_lists.delete(ip_access_list_id=created.ip_access_list.list_id) diff --git a/examples/ip_access_lists/replace_ip_access_lists.py b/examples/ip_access_lists/replace_ip_access_lists.py index e7383adb..a61f5f81 100755 --- a/examples/ip_access_lists/replace_ip_access_lists.py +++ b/examples/ip_access_lists/replace_ip_access_lists.py @@ -16,4 +16,4 @@ enabled=False) # cleanup -w.ip_access_lists.delete(delete=created.ip_access_list.list_id) +w.ip_access_lists.delete(ip_access_list_id=created.ip_access_list.list_id) diff --git a/examples/jobs/cancel_all_runs_jobs_api_full_integration.py b/examples/jobs/cancel_all_runs_jobs_api_full_integration.py index 0db3989e..260f6f88 100755 --- a/examples/jobs/cancel_all_runs_jobs_api_full_integration.py +++ b/examples/jobs/cancel_all_runs_jobs_api_full_integration.py @@ -20,7 +20,7 @@ timeout_seconds=0) ]) -w.jobs.cancel_all_runs(cancel_all_runs=created_job.job_id) +w.jobs.cancel_all_runs(job_id=created_job.job_id) # cleanup -w.jobs.delete(delete=created_job.job_id) +w.jobs.delete(job_id=created_job.job_id) diff --git a/examples/jobs/cancel_run_jobs_api_full_integration.py b/examples/jobs/cancel_run_jobs_api_full_integration.py index 97ab36be..9cb4f75c 100755 --- a/examples/jobs/cancel_run_jobs_api_full_integration.py +++ b/examples/jobs/cancel_run_jobs_api_full_integration.py @@ -25,4 +25,4 @@ cancelled_run = w.jobs.cancel_run(run_id=run_now_response.response.run_id).result() # cleanup -w.jobs.delete(delete=created_job.job_id) +w.jobs.delete(job_id=created_job.job_id) diff --git a/examples/jobs/create_jobs_api_full_integration.py b/examples/jobs/create_jobs_api_full_integration.py index 0c6b5036..1f508288 100755 --- a/examples/jobs/create_jobs_api_full_integration.py +++ b/examples/jobs/create_jobs_api_full_integration.py @@ -21,4 +21,4 @@ ]) # cleanup -w.jobs.delete(delete=created_job.job_id) +w.jobs.delete(job_id=created_job.job_id) diff --git a/examples/jobs/export_run_jobs_api_full_integration.py b/examples/jobs/export_run_jobs_api_full_integration.py index 85ad9e26..61396119 100755 --- a/examples/jobs/export_run_jobs_api_full_integration.py +++ b/examples/jobs/export_run_jobs_api_full_integration.py @@ -25,4 +25,4 @@ exported_view = w.jobs.export_run(run_id=run_by_id.tasks[0].run_id, views_to_export="CODE") # cleanup -w.jobs.delete(delete=created_job.job_id) +w.jobs.delete(job_id=created_job.job_id) diff --git a/examples/jobs/get_jobs_api_full_integration.py b/examples/jobs/get_jobs_api_full_integration.py index fde67087..3a30f179 100755 --- a/examples/jobs/get_jobs_api_full_integration.py +++ b/examples/jobs/get_jobs_api_full_integration.py @@ -20,7 +20,7 @@ timeout_seconds=0) ]) -by_id = w.jobs.get(get=created_job.job_id) +by_id = w.jobs.get(job_id=created_job.job_id) # cleanup -w.jobs.delete(delete=created_job.job_id) +w.jobs.delete(job_id=created_job.job_id) diff --git a/examples/jobs/get_run_output_jobs_api_full_integration.py b/examples/jobs/get_run_output_jobs_api_full_integration.py index 9b3e6888..80457741 100755 --- a/examples/jobs/get_run_output_jobs_api_full_integration.py +++ b/examples/jobs/get_run_output_jobs_api_full_integration.py @@ -18,7 +18,7 @@ task_key=f'sdk-{time.time_ns()}') ]).result() -output = w.jobs.get_run_output(get_run_output=run.tasks[0].run_id) +output = w.jobs.get_run_output(run_id=run.tasks[0].run_id) # cleanup -w.jobs.delete_run(delete_run=run.run_id) +w.jobs.delete_run(run_id=run.run_id) diff --git a/examples/jobs/repair_run_jobs_api_full_integration.py b/examples/jobs/repair_run_jobs_api_full_integration.py index ddabd29f..b90cbc1d 100755 --- a/examples/jobs/repair_run_jobs_api_full_integration.py +++ b/examples/jobs/repair_run_jobs_api_full_integration.py @@ -28,4 +28,4 @@ run_id=run_now_response.response.run_id).result() # cleanup -w.jobs.delete(delete=created_job.job_id) +w.jobs.delete(job_id=created_job.job_id) diff --git a/examples/jobs/reset_jobs_api_full_integration.py b/examples/jobs/reset_jobs_api_full_integration.py index f66437ff..3215fe6f 100755 --- a/examples/jobs/reset_jobs_api_full_integration.py +++ b/examples/jobs/reset_jobs_api_full_integration.py @@ -22,9 +22,9 @@ new_name = f'sdk-{time.time_ns()}' -by_id = w.jobs.get(get=created_job.job_id) +by_id = w.jobs.get(job_id=created_job.job_id) w.jobs.reset(job_id=by_id.job_id, new_settings=jobs.JobSettings(name=new_name, tasks=by_id.settings.tasks)) # cleanup -w.jobs.delete(delete=created_job.job_id) +w.jobs.delete(job_id=created_job.job_id) diff --git a/examples/jobs/run_now_jobs_api_full_integration.py b/examples/jobs/run_now_jobs_api_full_integration.py index 74ce781e..3896990b 100755 --- a/examples/jobs/run_now_jobs_api_full_integration.py +++ b/examples/jobs/run_now_jobs_api_full_integration.py @@ -23,4 +23,4 @@ run_by_id = w.jobs.run_now(job_id=created_job.job_id).result() # cleanup -w.jobs.delete(delete=created_job.job_id) +w.jobs.delete(job_id=created_job.job_id) diff --git a/examples/jobs/submit_jobs_api_full_integration.py b/examples/jobs/submit_jobs_api_full_integration.py index c9a142ec..5769d767 100755 --- a/examples/jobs/submit_jobs_api_full_integration.py +++ b/examples/jobs/submit_jobs_api_full_integration.py @@ -19,4 +19,4 @@ ]).result() # cleanup -w.jobs.delete_run(delete_run=run.run_id) +w.jobs.delete_run(run_id=run.run_id) diff --git a/examples/jobs/update_jobs_api_full_integration.py b/examples/jobs/update_jobs_api_full_integration.py index bec9cf4a..61194b8e 100755 --- a/examples/jobs/update_jobs_api_full_integration.py +++ b/examples/jobs/update_jobs_api_full_integration.py @@ -25,4 +25,4 @@ w.jobs.update(job_id=created_job.job_id, new_settings=jobs.JobSettings(name=new_name, max_concurrent_runs=5)) # cleanup -w.jobs.delete(delete=created_job.job_id) +w.jobs.delete(job_id=created_job.job_id) diff --git a/examples/log_delivery/create_log_delivery.py b/examples/log_delivery/create_log_delivery.py index 53d0f289..36edc03a 100755 --- a/examples/log_delivery/create_log_delivery.py +++ b/examples/log_delivery/create_log_delivery.py @@ -22,7 +22,7 @@ output_format=billing.OutputFormat.JSON)) # cleanup -a.storage.delete(delete=bucket.storage_configuration_id) -a.credentials.delete(delete=creds.credentials_id) +a.storage.delete(storage_configuration_id=bucket.storage_configuration_id) +a.credentials.delete(credentials_id=creds.credentials_id) a.log_delivery.patch_status(log_delivery_configuration_id=created.log_delivery_configuration.config_id, status=billing.LogDeliveryConfigStatus.DISABLED) diff --git a/examples/log_delivery/get_log_delivery.py b/examples/log_delivery/get_log_delivery.py index 37eb5f48..af3f6192 100755 --- a/examples/log_delivery/get_log_delivery.py +++ b/examples/log_delivery/get_log_delivery.py @@ -21,10 +21,10 @@ log_type=billing.LogType.AUDIT_LOGS, output_format=billing.OutputFormat.JSON)) -by_id = a.log_delivery.get(get=created.log_delivery_configuration.config_id) +by_id = a.log_delivery.get(log_delivery_configuration_id=created.log_delivery_configuration.config_id) # cleanup -a.storage.delete(delete=bucket.storage_configuration_id) -a.credentials.delete(delete=creds.credentials_id) +a.storage.delete(storage_configuration_id=bucket.storage_configuration_id) +a.credentials.delete(credentials_id=creds.credentials_id) a.log_delivery.patch_status(log_delivery_configuration_id=created.log_delivery_configuration.config_id, status=billing.LogDeliveryConfigStatus.DISABLED) diff --git a/examples/metastores/get_metastores.py b/examples/metastores/get_metastores.py index 873b246b..38ce05c0 100755 --- a/examples/metastores/get_metastores.py +++ b/examples/metastores/get_metastores.py @@ -9,7 +9,7 @@ storage_root="s3://%s/%s" % (os.environ["TEST_BUCKET"], f'sdk-{time.time_ns()}')) -_ = w.metastores.get(get=created.metastore_id) +_ = w.metastores.get(id=created.metastore_id) # cleanup w.metastores.delete(id=created.metastore_id, force=True) diff --git a/examples/networks/get_networks.py b/examples/networks/get_networks.py index 608ea129..4ee37c81 100755 --- a/examples/networks/get_networks.py +++ b/examples/networks/get_networks.py @@ -10,4 +10,4 @@ hex(time.time_ns())[2:]], security_group_ids=[hex(time.time_ns())[2:]]) -by_id = a.networks.get(get=netw.network_id) +by_id = a.networks.get(network_id=netw.network_id) diff --git a/examples/permissions/get_generic_permissions.py b/examples/permissions/get_generic_permissions.py index 9861b795..c103bbdd 100755 --- a/examples/permissions/get_generic_permissions.py +++ b/examples/permissions/get_generic_permissions.py @@ -6,6 +6,6 @@ notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}' -obj = w.workspace.get_status(get_status=notebook_path) +obj = w.workspace.get_status(path=notebook_path) _ = w.permissions.get(request_object_type="notebooks", request_object_id="%d" % (obj.object_id)) diff --git a/examples/permissions/get_permission_levels_generic_permissions.py b/examples/permissions/get_permission_levels_generic_permissions.py index 17610641..4dcd5bd2 100755 --- a/examples/permissions/get_permission_levels_generic_permissions.py +++ b/examples/permissions/get_permission_levels_generic_permissions.py @@ -6,7 +6,7 @@ notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}' -obj = w.workspace.get_status(get_status=notebook_path) +obj = w.workspace.get_status(path=notebook_path) levels = w.permissions.get_permission_levels(request_object_type="notebooks", request_object_id="%d" % (obj.object_id)) diff --git a/examples/permissions/set_generic_permissions.py b/examples/permissions/set_generic_permissions.py index 94451dd0..dfc54c07 100755 --- a/examples/permissions/set_generic_permissions.py +++ b/examples/permissions/set_generic_permissions.py @@ -9,14 +9,14 @@ group = w.groups.create(display_name=f'sdk-{time.time_ns()}') -obj = w.workspace.get_status(get_status=notebook_path) +obj = w.workspace.get_status(path=notebook_path) -_ = w.permissions.set(request_object_type="notebooks", - request_object_id="%d" % (obj.object_id), - access_control_list=[ - iam.AccessControlRequest(group_name=group.display_name, - permission_level=iam.PermissionLevel.CAN_RUN) - ]) +w.permissions.set(request_object_type="notebooks", + request_object_id="%d" % (obj.object_id), + access_control_list=[ + iam.AccessControlRequest(group_name=group.display_name, + permission_level=iam.PermissionLevel.CAN_RUN) + ]) # cleanup -w.groups.delete(delete=group.id) +w.groups.delete(id=group.id) diff --git a/examples/pipelines/create_pipelines.py b/examples/pipelines/create_pipelines.py index aea0f3fb..5a2b3933 100755 --- a/examples/pipelines/create_pipelines.py +++ b/examples/pipelines/create_pipelines.py @@ -22,4 +22,4 @@ ]) # cleanup -w.pipelines.delete(delete=created.pipeline_id) +w.pipelines.delete(pipeline_id=created.pipeline_id) diff --git a/examples/pipelines/get_pipelines.py b/examples/pipelines/get_pipelines.py index caee87a9..6222b4c8 100755 --- a/examples/pipelines/get_pipelines.py +++ b/examples/pipelines/get_pipelines.py @@ -21,7 +21,7 @@ }) ]) -by_id = w.pipelines.get(get=created.pipeline_id) +by_id = w.pipelines.get(pipeline_id=created.pipeline_id) # cleanup -w.pipelines.delete(delete=created.pipeline_id) +w.pipelines.delete(pipeline_id=created.pipeline_id) diff --git a/examples/pipelines/list_pipeline_events_pipelines.py b/examples/pipelines/list_pipeline_events_pipelines.py index 7e910cad..c4150187 100755 --- a/examples/pipelines/list_pipeline_events_pipelines.py +++ b/examples/pipelines/list_pipeline_events_pipelines.py @@ -24,4 +24,4 @@ events = w.pipelines.list_pipeline_events(pipeline_id=created.pipeline_id) # cleanup -w.pipelines.delete(delete=created.pipeline_id) +w.pipelines.delete(pipeline_id=created.pipeline_id) diff --git a/examples/pipelines/update_pipelines.py b/examples/pipelines/update_pipelines.py index 3e15b771..656d9007 100755 --- a/examples/pipelines/update_pipelines.py +++ b/examples/pipelines/update_pipelines.py @@ -35,4 +35,4 @@ ]) # cleanup -w.pipelines.delete(delete=created.pipeline_id) +w.pipelines.delete(pipeline_id=created.pipeline_id) diff --git a/examples/private_access/create_private_access.py b/examples/private_access/create_private_access.py index 3445ac01..5cf99643 100755 --- a/examples/private_access/create_private_access.py +++ b/examples/private_access/create_private_access.py @@ -9,4 +9,4 @@ region=os.environ["AWS_REGION"]) # cleanup -a.private_access.delete(delete=created.private_access_settings_id) +a.private_access.delete(private_access_settings_id=created.private_access_settings_id) diff --git a/examples/private_access/get_private_access.py b/examples/private_access/get_private_access.py index 29372352..2c4eed20 100755 --- a/examples/private_access/get_private_access.py +++ b/examples/private_access/get_private_access.py @@ -8,7 +8,7 @@ created = a.private_access.create(private_access_settings_name=f'sdk-{time.time_ns()}', region=os.environ["AWS_REGION"]) -by_id = a.private_access.get(get=created.private_access_settings_id) +by_id = a.private_access.get(private_access_settings_id=created.private_access_settings_id) # cleanup -a.private_access.delete(delete=created.private_access_settings_id) +a.private_access.delete(private_access_settings_id=created.private_access_settings_id) diff --git a/examples/private_access/replace_private_access.py b/examples/private_access/replace_private_access.py index ad25dc79..8cb6f2fa 100755 --- a/examples/private_access/replace_private_access.py +++ b/examples/private_access/replace_private_access.py @@ -13,4 +13,4 @@ region=os.environ["AWS_REGION"]) # cleanup -a.private_access.delete(delete=created.private_access_settings_id) +a.private_access.delete(private_access_settings_id=created.private_access_settings_id) diff --git a/examples/providers/create_providers.py b/examples/providers/create_providers.py index 05ee208c..cee06e5d 100755 --- a/examples/providers/create_providers.py +++ b/examples/providers/create_providers.py @@ -14,4 +14,4 @@ created = w.providers.create(name=f'sdk-{time.time_ns()}', recipient_profile_str=public_share_recipient) # cleanup -w.providers.delete(delete=created.name) +w.providers.delete(name=created.name) diff --git a/examples/providers/get_providers.py b/examples/providers/get_providers.py index d0735c09..ba86bc5e 100755 --- a/examples/providers/get_providers.py +++ b/examples/providers/get_providers.py @@ -13,7 +13,7 @@ created = w.providers.create(name=f'sdk-{time.time_ns()}', recipient_profile_str=public_share_recipient) -_ = w.providers.get(get=created.name) +_ = w.providers.get(name=created.name) # cleanup -w.providers.delete(delete=created.name) +w.providers.delete(name=created.name) diff --git a/examples/providers/list_shares_providers.py b/examples/providers/list_shares_providers.py index f0f632a0..b2e88593 100755 --- a/examples/providers/list_shares_providers.py +++ b/examples/providers/list_shares_providers.py @@ -16,4 +16,4 @@ shares = w.providers.list_shares(name=created.name) # cleanup -w.providers.delete(delete=created.name) +w.providers.delete(name=created.name) diff --git a/examples/providers/update_providers.py b/examples/providers/update_providers.py index f84ef757..66b68c35 100755 --- a/examples/providers/update_providers.py +++ b/examples/providers/update_providers.py @@ -16,4 +16,4 @@ _ = w.providers.update(name=created.name, comment="Comment for update") # cleanup -w.providers.delete(delete=created.name) +w.providers.delete(name=created.name) diff --git a/examples/queries/create_alerts.py b/examples/queries/create_alerts.py index 346d19fc..37d71ac6 100755 --- a/examples/queries/create_alerts.py +++ b/examples/queries/create_alerts.py @@ -12,4 +12,4 @@ query="SELECT 1") # cleanup -w.queries.delete(delete=query.id) +w.queries.delete(query_id=query.id) diff --git a/examples/queries/create_queries.py b/examples/queries/create_queries.py index 60cc416c..c8d5ac93 100755 --- a/examples/queries/create_queries.py +++ b/examples/queries/create_queries.py @@ -12,4 +12,4 @@ query="SHOW TABLES") # cleanup -w.queries.delete(delete=query.id) +w.queries.delete(query_id=query.id) diff --git a/examples/queries/get_queries.py b/examples/queries/get_queries.py index b31a33fd..d29b7598 100755 --- a/examples/queries/get_queries.py +++ b/examples/queries/get_queries.py @@ -11,7 +11,7 @@ description="test query from Go SDK", query="SHOW TABLES") -by_id = w.queries.get(get=query.id) +by_id = w.queries.get(query_id=query.id) # cleanup -w.queries.delete(delete=query.id) +w.queries.delete(query_id=query.id) diff --git a/examples/queries/update_queries.py b/examples/queries/update_queries.py index aa16a638..85a9609a 100755 --- a/examples/queries/update_queries.py +++ b/examples/queries/update_queries.py @@ -18,4 +18,4 @@ query="SELECT 2+2") # cleanup -w.queries.delete(delete=query.id) +w.queries.delete(query_id=query.id) diff --git a/examples/recipients/create_recipients.py b/examples/recipients/create_recipients.py index b19d5a6b..4c01e2f5 100755 --- a/examples/recipients/create_recipients.py +++ b/examples/recipients/create_recipients.py @@ -7,4 +7,4 @@ created = w.recipients.create(name=f'sdk-{time.time_ns()}') # cleanup -w.recipients.delete(delete=created.name) +w.recipients.delete(name=created.name) diff --git a/examples/recipients/get_recipients.py b/examples/recipients/get_recipients.py index 1d0cd3c9..4e8998e4 100755 --- a/examples/recipients/get_recipients.py +++ b/examples/recipients/get_recipients.py @@ -6,7 +6,7 @@ created = w.recipients.create(name=f'sdk-{time.time_ns()}') -_ = w.recipients.get(get=created.name) +_ = w.recipients.get(name=created.name) # cleanup -w.recipients.delete(delete=created.name) +w.recipients.delete(name=created.name) diff --git a/examples/recipients/rotate_token_recipients.py b/examples/recipients/rotate_token_recipients.py index 398e0cf4..5abc2af9 100755 --- a/examples/recipients/rotate_token_recipients.py +++ b/examples/recipients/rotate_token_recipients.py @@ -9,4 +9,4 @@ recipient_info = w.recipients.rotate_token(name=created.name, existing_token_expire_in_seconds=0) # cleanup -w.recipients.delete(delete=created.name) +w.recipients.delete(name=created.name) diff --git a/examples/recipients/share_permissions_recipients.py b/examples/recipients/share_permissions_recipients.py index 03ed26be..1f04c032 100755 --- a/examples/recipients/share_permissions_recipients.py +++ b/examples/recipients/share_permissions_recipients.py @@ -6,7 +6,7 @@ created = w.recipients.create(name=f'sdk-{time.time_ns()}') -share_permissions = w.recipients.share_permissions(share_permissions=created.name) +share_permissions = w.recipients.share_permissions(name=created.name) # cleanup -w.recipients.delete(delete=created.name) +w.recipients.delete(name=created.name) diff --git a/examples/recipients/update_recipients.py b/examples/recipients/update_recipients.py index 5fa0f5e9..959266dd 100755 --- a/examples/recipients/update_recipients.py +++ b/examples/recipients/update_recipients.py @@ -9,4 +9,4 @@ w.recipients.update(name=created.name, comment=f'sdk-{time.time_ns()}') # cleanup -w.recipients.delete(delete=created.name) +w.recipients.delete(name=created.name) diff --git a/examples/repos/create_repos.py b/examples/repos/create_repos.py index ec785863..b3a10094 100755 --- a/examples/repos/create_repos.py +++ b/examples/repos/create_repos.py @@ -9,4 +9,4 @@ ri = w.repos.create(path=root, url="https://github.com/shreyas-goenka/empty-repo.git", provider="github") # cleanup -w.repos.delete(delete=ri.id) +w.repos.delete(repo_id=ri.id) diff --git a/examples/repos/get_repos.py b/examples/repos/get_repos.py index f5491418..fcb6c69c 100755 --- a/examples/repos/get_repos.py +++ b/examples/repos/get_repos.py @@ -8,7 +8,7 @@ ri = w.repos.create(path=root, url="https://github.com/shreyas-goenka/empty-repo.git", provider="github") -by_id = w.repos.get(get=ri.id) +by_id = w.repos.get(repo_id=ri.id) # cleanup -w.repos.delete(delete=ri.id) +w.repos.delete(repo_id=ri.id) diff --git a/examples/repos/update_repos.py b/examples/repos/update_repos.py index 596263a2..afb94375 100755 --- a/examples/repos/update_repos.py +++ b/examples/repos/update_repos.py @@ -11,4 +11,4 @@ w.repos.update(repo_id=ri.id, branch="foo") # cleanup -w.repos.delete(delete=ri.id) +w.repos.delete(repo_id=ri.id) diff --git a/examples/schemas/create_schemas.py b/examples/schemas/create_schemas.py index a5841e0b..7bc3bb19 100755 --- a/examples/schemas/create_schemas.py +++ b/examples/schemas/create_schemas.py @@ -10,4 +10,4 @@ # cleanup w.catalogs.delete(name=new_catalog.name, force=True) -w.schemas.delete(delete=created.full_name) +w.schemas.delete(full_name=created.full_name) diff --git a/examples/schemas/create_shares.py b/examples/schemas/create_shares.py index 03b33e17..88dfa1bc 100755 --- a/examples/schemas/create_shares.py +++ b/examples/schemas/create_shares.py @@ -10,4 +10,4 @@ # cleanup w.catalogs.delete(name=created_catalog.name, force=True) -w.schemas.delete(delete=created_schema.full_name) +w.schemas.delete(full_name=created_schema.full_name) diff --git a/examples/schemas/create_tables.py b/examples/schemas/create_tables.py index 03b33e17..88dfa1bc 100755 --- a/examples/schemas/create_tables.py +++ b/examples/schemas/create_tables.py @@ -10,4 +10,4 @@ # cleanup w.catalogs.delete(name=created_catalog.name, force=True) -w.schemas.delete(delete=created_schema.full_name) +w.schemas.delete(full_name=created_schema.full_name) diff --git a/examples/schemas/create_volumes.py b/examples/schemas/create_volumes.py index 03b33e17..88dfa1bc 100755 --- a/examples/schemas/create_volumes.py +++ b/examples/schemas/create_volumes.py @@ -10,4 +10,4 @@ # cleanup w.catalogs.delete(name=created_catalog.name, force=True) -w.schemas.delete(delete=created_schema.full_name) +w.schemas.delete(full_name=created_schema.full_name) diff --git a/examples/schemas/get_schemas.py b/examples/schemas/get_schemas.py index 7ec3fa2a..2b9c0a58 100755 --- a/examples/schemas/get_schemas.py +++ b/examples/schemas/get_schemas.py @@ -8,8 +8,8 @@ created = w.schemas.create(name=f'sdk-{time.time_ns()}', catalog_name=new_catalog.name) -_ = w.schemas.get(get=created.full_name) +_ = w.schemas.get(full_name=created.full_name) # cleanup w.catalogs.delete(name=new_catalog.name, force=True) -w.schemas.delete(delete=created.full_name) +w.schemas.delete(full_name=created.full_name) diff --git a/examples/schemas/update_schemas.py b/examples/schemas/update_schemas.py index e22e0a86..b95a7b1c 100755 --- a/examples/schemas/update_schemas.py +++ b/examples/schemas/update_schemas.py @@ -12,4 +12,4 @@ # cleanup w.catalogs.delete(name=new_catalog.name, force=True) -w.schemas.delete(delete=created.full_name) +w.schemas.delete(full_name=created.full_name) diff --git a/examples/secrets/create_scope_secrets.py b/examples/secrets/create_scope_secrets.py index 89952dca..f881d011 100755 --- a/examples/secrets/create_scope_secrets.py +++ b/examples/secrets/create_scope_secrets.py @@ -12,4 +12,4 @@ # cleanup w.secrets.delete_secret(scope=scope_name, key=key_name) -w.secrets.delete_scope(delete_scope=scope_name) +w.secrets.delete_scope(scope=scope_name) diff --git a/examples/secrets/list_acls_secrets.py b/examples/secrets/list_acls_secrets.py index 1031475e..97873f18 100755 --- a/examples/secrets/list_acls_secrets.py +++ b/examples/secrets/list_acls_secrets.py @@ -10,8 +10,8 @@ w.secrets.create_scope(scope=scope_name) -acls = w.secrets.list_acls(list_acls=scope_name) +acls = w.secrets.list_acls(scope=scope_name) # cleanup w.secrets.delete_secret(scope=scope_name, key=key_name) -w.secrets.delete_scope(delete_scope=scope_name) +w.secrets.delete_scope(scope=scope_name) diff --git a/examples/secrets/list_secrets_secrets.py b/examples/secrets/list_secrets_secrets.py index 9d623655..29acb4d1 100755 --- a/examples/secrets/list_secrets_secrets.py +++ b/examples/secrets/list_secrets_secrets.py @@ -10,8 +10,8 @@ w.secrets.create_scope(scope=scope_name) -scrts = w.secrets.list_secrets(list_secrets=scope_name) +scrts = w.secrets.list_secrets(scope=scope_name) # cleanup w.secrets.delete_secret(scope=scope_name, key=key_name) -w.secrets.delete_scope(delete_scope=scope_name) +w.secrets.delete_scope(scope=scope_name) diff --git a/examples/secrets/put_acl_secrets.py b/examples/secrets/put_acl_secrets.py index 35c61f88..ffc83fa5 100755 --- a/examples/secrets/put_acl_secrets.py +++ b/examples/secrets/put_acl_secrets.py @@ -16,6 +16,6 @@ w.secrets.put_acl(scope=scope_name, permission=workspace.AclPermission.MANAGE, principal=group.display_name) # cleanup -w.groups.delete(delete=group.id) +w.groups.delete(id=group.id) w.secrets.delete_secret(scope=scope_name, key=key_name) -w.secrets.delete_scope(delete_scope=scope_name) +w.secrets.delete_scope(scope=scope_name) diff --git a/examples/secrets/put_secret_secrets.py b/examples/secrets/put_secret_secrets.py index 4e6b066e..233e3174 100755 --- a/examples/secrets/put_secret_secrets.py +++ b/examples/secrets/put_secret_secrets.py @@ -14,4 +14,4 @@ # cleanup w.secrets.delete_secret(scope=scope_name, key=key_name) -w.secrets.delete_scope(delete_scope=scope_name) +w.secrets.delete_scope(scope=scope_name) diff --git a/examples/service_principals/create_create_obo_token_on_aws.py b/examples/service_principals/create_create_obo_token_on_aws.py index 168f0365..9a8ba9b5 100755 --- a/examples/service_principals/create_create_obo_token_on_aws.py +++ b/examples/service_principals/create_create_obo_token_on_aws.py @@ -11,4 +11,4 @@ groups=[iam.ComplexValue(value=groups["admins"])]) # cleanup -w.service_principals.delete(delete=spn.id) +w.service_principals.delete(id=spn.id) diff --git a/examples/service_principals/create_service_principals_on_aws.py b/examples/service_principals/create_service_principals_on_aws.py index d288c9da..8d0923cf 100755 --- a/examples/service_principals/create_service_principals_on_aws.py +++ b/examples/service_principals/create_service_principals_on_aws.py @@ -7,4 +7,4 @@ created = w.service_principals.create(display_name=f'sdk-{time.time_ns()}') # cleanup -w.service_principals.delete(delete=created.id) +w.service_principals.delete(id=created.id) diff --git a/examples/service_principals/get_service_principals_on_aws.py b/examples/service_principals/get_service_principals_on_aws.py index fc486355..28cd35a2 100755 --- a/examples/service_principals/get_service_principals_on_aws.py +++ b/examples/service_principals/get_service_principals_on_aws.py @@ -6,7 +6,7 @@ created = w.service_principals.create(display_name=f'sdk-{time.time_ns()}') -by_id = w.service_principals.get(get=created.id) +by_id = w.service_principals.get(id=created.id) # cleanup -w.service_principals.delete(delete=created.id) +w.service_principals.delete(id=created.id) diff --git a/examples/service_principals/update_service_principals_on_aws.py b/examples/service_principals/update_service_principals_on_aws.py index c8d7d878..9c9c0a95 100755 --- a/examples/service_principals/update_service_principals_on_aws.py +++ b/examples/service_principals/update_service_principals_on_aws.py @@ -12,4 +12,4 @@ roles=[iam.ComplexValue(value="xyz")]) # cleanup -w.service_principals.delete(delete=created.id) +w.service_principals.delete(id=created.id) diff --git a/examples/shares/create_shares.py b/examples/shares/create_shares.py index cc115be2..ae491e56 100755 --- a/examples/shares/create_shares.py +++ b/examples/shares/create_shares.py @@ -7,4 +7,4 @@ created_share = w.shares.create(name=f'sdk-{time.time_ns()}') # cleanup -w.shares.delete(delete=created_share.name) +w.shares.delete(name=created_share.name) diff --git a/examples/shares/get_shares.py b/examples/shares/get_shares.py index ee9e4669..a010bc75 100755 --- a/examples/shares/get_shares.py +++ b/examples/shares/get_shares.py @@ -6,7 +6,7 @@ created_share = w.shares.create(name=f'sdk-{time.time_ns()}') -_ = w.shares.get(get=created_share.name) +_ = w.shares.get(name=created_share.name) # cleanup -w.shares.delete(delete=created_share.name) +w.shares.delete(name=created_share.name) diff --git a/examples/shares/update_shares.py b/examples/shares/update_shares.py index 0c9bdac4..ae01ddc1 100755 --- a/examples/shares/update_shares.py +++ b/examples/shares/update_shares.py @@ -29,7 +29,7 @@ ]) # cleanup -w.schemas.delete(delete=created_schema.full_name) +w.schemas.delete(full_name=created_schema.full_name) w.catalogs.delete(name=created_catalog.name, force=True) -w.tables.delete(delete=table_full_name) -w.shares.delete(delete=created_share.name) +w.tables.delete(full_name=table_full_name) +w.shares.delete(name=created_share.name) diff --git a/examples/statement_execution/execute_shares.py b/examples/statement_execution/execute_shares.py index bea32d42..96e8b8d2 100755 --- a/examples/statement_execution/execute_shares.py +++ b/examples/statement_execution/execute_shares.py @@ -17,5 +17,5 @@ statement="CREATE TABLE %s AS SELECT 2+2 as four" % (table_name)).result() # cleanup -w.schemas.delete(delete=created_schema.full_name) +w.schemas.delete(full_name=created_schema.full_name) w.catalogs.delete(name=created_catalog.name, force=True) diff --git a/examples/statement_execution/execute_tables.py b/examples/statement_execution/execute_tables.py index bea32d42..96e8b8d2 100755 --- a/examples/statement_execution/execute_tables.py +++ b/examples/statement_execution/execute_tables.py @@ -17,5 +17,5 @@ statement="CREATE TABLE %s AS SELECT 2+2 as four" % (table_name)).result() # cleanup -w.schemas.delete(delete=created_schema.full_name) +w.schemas.delete(full_name=created_schema.full_name) w.catalogs.delete(name=created_catalog.name, force=True) diff --git a/examples/storage/create_log_delivery.py b/examples/storage/create_log_delivery.py index 8f2f7ca9..466b944e 100755 --- a/examples/storage/create_log_delivery.py +++ b/examples/storage/create_log_delivery.py @@ -9,4 +9,4 @@ root_bucket_info=provisioning.RootBucketInfo(bucket_name=f'sdk-{time.time_ns()}')) # cleanup -a.storage.delete(delete=bucket.storage_configuration_id) +a.storage.delete(storage_configuration_id=bucket.storage_configuration_id) diff --git a/examples/storage/create_workspaces.py b/examples/storage/create_workspaces.py index 2f208a6d..e8c3bb4e 100755 --- a/examples/storage/create_workspaces.py +++ b/examples/storage/create_workspaces.py @@ -11,4 +11,4 @@ root_bucket_info=provisioning.RootBucketInfo(bucket_name=os.environ["TEST_ROOT_BUCKET"])) # cleanup -a.storage.delete(delete=storage.storage_configuration_id) +a.storage.delete(storage_configuration_id=storage.storage_configuration_id) diff --git a/examples/storage/get_storage.py b/examples/storage/get_storage.py index bafb0952..47c521b7 100755 --- a/examples/storage/get_storage.py +++ b/examples/storage/get_storage.py @@ -8,4 +8,4 @@ storage = a.storage.create(storage_configuration_name=f'sdk-{time.time_ns()}', root_bucket_info=provisioning.RootBucketInfo(bucket_name=f'sdk-{time.time_ns()}')) -by_id = a.storage.get(get=storage.storage_configuration_id) +by_id = a.storage.get(storage_configuration_id=storage.storage_configuration_id) diff --git a/examples/storage_credentials/create_external_locations_on_aws.py b/examples/storage_credentials/create_external_locations_on_aws.py index 4c977350..7d2ba29e 100755 --- a/examples/storage_credentials/create_external_locations_on_aws.py +++ b/examples/storage_credentials/create_external_locations_on_aws.py @@ -11,4 +11,4 @@ aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"])) # cleanup -w.storage_credentials.delete(delete=credential.name) +w.storage_credentials.delete(name=credential.name) diff --git a/examples/storage_credentials/create_storage_credentials_on_aws.py b/examples/storage_credentials/create_storage_credentials_on_aws.py index d39ad002..b20d7dc9 100755 --- a/examples/storage_credentials/create_storage_credentials_on_aws.py +++ b/examples/storage_credentials/create_storage_credentials_on_aws.py @@ -11,4 +11,4 @@ aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"])) # cleanup -w.storage_credentials.delete(delete=created.name) +w.storage_credentials.delete(name=created.name) diff --git a/examples/storage_credentials/get_storage_credentials_on_aws.py b/examples/storage_credentials/get_storage_credentials_on_aws.py index 036feee6..58f3ff4a 100755 --- a/examples/storage_credentials/get_storage_credentials_on_aws.py +++ b/examples/storage_credentials/get_storage_credentials_on_aws.py @@ -10,7 +10,7 @@ name=f'sdk-{time.time_ns()}', aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"])) -by_name = w.storage_credentials.get(get=created.name) +by_name = w.storage_credentials.get(name=created.name) # cleanup -w.storage_credentials.delete(delete=created.name) +w.storage_credentials.delete(name=created.name) diff --git a/examples/storage_credentials/update_storage_credentials_on_aws.py b/examples/storage_credentials/update_storage_credentials_on_aws.py index ced265c5..90ebf253 100755 --- a/examples/storage_credentials/update_storage_credentials_on_aws.py +++ b/examples/storage_credentials/update_storage_credentials_on_aws.py @@ -16,4 +16,4 @@ aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"])) # cleanup -w.storage_credentials.delete(delete=created.name) +w.storage_credentials.delete(name=created.name) diff --git a/examples/tables/get_tables.py b/examples/tables/get_tables.py index a510ce18..7c81faf6 100755 --- a/examples/tables/get_tables.py +++ b/examples/tables/get_tables.py @@ -18,9 +18,9 @@ table_full_name = "%s.%s.%s" % (created_catalog.name, created_schema.name, table_name) -created_table = w.tables.get(get=table_full_name) +created_table = w.tables.get(full_name=table_full_name) # cleanup -w.schemas.delete(delete=created_schema.full_name) +w.schemas.delete(full_name=created_schema.full_name) w.catalogs.delete(name=created_catalog.name, force=True) -w.tables.delete(delete=table_full_name) +w.tables.delete(full_name=table_full_name) diff --git a/examples/tables/list_summaries_tables.py b/examples/tables/list_summaries_tables.py index 7f843e65..d3e64fd0 100755 --- a/examples/tables/list_summaries_tables.py +++ b/examples/tables/list_summaries_tables.py @@ -12,5 +12,5 @@ schema_name_pattern=created_schema.name) # cleanup -w.schemas.delete(delete=created_schema.full_name) +w.schemas.delete(full_name=created_schema.full_name) w.catalogs.delete(name=created_catalog.name, force=True) diff --git a/examples/tables/list_tables.py b/examples/tables/list_tables.py index 3c78bace..6c14faa1 100755 --- a/examples/tables/list_tables.py +++ b/examples/tables/list_tables.py @@ -11,5 +11,5 @@ all_tables = w.tables.list(catalog_name=created_catalog.name, schema_name=created_schema.name) # cleanup -w.schemas.delete(delete=created_schema.full_name) +w.schemas.delete(full_name=created_schema.full_name) w.catalogs.delete(name=created_catalog.name, force=True) diff --git a/examples/token_management/create_obo_token_create_obo_token_on_aws.py b/examples/token_management/create_obo_token_create_obo_token_on_aws.py index f29a956c..881827b8 100755 --- a/examples/token_management/create_obo_token_create_obo_token_on_aws.py +++ b/examples/token_management/create_obo_token_create_obo_token_on_aws.py @@ -13,5 +13,5 @@ obo = w.token_management.create_obo_token(application_id=spn.application_id, lifetime_seconds=60) # cleanup -w.service_principals.delete(delete=spn.id) -w.token_management.delete(delete=obo.token_info.token_id) +w.service_principals.delete(id=spn.id) +w.token_management.delete(token_id=obo.token_info.token_id) diff --git a/examples/token_management/get_create_obo_token_on_aws.py b/examples/token_management/get_create_obo_token_on_aws.py index a55d0ccc..d47d60a3 100755 --- a/examples/token_management/get_create_obo_token_on_aws.py +++ b/examples/token_management/get_create_obo_token_on_aws.py @@ -12,8 +12,8 @@ obo = w.token_management.create_obo_token(application_id=spn.application_id, lifetime_seconds=60) -by_id = w.token_management.get(get=obo.token_info.token_id) +by_id = w.token_management.get(token_id=obo.token_info.token_id) # cleanup -w.service_principals.delete(delete=spn.id) -w.token_management.delete(delete=obo.token_info.token_id) +w.service_principals.delete(id=spn.id) +w.token_management.delete(token_id=obo.token_info.token_id) diff --git a/examples/tokens/create_tokens.py b/examples/tokens/create_tokens.py index 9df99e89..e7545d2e 100755 --- a/examples/tokens/create_tokens.py +++ b/examples/tokens/create_tokens.py @@ -7,4 +7,4 @@ token = w.tokens.create(comment=f'sdk-{time.time_ns()}', lifetime_seconds=300) # cleanup -w.tokens.delete(delete=token.token_info.token_id) +w.tokens.delete(token_id=token.token_info.token_id) diff --git a/examples/tokens/get_tokens.py b/examples/tokens/get_tokens.py index 0cdd3456..fade24f2 100755 --- a/examples/tokens/get_tokens.py +++ b/examples/tokens/get_tokens.py @@ -6,7 +6,7 @@ token = w.tokens.create(comment=f'sdk-{time.time_ns()}', lifetime_seconds=300) -by_name = w.tokens.get(get=token.token_info.comment) +by_name = w.tokens.get(comment=token.token_info.comment) # cleanup -w.tokens.delete(delete=token.token_info.token_id) +w.tokens.delete(token_id=token.token_info.token_id) diff --git a/examples/users/create_clusters_api_integration.py b/examples/users/create_clusters_api_integration.py index a652418e..020c1874 100755 --- a/examples/users/create_clusters_api_integration.py +++ b/examples/users/create_clusters_api_integration.py @@ -7,4 +7,4 @@ other_owner = w.users.create(user_name=f'sdk-{time.time_ns()}@example.com') # cleanup -w.users.delete(delete=other_owner.id) +w.users.delete(id=other_owner.id) diff --git a/examples/users/create_users.py b/examples/users/create_users.py new file mode 100755 index 00000000..4a348cf3 --- /dev/null +++ b/examples/users/create_users.py @@ -0,0 +1,7 @@ +import time + +from databricks.sdk import WorkspaceClient + +w = WorkspaceClient() + +user = w.users.create(display_name=f'sdk-{time.time_ns()}', user_name=f'sdk-{time.time_ns()}@example.com') diff --git a/examples/users/delete_clusters_api_integration.py b/examples/users/delete_clusters_api_integration.py index 823bfc37..fecdb036 100755 --- a/examples/users/delete_clusters_api_integration.py +++ b/examples/users/delete_clusters_api_integration.py @@ -6,4 +6,4 @@ other_owner = w.users.create(user_name=f'sdk-{time.time_ns()}@example.com') -w.users.delete(delete=other_owner.id) +w.users.delete(id=other_owner.id) diff --git a/examples/users/delete_users.py b/examples/users/delete_users.py new file mode 100755 index 00000000..81004b1a --- /dev/null +++ b/examples/users/delete_users.py @@ -0,0 +1,9 @@ +import time + +from databricks.sdk import WorkspaceClient + +w = WorkspaceClient() + +user = w.users.create(display_name=f'sdk-{time.time_ns()}', user_name=f'sdk-{time.time_ns()}@example.com') + +w.users.delete(id=user.id) diff --git a/examples/users/get_users.py b/examples/users/get_users.py new file mode 100755 index 00000000..55919ff7 --- /dev/null +++ b/examples/users/get_users.py @@ -0,0 +1,9 @@ +import time + +from databricks.sdk import WorkspaceClient + +w = WorkspaceClient() + +user = w.users.create(display_name=f'sdk-{time.time_ns()}', user_name=f'sdk-{time.time_ns()}@example.com') + +fetch = w.users.get(id=user.id) diff --git a/examples/users/list_users.py b/examples/users/list_users.py new file mode 100755 index 00000000..fbc477cb --- /dev/null +++ b/examples/users/list_users.py @@ -0,0 +1,8 @@ +from databricks.sdk import WorkspaceClient +from databricks.sdk.service import iam + +w = WorkspaceClient() + +all_users = w.users.list(attributes="id,userName", + sort_by="userName", + sort_order=iam.ListSortOrder.DESCENDING) diff --git a/examples/volumes/create_volumes.py b/examples/volumes/create_volumes.py index ec1fcd6f..660451bf 100755 --- a/examples/volumes/create_volumes.py +++ b/examples/volumes/create_volumes.py @@ -28,6 +28,6 @@ volume_type=catalog.VolumeType.EXTERNAL) # cleanup -w.schemas.delete(delete=created_schema.full_name) +w.schemas.delete(full_name=created_schema.full_name) w.catalogs.delete(name=created_catalog.name, force=True) -w.volumes.delete(delete=created_volume.full_name) +w.volumes.delete(full_name_arg=created_volume.full_name) diff --git a/examples/volumes/list_volumes.py b/examples/volumes/list_volumes.py index c070f6a7..32e39912 100755 --- a/examples/volumes/list_volumes.py +++ b/examples/volumes/list_volumes.py @@ -11,5 +11,5 @@ all_volumes = w.volumes.list(catalog_name=created_catalog.name, schema_name=created_schema.name) # cleanup -w.schemas.delete(delete=created_schema.full_name) +w.schemas.delete(full_name=created_schema.full_name) w.catalogs.delete(name=created_catalog.name, force=True) diff --git a/examples/volumes/read_volumes.py b/examples/volumes/read_volumes.py index 4ae83a31..3051ddd6 100755 --- a/examples/volumes/read_volumes.py +++ b/examples/volumes/read_volumes.py @@ -27,9 +27,9 @@ storage_location=external_location.url, volume_type=catalog.VolumeType.EXTERNAL) -loaded_volume = w.volumes.read(read=created_volume.full_name) +loaded_volume = w.volumes.read(full_name_arg=created_volume.full_name) # cleanup -w.schemas.delete(delete=created_schema.full_name) +w.schemas.delete(full_name=created_schema.full_name) w.catalogs.delete(name=created_catalog.name, force=True) -w.volumes.delete(delete=created_volume.full_name) +w.volumes.delete(full_name_arg=created_volume.full_name) diff --git a/examples/volumes/update_volumes.py b/examples/volumes/update_volumes.py index 14e461b4..6becccb1 100755 --- a/examples/volumes/update_volumes.py +++ b/examples/volumes/update_volumes.py @@ -27,11 +27,11 @@ storage_location=external_location.url, volume_type=catalog.VolumeType.EXTERNAL) -loaded_volume = w.volumes.read(read=created_volume.full_name) +loaded_volume = w.volumes.read(full_name_arg=created_volume.full_name) _ = w.volumes.update(full_name_arg=loaded_volume.full_name, comment="Updated volume comment") # cleanup -w.schemas.delete(delete=created_schema.full_name) +w.schemas.delete(full_name=created_schema.full_name) w.catalogs.delete(name=created_catalog.name, force=True) -w.volumes.delete(delete=created_volume.full_name) +w.volumes.delete(full_name_arg=created_volume.full_name) diff --git a/examples/vpc_endpoints/create_vpc_endpoints.py b/examples/vpc_endpoints/create_vpc_endpoints.py index ecee7a63..75033189 100755 --- a/examples/vpc_endpoints/create_vpc_endpoints.py +++ b/examples/vpc_endpoints/create_vpc_endpoints.py @@ -10,4 +10,4 @@ vpc_endpoint_name=f'sdk-{time.time_ns()}') # cleanup -a.vpc_endpoints.delete(delete=created.vpc_endpoint_id) +a.vpc_endpoints.delete(vpc_endpoint_id=created.vpc_endpoint_id) diff --git a/examples/vpc_endpoints/get_vpc_endpoints.py b/examples/vpc_endpoints/get_vpc_endpoints.py index 8c311b01..d49d3268 100755 --- a/examples/vpc_endpoints/get_vpc_endpoints.py +++ b/examples/vpc_endpoints/get_vpc_endpoints.py @@ -9,7 +9,7 @@ region=os.environ["AWS_REGION"], vpc_endpoint_name=f'sdk-{time.time_ns()}') -by_id = a.vpc_endpoints.get(get=created.vpc_endpoint_id) +by_id = a.vpc_endpoints.get(vpc_endpoint_id=created.vpc_endpoint_id) # cleanup -a.vpc_endpoints.delete(delete=created.vpc_endpoint_id) +a.vpc_endpoints.delete(vpc_endpoint_id=created.vpc_endpoint_id) diff --git a/examples/warehouses/create_sql_warehouses.py b/examples/warehouses/create_sql_warehouses.py index b6ae1d11..15e8f474 100755 --- a/examples/warehouses/create_sql_warehouses.py +++ b/examples/warehouses/create_sql_warehouses.py @@ -10,4 +10,4 @@ auto_stop_mins=10).result() # cleanup -w.warehouses.delete(delete=created.id) +w.warehouses.delete(id=created.id) diff --git a/examples/warehouses/edit_sql_warehouses.py b/examples/warehouses/edit_sql_warehouses.py index b1c62a7a..0e3c8e8f 100755 --- a/examples/warehouses/edit_sql_warehouses.py +++ b/examples/warehouses/edit_sql_warehouses.py @@ -16,4 +16,4 @@ auto_stop_mins=10) # cleanup -w.warehouses.delete(delete=created.id) +w.warehouses.delete(id=created.id) diff --git a/examples/warehouses/get_sql_warehouses.py b/examples/warehouses/get_sql_warehouses.py index 96fbe346..7b59844c 100755 --- a/examples/warehouses/get_sql_warehouses.py +++ b/examples/warehouses/get_sql_warehouses.py @@ -9,7 +9,7 @@ max_num_clusters=1, auto_stop_mins=10).result() -wh = w.warehouses.get(get=created.id) +wh = w.warehouses.get(id=created.id) # cleanup -w.warehouses.delete(delete=created.id) +w.warehouses.delete(id=created.id) diff --git a/examples/workspace/get_status_generic_permissions.py b/examples/workspace/get_status_generic_permissions.py index 8d7c3e80..44723bcb 100755 --- a/examples/workspace/get_status_generic_permissions.py +++ b/examples/workspace/get_status_generic_permissions.py @@ -6,4 +6,4 @@ notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}' -obj = w.workspace.get_status(get_status=notebook_path) +obj = w.workspace.get_status(path=notebook_path) diff --git a/examples/workspace/get_status_workspace_integration.py b/examples/workspace/get_status_workspace_integration.py index 039692d1..3ccd249c 100755 --- a/examples/workspace/get_status_workspace_integration.py +++ b/examples/workspace/get_status_workspace_integration.py @@ -6,4 +6,4 @@ notebook = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}' -get_status_response = w.workspace.get_status(get_status=notebook) +get_status_response = w.workspace.get_status(path=notebook) diff --git a/examples/workspace_assignment/list_workspace_assignment_on_aws.py b/examples/workspace_assignment/list_workspace_assignment_on_aws.py index b4facae2..15a7aa95 100755 --- a/examples/workspace_assignment/list_workspace_assignment_on_aws.py +++ b/examples/workspace_assignment/list_workspace_assignment_on_aws.py @@ -6,4 +6,4 @@ workspace_id = os.environ["TEST_WORKSPACE_ID"] -all = a.workspace_assignment.list(list=workspace_id) +all = a.workspace_assignment.list(workspace_id=workspace_id) diff --git a/examples/workspace_bindings/get_catalog_workspace_bindings.py b/examples/workspace_bindings/get_catalog_workspace_bindings.py index 17c1e315..651ab75a 100755 --- a/examples/workspace_bindings/get_catalog_workspace_bindings.py +++ b/examples/workspace_bindings/get_catalog_workspace_bindings.py @@ -6,7 +6,7 @@ created = w.catalogs.create(name=f'sdk-{time.time_ns()}') -bindings = w.workspace_bindings.get(get=created.name) +bindings = w.workspace_bindings.get(name=created.name) # cleanup w.catalogs.delete(name=created.name, force=True) diff --git a/examples/workspaces/create_workspaces.py b/examples/workspaces/create_workspaces.py index 5457c1ec..bf820677 100755 --- a/examples/workspaces/create_workspaces.py +++ b/examples/workspaces/create_workspaces.py @@ -21,6 +21,6 @@ storage_configuration_id=storage.storage_configuration_id).result() # cleanup -a.storage.delete(delete=storage.storage_configuration_id) -a.credentials.delete(delete=role.credentials_id) -a.workspaces.delete(delete=created.workspace_id) +a.storage.delete(storage_configuration_id=storage.storage_configuration_id) +a.credentials.delete(credentials_id=role.credentials_id) +a.workspaces.delete(workspace_id=created.workspace_id) diff --git a/examples/workspaces/get_workspaces.py b/examples/workspaces/get_workspaces.py index a82894b0..809a1f37 100755 --- a/examples/workspaces/get_workspaces.py +++ b/examples/workspaces/get_workspaces.py @@ -20,9 +20,9 @@ credentials_id=role.credentials_id, storage_configuration_id=storage.storage_configuration_id).result() -by_id = a.workspaces.get(get=created.workspace_id) +by_id = a.workspaces.get(workspace_id=created.workspace_id) # cleanup -a.storage.delete(delete=storage.storage_configuration_id) -a.credentials.delete(delete=role.credentials_id) -a.workspaces.delete(delete=created.workspace_id) +a.storage.delete(storage_configuration_id=storage.storage_configuration_id) +a.credentials.delete(credentials_id=role.credentials_id) +a.workspaces.delete(workspace_id=created.workspace_id) diff --git a/examples/workspaces/update_workspaces.py b/examples/workspaces/update_workspaces.py index 78ddf30a..f766e1b5 100755 --- a/examples/workspaces/update_workspaces.py +++ b/examples/workspaces/update_workspaces.py @@ -28,7 +28,7 @@ _ = a.workspaces.update(workspace_id=created.workspace_id, credentials_id=update_role.credentials_id).result() # cleanup -a.storage.delete(delete=storage.storage_configuration_id) -a.credentials.delete(delete=role.credentials_id) -a.credentials.delete(delete=update_role.credentials_id) -a.workspaces.delete(delete=created.workspace_id) +a.storage.delete(storage_configuration_id=storage.storage_configuration_id) +a.credentials.delete(credentials_id=role.credentials_id) +a.credentials.delete(credentials_id=update_role.credentials_id) +a.workspaces.delete(workspace_id=created.workspace_id)