From adff1585165acee9984dae3bae5605ad1ab72867 Mon Sep 17 00:00:00 2001 From: Serge Smertin Date: Fri, 8 Sep 2023 14:05:27 -0700 Subject: [PATCH] Migrate cluster policies to new fixtures --- tests/integration/conftest.py | 47 +------------------------ tests/integration/test_e2e.py | 12 +++++++ tests/integration/utils.py | 64 +---------------------------------- 3 files changed, 14 insertions(+), 109 deletions(-) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index cd89e5644e..79a0aeafbc 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -9,7 +9,6 @@ import pytest from databricks.sdk import AccountClient, WorkspaceClient from databricks.sdk.core import Config -from databricks.sdk.service.compute import CreatePolicyResponse from databricks.sdk.service.iam import AccessControlRequest, PermissionLevel from databricks.sdk.service.workspace import ObjectInfo, ObjectType @@ -19,21 +18,14 @@ from databricks.labs.ucx.providers.mixins.sql import StatementExecutionExt from databricks.labs.ucx.utils import ThreadedExecution -from .utils import ( - EnvironmentInfo, - InstanceProfile, - WorkspaceObjects, - _set_random_permissions, -) +from .utils import EnvironmentInfo, InstanceProfile, WorkspaceObjects logging.getLogger("tests").setLevel("DEBUG") logging.getLogger("databricks.labs.ucx").setLevel("DEBUG") logger = logging.getLogger(__name__) -NUM_TEST_GROUPS = int(os.environ.get("NUM_TEST_GROUPS", 5)) NUM_TEST_INSTANCE_PROFILES = int(os.environ.get("NUM_TEST_INSTANCE_PROFILES", 3)) -NUM_TEST_CLUSTER_POLICIES = int(os.environ.get("NUM_TEST_CLUSTER_POLICIES", 3)) NUM_TEST_TOKENS = int(os.environ.get("NUM_TEST_TOKENS", 3)) NUM_THREADS = int(os.environ.get("NUM_TEST_THREADS", 20)) @@ -242,41 +234,6 @@ def instance_profiles(env: EnvironmentInfo, ws: WorkspaceClient) -> list[Instanc logger.debug("Test instance profiles deleted") -@pytest.fixture -def cluster_policies(env: EnvironmentInfo, ws: WorkspaceClient) -> list[CreatePolicyResponse]: - logger.debug("Creating test cluster policies") - - test_cluster_policies: list[CreatePolicyResponse] = [ - ws.cluster_policies.create( - name=f"{env.test_uid}-test-{i}", - definition=""" - { - "spark_version": { - "type": "unlimited", - "defaultValue": "auto:latest-lts" - } - } - """, - ) - for i in range(NUM_TEST_CLUSTER_POLICIES) - ] - - _set_random_permissions( - test_cluster_policies, - "policy_id", - RequestObjectType.CLUSTER_POLICIES, - env, - ws, - permission_levels=[PermissionLevel.CAN_USE], - ) - - yield test_cluster_policies - - logger.debug("Deleting test instance pools") - executables = [partial(ws.cluster_policies.delete, p.policy_id) for p in test_cluster_policies] - Threader(executables).run() - - @pytest.fixture def tokens(ws: WorkspaceClient, env: EnvironmentInfo) -> list[AccessControlRequest]: logger.debug("Adding token-level permissions to groups") @@ -352,14 +309,12 @@ def workspace_objects(ws: WorkspaceClient, env: EnvironmentInfo) -> WorkspaceObj @pytest.fixture def verifiable_objects( - cluster_policies, tokens, workspace_objects, ) -> list[tuple[list, str, RequestObjectType | None]]: _verifiable_objects = [ (workspace_objects, "workspace_objects", None), (tokens, "tokens", RequestObjectType.AUTHORIZATION), - (cluster_policies, "policy_id", RequestObjectType.CLUSTER_POLICIES), ] yield _verifiable_objects diff --git a/tests/integration/test_e2e.py b/tests/integration/test_e2e.py index 060bbb43fb..f3704de8fe 100644 --- a/tests/integration/test_e2e.py +++ b/tests/integration/test_e2e.py @@ -134,6 +134,8 @@ def test_e2e( make_instance_pool_permissions, make_cluster, make_cluster_permissions, + make_cluster_policy, + make_cluster_policy_permissions, make_model, make_registered_model_permissions, make_experiment, @@ -170,6 +172,16 @@ def test_e2e( ([cluster], "cluster_id", RequestObjectType.CLUSTERS), ) + cluster_policy = make_cluster_policy() + make_cluster_policy_permissions( + object_id=cluster_policy.policy_id, + permission_level=random.choice([PermissionLevel.CAN_USE]), + group_name=ws_group.display_name, + ) + verifiable_objects.append( + ([cluster_policy], "policy_id", RequestObjectType.CLUSTER_POLICIES), + ) + model = make_model() make_registered_model_permissions( object_id=model.id, diff --git a/tests/integration/utils.py b/tests/integration/utils.py index 13872ef1f1..2275b0071f 100644 --- a/tests/integration/utils.py +++ b/tests/integration/utils.py @@ -1,16 +1,9 @@ import logging -import random from dataclasses import dataclass -from typing import Any -from databricks.sdk import WorkspaceClient -from databricks.sdk.service.compute import ClusterSpec, DataSecurityMode -from databricks.sdk.service.iam import AccessControlRequest, Group, PermissionLevel -from databricks.sdk.service.jobs import JobCluster, PythonWheelTask, Task +from databricks.sdk.service.iam import Group from databricks.sdk.service.workspace import ObjectInfo -from databricks.labs.ucx.inventory.types import RequestObjectType - logger = logging.getLogger(__name__) @@ -26,61 +19,6 @@ class EnvironmentInfo: groups: list[tuple[Group, Group]] -def _set_random_permissions( - objects: list[Any], - id_attribute: str, - request_object_type: RequestObjectType, - env: EnvironmentInfo, - ws: WorkspaceClient, - permission_levels: list[PermissionLevel], - num_acls: int | None = 3, -): - def get_random_ws_group() -> Group: - return random.choice([g[0] for g in env.groups]) - - def get_random_permission_level() -> PermissionLevel: - return random.choice(permission_levels) - - for _object in objects: - acl_req = [ - AccessControlRequest( - group_name=get_random_ws_group().display_name, permission_level=get_random_permission_level() - ) - for _ in range(num_acls) - ] - - ws.permissions.update( - request_object_type=request_object_type, - request_object_id=getattr(_object, id_attribute), - access_control_list=acl_req, - ) - - -def _get_basic_job_cluster() -> JobCluster: - return JobCluster( - job_cluster_key="default", - new_cluster=ClusterSpec( - spark_version="13.2.x-scala2.12", - node_type_id="i3.xlarge", - driver_node_type_id="i3.xlarge", - num_workers=0, - spark_conf={"spark.master": "local[*, 4]", "spark.databricks.cluster.profile": "singleNode"}, - custom_tags={ - "ResourceClass": "SingleNode", - }, - data_security_mode=DataSecurityMode.SINGLE_USER, - ), - ) - - -def _get_basic_task() -> Task: - return Task( - task_key="test", - python_wheel_task=PythonWheelTask(entry_point="main", package_name="some-pkg"), - job_cluster_key="default", - ) - - @dataclass class WorkspaceObjects: root_dir: ObjectInfo