Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Migrate cluster policies to new fixtures #174

Merged
merged 1 commit into from
Sep 8, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
47 changes: 1 addition & 46 deletions tests/integration/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
import pytest
from databricks.sdk import AccountClient, WorkspaceClient
from databricks.sdk.core import Config
from databricks.sdk.service.compute import CreatePolicyResponse
from databricks.sdk.service.iam import AccessControlRequest, PermissionLevel
from databricks.sdk.service.workspace import ObjectInfo, ObjectType

Expand All @@ -19,21 +18,14 @@
from databricks.labs.ucx.providers.mixins.sql import StatementExecutionExt
from databricks.labs.ucx.utils import ThreadedExecution

from .utils import (
EnvironmentInfo,
InstanceProfile,
WorkspaceObjects,
_set_random_permissions,
)
from .utils import EnvironmentInfo, InstanceProfile, WorkspaceObjects

logging.getLogger("tests").setLevel("DEBUG")
logging.getLogger("databricks.labs.ucx").setLevel("DEBUG")

logger = logging.getLogger(__name__)

NUM_TEST_GROUPS = int(os.environ.get("NUM_TEST_GROUPS", 5))
NUM_TEST_INSTANCE_PROFILES = int(os.environ.get("NUM_TEST_INSTANCE_PROFILES", 3))
NUM_TEST_CLUSTER_POLICIES = int(os.environ.get("NUM_TEST_CLUSTER_POLICIES", 3))
NUM_TEST_TOKENS = int(os.environ.get("NUM_TEST_TOKENS", 3))

NUM_THREADS = int(os.environ.get("NUM_TEST_THREADS", 20))
Expand Down Expand Up @@ -242,41 +234,6 @@ def instance_profiles(env: EnvironmentInfo, ws: WorkspaceClient) -> list[Instanc
logger.debug("Test instance profiles deleted")


@pytest.fixture
def cluster_policies(env: EnvironmentInfo, ws: WorkspaceClient) -> list[CreatePolicyResponse]:
logger.debug("Creating test cluster policies")

test_cluster_policies: list[CreatePolicyResponse] = [
ws.cluster_policies.create(
name=f"{env.test_uid}-test-{i}",
definition="""
{
"spark_version": {
"type": "unlimited",
"defaultValue": "auto:latest-lts"
}
}
""",
)
for i in range(NUM_TEST_CLUSTER_POLICIES)
]

_set_random_permissions(
test_cluster_policies,
"policy_id",
RequestObjectType.CLUSTER_POLICIES,
env,
ws,
permission_levels=[PermissionLevel.CAN_USE],
)

yield test_cluster_policies

logger.debug("Deleting test instance pools")
executables = [partial(ws.cluster_policies.delete, p.policy_id) for p in test_cluster_policies]
Threader(executables).run()


@pytest.fixture
def tokens(ws: WorkspaceClient, env: EnvironmentInfo) -> list[AccessControlRequest]:
logger.debug("Adding token-level permissions to groups")
Expand Down Expand Up @@ -352,14 +309,12 @@ def workspace_objects(ws: WorkspaceClient, env: EnvironmentInfo) -> WorkspaceObj

@pytest.fixture
def verifiable_objects(
cluster_policies,
tokens,
workspace_objects,
) -> list[tuple[list, str, RequestObjectType | None]]:
_verifiable_objects = [
(workspace_objects, "workspace_objects", None),
(tokens, "tokens", RequestObjectType.AUTHORIZATION),
(cluster_policies, "policy_id", RequestObjectType.CLUSTER_POLICIES),
]
yield _verifiable_objects

Expand Down
12 changes: 12 additions & 0 deletions tests/integration/test_e2e.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,8 @@ def test_e2e(
make_instance_pool_permissions,
make_cluster,
make_cluster_permissions,
make_cluster_policy,
make_cluster_policy_permissions,
make_model,
make_registered_model_permissions,
make_experiment,
Expand Down Expand Up @@ -170,6 +172,16 @@ def test_e2e(
([cluster], "cluster_id", RequestObjectType.CLUSTERS),
)

cluster_policy = make_cluster_policy()
make_cluster_policy_permissions(
object_id=cluster_policy.policy_id,
permission_level=random.choice([PermissionLevel.CAN_USE]),
group_name=ws_group.display_name,
)
verifiable_objects.append(
([cluster_policy], "policy_id", RequestObjectType.CLUSTER_POLICIES),
)

model = make_model()
make_registered_model_permissions(
object_id=model.id,
Expand Down
64 changes: 1 addition & 63 deletions tests/integration/utils.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,9 @@
import logging
import random
from dataclasses import dataclass
from typing import Any

from databricks.sdk import WorkspaceClient
from databricks.sdk.service.compute import ClusterSpec, DataSecurityMode
from databricks.sdk.service.iam import AccessControlRequest, Group, PermissionLevel
from databricks.sdk.service.jobs import JobCluster, PythonWheelTask, Task
from databricks.sdk.service.iam import Group
from databricks.sdk.service.workspace import ObjectInfo

from databricks.labs.ucx.inventory.types import RequestObjectType

logger = logging.getLogger(__name__)


Expand All @@ -26,61 +19,6 @@ class EnvironmentInfo:
groups: list[tuple[Group, Group]]


def _set_random_permissions(
objects: list[Any],
id_attribute: str,
request_object_type: RequestObjectType,
env: EnvironmentInfo,
ws: WorkspaceClient,
permission_levels: list[PermissionLevel],
num_acls: int | None = 3,
):
def get_random_ws_group() -> Group:
return random.choice([g[0] for g in env.groups])

def get_random_permission_level() -> PermissionLevel:
return random.choice(permission_levels)

for _object in objects:
acl_req = [
AccessControlRequest(
group_name=get_random_ws_group().display_name, permission_level=get_random_permission_level()
)
for _ in range(num_acls)
]

ws.permissions.update(
request_object_type=request_object_type,
request_object_id=getattr(_object, id_attribute),
access_control_list=acl_req,
)


def _get_basic_job_cluster() -> JobCluster:
return JobCluster(
job_cluster_key="default",
new_cluster=ClusterSpec(
spark_version="13.2.x-scala2.12",
node_type_id="i3.xlarge",
driver_node_type_id="i3.xlarge",
num_workers=0,
spark_conf={"spark.master": "local[*, 4]", "spark.databricks.cluster.profile": "singleNode"},
custom_tags={
"ResourceClass": "SingleNode",
},
data_security_mode=DataSecurityMode.SINGLE_USER,
),
)


def _get_basic_task() -> Task:
return Task(
task_key="test",
python_wheel_task=PythonWheelTask(entry_point="main", package_name="some-pkg"),
job_cluster_key="default",
)


@dataclass
class WorkspaceObjects:
root_dir: ObjectInfo
Expand Down