Skip to content

Commit

Permalink
Merge pull request #1009 from fractal-analytics-platform/978-add-user…
Browse files Browse the repository at this point in the history
…_id-to-query-parameters-of-jobdatasetworkflow-monitoring-endpoints

Add `user_id` as monitoring query parameter
  • Loading branch information
tcompa authored Nov 23, 2023
2 parents 407f415 + 1e5504c commit 2f35a24
Show file tree
Hide file tree
Showing 3 changed files with 53 additions and 6 deletions.
2 changes: 1 addition & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
* API:
* Major endpoint changes:
* Add trailing slash to _all_ endpoints' paths (\#1003).
* Add new monitoring endpoints restricted to superusers at `/monitoring` (\#947).
* Add new monitoring endpoints restricted to superusers at `/monitoring` (\#947, \#1009).
* Add new `GET` endpoints `api/v1/job/` and `api/v1/project/{project_id}/workflow/{workflow_id}/job/` (\#969, \#1003).
* Add new `GET` endpoints `api/v1/dataset/` and `api/v1/workflow/` (\#988, \#1003).
* Add new `GET` endpoint `api/v1/project/{project_id}/dataset/` (\#993).
Expand Down
17 changes: 16 additions & 1 deletion fractal_server/app/routes/monitoring.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,12 @@
from ..models import JobStatusType
from ..models import Project
from ..models import Workflow
from ..models.security import UserOAuth as User
from ..schemas import ApplyWorkflowRead
from ..schemas import DatasetRead
from ..schemas import ProjectRead
from ..schemas import WorkflowRead
from ..security import current_active_superuser
from ..security import User


router_monitoring = APIRouter()
Expand Down Expand Up @@ -60,6 +60,7 @@ async def monitor_project(
@router_monitoring.get("/workflow/", response_model=list[WorkflowRead])
async def monitor_workflow(
id: Optional[int] = None,
user_id: Optional[int] = None,
project_id: Optional[int] = None,
name_contains: Optional[str] = None,
user: User = Depends(current_active_superuser),
Expand All @@ -76,6 +77,10 @@ async def monitor_workflow(
"""
stm = select(Workflow)

if user_id is not None:
stm = stm.join(Project).where(
Project.user_list.any(User.id == user_id)
)
if id is not None:
stm = stm.where(Workflow.id == id)
if project_id is not None:
Expand All @@ -96,6 +101,7 @@ async def monitor_workflow(
@router_monitoring.get("/dataset/", response_model=list[DatasetRead])
async def monitor_dataset(
id: Optional[int] = None,
user_id: Optional[int] = None,
project_id: Optional[int] = None,
name_contains: Optional[str] = None,
type: Optional[str] = None,
Expand All @@ -114,6 +120,10 @@ async def monitor_dataset(
"""
stm = select(Dataset)

if user_id is not None:
stm = stm.join(Project).where(
Project.user_list.any(User.id == user_id)
)
if id is not None:
stm = stm.where(Dataset.id == id)
if project_id is not None:
Expand All @@ -136,6 +146,7 @@ async def monitor_dataset(
@router_monitoring.get("/job/", response_model=list[ApplyWorkflowRead])
async def monitor_job(
id: Optional[int] = None,
user_id: Optional[int] = None,
project_id: Optional[int] = None,
input_dataset_id: Optional[int] = None,
output_dataset_id: Optional[int] = None,
Expand Down Expand Up @@ -173,6 +184,10 @@ async def monitor_job(

if id is not None:
stm = stm.where(ApplyWorkflow.id == id)
if user_id is not None:
stm = stm.join(Project).where(
Project.user_list.any(User.id == user_id)
)
if project_id is not None:
stm = stm.where(ApplyWorkflow.project_id == project_id)
if input_dataset_id is not None:
Expand Down
40 changes: 36 additions & 4 deletions tests/test_monitoring.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,14 +92,24 @@ async def test_monitor_workflow(

async with MockCurrentUser(
persist=True, user_kwargs={"is_superuser": True}
):
) as superuser:
project3 = await project_factory(superuser)
await workflow_factory(project_id=project3.id, name="super")

# get all workflows
res = await client.get(f"{PREFIX}/workflow/")
assert res.status_code == 200
assert len(res.json()) == 4

# get workflows by user_id
res = await client.get(f"{PREFIX}/workflow/?user_id={user.id}")
assert res.status_code == 200
assert len(res.json()) == 3

# get workflows by id
res = await client.get(f"{PREFIX}/workflow/?id={workflow1a.id}")
res = await client.get(
f"{PREFIX}/workflow/?user_id={user.id}&id={workflow1a.id}"
)
assert res.status_code == 200
assert len(res.json()) == 1
assert res.json()[0]["name"] == workflow1a.name
Expand Down Expand Up @@ -169,10 +179,22 @@ async def test_monitor_dataset(

async with MockCurrentUser(
persist=True, user_kwargs={"is_superuser": True}
):
) as superuser:
super_project = await project_factory(superuser)
await dataset_factory(
project_id=super_project.id,
name="super-d",
type="zarr",
)

# get all datasets
res = await client.get(f"{PREFIX}/dataset/")
assert res.status_code == 200
assert len(res.json()) == 4

# get datasets by user_id
res = await client.get(f"{PREFIX}/dataset/?user_id={user.id}")
assert res.status_code == 200
assert len(res.json()) == 3

# get datasets by id
Expand Down Expand Up @@ -206,7 +228,9 @@ async def test_monitor_dataset(
assert len(res.json()) == 0

# get datasets by type
res = await client.get(f"{PREFIX}/dataset/?type=zarr")
res = await client.get(
f"{PREFIX}/dataset/?type=zarr&user_id={user.id}"
)
assert res.status_code == 200
assert len(res.json()) == 2
res = await client.get(f"{PREFIX}/dataset/?type=image")
Expand Down Expand Up @@ -270,6 +294,14 @@ async def test_monitor_job(
assert res.status_code == 200
assert len(res.json()) == 2

# get jobs by user_id
res = await client.get(f"{PREFIX}/job/?user_id={user.id}")
assert res.status_code == 200
assert len(res.json()) == 2
res = await client.get(f"{PREFIX}/job/?user_id={user.id + 1}")
assert res.status_code == 200
assert len(res.json()) == 0

# get jobs by id
res = await client.get(f"{PREFIX}/job/?id={job1.id}")
assert res.status_code == 200
Expand Down

0 comments on commit 2f35a24

Please sign in to comment.