diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index a08ad1a1168..04cc6ad341f 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -4,33 +4,33 @@ # files and folders recursively -.codeclimate.yml @sanderegg, @pcrespov +.codeclimate.yml @sanderegg @pcrespov .travis.yml @sanderegg -Makefile @pcrespov, @sanderegg +Makefile @pcrespov @sanderegg # NOTE: '/' denotes the root of the repository -/api/ @sanderegg, @pcrespov -/ci/ @sanderegg, @pcrespov +/api/ @sanderegg @pcrespov +/ci/ @sanderegg @pcrespov /docs/ @pcrespov -/packages/models-library/ @sanderegg, @pcrespov -/packages/pytest-simcore/ @pcrespov, @sanderegg -/packages/service-integration/ @pcrespov, @sanderegg, @KZzizzle +/packages/models-library/ @sanderegg @pcrespov +/packages/pytest-simcore/ @pcrespov @sanderegg +/packages/service-integration/ @pcrespov @sanderegg @KZzizzle /packages/service-library/ @pcrespov -/packages/settings-library/ @pcrespov, @sanderegg +/packages/settings-library/ @pcrespov @sanderegg /requirements/ @pcrespov -/scripts/demo/ @odeimaiz, @pcrespov +/scripts/demo/ @odeimaiz @pcrespov /scripts/json-schema-to-openapi-schema @sanderegg -/scripts/template-projects/ @odeimaiz, @pcrespov +/scripts/template-projects/ @odeimaiz @pcrespov /services/api-server/ @pcrespov -/services/catalog/ @pcrespov, @sanderegg -/services/director*/ @sanderegg, @pcrespov +/services/catalog/ @pcrespov @sanderegg +/services/director*/ @sanderegg @pcrespov /services/dynamic-sidecar/ @GitHK /services/migration/ @pcrespov -/services/storage/ @mguidon, @pcrespov +/services/storage/ @mguidon @pcrespov /services/static-webserver @GitHK -/services/web/client/ @odeimaiz, @ignapas -/services/web/server/ @pcrespov, @sanderegg, @GitHK +/services/web/client/ @odeimaiz @ignapas +/services/web/server/ @pcrespov @sanderegg @GitHK /tests/environment-setup/ @pcrespov /tests/performance/ @pcrespov /tests/public-api/ @pcrespov diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index af1ab194244..93c66f221d3 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -11,6 +11,9 @@ Consider prefix your PR message with an emoticon ⚗️ experimental ⬆️ upgrades dependencies 📝 documentation + 🗑️ deprecated + ⚰️ remove dead code + 🔥 remove code or files or from https://gitmoji.dev/ and append (⚠️ devops) if changes in devops configuration required before deploying @@ -25,16 +28,15 @@ and append (⚠️ devops) if changes in devops configuration required before de ## How to test +- Covered by CI diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 65b6a5518eb..c8d3d2ce56c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -13,10 +13,10 @@ repos: - id: check-merge-conflict # NOTE: Keep order as pycln (remove unused imports), then isort (sort them) and black (final formatting) - repo: https://github.com/hadialqattan/pycln - rev: v1.1.0 + rev: v1.2.4 hooks: - id: pycln - args: [--all] + args: [--expand-stars] - repo: https://github.com/PyCQA/isort rev: 5.6.4 hooks: diff --git a/packages/models-library/src/models_library/settings/services_common.py b/packages/models-library/src/models_library/settings/services_common.py deleted file mode 100644 index 62c0b3717a5..00000000000 --- a/packages/models-library/src/models_library/settings/services_common.py +++ /dev/null @@ -1,41 +0,0 @@ -from pydantic import BaseSettings, Field, PositiveInt - -_MINUTE = 60 -_HOUR = 60 * _MINUTE - - -class ServicesCommonSettings(BaseSettings): - # set this interval to 1 hour - director_dynamic_service_save_timeout: PositiveInt = Field( - _HOUR, - description=( - "When stopping a dynamic service, if it has " - "big payloads it is important to have longer timeouts." - ), - ) - webserver_director_stop_service_timeout: PositiveInt = Field( - _HOUR + 10, - description=( - "The below will try to help explaining what is happening: " - "webserver -(stop_service)-> director-v* -(save_state)-> service_x" - "- webserver requests stop_service and uses a 01:00:10 timeout" - "- director-v* requests save_state and uses a 01:00:00 timeout" - "The +10 seconds is used to make sure the director replies" - ), - ) - storage_service_upload_download_timeout: PositiveInt = Field( - _HOUR, - description=( - "When dynamic services upload and download data from storage, " - "sometimes very big payloads are involved. In order to handle " - "such payloads it is required to have long timeouts which " - "allow the service to finish the operation." - ), - ) - restart_containers_timeout: PositiveInt = Field( - 1 * _MINUTE, description="timeout of containers restart" - ) - - class Config: - env_prefix = "SERVICES_COMMON_" - case_sensitive = False diff --git a/packages/postgres-database/src/simcore_postgres_database/models/users.py b/packages/postgres-database/src/simcore_postgres_database/models/users.py index 51bbaf220bd..297372aee33 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/users.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/users.py @@ -4,15 +4,24 @@ - Users they have a role within the framework that provides them different access levels to it """ -import itertools from enum import Enum +from functools import total_ordering import sqlalchemy as sa from sqlalchemy.sql import func from .base import metadata +_USER_ROLE_TO_LEVEL = { + "ANONYMOUS": 0, + "GUEST": 10, + "USER": 20, + "TESTER": 30, + "ADMIN": 100, +} + +@total_ordering class UserRole(Enum): """SORTED enumeration of user roles @@ -36,11 +45,14 @@ class UserRole(Enum): TESTER = "TESTER" ADMIN = "ADMIN" - @classmethod - def super_users(cls): - return list(itertools.takewhile(lambda e: e != cls.USER, cls)) + @property + def privilege_level(self) -> int: + return _USER_ROLE_TO_LEVEL[self.name] - # TODO: add comparison https://portingguide.readthedocs.io/en/latest/comparisons.html + def __lt__(self, other) -> bool: + if self.__class__ is other.__class__: + return self.privilege_level < other.privilege_level + return NotImplemented class UserStatus(Enum): diff --git a/packages/postgres-database/tests/test_users.py b/packages/postgres-database/tests/test_users.py new file mode 100644 index 00000000000..dc60f5ed5ff --- /dev/null +++ b/packages/postgres-database/tests/test_users.py @@ -0,0 +1,50 @@ +from simcore_postgres_database.models.users import _USER_ROLE_TO_LEVEL, UserRole + + +def test_user_role_to_level_map_in_sync(): + # If fails, then update _USER_ROLE_TO_LEVEL map + assert set(_USER_ROLE_TO_LEVEL.keys()) == set(UserRole.__members__.keys()) + + +def test_user_role_comparison(): + + assert UserRole.ANONYMOUS < UserRole.ADMIN + assert UserRole.GUEST < UserRole.ADMIN + assert UserRole.USER < UserRole.ADMIN + assert UserRole.TESTER < UserRole.ADMIN + assert UserRole.ADMIN == UserRole.ADMIN + + assert UserRole.ANONYMOUS < UserRole.TESTER + assert UserRole.GUEST < UserRole.TESTER + assert UserRole.USER < UserRole.TESTER + assert UserRole.TESTER == UserRole.TESTER + assert UserRole.ADMIN > UserRole.TESTER + + assert UserRole.ANONYMOUS < UserRole.USER + assert UserRole.GUEST < UserRole.USER + assert UserRole.USER == UserRole.USER + assert UserRole.TESTER > UserRole.USER + assert UserRole.ADMIN > UserRole.USER + + assert UserRole.ANONYMOUS < UserRole.GUEST + assert UserRole.GUEST == UserRole.GUEST + assert UserRole.USER > UserRole.GUEST + assert UserRole.TESTER > UserRole.GUEST + assert UserRole.ADMIN > UserRole.GUEST + + assert UserRole.ANONYMOUS == UserRole.ANONYMOUS + assert UserRole.GUEST > UserRole.ANONYMOUS + assert UserRole.USER > UserRole.ANONYMOUS + assert UserRole.TESTER > UserRole.ANONYMOUS + assert UserRole.ADMIN > UserRole.ANONYMOUS + + # < and > + assert UserRole.TESTER < UserRole.ADMIN + assert UserRole.ADMIN > UserRole.TESTER + + # >=, == and <= + assert UserRole.TESTER <= UserRole.ADMIN + assert UserRole.ADMIN >= UserRole.TESTER + + assert UserRole.ADMIN <= UserRole.ADMIN + assert UserRole.ADMIN == UserRole.ADMIN diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_login.py b/packages/pytest-simcore/src/pytest_simcore/helpers/utils_login.py index a8bcfc162dc..0805a3c5b03 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_login.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/utils_login.py @@ -1,5 +1,5 @@ import re -from typing import Any, Dict +from typing import TypedDict from aiohttp import web from aiohttp.test_utils import TestClient @@ -12,9 +12,22 @@ from .utils_assert import assert_status -# WARNING: UserDict is already in https://docs.python.org/3/library/collections.html#collections.UserDict -# TODO: move this to future simcore_service_webserver.users_models.py -AUserDict = Dict[str, Any] + +# WARNING: DO NOT use UserDict is already in https://docs.python.org/3/library/collections.html#collections.UserDictclass UserRowDict(TypedDict): +# NOTE: this is modified dict version of packages/postgres-database/src/simcore_postgres_database/models/users.py for testing purposes +class _UserInfoDictRequired(TypedDict, total=True): + id: int + name: str + email: str + raw_password: str + status: UserStatus + role: UserRole + + +class UserInfoDict(_UserInfoDictRequired, total=False): + created_ip: int + password_hash: str + TEST_MARKS = re.compile(r"TEST (\w+):(.*)") @@ -37,7 +50,7 @@ def parse_link(text): return URL(link).path -async def create_user(db: AsyncpgStorage, data=None) -> AUserDict: +async def create_user(db: AsyncpgStorage, data=None) -> UserInfoDict: data = data or {} password = get_random_string(10) params = { @@ -56,7 +69,7 @@ async def create_user(db: AsyncpgStorage, data=None) -> AUserDict: async def log_client_in( client: TestClient, user_data=None, *, enable_check=True -) -> AUserDict: +) -> UserInfoDict: # creates user directly in db db: AsyncpgStorage = get_plugin_storage(client.app) cfg: LoginOptions = get_plugin_options(client.app) @@ -99,7 +112,7 @@ def __init__(self, client, params=None, *, check_if_succeeds=True): self.client = client self.enable_check = check_if_succeeds - async def __aenter__(self) -> AUserDict: + async def __aenter__(self) -> UserInfoDict: self.user = await log_client_in( self.client, self.params, enable_check=self.enable_check ) diff --git a/services/web/server/tests/unit/isolated/scicrunch/_citations.py b/packages/pytest-simcore/src/pytest_simcore/helpers/utils_scicrunch_citations.py similarity index 100% rename from services/web/server/tests/unit/isolated/scicrunch/_citations.py rename to packages/pytest-simcore/src/pytest_simcore/helpers/utils_scicrunch_citations.py diff --git a/services/web/server/tests/unit/with_dbs/_helpers.py b/packages/pytest-simcore/src/pytest_simcore/helpers/utils_webserver_projects.py similarity index 98% rename from services/web/server/tests/unit/with_dbs/_helpers.py rename to packages/pytest-simcore/src/pytest_simcore/helpers/utils_webserver_projects.py index 824695c98a2..bab370a068f 100644 --- a/services/web/server/tests/unit/with_dbs/_helpers.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/utils_webserver_projects.py @@ -34,6 +34,7 @@ class ExpectedResponse(NamedTuple): ] def __str__(self) -> str: + # pylint: disable=no-member items = ",".join(f"{k}={v.__name__}" for k, v in self._asdict().items()) return f"{self.__class__.__name__}({items})" diff --git a/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py b/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py index 661b8d293f3..d8f8dcee4d8 100644 --- a/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py +++ b/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py @@ -8,10 +8,10 @@ import pytest from aiohttp import web -from aioresponses import aioresponses as AioResponsesMock from aioresponses.core import CallbackResult from models_library.api_schemas_storage import FileMetaData from models_library.projects_state import RunningState +from pytest_simcore.aioresponses_mocker import AioResponsesMock from yarl import URL pytest_plugins = [ @@ -130,7 +130,7 @@ def get_computation_cb(url, **kwargs) -> CallbackResult: @pytest.fixture -async def director_v2_service_mock( +async def director_v2_service_responses_mock( aioresponses_mocker: AioResponsesMock, ) -> AioResponsesMock: """mocks responses of director-v2""" diff --git a/packages/service-library/src/servicelib/utils.py b/packages/service-library/src/servicelib/utils.py index 2f993135197..5ca3f28d690 100644 --- a/packages/service-library/src/servicelib/utils.py +++ b/packages/service-library/src/servicelib/utils.py @@ -61,17 +61,17 @@ def search_osparc_repo_dir(start: Union[str, Path], max_iterations=8) -> Optiona # FUTURES +def log_exception_callback(fut: asyncio.Future): + try: + fut.result() + except Exception: # pylint: disable=broad-except + logger.exception("Error occured while running task!") + + def fire_and_forget_task( obj: Union[Coroutine, asyncio.Future, Awaitable] ) -> asyncio.Future: future = asyncio.ensure_future(obj) - - def log_exception_callback(fut: asyncio.Future): - try: - fut.result() - except Exception: # pylint: disable=broad-except - logger.exception("Error occured while running task!") - future.add_done_callback(log_exception_callback) return future diff --git a/packages/settings-library/src/settings_library/email.py b/packages/settings-library/src/settings_library/email.py index 52cce3a2885..30822f9c9cf 100644 --- a/packages/settings-library/src/settings_library/email.py +++ b/packages/settings-library/src/settings_library/email.py @@ -1,6 +1,6 @@ from typing import Optional -from pydantic import root_validator +from pydantic import root_validator from pydantic.fields import Field from pydantic.types import SecretStr @@ -11,14 +11,15 @@ class SMTPSettings(BaseCustomSettings): """Simple Mail Transfer Protocol""" + # TODO: NameEmail or EmailStr SMTP_SENDER: str = "@".join(["O2SPARC support "]) SMTP_HOST: str SMTP_PORT: PortInt SMTP_TLS_ENABLED: bool = Field(False, description="Enables Secure Mode") - SMTP_USERNAME: Optional[str] - SMTP_PASSWORD: Optional[SecretStr] + SMTP_USERNAME: Optional[str] = Field(None, min_length=1) + SMTP_PASSWORD: Optional[SecretStr] = Field(None, min_length=1) @root_validator @classmethod diff --git a/packages/settings-library/tests/test_email.py b/packages/settings-library/tests/test_email.py index c6367b231c3..53f06f68a49 100644 --- a/packages/settings-library/tests/test_email.py +++ b/packages/settings-library/tests/test_email.py @@ -1,7 +1,7 @@ from typing import Any, Dict -from pydantic import ValidationError import pytest +from pydantic import ValidationError from settings_library.email import SMTPSettings @@ -53,6 +53,13 @@ def test_smtp_configuration_ok(cfg: Dict[str, Any]): "SMTP_TLS_ENABLED": True, "SMTP_USERNAME": "test", }, + { + "SMTP_HOST": "test", + "SMTP_PORT": 113, + "SMTP_USERNAME": "", + "SMTP_PASSWORD": "test", + "SMTP_TLS_ENABLED": True, + }, ], ) def test_smtp_configuration_fails(cfg: Dict[str, Any]): diff --git a/scripts/pydeps/.pydeps b/scripts/pydeps/.pydeps new file mode 100644 index 00000000000..0784096edcb --- /dev/null +++ b/scripts/pydeps/.pydeps @@ -0,0 +1,11 @@ +[pydeps] +max_bacon = 2 +no_show = True +verbose = 0 +pylib = False +exclude = + os + re + sys + collections + __future__ diff --git a/scripts/pydeps/Dockerfile b/scripts/pydeps/Dockerfile new file mode 100644 index 00000000000..cc1f79d029e --- /dev/null +++ b/scripts/pydeps/Dockerfile @@ -0,0 +1,37 @@ +# NOTE: This is a first step towards a devcontainer +# to perform operations like pip-compile or auto-formatting +# that preserves identical environment across developer machines +# +# Python version can be upgraded if: +# - Has been patched several times (avoid using the very first release for production) +# - Can be installed with pyenv (SEE pyenv install --list ) +# +# +ARG PYTHON_VERSION="3.8.10" +FROM python:${PYTHON_VERSION}-slim-buster as base + + +RUN apt-get update \ + && apt-get -y install --no-install-recommends\ + make \ + git \ + gawk \ + apt-utils \ + strace \ + graphviz \ + && rm -rf /var/lib/apt/lists/* \ + && apt-get clean + +ARG HOME_DIR +RUN mkdir -p ${HOME_DIR} +COPY .pydeps ${HOME_DIR}/.pydeps + +RUN pip --no-cache-dir install --upgrade \ + pip~=21.3 \ + wheel \ + setuptools + + +# devenv +RUN pip install \ + pydeps diff --git a/scripts/pydeps/Makefile b/scripts/pydeps/Makefile new file mode 100644 index 00000000000..f6e2fe5362f --- /dev/null +++ b/scripts/pydeps/Makefile @@ -0,0 +1,72 @@ +# Recipes to upgrade requirements in batch +# +# Examples of usage +# +# - Full upgrade of all packages +# make touch packages +# +# - Upgrade one library in all packages +# make packages upgrade=name-of-package +# +# +# WARNING: this batch MUST ONLY be used for non-services. Services are INDEPENDENT +# and to upgrade them, use services/{servicesname}/requirements/Makefile +# +.DEFAULT_GOAL := help + +PYTHON_VERSION:=3.8.10 + +# locations +REPODIR := $(shell git rev-parse --show-toplevel) +PACKAGES_DIR := $(abspath $(REPODIR)/packages) +SERVICES_DIR := $(abspath $(REPODIR)/services) +APP_NAME :=$(notdir $(CURDIR)) +IMAGE_NAME :=local/${APP_NAME}-devkit:${PYTHON_VERSION} + +# tools +MAKE_C := $(MAKE) --directory + + +# SEE https://medium.com/faun/set-current-host-user-for-docker-container-4e521cef9ffc +.PHONY: build +build build-nc: ## builds tooling image ${IMAGE_NAME} + docker build $(if $(findstring -nc,$@),--no-cache,) \ + --build-arg PYTHON_VERSION="${PYTHON_VERSION}" \ + --build-arg HOME_DIR="/home/$(USER)" \ + --tag ${IMAGE_NAME} \ + . + + +.PHONY: shell +shell: build ## Mounts REPODIR and open shell in ${IMAGE_NAME} (mostly for interactive debug/use) + # pydeps docs https://pydeps.readthedocs.io/en/latest/#usage + docker run -it \ + --workdir="/home/$(USER)/osparc-simcore" \ + --volume="/etc/group:/etc/group:ro" \ + --volume="/etc/passwd:/etc/passwd:ro" \ + --volume=$(REPODIR):/home/$(USER)/osparc-simcore \ + --user=$(shell id -u):$(shell id -g) \ + --entrypoint=/bin/bash \ + ${IMAGE_NAME} + +# Examples: +# - SEE https://pydeps.readthedocs.io/en/latest/#usage +# +# pydeps services/web/server/src/simcore_service_webserver --only "simcore_service_webserver.projects" --no-show --cluster +# +# +# + + +.PHONY: help +# thanks to https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html +help: ## this colorful help + @echo "Recipes for '$(notdir $(CURDIR))':" + @echo "" + @awk --posix 'BEGIN {FS = ":.*?## "} /^[[:alpha:][:space:]_-]+:.*?## / {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) + @echo "" + + +.PHONY: guard-% +guard-%: + @if [ "${${*}}" = "" ]; then echo "Environment variable $* not set"; exit 1; fi diff --git a/services/catalog/tests/unit/with_dbs/conftest.py b/services/catalog/tests/unit/with_dbs/conftest.py index 234bcc8ac9c..9da58fa85b4 100644 --- a/services/catalog/tests/unit/with_dbs/conftest.py +++ b/services/catalog/tests/unit/with_dbs/conftest.py @@ -2,7 +2,6 @@ # pylint:disable=unused-argument # pylint:disable=redefined-outer-name - import itertools import random from typing import Any, AsyncIterator, Callable, Dict, Iterable, Iterator, List, Tuple diff --git a/services/web/server/src/simcore_service_webserver/activity/plugin.py b/services/web/server/src/simcore_service_webserver/activity/plugin.py index 2cf1f3ffaf7..3b979a43dc9 100644 --- a/services/web/server/src/simcore_service_webserver/activity/plugin.py +++ b/services/web/server/src/simcore_service_webserver/activity/plugin.py @@ -6,7 +6,6 @@ iter_path_operations, map_handlers_with_operations, ) -from simcore_service_webserver.activity.settings import get_plugin_settings from .._constants import APP_OPENAPI_SPECS_KEY from . import handlers diff --git a/services/web/server/src/simcore_service_webserver/director/director_api.py b/services/web/server/src/simcore_service_webserver/director/director_api.py index e5ccbbe058e..4292f495be3 100644 --- a/services/web/server/src/simcore_service_webserver/director/director_api.py +++ b/services/web/server/src/simcore_service_webserver/director/director_api.py @@ -9,7 +9,6 @@ from typing import Any, Dict, List, Optional, Tuple from aiohttp import ClientSession, web -from models_library.settings.services_common import ServicesCommonSettings from servicelib.aiohttp.client_session import get_client_session from servicelib.utils import logged_gather from yarl import URL @@ -90,11 +89,14 @@ async def stop_service( # stopping a service can take a lot of time # bumping the stop command timeout to 1 hour # this will allow to sava bigger datasets from the services + + settings: DirectorSettings = get_plugin_settings(app) + url = api_endpoint / "running_interactive_services" / service_uuid async with session.delete( url, ssl=False, - timeout=ServicesCommonSettings().webserver_director_stop_service_timeout, + timeout=settings.DIRECTOR_STOP_SERVICE_TIMEOUT, params={"save_state": "true" if save_state else "false"}, ) as resp: if resp.status == 404: diff --git a/services/web/server/src/simcore_service_webserver/director/plugin.py b/services/web/server/src/simcore_service_webserver/director/plugin.py index 23274f049e5..6fdbc1b88b6 100644 --- a/services/web/server/src/simcore_service_webserver/director/plugin.py +++ b/services/web/server/src/simcore_service_webserver/director/plugin.py @@ -4,6 +4,7 @@ """ import logging +import warnings from aiohttp import web from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup @@ -21,5 +22,8 @@ logger=logger, ) def setup_director(app: web.Application): + warnings.warn( + f"{__name__} plugin is deprecated, use director-v2 plugin instead", + DeprecationWarning, + ) get_plugin_settings(app) - # TODO: init some client diff --git a/services/web/server/src/simcore_service_webserver/director/settings.py b/services/web/server/src/simcore_service_webserver/director/settings.py index 5f70da33e7f..87381f12f37 100644 --- a/services/web/server/src/simcore_service_webserver/director/settings.py +++ b/services/web/server/src/simcore_service_webserver/director/settings.py @@ -2,12 +2,16 @@ from aiohttp.web import Application from models_library.basic_types import PortInt, VersionTag +from pydantic import Field, PositiveInt from settings_library.base import BaseCustomSettings from settings_library.utils_service import DEFAULT_AIOHTTP_PORT, MixinServiceSettings from yarl import URL from .._constants import APP_SETTINGS_KEY +_MINUTE = 60 +_HOUR = 60 * _MINUTE + class DirectorSettings(BaseCustomSettings, MixinServiceSettings): DIRECTOR_HOST: str = "director" @@ -18,6 +22,26 @@ class DirectorSettings(BaseCustomSettings, MixinServiceSettings): def base_url(self) -> URL: return URL(self._build_api_base_url(prefix="DIRECTOR")) + # DESIGN NOTE: + # - Timeouts are typically used in clients (total/read/connection timeouts) or asyncio calls + # - Mostly in floats (aiohttp.Client/) but sometimes in ints + # - Typically in seconds but occasionally in ms + DIRECTOR_STOP_SERVICE_TIMEOUT: PositiveInt = Field( + _HOUR + 10, + description=( + "Timeout on stop service request (seconds)" + "ANE: The below will try to help explaining what is happening: " + "webserver -(stop_service)-> director-v* -(save_state)-> service_x" + "- webserver requests stop_service and uses a 01:00:10 timeout" + "- director-v* requests save_state and uses a 01:00:00 timeout" + "The +10 seconds is used to make sure the director replies" + ), + envs=[ + "DIRECTOR_STOP_SERVICE_TIMEOUT", + "webserver_director_stop_service_timeout", # TODO: deprecated. rm when deveops give OK + ], + ) + def get_plugin_settings(app: Application) -> DirectorSettings: settings = app[APP_SETTINGS_KEY].WEBSERVER_DIRECTOR diff --git a/services/web/server/src/simcore_service_webserver/director_v2.py b/services/web/server/src/simcore_service_webserver/director_v2.py index 049381ca44f..767236cf5d4 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2.py +++ b/services/web/server/src/simcore_service_webserver/director_v2.py @@ -29,7 +29,7 @@ def setup_director_v2(app: web.Application): # dependencies setup_rest(app) - # lcietn + # client set_client(app, DirectorV2ApiClient(app)) # routes diff --git a/services/web/server/src/simcore_service_webserver/director_v2_api.py b/services/web/server/src/simcore_service_webserver/director_v2_api.py index b3a5806da14..e7fd3f01248 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2_api.py +++ b/services/web/server/src/simcore_service_webserver/director_v2_api.py @@ -15,16 +15,15 @@ create_or_update_pipeline, delete_pipeline, get_computation_task, - get_service_state, - get_services, + get_dynamic_service_state, + get_dynamic_services, is_healthy, is_pipeline_running, - request_retrieve_dyn_service, - restart, - retrieve, - start_service, - stop_service, - stop_services, + restart_dynamic_service, + retrieve_dynamic_service_inputs, + start_dynamic_service, + stop_dynamic_service, + stop_dynamic_services_in_project, ) # director-v2 module internal API @@ -34,16 +33,15 @@ "delete_pipeline", "DirectorServiceError", "get_computation_task", + "get_dynamic_service_state", + "get_dynamic_services", "get_project_run_policy", - "get_service_state", - "get_services", "is_healthy", "is_pipeline_running", - "request_retrieve_dyn_service", - "restart", - "retrieve", + "restart_dynamic_service", + "retrieve_dynamic_service_inputs", "set_project_run_policy", - "start_service", - "stop_service", - "stop_services", + "start_dynamic_service", + "stop_dynamic_service", + "stop_dynamic_services_in_project", ) diff --git a/services/web/server/src/simcore_service_webserver/director_v2_core.py b/services/web/server/src/simcore_service_webserver/director_v2_core.py index 98b47bc53db..c1b4ecf3842 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2_core.py +++ b/services/web/server/src/simcore_service_webserver/director_v2_core.py @@ -7,7 +7,6 @@ from aiohttp import ClientTimeout, web from models_library.projects import ProjectID from models_library.projects_pipeline import ComputationTask -from models_library.settings.services_common import ServicesCommonSettings from models_library.users import UserID from pydantic.types import PositiveInt from servicelib.logging_utils import log_decorator @@ -30,9 +29,7 @@ _APP_DIRECTOR_V2_CLIENT_KEY = f"{__name__}.DirectorV2ApiClient" SERVICE_HEALTH_CHECK_TIMEOUT = ClientTimeout(total=2, connect=1) # type:ignore -SERVICE_RETRIEVE_HTTP_TIMEOUT = ClientTimeout( - total=60 * 60, connect=None, sock_connect=5 # type:ignore -) + DEFAULT_RETRY_POLICY = dict( wait=wait_random(0, 1), stop=stop_after_attempt(2), @@ -276,30 +273,11 @@ async def delete_pipeline( ) -@log_decorator(logger=log) -async def request_retrieve_dyn_service( - app: web.Application, service_uuid: str, port_keys: List[str] -) -> None: - settings: DirectorV2Settings = get_plugin_settings(app) - backend_url = settings.base_url / f"dynamic_services/{service_uuid}:retrieve" - body = {"port_keys": port_keys} - - try: - await _request_director_v2( - app, "POST", backend_url, data=body, timeout=SERVICE_RETRIEVE_HTTP_TIMEOUT - ) - except DirectorServiceError as exc: - log.warning( - "Unable to call :retrieve endpoint on service %s, keys: [%s]: error: [%s:%s]", - service_uuid, - port_keys, - exc.status, - exc.reason, - ) +## DYNAMIC SERVICES --------------------------------------------------------------------------- @log_decorator(logger=log) -async def start_service( +async def start_dynamic_service( app: web.Application, user_id: PositiveInt, project_id: str, @@ -345,7 +323,7 @@ async def start_service( @log_decorator(logger=log) -async def get_services( +async def get_dynamic_services( app: web.Application, user_id: Optional[PositiveInt] = None, project_id: Optional[str] = None, @@ -368,22 +346,23 @@ async def get_services( @log_decorator(logger=log) -async def stop_service( - app: web.Application, service_uuid: str, save_state: Optional[bool] = True +async def stop_dynamic_service( + app: web.Application, service_uuid: str, save_state: bool = True ) -> None: # stopping a service can take a lot of time # bumping the stop command timeout to 1 hour # this will allow to sava bigger datasets from the services - # TODO: PC -> ANE: all settings MUST be in app[APP_SETTINGS_KEY] - - timeout = ServicesCommonSettings().webserver_director_stop_service_timeout - settings: DirectorV2Settings = get_plugin_settings(app) backend_url = (settings.base_url / f"dynamic_services/{service_uuid}").update_query( save_state="true" if save_state else "false", ) + await _request_director_v2( - app, "DELETE", backend_url, expected_status=web.HTTPNoContent, timeout=timeout + app, + "DELETE", + backend_url, + expected_status=web.HTTPNoContent, + timeout=settings.DIRECTOR_V2_STOP_SERVICE_TIMEOUT, ) @@ -407,28 +386,28 @@ async def list_running_dynamic_services( @log_decorator(logger=log) -async def stop_services( +async def stop_dynamic_services_in_project( app: web.Application, user_id: Optional[PositiveInt] = None, project_id: Optional[str] = None, - save_state: Optional[bool] = True, + save_state: bool = True, ) -> None: - """Stops all services in parallel""" - running_dynamic_services = await get_services( + """Stops ALL dynamic services within the project in parallel""" + running_dynamic_services = await get_dynamic_services( app, user_id=user_id, project_id=project_id ) - services_to_stop = [ - stop_service( + _stop_dynamic_service_coros = [ + stop_dynamic_service( app=app, service_uuid=service["service_uuid"], save_state=save_state ) for service in running_dynamic_services ] - await logged_gather(*services_to_stop) + await logged_gather(*_stop_dynamic_service_coros) @log_decorator(logger=log) -async def get_service_state(app: web.Application, node_uuid: str) -> DataType: +async def get_dynamic_service_state(app: web.Application, node_uuid: str) -> DataType: settings: DirectorV2Settings = get_plugin_settings(app) backend_url = settings.base_url / f"dynamic_services/{node_uuid}" @@ -441,38 +420,26 @@ async def get_service_state(app: web.Application, node_uuid: str) -> DataType: @log_decorator(logger=log) -async def retrieve( - app: web.Application, node_uuid: str, port_keys: List[str] -) -> DataBody: - # when triggering retrieve endpoint - # this will allow to sava bigger datasets from the services - # TODO: PC -> ANE: all settings MUST be in app[APP_SETTINGS_KEY] - timeout = ServicesCommonSettings().storage_service_upload_download_timeout - +async def retrieve_dynamic_service_inputs( + app: web.Application, service_uuid: str, port_keys: List[str] +) -> DataType: + """Pulls data from connections to the dynamic service inputs""" settings: DirectorV2Settings = get_plugin_settings(app) - backend_url = settings.base_url / "dynamic_services" / f"{node_uuid}:retrieve" - body = dict(port_keys=port_keys) + backend_url = settings.base_url / f"dynamic_services/{service_uuid}:retrieve" - retry_result = await _request_director_v2( + result = await _request_director_v2( app, "POST", backend_url, - expected_status=web.HTTPOk, - data=body, - timeout=timeout, + data={"port_keys": port_keys}, + timeout=settings.get_service_retrieve_timeout(), ) - - assert isinstance(retry_result, dict) # nosec - return retry_result + assert isinstance(result, dict) # nosec + return result @log_decorator(logger=log) -async def restart(app: web.Application, node_uuid: str) -> None: - # when triggering retrieve endpoint - # this will allow to sava bigger datasets from the services - # TODO: PC -> ANE: all settings MUST be in app[APP_SETTINGS_KEY] - timeout = ServicesCommonSettings().restart_containers_timeout - +async def restart_dynamic_service(app: web.Application, node_uuid: str) -> None: settings: DirectorV2Settings = get_plugin_settings(app) backend_url = settings.base_url / f"dynamic_services/{node_uuid}:restart" @@ -481,5 +448,5 @@ async def restart(app: web.Application, node_uuid: str) -> None: "POST", backend_url, expected_status=web.HTTPOk, - timeout=timeout, + timeout=settings.DIRECTOR_V2_RESTART_DYNAMIC_SERVICE_TIMEOUT, ) diff --git a/services/web/server/src/simcore_service_webserver/director_v2_settings.py b/services/web/server/src/simcore_service_webserver/director_v2_settings.py index 2fb416920fc..b7c0bd21e89 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2_settings.py +++ b/services/web/server/src/simcore_service_webserver/director_v2_settings.py @@ -3,8 +3,9 @@ from functools import cached_property -from aiohttp import ClientSession, web +from aiohttp import ClientSession, ClientTimeout, web from models_library.basic_types import PortInt, VersionTag +from pydantic import Field, PositiveInt from servicelib.aiohttp.application_keys import APP_CLIENT_SESSION_KEY from settings_library.base import BaseCustomSettings from settings_library.utils_service import DEFAULT_FASTAPI_PORT, MixinServiceSettings @@ -12,8 +13,8 @@ from ._constants import APP_SETTINGS_KEY -SERVICE_NAME = "director-v2" -CONFIG_SECTION_NAME = SERVICE_NAME +_MINUTE = 60 +_HOUR = 60 * _MINUTE class DirectorV2Settings(BaseCustomSettings, MixinServiceSettings): @@ -25,6 +26,62 @@ class DirectorV2Settings(BaseCustomSettings, MixinServiceSettings): def base_url(self) -> URL: return URL(self._build_api_base_url(prefix="DIRECTOR_V2")) + # DESIGN NOTE: + # - Timeouts are typically used in clients (total/read/connection timeouts) or asyncio calls + # - Mostly in floats (aiohttp.Client/) but sometimes in ints + # - Typically in seconds but occasionally in ms + DIRECTOR_V2_STOP_SERVICE_TIMEOUT: PositiveInt = Field( + _HOUR + 10, + description=( + "Timeout on stop service request (seconds)" + "ANE: The below will try to help explaining what is happening: " + "webserver -(stop_service)-> director-v* -(save_state)-> service_x" + "- webserver requests stop_service and uses a 01:00:10 timeout" + "- director-v* requests save_state and uses a 01:00:00 timeout" + "The +10 seconds is used to make sure the director replies" + ), + envs=[ + "DIRECTOR_V2_STOP_SERVICE_TIMEOUT", + # TODO: below this line are deprecated. rm when deveops give OK + "WEBSERVER_DIRECTOR_STOP_SERVICE_TIMEOUT", + "webserver_director_stop_service_timeout", + ], + ) + + DIRECTOR_V2_RESTART_DYNAMIC_SERVICE_TIMEOUT: PositiveInt = Field( + 1 * _MINUTE, + description="timeout of containers restart", + envs=[ + "DIRECTOR_V2_RESTART_DYNAMIC_SERVICE_TIMEOUT", + # TODO: below this line are deprecated. rm when deveops give OK + "SERVICES_COMMON_RESTART_CONTAINERS_TIMEOUT", + "SERVICES_COMMON_restart_containers_timeout", + ], + ) + + DIRECTOR_V2_STORAGE_SERVICE_UPLOAD_DOWNLOAD_TIMEOUT: PositiveInt = Field( + _HOUR, + description=( + "When dynamic services upload and download data from storage, " + "sometimes very big payloads are involved. In order to handle " + "such payloads it is required to have long timeouts which " + "allow the service to finish the operation." + ), + envs=[ + "DIRECTOR_V2_DYNAMIC_SERVICE_DATA_UPLOAD_DOWNLOAD_TIMEOUT", + # TODO: below this line are deprecated. rm when deveops give OK + "SERVICES_COMMON_STORAGE_SERVICE_UPLOAD_DOWNLOAD_TIMEOUT", + "SERVICES_COMMON_storage_service_upload_download_timeout", + ], + ) + + def get_service_retrieve_timeout(self) -> ClientTimeout: + return ClientTimeout( + total=self.DIRECTOR_V2_STORAGE_SERVICE_UPLOAD_DOWNLOAD_TIMEOUT, + connect=None, + sock_connect=5, + ) + def get_plugin_settings(app: web.Application) -> DirectorV2Settings: settings = app[APP_SETTINGS_KEY].WEBSERVER_DIRECTOR_V2 diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector_core.py b/services/web/server/src/simcore_service_webserver/garbage_collector_core.py index 3f85054e3ae..6ce94a3af4c 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector_core.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector_core.py @@ -12,6 +12,7 @@ from aioredlock import Aioredlock from servicelib.utils import logged_gather from simcore_postgres_database.errors import DatabaseError +from simcore_postgres_database.models.users import UserRole from . import director_v2_api, users_exceptions from .db_models import GroupType @@ -23,10 +24,15 @@ get_project_for_user, get_workbench_node_ids_from_project_uuid, is_node_id_present_in_any_project_workbench, - remove_project_interactive_services, + remove_project_dynamic_services, ) from .projects.projects_db import APP_PROJECT_DBAPI, ProjectAccessRights -from .projects.projects_exceptions import ProjectNotFoundError +from .projects.projects_exceptions import ( + ProjectDeleteError, + ProjectLockError, + ProjectNotFoundError, + ProjectValidationError, +) from .redis import get_redis_lock_manager from .resource_manager.registry import RedisResourceRegistry, get_registry from .users_api import ( @@ -34,8 +40,9 @@ get_guest_user_ids_and_names, get_user, get_user_id_from_gid, - is_user_guest, + get_user_role, ) +from .users_exceptions import UserNotFoundError from .users_to_groups_api import get_users_for_gid _DATABASE_ERRORS = ( @@ -192,30 +199,28 @@ async def remove_disconnected_user_resources( # inform that the project can be closed on the backend side # try: - await remove_project_interactive_services( + await remove_project_dynamic_services( user_id=int(dead_key["user_id"]), project_uuid=resource_value, app=app, - user_name={ - "first_name": "garbage", - "last_name": "collector", - }, + notify_users=True, ) - except ProjectNotFoundError as err: + except ( + ProjectNotFoundError, + UserNotFoundError, + ProjectLockError, + ) as err: logger.warning( - ( - "Could not remove project interactive services user_id=%s " - "project_uuid=%s. Check the logs above for details [%s]" - ), - user_id, + "Could not remove dynamic services for project with %s project_uuid=%s [%s]", + f"{user_id=}", resource_value, err, ) - # ONLY GUESTS: if this user was a GUEST also remove it from the database + # ENFORCE REMOVAL if GUEST: if this user was a GUEST also remove it from the database # with the only associated project owned - await remove_guest_user_with_all_its_resources( + await remove_all_resources_if_guest( app=app, user_id=int(dead_key["user_id"]), ) @@ -290,7 +295,7 @@ async def remove_users_manually_marked_as_guests( f"{guest_user_id=}", f"{guest_user_name=}", ) - await remove_guest_user_with_all_its_resources( + await remove_all_resources_if_guest( app=app, user_id=guest_user_id, ) @@ -313,7 +318,9 @@ async def _remove_single_orphaned_service( ) logger.info(message) try: - await director_v2_api.stop_service(app, service_uuid, save_state=False) + await director_v2_api.stop_dynamic_service( + app, service_uuid, save_state=False + ) except (ServiceNotFoundError, DirectorException) as err: logger.warning("Error while stopping service: %s", err) return @@ -349,14 +356,16 @@ async def _remove_single_orphaned_service( # let's be conservative here. # 1. opened project disappeared from redis? # 2. something bad happened when closing a project? - user_id = int(interactive_service.get("user_id", -1)) - is_invalid_user_id = user_id <= 0 - save_state = True - - if is_invalid_user_id or await is_user_guest(app, user_id): + try: + user_role: UserRole = await get_user_role( + app, user_id=int(interactive_service.get("user_id", -1)) + ) + save_state = user_role > UserRole.GUEST + except UserNotFoundError: + # Don't know the user! save_state = False - await director_v2_api.stop_service(app, service_uuid, save_state) + await director_v2_api.stop_dynamic_service(app, service_uuid, save_state) except (ServiceNotFoundError, DirectorException) as err: logger.warning("Error while stopping service: %s", err) @@ -389,7 +398,7 @@ async def remove_orphaned_services( running_interactive_services: List[Dict[str, Any]] = [] try: - running_interactive_services = await director_v2_api.get_services(app) + running_interactive_services = await director_v2_api.get_dynamic_services(app) except director_v2_api.DirectorServiceError: logger.debug(("Could not fetch running_interactive_services")) @@ -417,26 +426,26 @@ async def remove_orphaned_services( logger.debug("Finished orphaned services removal") -async def remove_guest_user_with_all_its_resources( - app: web.Application, user_id: int -) -> None: - """Removes a GUEST user with all its associated projects and S3/MinIO files""" - +async def remove_all_resources_if_guest(app: web.Application, user_id: int) -> None: + """If user is <=GUEST, all associated projects and S3/MinIO files will be removed""" try: - if not await is_user_guest(app, user_id): - return + user_role = await get_user_role(app, user_id) + if user_role <= UserRole.GUEST: + # NOTE: This if-statement acts as a safety barrier to avoid removing resources + # from over-guest users (i.e. real users) - logger.debug( - "Deleting all projects of user with %s because it is a GUEST", - f"{user_id=}", - ) - await remove_all_projects_for_user(app=app, user_id=user_id) + logger.debug( + "Deleting all projects of user with %s with %s", + f"{user_id=}", + f"{user_role=}", + ) + await remove_all_projects_for_user(app=app, user_id=user_id) - logger.debug( - "Deleting user %s because it is a GUEST", - f"{user_id=}", - ) - await remove_user(app=app, user_id=user_id) + logger.debug("Deleting user %s with %s", f"{user_id=}", f"{user_role=}") + await remove_user(app=app, user_id=user_id) + + except UserNotFoundError: + pass except _DATABASE_ERRORS as error: logger.warning( @@ -458,17 +467,11 @@ async def remove_all_projects_for_user(app: web.Application, user_id: int) -> No - if the project is not shared with any user but with groups of users, one of the users inside the group (which currently exists) will be picked as the new owner + + raise users_exceptions.UsersNotFoundError """ # recover user's primary_gid - try: - project_owner: Dict = await get_user(app=app, user_id=user_id) - except users_exceptions.UserNotFoundError: - logger.warning( - "Could not recover user data for user '%s', stopping removal of projects!", - f"{user_id=}", - ) - return - + project_owner: Dict = await get_user(app=app, user_id=user_id) user_primary_gid = int(project_owner["primary_gid"]) # fetch all projects for the user @@ -491,11 +494,12 @@ async def remove_all_projects_for_user(app: web.Application, user_id: int) -> No user_id=user_id, include_templates=True, ) - except web.HTTPNotFound: + except (ProjectNotFoundError, UserNotFoundError, ProjectValidationError) as err: logger.warning( - "Could not find project %s for user with %s to be removed. Skipping.", + "Skipping removal of %s for user %s due to %s.", f"{project_uuid=}", f"{user_id=}", + f"{err}", ) continue @@ -520,10 +524,11 @@ async def remove_all_projects_for_user(app: web.Application, user_id: int) -> No await delete_project(app, project_uuid, user_id) - except ProjectNotFoundError: + except ProjectDeleteError as err: logging.warning( - "Project with %s was not found, skipping removal", + "Failed to complete project deletion of %s: %s", f"{project_uuid=}", + err, ) else: diff --git a/services/web/server/src/simcore_service_webserver/meta_modeling_projects.py b/services/web/server/src/simcore_service_webserver/meta_modeling_projects.py index ecb1a23534d..caf50979bce 100644 --- a/services/web/server/src/simcore_service_webserver/meta_modeling_projects.py +++ b/services/web/server/src/simcore_service_webserver/meta_modeling_projects.py @@ -22,7 +22,7 @@ get_runnable_projects_ids, ) from .meta_modeling_version_control import CommitID, VersionControlForMetaModeling -from .projects.projects_handlers import RQ_REQUESTED_REPO_PROJECT_UUID_KEY +from .projects.projects_handlers_crud import RQ_REQUESTED_REPO_PROJECT_UUID_KEY log = logging.getLogger(__name__) diff --git a/services/web/server/src/simcore_service_webserver/projects/_core_delete.py b/services/web/server/src/simcore_service_webserver/projects/_core_delete.py new file mode 100644 index 00000000000..6e85852dd4f --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/projects/_core_delete.py @@ -0,0 +1,103 @@ +""" Core submodule: logic to delete a project (and all associated services, data, etc) + +""" + +import asyncio +import functools +import logging +from uuid import UUID + +from aiohttp import web + +from .. import director_v2_api +from ..storage_api import delete_data_folders_of_project +from ..users_exceptions import UserNotFoundError +from ._core_services import remove_project_dynamic_services +from .projects_db import APP_PROJECT_DBAPI, ProjectDBAPI +from .projects_exceptions import ( + ProjectDeleteError, + ProjectLockError, + ProjectNotFoundError, +) + +log = logging.getLogger(__name__) + +# helper to format task name when using fire&forget +# TODO: might use this to ensure only ONE task instance is fire&forget at a time +DELETE_PROJECT_TASK_NAME = "fire_and_forget.delete_project.project_uuid={0}.user_id={1}" + + +async def delete_project(app: web.Application, project_uuid: str, user_id: int) -> None: + """Stops dynamic services, deletes data and finally deletes project + + NOTE: this does NOT use fire&forget anymore. This is a decission of the caller to make. + + raises ProjectDeleteError + """ + log.debug( + "deleting project '%s' for user '%s' in database", + f"{project_uuid=}", + f"{user_id=}", + ) + db: ProjectDBAPI = app[APP_PROJECT_DBAPI] + + try: + + # TODO: tmp using invisible as a "deletion mark" + # Even if any of the steps below fail, the project will remain invisible + # TODO: see https://github.com/ITISFoundation/osparc-simcore/pull/2522 + # TODO: note that if any of the steps below fail, it might results in a + # services/projects/data that might be incosistent. The GC should + # be able to detect that and resolve it. + # + + await db.set_hidden_flag(f"{project_uuid}", enabled=True) + + # stops dynamic services + # raises ProjectNotFoundError, UserNotFoundError, ProjectLockError + await remove_project_dynamic_services( + user_id, project_uuid, app, notify_users=False + ) + + # stops computational services + # raises DirectorServiceError + await director_v2_api.delete_pipeline(app, user_id, UUID(project_uuid)) + + # rm data from storage + await delete_data_folders_of_project(app, project_uuid, user_id) + + # rm project from database + await db.delete_user_project(user_id, project_uuid) + + except ProjectLockError as err: + raise ProjectDeleteError( + project_uuid, reason=f"Project currently in use {err}" + ) from err + + except (ProjectNotFoundError, UserNotFoundError) as err: + raise ProjectDeleteError( + project_uuid, reason=f"Invalid project state {err}" + ) from err + + +def create_delete_project_task( + app: web.Application, project_uuid: str, user_id: int, logger: logging.Logger +) -> asyncio.Task: + """helper to uniformly create 'delete_project' tasks + + These tasks then can be used for fire&forget + """ + + def _log_errors(fut: asyncio.Future): + try: + fut.result() + except Exception: # pylint: disable=broad-except + logger.exception("Error while deleting project") + + task = asyncio.create_task( + delete_project(app, project_uuid, user_id), + name=DELETE_PROJECT_TASK_NAME.format(project_uuid, user_id), + ) + + task.add_done_callback(functools.partial(_log_errors, log)) + return task diff --git a/services/web/server/src/simcore_service_webserver/projects/_core_get.py b/services/web/server/src/simcore_service_webserver/projects/_core_get.py new file mode 100644 index 00000000000..aaa76a0be4c --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/projects/_core_get.py @@ -0,0 +1,84 @@ +""" Core submodule: logic to get a project resource +""" + +import asyncio +import logging +from typing import Optional + +from aiohttp import web +from servicelib.aiohttp.application_keys import APP_JSONSCHEMA_SPECS_KEY +from servicelib.aiohttp.jsonschema_validation import validate_instance + +from ._core_states import add_project_states_for_user +from .project_models import ProjectDict +from .projects_db import APP_PROJECT_DBAPI, ProjectDBAPI + +log = logging.getLogger(__name__) + + +async def validate_project(app: web.Application, project: ProjectDict): + """ + raises ProjectValidationError + """ + project_schema = app[APP_JSONSCHEMA_SPECS_KEY]["projects"] + await asyncio.get_event_loop().run_in_executor( + None, validate_instance, project, project_schema + ) + + +async def get_project_for_user( + app: web.Application, + project_uuid: str, + user_id: int, + *, + include_templates: Optional[bool] = False, + include_state: Optional[bool] = False, +) -> ProjectDict: + """Returns a VALID project accessible to user + + :raises ProjectNotFoundError: if no match found + """ + db: ProjectDBAPI = app[APP_PROJECT_DBAPI] + assert db # nosec + + project: ProjectDict = {} + is_template = False + if include_templates: + project = await db.get_template_project(project_uuid) + is_template = bool(project) + + if not project: + project = await db.get_user_project(user_id, project_uuid) + + # adds state if it is not a template + if include_state: + project = await add_project_states_for_user(user_id, project, is_template, app) + + # TODO: how to handle when database has an invalid project schema??? + # Notice that db model does not include a check on project schema. + await validate_project(app, project) + return project + + +# NOTE: Needs refactoring after access-layer in storage. DO NOT USE but keep +# here since it documents well the concept +# +# async def clone_project( +# request: web.Request, project: Dict, user_id: int, forced_copy_project_id: str = "" +# ) -> Dict: +# """Clones both document and data folders of a project +# +# - document +# - get new identifiers for project and nodes +# - data folders +# - folder name composes as project_uuid/node_uuid +# - data is deep-copied to new folder corresponding to new identifiers +# - managed by storage uservice +# """ +# cloned_project, nodes_map = clone_project_document(project, forced_copy_project_id) +# +# updated_project = await copy_data_folders_from_project( +# request.app, project, cloned_project, nodes_map, user_id +# ) +# +# return updated_project diff --git a/services/web/server/src/simcore_service_webserver/projects/_core_nodes.py b/services/web/server/src/simcore_service_webserver/projects/_core_nodes.py new file mode 100644 index 00000000000..cdceaa09049 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/projects/_core_nodes.py @@ -0,0 +1,319 @@ +""" Core module: project's nodes + +""" +# TODO: separate plugin for sub-resources? + +import logging +from collections import defaultdict +from pprint import pformat +from typing import Any, Dict, List, Optional, Set, Tuple +from uuid import uuid4 + +from aiohttp import web +from models_library.projects_state import RunningState +from servicelib.json_serialization import json_dumps +from servicelib.utils import fire_and_forget_task, logged_gather + +from .. import director_v2_api +from ..socketio.events import ( + SOCKET_IO_NODE_UPDATED_EVENT, + SOCKET_IO_PROJECT_UPDATED_EVENT, + SocketMessageDict, + send_group_messages, + send_messages, +) +from ..storage_api import delete_data_folders_of_project_node +from ._core_states import add_project_states_for_user +from .projects_db import APP_PROJECT_DBAPI +from .projects_utils import extract_dns_without_default_port + +log = logging.getLogger(__name__) + + +def is_node_dynamic(node_key: str) -> bool: + return "/dynamic/" in node_key + + +async def add_project_node( + request: web.Request, + project_uuid: str, + user_id: int, + service_key: str, + service_version: str, + service_id: Optional[str], +) -> str: + log.debug( + "starting node %s:%s in project %s for user %s", + service_key, + service_version, + project_uuid, + user_id, + ) + node_uuid = service_id if service_id else str(uuid4()) + if is_node_dynamic(service_key): + await director_v2_api.start_dynamic_service( + request.app, + project_id=project_uuid, + user_id=user_id, + service_key=service_key, + service_version=service_version, + service_uuid=node_uuid, + request_dns=extract_dns_without_default_port(request.url), + request_scheme=request.headers.get("X-Forwarded-Proto", request.url.scheme), + ) + return node_uuid + + +async def get_project_node( + request: web.Request, project_uuid: str, user_id: int, node_id: str +): + log.debug( + "getting node %s in project %s for user %s", node_id, project_uuid, user_id + ) + + list_of_interactive_services = await director_v2_api.get_dynamic_services( + request.app, project_id=project_uuid, user_id=user_id + ) + # get the project if it is running + for service in list_of_interactive_services: + if service["service_uuid"] == node_id: + return service + # the service is not running, it's a computational service maybe + # TODO: find out if computational service is running if not throw a 404 since it's not around + return {"service_uuid": node_id, "service_state": "idle"} + + +async def delete_project_node( + request: web.Request, project_uuid: str, user_id: int, node_uuid: str +) -> None: + log.debug( + "deleting node %s in project %s for user %s", node_uuid, project_uuid, user_id + ) + + list_of_services = await director_v2_api.get_dynamic_services( + request.app, project_id=project_uuid, user_id=user_id + ) + # stop the service if it is running + for service in list_of_services: + if service["service_uuid"] == node_uuid: + log.error("deleting service=%s", service) + # no need to save the state of the node when deleting it + await director_v2_api.stop_dynamic_service( + request.app, + node_uuid, + save_state=False, + ) + break + # remove its data if any + await delete_data_folders_of_project_node( + request.app, project_uuid, node_uuid, user_id + ) + + +async def update_project_node_state( + app: web.Application, user_id: int, project_id: str, node_id: str, new_state: str +) -> Dict: + log.debug( + "updating node %s current state in project %s for user %s", + node_id, + project_id, + user_id, + ) + partial_workbench_data: Dict[str, Any] = { + node_id: {"state": {"currentStatus": new_state}}, + } + if RunningState(new_state) in [ + RunningState.PUBLISHED, + RunningState.PENDING, + RunningState.STARTED, + ]: + partial_workbench_data[node_id]["progress"] = 0 + elif RunningState(new_state) in [RunningState.SUCCESS, RunningState.FAILED]: + partial_workbench_data[node_id]["progress"] = 100 + + db = app[APP_PROJECT_DBAPI] + updated_project, _ = await db.patch_user_project_workbench( + partial_workbench_data=partial_workbench_data, + user_id=user_id, + project_uuid=project_id, + ) + updated_project = await add_project_states_for_user( + user_id=user_id, project=updated_project, is_template=False, app=app + ) + return updated_project + + +async def update_project_node_progress( + app: web.Application, user_id: int, project_id: str, node_id: str, progress: float +) -> Optional[Dict]: + log.debug( + "updating node %s progress in project %s for user %s with %s", + node_id, + project_id, + user_id, + progress, + ) + partial_workbench_data = { + node_id: {"progress": int(100.0 * float(progress) + 0.5)}, + } + db = app[APP_PROJECT_DBAPI] + updated_project, _ = await db.patch_user_project_workbench( + partial_workbench_data=partial_workbench_data, + user_id=user_id, + project_uuid=project_id, + ) + updated_project = await add_project_states_for_user( + user_id=user_id, project=updated_project, is_template=False, app=app + ) + return updated_project + + +async def update_project_node_outputs( + app: web.Application, + user_id: int, + project_id: str, + node_id: str, + new_outputs: Optional[Dict], + new_run_hash: Optional[str], +) -> Tuple[Dict, List[str]]: + """ + Updates outputs of a given node in a project with 'data' + """ + log.debug( + "updating node %s outputs in project %s for user %s with %s: run_hash [%s]", + node_id, + project_id, + user_id, + json_dumps(new_outputs), + new_run_hash, + ) + new_outputs = new_outputs or {} + + partial_workbench_data = { + node_id: {"outputs": new_outputs, "runHash": new_run_hash}, + } + + db = app[APP_PROJECT_DBAPI] + updated_project, changed_entries = await db.patch_user_project_workbench( + partial_workbench_data=partial_workbench_data, + user_id=user_id, + project_uuid=project_id, + ) + log.debug( + "patched project %s, following entries changed: %s", + project_id, + pformat(changed_entries), + ) + updated_project = await add_project_states_for_user( + user_id=user_id, project=updated_project, is_template=False, app=app + ) + + # changed entries come in the form of {node_uuid: {outputs: {changed_key1: value1, changed_key2: value2}}} + # we do want only the key names + changed_keys = changed_entries.get(node_id, {}).get("outputs", {}).keys() + return updated_project, changed_keys + + +async def get_workbench_node_ids_from_project_uuid( + app: web.Application, + project_uuid: str, +) -> Set[str]: + """Returns a set with all the node_ids from a project's workbench""" + db = app[APP_PROJECT_DBAPI] + return await db.get_all_node_ids_from_workbenches(project_uuid) + + +async def is_node_id_present_in_any_project_workbench( + app: web.Application, + node_id: str, +) -> bool: + """If the node_id is presnet in one of the projects' workbenche returns True""" + db = app[APP_PROJECT_DBAPI] + return node_id in await db.get_all_node_ids_from_workbenches() + + +async def notify_project_state_update( + app: web.Application, + project: Dict, + notify_only_user: Optional[int] = None, +) -> None: + messages: List[SocketMessageDict] = [ + { + "event_type": SOCKET_IO_PROJECT_UPDATED_EVENT, + "data": { + "project_uuid": project["uuid"], + "data": project["state"], + }, + } + ] + + if notify_only_user: + await send_messages(app, user_id=str(notify_only_user), messages=messages) + else: + rooms_to_notify = [ + f"{gid}" + for gid, rights in project["accessRights"].items() + if rights["read"] + ] + for room in rooms_to_notify: + await send_group_messages(app, room, messages) + + +async def notify_project_node_update( + app: web.Application, project: Dict, node_id: str +) -> None: + rooms_to_notify = [ + f"{gid}" for gid, rights in project["accessRights"].items() if rights["read"] + ] + + messages: List[SocketMessageDict] = [ + { + "event_type": SOCKET_IO_NODE_UPDATED_EVENT, + "data": { + "project_id": project["uuid"], + "node_id": node_id, + "data": project["workbench"][node_id], + }, + } + ] + + for room in rooms_to_notify: + await send_group_messages(app, room, messages) + + +async def post_trigger_connected_service_retrieve(**kwargs) -> None: + await fire_and_forget_task(trigger_connected_service_retrieve(**kwargs)) + + +async def trigger_connected_service_retrieve( + app: web.Application, project: Dict, updated_node_uuid: str, changed_keys: List[str] +) -> None: + workbench = project["workbench"] + nodes_keys_to_update: Dict[str, List[str]] = defaultdict(list) + # find the nodes that need to retrieve data + for node_uuid, node in workbench.items(): + # check this node is dynamic + if not is_node_dynamic(node["key"]): + continue + + # check whether this node has our updated node as linked inputs + node_inputs = node.get("inputs", {}) + for port_key, port_value in node_inputs.items(): + # we look for node port links, not values + if not isinstance(port_value, dict): + continue + + input_node_uuid = port_value.get("nodeUuid") + if input_node_uuid != updated_node_uuid: + continue + # so this node is linked to the updated one, now check if the port was changed? + linked_input_port = port_value.get("output") + if linked_input_port in changed_keys: + nodes_keys_to_update[node_uuid].append(port_key) + + # call /retrieve on the nodes + update_tasks = [ + director_v2_api.retrieve_dynamic_service_inputs(app, node, keys) + for node, keys in nodes_keys_to_update.items() + ] + await logged_gather(*update_tasks) diff --git a/services/web/server/src/simcore_service_webserver/projects/_core_notify.py b/services/web/server/src/simcore_service_webserver/projects/_core_notify.py new file mode 100644 index 00000000000..d840b9cb723 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/projects/_core_notify.py @@ -0,0 +1,126 @@ +""" Core submodule: utils for project events like notifications, or observer pattern, etc + +""" +import contextlib +import logging +from typing import List + +from aiohttp import web +from models_library.projects_state import ProjectState, ProjectStatus +from servicelib.observer import observe +from servicelib.utils import logged_gather + +from ..resource_manager.websocket_manager import PROJECT_ID_KEY, managed_resource +from ._core_get import get_project_for_user +from ._core_nodes import notify_project_state_update +from ._core_states import get_project_states_for_user +from .project_lock import UserNameDict, lock_project +from .projects_exceptions import ProjectLockError + +log = logging.getLogger(__name__) + + +async def retrieve_and_notify_project_locked_state( + user_id: int, + project_uuid: str, + app: web.Application, + notify_only_prj_user: bool = False, +): + project = await get_project_for_user(app, project_uuid, user_id, include_state=True) + await notify_project_state_update( + app, project, notify_only_user=user_id if notify_only_prj_user else None + ) + + +@observe(event="SIGNAL_USER_DISCONNECTED") +async def user_disconnected( + user_id: int, client_session_id: str, app: web.Application +) -> None: + # check if there is a project resource + with managed_resource(user_id, client_session_id, app) as rt: + list_projects: List[str] = await rt.find(PROJECT_ID_KEY) + + await logged_gather( + *[ + retrieve_and_notify_project_locked_state( + user_id, prj, app, notify_only_prj_user=True + ) + for prj in list_projects + ] + ) + + +@contextlib.asynccontextmanager +async def lock_project_and_notify_state_update( + app: web.Application, + project_uuid: str, + status: ProjectStatus, + user_id: int, + user_name: UserNameDict, + notify_users: bool = True, +): + """ + raises ProjectLockError + raises ProjectNotFoundError + raises UserNotFoundError + raises jsonschema.ValidationError + """ + + # FIXME: PC: I find this function very error prone. For instance, the requirements + # on the input parameters depend on the value of 'notify_users', i.e. changes dynamically. + # + # If notify_users=True, then project_uuid has to be defined in the database since + # the notification function the state which is in the database. On the other hand, + # locking relies on the project entry in redis. + # + # These two references to the project (redis and the db) are not in sync leading to some. An + # example is ``stop_service`` where incosistent states that heavily depend on the logic of the function. + # + # A suggestion would be to split this in two explicit functions where notifications is active or not + # but not leave that decision to a variable. + # + try: + async with await lock_project( + app, + project_uuid, + status, + user_id, + user_name, + ): + log.debug( + "Project [%s] lock acquired with %s, %s, %s", + f"{project_uuid=}", + f"{status=}", + f"{user_id=}", + f"{notify_users=}", + ) + if notify_users: + # notifies as locked + await retrieve_and_notify_project_locked_state( + user_id, project_uuid, app + ) + + yield # none of the operations within the context can modify project + + log.debug( + "Project [%s] lock released", + f"{project_uuid=}", + ) + except ProjectLockError: + # someone else has already the lock? + # FIXME: this can raise as well + prj_states: ProjectState = await get_project_states_for_user( + user_id, project_uuid, app + ) + log.error( + "Project [%s] for %s already locked in state '%s'. Please check with support.", + f"{project_uuid=}", + f"{user_id=}", + f"{prj_states.locked.status=}", + ) + raise + + finally: + if notify_users: + # notifies as lock is released + await retrieve_and_notify_project_locked_state(user_id, project_uuid, app) diff --git a/services/web/server/src/simcore_service_webserver/projects/_core_open_close.py b/services/web/server/src/simcore_service_webserver/projects/_core_open_close.py new file mode 100644 index 00000000000..bc0a5ecd3a6 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/projects/_core_open_close.py @@ -0,0 +1,131 @@ +""" Core module: operations related with active project as open/close a project + +""" + +import logging +from typing import List + +from aiohttp import web +from models_library.projects_state import ProjectStatus +from servicelib.utils import fire_and_forget_task + +from ..resource_manager.websocket_manager import ( + PROJECT_ID_KEY, + UserSessionID, + managed_resource, +) +from ..users_api import get_user_name +from ._core_notify import lock_project_and_notify_state_update +from ._core_services import remove_project_dynamic_services +from ._core_states import user_has_another_client_open +from .projects_exceptions import ProjectLockError + +log = logging.getLogger(__name__) + + +async def _clean_user_disconnected_clients( + user_session_id_list: List[UserSessionID], app: web.Application +): + for user_session in user_session_id_list: + with managed_resource( + user_session.user_id, user_session.client_session_id, app + ) as rt: + if await rt.get_socket_id() is None: + log.debug( + "removing disconnected project of user %s/%s", + user_session.user_id, + user_session.client_session_id, + ) + await rt.remove(PROJECT_ID_KEY) + + +async def try_open_project_for_user( + user_id: int, project_uuid: str, client_session_id: str, app: web.Application +) -> bool: + try: + async with lock_project_and_notify_state_update( + app, + project_uuid, + ProjectStatus.OPENING, + user_id, + await get_user_name(app, user_id), + notify_users=False, + ): + + with managed_resource(user_id, client_session_id, app) as rt: + user_session_id_list: List[ + UserSessionID + ] = await rt.find_users_of_resource(PROJECT_ID_KEY, project_uuid) + + if not user_session_id_list: + # no one has the project so we lock it + await rt.add(PROJECT_ID_KEY, project_uuid) + return True + + set_user_ids = { + user_session.user_id for user_session in user_session_id_list + } + if set_user_ids.issubset({user_id}): + # we are the only user, remove this session from the list + if not await user_has_another_client_open( + [ + uid + for uid in user_session_id_list + if uid != UserSessionID(user_id, client_session_id) + ], + app, + ): + # steal the project + await rt.add(PROJECT_ID_KEY, project_uuid) + await _clean_user_disconnected_clients( + user_session_id_list, app + ) + return True + return False + + except ProjectLockError: + log.debug( + "project %s/%s is currently locked", f"{user_id=}", f"{project_uuid=}" + ) + return False + + +async def try_close_project_for_user( + user_id: int, + project_uuid: str, + client_session_id: str, + app: web.Application, +): + with managed_resource(user_id, client_session_id, app) as rt: + user_to_session_ids: List[UserSessionID] = await rt.find_users_of_resource( + PROJECT_ID_KEY, project_uuid + ) + # first check we have it opened now + if UserSessionID(user_id, client_session_id) not in user_to_session_ids: + # nothing to do the project is already closed + log.warning( + "project [%s] is already closed for user [%s].", + project_uuid, + user_id, + ) + return + # remove the project from our list of opened ones + log.debug( + "removing project [%s] from user [%s] resources", project_uuid, user_id + ) + await rt.remove(PROJECT_ID_KEY) + # check it is not opened by someone else + user_to_session_ids.remove(UserSessionID(user_id, client_session_id)) + log.debug("remaining user_to_session_ids: %s", user_to_session_ids) + if not user_to_session_ids: + # NOTE: depending on the garbage collector speed, it might already be removing it + # TODO: analyze what to do with errors in this f&f task + fire_and_forget_task( + remove_project_dynamic_services(user_id, project_uuid, app) + ) + else: + log.warning( + "project [%s] is used by other users: [%s]. This should not be possible", + project_uuid, + {user_session.user_id for user_session in user_to_session_ids}, + ) diff --git a/services/web/server/src/simcore_service_webserver/projects/_core_services.py b/services/web/server/src/simcore_service_webserver/projects/_core_services.py new file mode 100644 index 00000000000..2fa04fb19f5 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/projects/_core_services.py @@ -0,0 +1,120 @@ +""" Core submodule: dynamic services + +""" + +import logging +from contextlib import suppress +from typing import Dict + +from aiohttp import web +from models_library.projects_state import ProjectStatus +from pydantic.types import PositiveInt +from servicelib.utils import logged_gather + +from .. import director_v2_api +from ..users_api import UserRole, get_user_name, get_user_role +from ._core_nodes import is_node_dynamic +from ._core_notify import lock_project_and_notify_state_update +from .projects_utils import extract_dns_without_default_port + +log = logging.getLogger(__name__) + + +async def start_project_dynamic_services( + request: web.Request, project: Dict, user_id: PositiveInt +) -> None: + # first get the services if they already exist + log.debug( + "getting running interactive services of project %s for user %s", + f"{project['uuid']=}", + f"{user_id=}", + ) + running_services = await director_v2_api.get_dynamic_services( + request.app, user_id, project["uuid"] + ) + log.debug( + "Currently running services %s for user %s", + f"{running_services=}", + f"{user_id=}", + ) + + running_service_uuids = [x["service_uuid"] for x in running_services] + # now start them if needed + project_needed_services = { + service_uuid: service + for service_uuid, service in project["workbench"].items() + if is_node_dynamic(service["key"]) and service_uuid not in running_service_uuids + } + log.debug("Starting services: %s", f"{project_needed_services=}") + + start_service_tasks = [ + director_v2_api.start_dynamic_service( + request.app, + user_id=user_id, + project_id=project["uuid"], + service_key=service["key"], + service_version=service["version"], + service_uuid=service_uuid, + request_dns=extract_dns_without_default_port(request.url), + request_scheme=request.headers.get("X-Forwarded-Proto", request.url.scheme), + ) + for service_uuid, service in project_needed_services.items() + ] + results = await logged_gather(*start_service_tasks, reraise=True) + log.debug("Services start result %s", results) + for entry in results: + if entry: + # if the status is present in the results for the start_service + # it means that the API call failed + # also it is enforced that the status is different from 200 OK + if entry.get("status", 200) != 200: + log.error("Error while starting dynamic service %s", f"{entry=}") + + +async def remove_project_dynamic_services( + user_id: int, + project_uuid: str, + app: web.Application, + notify_users: bool = True, +) -> None: + """ + raises ProjectNotFoundError + raises UserNotFoundError + raises ProjectLockError: project is locked and therefore services cannot be stopped + """ + + log.debug( + "Removing project interactive services for %s and %s and %s", + f"{project_uuid=}", + f"{user_id=}", + f"{notify_users=}", + ) + + # can raise UserNotFoundError + user_name_data = await get_user_name(app, user_id) + user_role: UserRole = await get_user_role(app, user_id) + + # + # - during the closing process, which might take awhile, + # the project is locked so no one opens it at the same time + # - Users also might get notified + # - If project is already locked, just ignore + # + async with lock_project_and_notify_state_update( + app, + project_uuid, + ProjectStatus.CLOSING, + user_id, # required + user_name_data, + notify_users=notify_users, + ): + with suppress(director_v2_api.DirectorServiceError): + # FIXME: + # Here director exceptions are suppressed. + # In case the service is not found to preserve old behavior + await director_v2_api.stop_dynamic_services_in_project( + app=app, + user_id=user_id, + project_id=project_uuid, + save_state=user_role > UserRole.GUEST, + ) diff --git a/services/web/server/src/simcore_service_webserver/projects/_core_states.py b/services/web/server/src/simcore_service_webserver/projects/_core_states.py new file mode 100644 index 00000000000..f89541af050 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/projects/_core_states.py @@ -0,0 +1,187 @@ +""" Core submodule: project states logic + +""" + +import json +import logging +from typing import Any, Dict, List, Optional +from uuid import UUID + +from aiohttp import web +from models_library.projects_pipeline import ComputationTask +from models_library.projects_state import ( + Owner, + ProjectLocked, + ProjectRunningState, + ProjectState, + ProjectStatus, + RunningState, +) +from servicelib.utils import logged_gather + +from .. import director_v2_api +from ..resource_manager.websocket_manager import ( + PROJECT_ID_KEY, + UserSessionID, + managed_resource, +) +from ..users_api import get_user_name +from .project_lock import UserNameDict, get_project_locked_state + +log = logging.getLogger(__name__) + + +async def user_has_another_client_open( + user_session_id_list: List[UserSessionID], app: web.Application +) -> bool: + # NOTE: this is not in _core_open_close because + # NOTE if there is an active socket in use, that means the client is active + for user_session in user_session_id_list: + with managed_resource( + user_session.user_id, user_session.client_session_id, app + ) as rt: + if await rt.get_socket_id() is not None: + return True + return False + + +async def _get_project_lock_state( + user_id: int, + project_uuid: str, + app: web.Application, +) -> ProjectLocked: + """returns the lock state of a project + 1. If a project is locked for any reason, first return the project as locked and STATUS defined by lock + 2. If a client_session_id is passed, then first check to see if the project is currently opened by this very user/tab combination, if yes returns the project as Locked and OPENED. + 3. If any other user than user_id is using the project (even disconnected before the TTL is finished) then the project is Locked and OPENED. + 4. If the same user is using the project with a valid socket id (meaning a tab is currently active) then the project is Locked and OPENED. + 5. If the same user is using the project with NO socket id (meaning there is no current tab active) then the project is Unlocked and OPENED. which means the user can open it again. + """ + log.debug( + "getting project [%s] lock state for user [%s]...", + f"{project_uuid=}", + f"{user_id=}", + ) + prj_locked_state: Optional[ProjectLocked] = await get_project_locked_state( + app, project_uuid + ) + if prj_locked_state: + log.debug( + "project [%s] is locked: %s", f"{project_uuid=}", f"{prj_locked_state=}" + ) + return prj_locked_state + + # let's now check if anyone has the project in use somehow + with managed_resource(user_id, None, app) as rt: + user_session_id_list: List[UserSessionID] = await rt.find_users_of_resource( + PROJECT_ID_KEY, project_uuid + ) + set_user_ids = {user_session.user_id for user_session in user_session_id_list} + + assert ( # nosec + len(set_user_ids) <= 1 + ) # nosec # NOTE: A project can only be opened by one user in one tab at the moment + + if not set_user_ids: + # no one has the project, so it is unlocked and closed. + log.debug("project [%s] is not in use", f"{project_uuid=}") + return ProjectLocked(value=False, status=ProjectStatus.CLOSED) + + log.debug( + "project [%s] might be used by the following users: [%s]", + f"{project_uuid=}", + f"{set_user_ids=}", + ) + usernames: List[UserNameDict] = [ + await get_user_name(app, uid) for uid in set_user_ids + ] + # let's check if the project is opened by the same user, maybe already opened or closed in a orphaned session + if set_user_ids.issubset({user_id}): + if not await user_has_another_client_open(user_session_id_list, app): + # in this case the project is re-openable by the same user until it gets closed + log.debug( + "project [%s] is in use by the same user [%s] that is currently disconnected, so it is unlocked for this specific user and opened", + f"{project_uuid=}", + f"{set_user_ids=}", + ) + return ProjectLocked( + value=False, + owner=Owner(user_id=list(set_user_ids)[0], **usernames[0]), + status=ProjectStatus.OPENED, + ) + # the project is opened in another tab or browser, or by another user, both case resolves to the project being locked, and opened + log.debug( + "project [%s] is in use by another user [%s], so it is locked", + f"{project_uuid=}", + f"{set_user_ids=}", + ) + return ProjectLocked( + value=True, + owner=Owner(user_id=list(set_user_ids)[0], **usernames[0]), + status=ProjectStatus.OPENED, + ) + + +async def get_project_states_for_user( + user_id: int, project_uuid: str, app: web.Application +) -> ProjectState: + # for templates: the project is never locked and never opened. also the running state is always unknown + lock_state = ProjectLocked(value=False, status=ProjectStatus.CLOSED) + running_state = RunningState.UNKNOWN + + computation_task: Optional[ComputationTask] + + lock_state, computation_task = await logged_gather( + _get_project_lock_state(user_id, project_uuid, app), + director_v2_api.get_computation_task(app, user_id, UUID(project_uuid)), + ) + + if computation_task: + # get the running state + running_state = computation_task.state + + return ProjectState( + locked=lock_state, state=ProjectRunningState(value=running_state) + ) + + +async def add_project_states_for_user( + user_id: int, + project: Dict[str, Any], + is_template: bool, + app: web.Application, +) -> Dict[str, Any]: + log.debug( + "adding project states for %s with project %s", + f"{user_id=}", + f"{project['uuid']=}", + ) + # for templates: the project is never locked and never opened. also the running state is always unknown + lock_state = ProjectLocked(value=False, status=ProjectStatus.CLOSED) + running_state = RunningState.UNKNOWN + + if not is_template: + lock_state = await _get_project_lock_state(user_id, project["uuid"], app) + + if computation_task := await director_v2_api.get_computation_task( + app, user_id, project["uuid"] + ): + # get the running state + running_state = computation_task.state + # get the nodes individual states + for ( + node_id, + node_state, + ) in computation_task.pipeline_details.node_states.items(): + prj_node = project["workbench"].get(str(node_id)) + if prj_node is None: + continue + node_state_dict = json.loads( + node_state.json(by_alias=True, exclude_unset=True) + ) + prj_node.setdefault("state", {}).update(node_state_dict) + + project["state"] = ProjectState( + locked=lock_state, state=ProjectRunningState(value=running_state) + ).dict(by_alias=True, exclude_unset=True) + return project diff --git a/services/web/server/src/simcore_service_webserver/projects/plugin.py b/services/web/server/src/simcore_service_webserver/projects/plugin.py index 6fdad1b3582..f2e566dbadc 100644 --- a/services/web/server/src/simcore_service_webserver/projects/plugin.py +++ b/services/web/server/src/simcore_service_webserver/projects/plugin.py @@ -18,7 +18,12 @@ from .._constants import APP_OPENAPI_SPECS_KEY, APP_SETTINGS_KEY from .._resources import resources -from . import projects_handlers, projects_nodes_handlers, projects_tags_handlers +from . import ( + projects_handlers, + projects_handlers_crud, + projects_nodes_handlers, + projects_tags_handlers, +) from .projects_access import setup_projects_access from .projects_db import setup_projects_db @@ -77,6 +82,7 @@ def setup_projects(app: web.Application) -> bool: _create_routes( "project", specs, + projects_handlers_crud, projects_handlers, projects_nodes_handlers, projects_tags_handlers, diff --git a/services/web/server/src/simcore_service_webserver/projects/project_lock.py b/services/web/server/src/simcore_service_webserver/projects/project_lock.py index 7f8ada76c17..bbfbdcc4f7c 100644 --- a/services/web/server/src/simcore_service_webserver/projects/project_lock.py +++ b/services/web/server/src/simcore_service_webserver/projects/project_lock.py @@ -10,8 +10,8 @@ PROJECT_REDIS_LOCK_KEY: str = "project:{}" + ProjectLock = aioredlock.Lock -ProjectLockError = aioredlock.LockError async def lock_project( diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_api.py b/services/web/server/src/simcore_service_webserver/projects/projects_api.py index d5f5dc141c9..835b126b28a 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_api.py @@ -6,866 +6,38 @@ - return data and successful HTTP responses (or raise them) - upon failure raise errors that can be also HTTP reponses """ -# pylint: disable=too-many-arguments -import asyncio -import contextlib -import json -import logging -from collections import defaultdict -from contextlib import suppress -from pprint import pformat -from typing import Any, Dict, List, Optional, Set, Tuple -from uuid import UUID, uuid4 - -from aiohttp import web -from models_library.projects_state import ( - Owner, - ProjectLocked, - ProjectRunningState, - ProjectState, - ProjectStatus, - RunningState, -) -from pydantic.types import PositiveInt -from servicelib.aiohttp.application_keys import APP_JSONSCHEMA_SPECS_KEY -from servicelib.aiohttp.jsonschema_validation import validate_instance -from servicelib.json_serialization import json_dumps -from servicelib.observer import observe -from servicelib.utils import fire_and_forget_task, logged_gather - -from .. import director_v2_api -from ..resource_manager.websocket_manager import ( - PROJECT_ID_KEY, - UserSessionID, - managed_resource, -) -from ..socketio.events import ( - SOCKET_IO_NODE_UPDATED_EVENT, - SOCKET_IO_PROJECT_UPDATED_EVENT, - SocketMessageDict, - send_group_messages, - send_messages, +from typing import Tuple + +from ._core_delete import delete_project +from ._core_get import get_project_for_user, validate_project +from ._core_nodes import ( + get_workbench_node_ids_from_project_uuid, + is_node_id_present_in_any_project_workbench, + notify_project_node_update, + post_trigger_connected_service_retrieve, + update_project_node_outputs, + update_project_node_progress, + update_project_node_state, ) -from ..storage_api import ( - delete_data_folders_of_project, - delete_data_folders_of_project_node, +from ._core_notify import ( + notify_project_state_update, + retrieve_and_notify_project_locked_state, ) -from ..users_api import get_user_name, is_user_guest -from .project_lock import ( - ProjectLockError, - UserNameDict, - get_project_locked_state, - lock_project, +from ._core_services import remove_project_dynamic_services + +__all__: Tuple[str, ...] = ( + "delete_project", + "get_project_for_user", + "get_workbench_node_ids_from_project_uuid", + "is_node_id_present_in_any_project_workbench", + "notify_project_node_update", + "notify_project_state_update", + "post_trigger_connected_service_retrieve", + "remove_project_dynamic_services", + "retrieve_and_notify_project_locked_state", + "update_project_node_outputs", + "update_project_node_progress", + "update_project_node_state", + "validate_project", ) -from .projects_db import APP_PROJECT_DBAPI, ProjectDBAPI -from .projects_utils import extract_dns_without_default_port - -log = logging.getLogger(__name__) - -PROJECT_REDIS_LOCK_KEY: str = "project:{}" - - -def _is_node_dynamic(node_key: str) -> bool: - return "/dynamic/" in node_key - - -async def validate_project(app: web.Application, project: Dict): - project_schema = app[APP_JSONSCHEMA_SPECS_KEY]["projects"] - await asyncio.get_event_loop().run_in_executor( - None, validate_instance, project, project_schema - ) - - -async def get_project_for_user( - app: web.Application, - project_uuid: str, - user_id: int, - *, - include_templates: Optional[bool] = False, - include_state: Optional[bool] = False, -) -> Dict: - """Returns a VALID project accessible to user - - :raises ProjectNotFoundError: if no match found - :return: schema-compliant project data - :rtype: Dict - """ - db: ProjectDBAPI = app[APP_PROJECT_DBAPI] - assert db # nosec - - project: Dict = {} - is_template = False - if include_templates: - project = await db.get_template_project(project_uuid) - is_template = bool(project) - - if not project: - project = await db.get_user_project(user_id, project_uuid) - - # adds state if it is not a template - if include_state: - project = await add_project_states_for_user(user_id, project, is_template, app) - - # TODO: how to handle when database has an invalid project schema??? - # Notice that db model does not include a check on project schema. - await validate_project(app, project) - return project - - -# NOTE: Needs refactoring after access-layer in storage. DO NOT USE but keep -# here since it documents well the concept -# -# async def clone_project( -# request: web.Request, project: Dict, user_id: int, forced_copy_project_id: str = "" -# ) -> Dict: -# """Clones both document and data folders of a project -# -# - document -# - get new identifiers for project and nodes -# - data folders -# - folder name composes as project_uuid/node_uuid -# - data is deep-copied to new folder corresponding to new identifiers -# - managed by storage uservice -# """ -# cloned_project, nodes_map = clone_project_document(project, forced_copy_project_id) -# -# updated_project = await copy_data_folders_from_project( -# request.app, project, cloned_project, nodes_map, user_id -# ) -# -# return updated_project - - -async def start_project_interactive_services( - request: web.Request, project: Dict, user_id: PositiveInt -) -> None: - # first get the services if they already exist - log.debug( - "getting running interactive services of project %s for user %s", - f"{project['uuid']=}", - f"{user_id=}", - ) - running_services = await director_v2_api.get_services( - request.app, user_id, project["uuid"] - ) - log.debug( - "Currently running services %s for user %s", - f"{running_services=}", - f"{user_id=}", - ) - - running_service_uuids = [x["service_uuid"] for x in running_services] - # now start them if needed - project_needed_services = { - service_uuid: service - for service_uuid, service in project["workbench"].items() - if _is_node_dynamic(service["key"]) - and service_uuid not in running_service_uuids - } - log.debug("Starting services: %s", f"{project_needed_services=}") - - start_service_tasks = [ - director_v2_api.start_service( - request.app, - user_id=user_id, - project_id=project["uuid"], - service_key=service["key"], - service_version=service["version"], - service_uuid=service_uuid, - request_dns=extract_dns_without_default_port(request.url), - request_scheme=request.headers.get("X-Forwarded-Proto", request.url.scheme), - ) - for service_uuid, service in project_needed_services.items() - ] - results = await logged_gather(*start_service_tasks, reraise=True) - log.debug("Services start result %s", results) - for entry in results: - if entry: - # if the status is present in the results for the start_service - # it means that the API call failed - # also it is enforced that the status is different from 200 OK - if entry.get("status", 200) != 200: - log.error("Error while starting dynamic service %s", f"{entry=}") - - -async def delete_project(app: web.Application, project_uuid: str, user_id: int) -> None: - await _delete_project_from_db(app, project_uuid, user_id) - - async def _remove_services_and_data(): - await remove_project_interactive_services( - user_id, project_uuid, app, notify_users=False - ) - await delete_data_folders_of_project(app, project_uuid, user_id) - - fire_and_forget_task(_remove_services_and_data()) - - -@observe(event="SIGNAL_USER_DISCONNECTED") -async def user_disconnected( - user_id: int, client_session_id: str, app: web.Application -) -> None: - # check if there is a project resource - with managed_resource(user_id, client_session_id, app) as rt: - list_projects: List[str] = await rt.find(PROJECT_ID_KEY) - - await logged_gather( - *[ - retrieve_and_notify_project_locked_state( - user_id, prj, app, notify_only_prj_user=True - ) - for prj in list_projects - ] - ) - - -async def retrieve_and_notify_project_locked_state( - user_id: int, - project_uuid: str, - app: web.Application, - notify_only_prj_user: bool = False, -): - project = await get_project_for_user(app, project_uuid, user_id, include_state=True) - await notify_project_state_update( - app, project, notify_only_user=user_id if notify_only_prj_user else None - ) - - -@contextlib.asynccontextmanager -async def lock_with_notification( - app: web.Application, - project_uuid: str, - status: ProjectStatus, - user_id: int, - user_name: UserNameDict, - notify_users: bool = True, -): - try: - async with await lock_project( - app, - project_uuid, - status, - user_id, - user_name, - ): - log.debug( - "Project [%s] lock acquired", - f"{project_uuid=}", - ) - if notify_users: - await retrieve_and_notify_project_locked_state( - user_id, project_uuid, app - ) - yield - log.debug( - "Project [%s] lock released", - f"{project_uuid=}", - ) - except ProjectLockError: - # someone else has already the lock? - prj_states: ProjectState = await get_project_states_for_user( - user_id, project_uuid, app - ) - log.error( - "Project [%s] already locked in state '%s'. Please check with support.", - f"{project_uuid=}", - f"{prj_states.locked.status=}", - ) - raise - finally: - if notify_users: - await retrieve_and_notify_project_locked_state(user_id, project_uuid, app) - - -async def remove_project_interactive_services( - user_id: int, - project_uuid: str, - app: web.Application, - notify_users: bool = True, - user_name: Optional[UserNameDict] = None, -) -> None: - # NOTE: during the closing process, which might take awhile, - # the project is locked so no one opens it at the same time - log.debug( - "removing project interactive services for project [%s] and user [%s]", - project_uuid, - user_id, - ) - try: - async with lock_with_notification( - app, - project_uuid, - ProjectStatus.CLOSING, - user_id, - user_name or await get_user_name(app, user_id), - notify_users=notify_users, - ): - # save the state if the user is not a guest. if we do not know we save in any case. - with suppress(director_v2_api.DirectorServiceError): - # here director exceptions are suppressed. in case the service is not found to preserve old behavior - await director_v2_api.stop_services( - app=app, - user_id=user_id, - project_id=project_uuid, - save_state=not await is_user_guest(app, user_id) - if user_id - else True, - ) - except ProjectLockError: - pass - - -async def _delete_project_from_db( - app: web.Application, project_uuid: str, user_id: int -) -> None: - log.debug("deleting project '%s' for user '%s' in database", project_uuid, user_id) - db = app[APP_PROJECT_DBAPI] - await director_v2_api.delete_pipeline(app, user_id, UUID(project_uuid)) - await db.delete_user_project(user_id, project_uuid) - - -## PROJECT NODES ----------------------------------------------------- - - -async def add_project_node( - request: web.Request, - project_uuid: str, - user_id: int, - service_key: str, - service_version: str, - service_id: Optional[str], -) -> str: - log.debug( - "starting node %s:%s in project %s for user %s", - service_key, - service_version, - project_uuid, - user_id, - ) - node_uuid = service_id if service_id else str(uuid4()) - if _is_node_dynamic(service_key): - await director_v2_api.start_service( - request.app, - project_id=project_uuid, - user_id=user_id, - service_key=service_key, - service_version=service_version, - service_uuid=node_uuid, - request_dns=extract_dns_without_default_port(request.url), - request_scheme=request.headers.get("X-Forwarded-Proto", request.url.scheme), - ) - return node_uuid - - -async def get_project_node( - request: web.Request, project_uuid: str, user_id: int, node_id: str -): - log.debug( - "getting node %s in project %s for user %s", node_id, project_uuid, user_id - ) - - list_of_interactive_services = await director_v2_api.get_services( - request.app, project_id=project_uuid, user_id=user_id - ) - # get the project if it is running - for service in list_of_interactive_services: - if service["service_uuid"] == node_id: - return service - # the service is not running, it's a computational service maybe - # TODO: find out if computational service is running if not throw a 404 since it's not around - return {"service_uuid": node_id, "service_state": "idle"} - - -async def delete_project_node( - request: web.Request, project_uuid: str, user_id: int, node_uuid: str -) -> None: - log.debug( - "deleting node %s in project %s for user %s", node_uuid, project_uuid, user_id - ) - - list_of_services = await director_v2_api.get_services( - request.app, project_id=project_uuid, user_id=user_id - ) - # stop the service if it is running - for service in list_of_services: - if service["service_uuid"] == node_uuid: - log.error("deleting service=%s", service) - # no need to save the state of the node when deleting it - await director_v2_api.stop_service( - request.app, - node_uuid, - save_state=False, - ) - break - # remove its data if any - await delete_data_folders_of_project_node( - request.app, project_uuid, node_uuid, user_id - ) - - -async def update_project_node_state( - app: web.Application, user_id: int, project_id: str, node_id: str, new_state: str -) -> Dict: - log.debug( - "updating node %s current state in project %s for user %s", - node_id, - project_id, - user_id, - ) - partial_workbench_data: Dict[str, Any] = { - node_id: {"state": {"currentStatus": new_state}}, - } - if RunningState(new_state) in [ - RunningState.PUBLISHED, - RunningState.PENDING, - RunningState.STARTED, - ]: - partial_workbench_data[node_id]["progress"] = 0 - elif RunningState(new_state) in [RunningState.SUCCESS, RunningState.FAILED]: - partial_workbench_data[node_id]["progress"] = 100 - - db = app[APP_PROJECT_DBAPI] - updated_project, _ = await db.patch_user_project_workbench( - partial_workbench_data=partial_workbench_data, - user_id=user_id, - project_uuid=project_id, - ) - updated_project = await add_project_states_for_user( - user_id=user_id, project=updated_project, is_template=False, app=app - ) - return updated_project - - -async def update_project_node_progress( - app: web.Application, user_id: int, project_id: str, node_id: str, progress: float -) -> Optional[Dict]: - log.debug( - "updating node %s progress in project %s for user %s with %s", - node_id, - project_id, - user_id, - progress, - ) - partial_workbench_data = { - node_id: {"progress": int(100.0 * float(progress) + 0.5)}, - } - db = app[APP_PROJECT_DBAPI] - updated_project, _ = await db.patch_user_project_workbench( - partial_workbench_data=partial_workbench_data, - user_id=user_id, - project_uuid=project_id, - ) - updated_project = await add_project_states_for_user( - user_id=user_id, project=updated_project, is_template=False, app=app - ) - return updated_project - - -async def update_project_node_outputs( - app: web.Application, - user_id: int, - project_id: str, - node_id: str, - new_outputs: Optional[Dict], - new_run_hash: Optional[str], -) -> Tuple[Dict, List[str]]: - """ - Updates outputs of a given node in a project with 'data' - """ - log.debug( - "updating node %s outputs in project %s for user %s with %s: run_hash [%s]", - node_id, - project_id, - user_id, - json_dumps(new_outputs), - new_run_hash, - ) - new_outputs = new_outputs or {} - - partial_workbench_data = { - node_id: {"outputs": new_outputs, "runHash": new_run_hash}, - } - - db = app[APP_PROJECT_DBAPI] - updated_project, changed_entries = await db.patch_user_project_workbench( - partial_workbench_data=partial_workbench_data, - user_id=user_id, - project_uuid=project_id, - ) - log.debug( - "patched project %s, following entries changed: %s", - project_id, - pformat(changed_entries), - ) - updated_project = await add_project_states_for_user( - user_id=user_id, project=updated_project, is_template=False, app=app - ) - - # changed entries come in the form of {node_uuid: {outputs: {changed_key1: value1, changed_key2: value2}}} - # we do want only the key names - changed_keys = changed_entries.get(node_id, {}).get("outputs", {}).keys() - return updated_project, changed_keys - - -async def get_workbench_node_ids_from_project_uuid( - app: web.Application, - project_uuid: str, -) -> Set[str]: - """Returns a set with all the node_ids from a project's workbench""" - db = app[APP_PROJECT_DBAPI] - return await db.get_all_node_ids_from_workbenches(project_uuid) - - -async def is_node_id_present_in_any_project_workbench( - app: web.Application, - node_id: str, -) -> bool: - """If the node_id is presnet in one of the projects' workbenche returns True""" - db = app[APP_PROJECT_DBAPI] - return node_id in await db.get_all_node_ids_from_workbenches() - - -async def notify_project_state_update( - app: web.Application, - project: Dict, - notify_only_user: Optional[int] = None, -) -> None: - messages: List[SocketMessageDict] = [ - { - "event_type": SOCKET_IO_PROJECT_UPDATED_EVENT, - "data": { - "project_uuid": project["uuid"], - "data": project["state"], - }, - } - ] - - if notify_only_user: - await send_messages(app, user_id=str(notify_only_user), messages=messages) - else: - rooms_to_notify = [ - f"{gid}" - for gid, rights in project["accessRights"].items() - if rights["read"] - ] - for room in rooms_to_notify: - await send_group_messages(app, room, messages) - - -async def notify_project_node_update( - app: web.Application, project: Dict, node_id: str -) -> None: - rooms_to_notify = [ - f"{gid}" for gid, rights in project["accessRights"].items() if rights["read"] - ] - - messages: List[SocketMessageDict] = [ - { - "event_type": SOCKET_IO_NODE_UPDATED_EVENT, - "data": { - "project_id": project["uuid"], - "node_id": node_id, - "data": project["workbench"][node_id], - }, - } - ] - - for room in rooms_to_notify: - await send_group_messages(app, room, messages) - - -async def post_trigger_connected_service_retrieve(**kwargs) -> None: - await fire_and_forget_task(trigger_connected_service_retrieve(**kwargs)) - - -async def trigger_connected_service_retrieve( - app: web.Application, project: Dict, updated_node_uuid: str, changed_keys: List[str] -) -> None: - workbench = project["workbench"] - nodes_keys_to_update: Dict[str, List[str]] = defaultdict(list) - # find the nodes that need to retrieve data - for node_uuid, node in workbench.items(): - # check this node is dynamic - if not _is_node_dynamic(node["key"]): - continue - - # check whether this node has our updated node as linked inputs - node_inputs = node.get("inputs", {}) - for port_key, port_value in node_inputs.items(): - # we look for node port links, not values - if not isinstance(port_value, dict): - continue - - input_node_uuid = port_value.get("nodeUuid") - if input_node_uuid != updated_node_uuid: - continue - # so this node is linked to the updated one, now check if the port was changed? - linked_input_port = port_value.get("output") - if linked_input_port in changed_keys: - nodes_keys_to_update[node_uuid].append(port_key) - - # call /retrieve on the nodes - update_tasks = [ - director_v2_api.request_retrieve_dyn_service(app, node, keys) - for node, keys in nodes_keys_to_update.items() - ] - await logged_gather(*update_tasks) - - -# PROJECT STATE ------------------------------------------------------------------- - - -async def _user_has_another_client_open( - user_session_id_list: List[UserSessionID], app: web.Application -) -> bool: - # NOTE if there is an active socket in use, that means the client is active - for user_session in user_session_id_list: - with managed_resource( - user_session.user_id, user_session.client_session_id, app - ) as rt: - if await rt.get_socket_id() is not None: - return True - return False - - -async def _clean_user_disconnected_clients( - user_session_id_list: List[UserSessionID], app: web.Application -): - for user_session in user_session_id_list: - with managed_resource( - user_session.user_id, user_session.client_session_id, app - ) as rt: - if await rt.get_socket_id() is None: - log.debug( - "removing disconnected project of user %s/%s", - user_session.user_id, - user_session.client_session_id, - ) - await rt.remove(PROJECT_ID_KEY) - - -async def try_open_project_for_user( - user_id: int, project_uuid: str, client_session_id: str, app: web.Application -) -> bool: - try: - async with lock_with_notification( - app, - project_uuid, - ProjectStatus.OPENING, - user_id, - await get_user_name(app, user_id), - notify_users=False, - ): - - with managed_resource(user_id, client_session_id, app) as rt: - user_session_id_list: List[ - UserSessionID - ] = await rt.find_users_of_resource(PROJECT_ID_KEY, project_uuid) - - if not user_session_id_list: - # no one has the project so we lock it - await rt.add(PROJECT_ID_KEY, project_uuid) - return True - - set_user_ids = { - user_session.user_id for user_session in user_session_id_list - } - if set_user_ids.issubset({user_id}): - # we are the only user, remove this session from the list - if not await _user_has_another_client_open( - [ - uid - for uid in user_session_id_list - if uid != UserSessionID(user_id, client_session_id) - ], - app, - ): - # steal the project - await rt.add(PROJECT_ID_KEY, project_uuid) - await _clean_user_disconnected_clients( - user_session_id_list, app - ) - return True - return False - - except ProjectLockError: - # the project is currently locked - return False - - -async def try_close_project_for_user( - user_id: int, - project_uuid: str, - client_session_id: str, - app: web.Application, -): - with managed_resource(user_id, client_session_id, app) as rt: - user_to_session_ids: List[UserSessionID] = await rt.find_users_of_resource( - PROJECT_ID_KEY, project_uuid - ) - # first check we have it opened now - if UserSessionID(user_id, client_session_id) not in user_to_session_ids: - # nothing to do the project is already closed - log.warning( - "project [%s] is already closed for user [%s].", - project_uuid, - user_id, - ) - return - # remove the project from our list of opened ones - log.debug( - "removing project [%s] from user [%s] resources", project_uuid, user_id - ) - await rt.remove(PROJECT_ID_KEY) - # check it is not opened by someone else - user_to_session_ids.remove(UserSessionID(user_id, client_session_id)) - log.debug("remaining user_to_session_ids: %s", user_to_session_ids) - if not user_to_session_ids: - # NOTE: depending on the garbage collector speed, it might already be removing it - fire_and_forget_task( - remove_project_interactive_services(user_id, project_uuid, app) - ) - else: - log.warning( - "project [%s] is used by other users: [%s]. This should not be possible", - project_uuid, - {user_session.user_id for user_session in user_to_session_ids}, - ) - - -async def _get_project_lock_state( - user_id: int, - project_uuid: str, - app: web.Application, -) -> ProjectLocked: - """returns the lock state of a project - 1. If a project is locked for any reason, first return the project as locked and STATUS defined by lock - 2. If a client_session_id is passed, then first check to see if the project is currently opened by this very user/tab combination, if yes returns the project as Locked and OPENED. - 3. If any other user than user_id is using the project (even disconnected before the TTL is finished) then the project is Locked and OPENED. - 4. If the same user is using the project with a valid socket id (meaning a tab is currently active) then the project is Locked and OPENED. - 5. If the same user is using the project with NO socket id (meaning there is no current tab active) then the project is Unlocked and OPENED. which means the user can open it again. - """ - log.debug( - "getting project [%s] lock state for user [%s]...", - f"{project_uuid=}", - f"{user_id=}", - ) - prj_locked_state: Optional[ProjectLocked] = await get_project_locked_state( - app, project_uuid - ) - if prj_locked_state: - log.debug( - "project [%s] is locked: %s", f"{project_uuid=}", f"{prj_locked_state=}" - ) - return prj_locked_state - - # let's now check if anyone has the project in use somehow - with managed_resource(user_id, None, app) as rt: - user_session_id_list: List[UserSessionID] = await rt.find_users_of_resource( - PROJECT_ID_KEY, project_uuid - ) - set_user_ids = {user_session.user_id for user_session in user_session_id_list} - - assert ( # nosec - len(set_user_ids) <= 1 - ) # nosec # NOTE: A project can only be opened by one user in one tab at the moment - - if not set_user_ids: - # no one has the project, so it is unlocked and closed. - log.debug("project [%s] is not in use", f"{project_uuid=}") - return ProjectLocked(value=False, status=ProjectStatus.CLOSED) - - log.debug( - "project [%s] might be used by the following users: [%s]", - f"{project_uuid=}", - f"{set_user_ids=}", - ) - usernames: List[UserNameDict] = [ - await get_user_name(app, uid) for uid in set_user_ids - ] - # let's check if the project is opened by the same user, maybe already opened or closed in a orphaned session - if set_user_ids.issubset({user_id}): - if not await _user_has_another_client_open(user_session_id_list, app): - # in this case the project is re-openable by the same user until it gets closed - log.debug( - "project [%s] is in use by the same user [%s] that is currently disconnected, so it is unlocked for this specific user and opened", - f"{project_uuid=}", - f"{set_user_ids=}", - ) - return ProjectLocked( - value=False, - owner=Owner(user_id=list(set_user_ids)[0], **usernames[0]), - status=ProjectStatus.OPENED, - ) - # the project is opened in another tab or browser, or by another user, both case resolves to the project being locked, and opened - log.debug( - "project [%s] is in use by another user [%s], so it is locked", - f"{project_uuid=}", - f"{set_user_ids=}", - ) - return ProjectLocked( - value=True, - owner=Owner(user_id=list(set_user_ids)[0], **usernames[0]), - status=ProjectStatus.OPENED, - ) - - -async def get_project_states_for_user( - user_id: int, project_uuid: str, app: web.Application -) -> ProjectState: - # for templates: the project is never locked and never opened. also the running state is always unknown - lock_state = ProjectLocked(value=False, status=ProjectStatus.CLOSED) - running_state = RunningState.UNKNOWN - lock_state, computation_task = await logged_gather( - _get_project_lock_state(user_id, project_uuid, app), - director_v2_api.get_computation_task(app, user_id, UUID(project_uuid)), - ) - if computation_task: - # get the running state - running_state = computation_task.state - - return ProjectState( - locked=lock_state, state=ProjectRunningState(value=running_state) - ) - - -async def add_project_states_for_user( - user_id: int, - project: Dict[str, Any], - is_template: bool, - app: web.Application, -) -> Dict[str, Any]: - log.debug( - "adding project states for %s with project %s", - f"{user_id=}", - f"{project['uuid']=}", - ) - # for templates: the project is never locked and never opened. also the running state is always unknown - lock_state = ProjectLocked(value=False, status=ProjectStatus.CLOSED) - running_state = RunningState.UNKNOWN - - if not is_template: - lock_state = await _get_project_lock_state(user_id, project["uuid"], app) - - if computation_task := await director_v2_api.get_computation_task( - app, user_id, project["uuid"] - ): - # get the running state - running_state = computation_task.state - # get the nodes individual states - for ( - node_id, - node_state, - ) in computation_task.pipeline_details.node_states.items(): - prj_node = project["workbench"].get(str(node_id)) - if prj_node is None: - continue - node_state_dict = json.loads( - node_state.json(by_alias=True, exclude_unset=True) - ) - prj_node.setdefault("state", {}).update(node_state_dict) - - project["state"] = ProjectState( - locked=lock_state, state=ProjectRunningState(value=running_state) - ).dict(by_alias=True, exclude_unset=True) - return project diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_db.py b/services/web/server/src/simcore_service_webserver/projects/projects_db.py index 2b02f2a02ad..feb06a790aa 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_db.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_db.py @@ -53,6 +53,8 @@ DB_EXCLUSIVE_COLUMNS = ["type", "id", "published", "hidden"] SCHEMA_NON_NULL_KEYS = ["thumbnail"] +# TODO: REFACTOR!!! access rights need to be separated. See how was done in storage! + class ProjectAccessRights(Enum): OWNER = {"read": True, "write": True, "delete": True} @@ -762,6 +764,23 @@ async def delete_user_project(self, user_id: int, project_uuid: str): projects.delete().where(projects.c.uuid == project_uuid) ) + async def raise_if_cannot_delete(self, user_id: int, project_uuid: str): + """ + rauses ProjectNotFoundError + raises ProjectInvalidRightsError + """ + # TODO: REFACTOR!!! access rights need to be separated. See how was done in storage! + async with self.engine.acquire() as conn: + async with conn.begin() as _transaction: + project = await self._get_project( + conn, user_id, project_uuid, include_templates=True, for_update=True + ) + # if we have delete access we delete the project + user_groups: List[RowProxy] = await self.__load_user_groups( + conn, user_id + ) + _check_project_permissions(project, user_id, user_groups, "delete") + async def make_unique_project_uuid(self) -> str: """Generates a project identifier still not used in database @@ -862,6 +881,15 @@ async def update_project_without_checking_permissions( ) return result.rowcount == 1 + async def set_hidden_flag(self, project_uuid: str, enabled: bool): + async with self.engine.acquire() as conn: + stmt = ( + projects.update() + .values(hidden=enabled) + .where(projects.c.uuid == project_uuid) + ) + await conn.execute(stmt) + def setup_projects_db(app: web.Application): db = ProjectDBAPI(app) diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_exceptions.py b/services/web/server/src/simcore_service_webserver/projects/projects_exceptions.py index 77128f05c6c..33d28be8be2 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_exceptions.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_exceptions.py @@ -1,4 +1,6 @@ """Defines the different exceptions that may arise in the projects subpackage""" +import aioredlock +from jsonschema import ValidationError as JsonSchemaDataValidationError class ProjectsException(Exception): @@ -35,6 +37,16 @@ def __init__(self, project_uuid): self.project_uuid = project_uuid +class ProjectDeleteError(ProjectsException): + def __init__(self, project_uuid, reason="Unknown"): + super().__init__(f"Failed to complete deletion of {project_uuid=}: {reason}") + self.project_uuid = project_uuid + + +# TODO: use double inheritance and implement classmethod to build from base instances or compose! +ProjectValidationError = JsonSchemaDataValidationError + + class NodeNotFoundError(ProjectsException): """Node was not found in project""" @@ -42,3 +54,6 @@ def __init__(self, project_uuid: str, node_uuid: str): super().__init__(f"Node {node_uuid} not found in project {project_uuid}") self.node_uuid = node_uuid self.project_uuid = project_uuid + + +ProjectLockError = aioredlock.LockError diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py b/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py index b2f83360126..2dcbec63104 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py @@ -1,313 +1,35 @@ -""" Handlers for CRUD operations on /projects/ +""" Handlers for on /projects colletions + +Imports in standard methods (projects_handlers_crud) and +extends with + - custom methods (https://google.aip.dev/121) + - singleton resources (https://google.aip.dev/156) + - ... """ -import asyncio import json import logging -from typing import Any, Coroutine, Dict, List, Optional, Set from aiohttp import web -from jsonschema import ValidationError -from models_library.projects import ProjectID -from models_library.projects_state import ProjectState, ProjectStatus -from models_library.rest_pagination import Page -from models_library.rest_pagination_utils import paginate_data +from models_library.projects_state import ProjectState from servicelib.aiohttp.web_exceptions_extension import HTTPLocked from servicelib.json_serialization import json_dumps -from servicelib.utils import logged_gather -from simcore_postgres_database.webserver_models import ProjectType as ProjectTypeDB -from .. import catalog, director_v2_api -from .._constants import RQ_PRODUCT_KEY from .._meta import api_version_prefix as VTAG from ..director_v2_core import DirectorServiceError from ..login.decorators import RQT_USERID_KEY, login_required from ..resource_manager.websocket_manager import PROJECT_ID_KEY, managed_resource -from ..rest_constants import RESPONSE_MODEL_POLICY -from ..security_api import check_permission from ..security_decorators import permission_required -from ..storage_api import copy_data_folders_from_project -from ..users_api import get_user_name -from . import projects_api -from .project_models import ProjectDict, ProjectTypeAPI -from .projects_db import APP_PROJECT_DBAPI, ProjectDBAPI -from .projects_exceptions import ProjectInvalidRightsError, ProjectNotFoundError -from .projects_utils import ( - any_node_inputs_changed, - clone_project_document, - get_project_unavailable_services, - project_uses_available_services, -) - -# When the user requests a project with a repo, the working copy might differ from -# the repo project. A middleware in the meta module (if active) will resolve -# the working copy and redirect to the appropriate project entrypoint. Nonetheless, the -# response needs to refer to the uuid of the request and this is passed through this request key -RQ_REQUESTED_REPO_PROJECT_UUID_KEY = f"{__name__}.RQT_REQUESTED_REPO_PROJECT_UUID_KEY" - -OVERRIDABLE_DOCUMENT_KEYS = [ - "name", - "description", - "thumbnail", - "prjOwner", - "accessRights", -] -# TODO: validate these against api/specs/webserver/v0/components/schemas/project-v0.0.1.json - +from . import _core_get, _core_notify, _core_open_close, _core_services, _core_states +from .projects_exceptions import ProjectNotFoundError +from .projects_handlers_crud import routes log = logging.getLogger(__name__) -routes = web.RouteTableDef() - - -@routes.post(f"/{VTAG}/projects") -@login_required -@permission_required("project.create") -@permission_required("services.pipeline.*") # due to update_pipeline_db -async def create_projects( - request: web.Request, -): # pylint: disable=too-many-branches, too-many-statements - user_id: int = request[RQT_USERID_KEY] - db: ProjectDBAPI = request.config_dict[APP_PROJECT_DBAPI] - template_uuid = request.query.get("from_template") - as_template = request.query.get("as_template") - copy_data: bool = bool( - request.query.get("copy_data", "true") in [1, "true", "True"] - ) - hidden: bool = bool(request.query.get("hidden", False)) - - new_project = {} - new_project_was_hidden_before_data_was_copied = hidden - try: - clone_data_coro: Optional[Coroutine] = None - source_project: Optional[Dict[str, Any]] = None - if as_template: # create template from - await check_permission(request, "project.template.create") - source_project = await projects_api.get_project_for_user( - request.app, - project_uuid=as_template, - user_id=user_id, - include_templates=False, - ) - elif template_uuid: # create from template - source_project = await db.get_template_project(template_uuid) - if not source_project: - raise web.HTTPNotFound( - reason="Invalid template uuid {}".format(template_uuid) - ) - if source_project: - # clone template as user project - new_project, nodes_map = clone_project_document( - source_project, - forced_copy_project_id=None, - clean_output_data=(copy_data == False), - ) - if template_uuid: - # remove template access rights - new_project["accessRights"] = {} - # the project is to be hidden until the data is copied - hidden = copy_data - clone_data_coro = ( - copy_data_folders_from_project( - request.app, source_project, new_project, nodes_map, user_id - ) - if copy_data - else None - ) - # FIXME: parameterized inputs should get defaults provided by service - - # overrides with body - if request.has_body: - predefined = await request.json() - if new_project: - for key in OVERRIDABLE_DOCUMENT_KEYS: - non_null_value = predefined.get(key) - if non_null_value: - new_project[key] = non_null_value - else: - # TODO: take skeleton and fill instead - new_project = predefined - - # re-validate data - await projects_api.validate_project(request.app, new_project) - - # update metadata (uuid, timestamps, ownership) and save - new_project = await db.add_project( - new_project, - user_id, - force_as_template=as_template is not None, - hidden=hidden, - ) - - # copies the project's DATA IF cloned - if clone_data_coro: - assert source_project # nosec - if as_template: - # we need to lock the original study while copying the data - async with projects_api.lock_with_notification( - request.app, - source_project["uuid"], - ProjectStatus.CLONING, - user_id, - await get_user_name(request.app, user_id), - ): - - await clone_data_coro - else: - await clone_data_coro - # unhide the project if needed since it is now complete - if not new_project_was_hidden_before_data_was_copied: - await db.update_project_without_checking_permissions( - new_project, new_project["uuid"], hidden=False - ) - - # This is a new project and every new graph needs to be reflected in the pipeline tables - await director_v2_api.create_or_update_pipeline( - request.app, user_id, new_project["uuid"] - ) - - # Appends state - new_project = await projects_api.add_project_states_for_user( - user_id=user_id, - project=new_project, - is_template=as_template is not None, - app=request.app, - ) - - except ValidationError as exc: - raise web.HTTPBadRequest(reason="Invalid project data") from exc - except ProjectNotFoundError as exc: - raise web.HTTPNotFound(reason="Project not found") from exc - except ProjectInvalidRightsError as exc: - raise web.HTTPUnauthorized from exc - except asyncio.CancelledError: - log.warning( - "cancelled creation of project for user '%s', cleaning up", f"{user_id=}" - ) - await projects_api.delete_project(request.app, new_project["uuid"], user_id) - raise - else: - log.debug("project created successfuly") - raise web.HTTPCreated( - text=json.dumps(new_project), content_type="application/json" - ) - - -@routes.get(f"/{VTAG}/projects") -@login_required -@permission_required("project.read") -async def list_projects(request: web.Request): - # TODO: implement all query parameters as - # in https://www.ibm.com/support/knowledgecenter/en/SSCRJU_3.2.0/com.ibm.swg.im.infosphere.streams.rest.api.doc/doc/restapis-queryparms-list.html - from servicelib.aiohttp.rest_utils import extract_and_validate - - user_id, product_name = request[RQT_USERID_KEY], request[RQ_PRODUCT_KEY] - _, query, _ = await extract_and_validate(request) - - project_type = ProjectTypeAPI(query["type"]) - offset = query["offset"] - limit = query["limit"] - show_hidden = query["show_hidden"] - - db: ProjectDBAPI = request.config_dict[APP_PROJECT_DBAPI] - - async def set_all_project_states( - projects: List[Dict[str, Any]], project_types: List[bool] - ): - await logged_gather( - *[ - projects_api.add_project_states_for_user( - user_id=user_id, - project=prj, - is_template=prj_type == ProjectTypeDB.TEMPLATE, - app=request.app, - ) - for prj, prj_type in zip(projects, project_types) - ], - reraise=True, - max_concurrency=100, - ) - - user_available_services: List[ - Dict - ] = await catalog.get_services_for_user_in_product( - request.app, user_id, product_name, only_key_versions=True - ) - - projects, project_types, total_number_projects = await db.load_projects( - user_id=user_id, - filter_by_project_type=ProjectTypeAPI.to_project_type_db(project_type), - filter_by_services=user_available_services, - offset=offset, - limit=limit, - include_hidden=show_hidden, - ) - await set_all_project_states(projects, project_types) - page = Page[ProjectDict].parse_obj( - paginate_data( - chunk=projects, - request_url=request.url, - total=total_number_projects, - limit=limit, - offset=offset, - ) - ) - return page.dict(**RESPONSE_MODEL_POLICY) - - -@routes.get(f"/{VTAG}/projects/{{project_uuid}}") -@login_required -@permission_required("project.read") -async def get_project(request: web.Request): - """Returns all projects accessible to a user (not necesarly owned)""" - # TODO: temporary hidden until get_handlers_from_namespace refactor to seek marked functions instead! - user_id, product_name = request[RQT_USERID_KEY], request[RQ_PRODUCT_KEY] - try: - project_uuid = request.match_info["project_id"] - except KeyError as err: - raise web.HTTPBadRequest(reason=f"Invalid request parameter {err}") from err - - user_available_services: List[ - Dict - ] = await catalog.get_services_for_user_in_product( - request.app, user_id, product_name, only_key_versions=True - ) - - try: - project = await projects_api.get_project_for_user( - request.app, - project_uuid=project_uuid, - user_id=user_id, - include_templates=True, - include_state=True, - ) - if not await project_uses_available_services(project, user_available_services): - unavilable_services = get_project_unavailable_services( - project, user_available_services - ) - formatted_services = ", ".join( - f"{service}:{version}" for service, version in unavilable_services - ) - # TODO: lack of permissions should be notified with https://httpstatuses.com/403 web.HTTPForbidden - raise web.HTTPNotFound( - reason=( - f"Project '{project_uuid}' uses unavailable services. Please ask " - f"for permission for the following services {formatted_services}" - ) - ) - - if new_uuid := request.get(RQ_REQUESTED_REPO_PROJECT_UUID_KEY): - project["uuid"] = new_uuid - - return {"data": project} - - except ProjectInvalidRightsError as exc: - raise web.HTTPForbidden( - reason=f"You do not have sufficient rights to read project {project_uuid}" - ) from exc - except ProjectNotFoundError as exc: - raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") from exc +# +# Singleton resources https://google.aip.dev/156 +# @routes.get(f"/{VTAG}/projects/active") @@ -328,8 +50,7 @@ async def get_active_project(request: web.Request) -> web.Response: # get user's projects user_active_projects = await rt.find(PROJECT_ID_KEY) if user_active_projects: - - project = await projects_api.get_project_for_user( + project = await _core_get.get_project_for_user( request.app, project_uuid=user_active_projects[0], user_id=user_id, @@ -340,177 +61,12 @@ async def get_active_project(request: web.Request) -> web.Response: return web.json_response({"data": project}, dumps=json_dumps) except ProjectNotFoundError as exc: - raise web.HTTPNotFound(reason="Project not found") from exc - - -@routes.put(f"/{VTAG}/projects/{{project_uuid}}") -@login_required -@permission_required("project.update") -@permission_required("services.pipeline.*") # due to update_pipeline_db -async def replace_project(request: web.Request): - """Implements PUT /projects - - In a PUT request, the enclosed entity is considered to be a modified version of - the resource stored on the origin server, and the client is requesting that the - stored version be replaced. - - With PATCH, however, the enclosed entity contains a set of instructions describing how a - resource currently residing on the origin server should be modified to produce a new version. - - Also, another difference is that when you want to update a resource with PUT request, you have to send - the full payload as the request whereas with PATCH, you only send the parameters which you want to update. - - :raises web.HTTPNotFound: cannot find project id in repository - """ - user_id: int = request[RQT_USERID_KEY] - try: - project_uuid = ProjectID(request.match_info["project_id"]) - new_project = await request.json() - - # Prune state field (just in case) - new_project.pop("state", None) - - except AttributeError as err: - # NOTE: if new_project is not a dict, .pop will raise this error - raise web.HTTPBadRequest( - reason="Invalid request payload, expected a project model" - ) from err - except KeyError as err: - raise web.HTTPBadRequest(reason=f"Invalid request parameter {err}") from err - except json.JSONDecodeError as exc: - raise web.HTTPBadRequest(reason="Invalid request body") from exc - - db: ProjectDBAPI = request.config_dict[APP_PROJECT_DBAPI] - await check_permission( - request, - "project.update | project.workbench.node.inputs.update", - context={ - "dbapi": db, - "project_id": f"{project_uuid}", - "user_id": user_id, - "new_data": new_project, - }, - ) - - try: - await projects_api.validate_project(request.app, new_project) - - current_project = await projects_api.get_project_for_user( - request.app, - project_uuid=f"{project_uuid}", - user_id=user_id, - include_templates=True, - include_state=True, - ) - - if current_project["accessRights"] != new_project["accessRights"]: - await check_permission(request, "project.access_rights.update") - - if await director_v2_api.is_pipeline_running( - request.app, user_id, project_uuid - ): - - if any_node_inputs_changed(new_project, current_project): - # NOTE: This is a conservative measure that we take - # until nodeports logic is re-designed to tackle with this - # particular state. - # - # This measure avoid having a state with different node *links* in the - # comp-tasks table and the project's workbench column. - # The limitation is that nodeports only "sees" those in the comptask - # and this table does not add the new ones since it remains "blocked" - # for modification from that project while the pipeline runs. Therefore - # any extra link created while the pipeline is running can not - # be managed by nodeports because it basically "cannot see it" - # - # Responds https://httpstatuses.com/409: - # The request could not be completed due to a conflict with the current - # state of the target resource (i.e. pipeline is running). This code is used in - # situations where the user might be able to resolve the conflict - # and resubmit the request (front-end will show a pop-up with message below) - # - raise web.HTTPConflict( - reason=f"Project {project_uuid} cannot be modified while pipeline is still running." - ) - - new_project = await db.replace_user_project( - new_project, user_id, f"{project_uuid}", include_templates=True - ) - await director_v2_api.create_or_update_pipeline( - request.app, user_id, project_uuid - ) - # Appends state - new_project = await projects_api.add_project_states_for_user( - user_id=user_id, - project=new_project, - is_template=False, - app=request.app, - ) - - except ValidationError as exc: - raise web.HTTPBadRequest( - reason=f"Invalid project update: {exc.message}" - ) from exc - - except ProjectInvalidRightsError as exc: - raise web.HTTPForbidden( - reason="You do not have sufficient rights to save the project" - ) from exc - - except ProjectNotFoundError as exc: - raise web.HTTPNotFound from exc - - return {"data": new_project} - - -@routes.delete(f"/{VTAG}/projects/{{project_uuid}}") -@login_required -@permission_required("project.delete") -async def delete_project(request: web.Request): - # first check if the project exists - user_id: int = request[RQT_USERID_KEY] - try: - project_uuid = request.match_info["project_id"] - except KeyError as err: - raise web.HTTPBadRequest(reason=f"Invalid request parameter {err}") from err - - try: - await projects_api.get_project_for_user( - request.app, - project_uuid=project_uuid, - user_id=user_id, - include_templates=True, - ) - project_users: Set[int] = set() - with managed_resource(user_id, None, request.app) as rt: - project_users = { - user_session.user_id - for user_session in await rt.find_users_of_resource( - PROJECT_ID_KEY, project_uuid - ) - } - # that project is still in use - if user_id in project_users: - raise web.HTTPForbidden( - reason="Project is still open in another tab/browser. It cannot be deleted until it is closed." - ) - if project_users: - other_user_names = { - await get_user_name(request.app, x) for x in project_users - } - raise web.HTTPForbidden( - reason=f"Project is open by {other_user_names}. It cannot be deleted until the project is closed." - ) + raise web.HTTPNotFound(reason="No active project found") from exc - await projects_api.delete_project(request.app, project_uuid, user_id) - except ProjectInvalidRightsError as err: - raise web.HTTPForbidden( - reason="You do not have sufficient rights to delete this project" - ) from err - except ProjectNotFoundError as err: - raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") from err - raise web.HTTPNoContent(content_type="application/json") +# +# Custom methods https://google.aip.dev/136 +# @routes.post(f"/{VTAG}/projects/{{project_uuid}}:open") @@ -527,7 +83,7 @@ async def open_project(request: web.Request) -> web.Response: raise web.HTTPBadRequest(reason="Invalid request body") from exc try: - project = await projects_api.get_project_for_user( + project = await _core_get.get_project_for_user( request.app, project_uuid=project_uuid, user_id=user_id, @@ -535,7 +91,7 @@ async def open_project(request: web.Request) -> web.Response: include_state=True, ) - if not await projects_api.try_open_project_for_user( + if not await _core_open_close.try_open_project_for_user( user_id, project_uuid=project_uuid, client_session_id=client_session_id, @@ -544,17 +100,17 @@ async def open_project(request: web.Request) -> web.Response: raise HTTPLocked(reason="Project is locked, try later") # user id opened project uuid - await projects_api.start_project_interactive_services(request, project, user_id) + await _core_services.start_project_dynamic_services(request, project, user_id) # notify users that project is now opened - project = await projects_api.add_project_states_for_user( + project = await _core_states.add_project_states_for_user( user_id=user_id, project=project, is_template=False, app=request.app, ) - await projects_api.notify_project_state_update(request.app, project) + await _core_notify.notify_project_state_update(request.app, project) return web.json_response({"data": project}, dumps=json_dumps) @@ -563,7 +119,7 @@ async def open_project(request: web.Request) -> web.Response: except DirectorServiceError as exc: # there was an issue while accessing the director-v2/director-v0 # ensure the project is closed again - await projects_api.try_close_project_for_user( + await _core_open_close.try_close_project_for_user( user_id=user_id, project_uuid=project_uuid, client_session_id=client_session_id, @@ -590,14 +146,14 @@ async def close_project(request: web.Request) -> web.Response: try: # ensure the project exists - await projects_api.get_project_for_user( + await _core_get.get_project_for_user( request.app, project_uuid=project_uuid, user_id=user_id, include_templates=False, include_state=False, ) - await projects_api.try_close_project_for_user( + await _core_open_close.try_close_project_for_user( user_id, project_uuid, client_session_id, request.app ) raise web.HTTPNoContent(content_type="application/json") @@ -617,7 +173,7 @@ async def state_project(request: web.Request) -> web.Response: project_uuid = path["project_id"] # check that project exists and queries state - validated_project = await projects_api.get_project_for_user( + validated_project = await _core_get.get_project_for_user( request.app, project_uuid=project_uuid, user_id=user_id, diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_handlers_crud.py b/services/web/server/src/simcore_service_webserver/projects/projects_handlers_crud.py new file mode 100644 index 00000000000..68de57e0bc4 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/projects/projects_handlers_crud.py @@ -0,0 +1,515 @@ +""" Handlers for STANDARD methods on /projects colletions + + +Standard methods are +- Get https://google.aip.dev/131 +- List https://google.aip.dev/132 +- Create https://google.aip.dev/133 +- Update https://google.aip.dev/134 +- Delete https://google.aip.dev/135 + +and the acronym CRUD states for Create+Read(Get&List)+Update+Delete + +""" + +import asyncio +import json +import logging +from typing import Any, Coroutine, Dict, List, Optional, Set + +from aiohttp import web +from jsonschema import ValidationError +from models_library.projects import ProjectID +from models_library.projects_state import ProjectStatus +from models_library.rest_pagination import Page +from models_library.rest_pagination_utils import paginate_data +from servicelib.utils import logged_gather +from simcore_postgres_database.webserver_models import ProjectType as ProjectTypeDB + +from .. import catalog, director_v2_api +from .._constants import RQ_PRODUCT_KEY +from .._meta import api_version_prefix as VTAG +from ..login.decorators import RQT_USERID_KEY, login_required +from ..resource_manager.websocket_manager import PROJECT_ID_KEY, managed_resource +from ..rest_constants import RESPONSE_MODEL_POLICY +from ..security_api import check_permission +from ..security_decorators import permission_required +from ..storage_api import copy_data_folders_from_project +from ..users_api import get_user_name +from . import _core_get, _core_notify, _core_states +from ._core_delete import create_delete_project_task +from .project_models import ProjectDict, ProjectTypeAPI +from .projects_db import APP_PROJECT_DBAPI, ProjectDBAPI +from .projects_exceptions import ProjectInvalidRightsError, ProjectNotFoundError +from .projects_utils import ( + any_node_inputs_changed, + clone_project_document, + get_project_unavailable_services, + project_uses_available_services, +) + +# When the user requests a project with a repo, the working copy might differ from +# the repo project. A middleware in the meta module (if active) will resolve +# the working copy and redirect to the appropriate project entrypoint. Nonetheless, the +# response needs to refer to the uuid of the request and this is passed through this request key +RQ_REQUESTED_REPO_PROJECT_UUID_KEY = f"{__name__}.RQT_REQUESTED_REPO_PROJECT_UUID_KEY" + +OVERRIDABLE_DOCUMENT_KEYS = [ + "name", + "description", + "thumbnail", + "prjOwner", + "accessRights", +] +# TODO: validate these against api/specs/webserver/v0/components/schemas/project-v0.0.1.json + + +log = logging.getLogger(__name__) + +routes = web.RouteTableDef() + + +@routes.post(f"/{VTAG}/projects") +@login_required +@permission_required("project.create") +@permission_required("services.pipeline.*") # due to update_pipeline_db +async def create_projects( + request: web.Request, +): # pylint: disable=too-many-branches, too-many-statements + user_id: int = request[RQT_USERID_KEY] + db: ProjectDBAPI = request.config_dict[APP_PROJECT_DBAPI] + template_uuid = request.query.get("from_template") + as_template = request.query.get("as_template") + copy_data: bool = bool( + request.query.get("copy_data", "true") in [1, "true", "True"] + ) + hidden: bool = bool(request.query.get("hidden", False)) + + new_project = {} + new_project_was_hidden_before_data_was_copied = hidden + try: + clone_data_coro: Optional[Coroutine] = None + source_project: Optional[Dict[str, Any]] = None + if as_template: # create template from + await check_permission(request, "project.template.create") + source_project = await _core_get.get_project_for_user( + request.app, + project_uuid=as_template, + user_id=user_id, + include_templates=False, + ) + elif template_uuid: # create from template + source_project = await db.get_template_project(template_uuid) + if not source_project: + raise web.HTTPNotFound( + reason="Invalid template uuid {}".format(template_uuid) + ) + if source_project: + # clone template as user project + new_project, nodes_map = clone_project_document( + source_project, + forced_copy_project_id=None, + clean_output_data=(copy_data == False), + ) + if template_uuid: + # remove template access rights + new_project["accessRights"] = {} + # the project is to be hidden until the data is copied + hidden = copy_data + clone_data_coro = ( + copy_data_folders_from_project( + request.app, source_project, new_project, nodes_map, user_id + ) + if copy_data + else None + ) + # FIXME: parameterized inputs should get defaults provided by service + + # overrides with body + if request.can_read_body: + predefined = await request.json() + if new_project: + for key in OVERRIDABLE_DOCUMENT_KEYS: + non_null_value = predefined.get(key) + if non_null_value: + new_project[key] = non_null_value + else: + # TODO: take skeleton and fill instead + new_project = predefined + + # re-validate data + await _core_get.validate_project(request.app, new_project) + + # update metadata (uuid, timestamps, ownership) and save + new_project = await db.add_project( + new_project, + user_id, + force_as_template=as_template is not None, + hidden=hidden, + ) + + # copies the project's DATA IF cloned + if clone_data_coro: + assert source_project # nosec + if as_template: + # we need to lock the original study while copying the data + async with _core_notify.lock_project_and_notify_state_update( + request.app, + source_project["uuid"], + ProjectStatus.CLONING, + user_id, + await get_user_name(request.app, user_id), + ): + + await clone_data_coro + else: + await clone_data_coro + # unhide the project if needed since it is now complete + if not new_project_was_hidden_before_data_was_copied: + await db.update_project_without_checking_permissions( + new_project, new_project["uuid"], hidden=False + ) + + # This is a new project and every new graph needs to be reflected in the pipeline tables + await director_v2_api.create_or_update_pipeline( + request.app, user_id, new_project["uuid"] + ) + + # Appends state + new_project = await _core_states.add_project_states_for_user( + user_id=user_id, + project=new_project, + is_template=as_template is not None, + app=request.app, + ) + + except ValidationError as exc: + raise web.HTTPBadRequest(reason="Invalid project data") from exc + except ProjectNotFoundError as exc: + raise web.HTTPNotFound(reason="Project not found") from exc + except ProjectInvalidRightsError as exc: + raise web.HTTPUnauthorized from exc + except (asyncio.CancelledError, asyncio.TimeoutError): + log.warning( + "cancelled creation of project for user '%s', cleaning up", f"{user_id=}" + ) + # TODO: this is a temp solution that hides this project from the listing until + # the delete_project_task completes + # TODO: see https://github.com/ITISFoundation/osparc-simcore/pull/2522 + await db.set_hidden_flag(new_project["uuid"], enabled=True) + # fire+forget: this operation can be heavy, specially with data deletion + create_delete_project_task( + request.app, new_project["uuid"], user_id, logger=log + ) + raise + else: + log.debug("project created successfuly") + raise web.HTTPCreated( + text=json.dumps(new_project), content_type="application/json" + ) + + +@routes.get(f"/{VTAG}/projects") +@login_required +@permission_required("project.read") +async def list_projects(request: web.Request): + # TODO: implement all query parameters as + # in https://www.ibm.com/support/knowledgecenter/en/SSCRJU_3.2.0/com.ibm.swg.im.infosphere.streams.rest.api.doc/doc/restapis-queryparms-list.html + from servicelib.aiohttp.rest_utils import extract_and_validate + + user_id, product_name = request[RQT_USERID_KEY], request[RQ_PRODUCT_KEY] + _, query, _ = await extract_and_validate(request) + + project_type = ProjectTypeAPI(query["type"]) + offset = query["offset"] + limit = query["limit"] + show_hidden = query["show_hidden"] + + db: ProjectDBAPI = request.config_dict[APP_PROJECT_DBAPI] + + async def set_all_project_states( + projects: List[Dict[str, Any]], project_types: List[bool] + ): + await logged_gather( + *[ + _core_states.add_project_states_for_user( + user_id=user_id, + project=prj, + is_template=prj_type == ProjectTypeDB.TEMPLATE, + app=request.app, + ) + for prj, prj_type in zip(projects, project_types) + ], + reraise=True, + max_concurrency=100, + ) + + user_available_services: List[ + Dict + ] = await catalog.get_services_for_user_in_product( + request.app, user_id, product_name, only_key_versions=True + ) + + projects, project_types, total_number_projects = await db.load_projects( + user_id=user_id, + filter_by_project_type=ProjectTypeAPI.to_project_type_db(project_type), + filter_by_services=user_available_services, + offset=offset, + limit=limit, + include_hidden=show_hidden, + ) + await set_all_project_states(projects, project_types) + page = Page[ProjectDict].parse_obj( + paginate_data( + chunk=projects, + request_url=request.url, + total=total_number_projects, + limit=limit, + offset=offset, + ) + ) + return page.dict(**RESPONSE_MODEL_POLICY) + + +@routes.get(f"/{VTAG}/projects/{{project_uuid}}") +@login_required +@permission_required("project.read") +async def get_project(request: web.Request): + """Returns all projects accessible to a user (not necesarly owned)""" + # TODO: temporary hidden until get_handlers_from_namespace refactor to seek marked functions instead! + user_id, product_name = request[RQT_USERID_KEY], request[RQ_PRODUCT_KEY] + try: + project_uuid = request.match_info["project_id"] + except KeyError as err: + raise web.HTTPBadRequest(reason=f"Invalid request parameter {err}") from err + + user_available_services: List[ + Dict + ] = await catalog.get_services_for_user_in_product( + request.app, user_id, product_name, only_key_versions=True + ) + + try: + project = await _core_get.get_project_for_user( + request.app, + project_uuid=project_uuid, + user_id=user_id, + include_templates=True, + include_state=True, + ) + if not await project_uses_available_services(project, user_available_services): + unavilable_services = get_project_unavailable_services( + project, user_available_services + ) + formatted_services = ", ".join( + f"{service}:{version}" for service, version in unavilable_services + ) + # TODO: lack of permissions should be notified with https://httpstatuses.com/403 web.HTTPForbidden + raise web.HTTPNotFound( + reason=( + f"Project '{project_uuid}' uses unavailable services. Please ask " + f"for permission for the following services {formatted_services}" + ) + ) + + if new_uuid := request.get(RQ_REQUESTED_REPO_PROJECT_UUID_KEY): + project["uuid"] = new_uuid + + return {"data": project} + + except ProjectInvalidRightsError as exc: + raise web.HTTPForbidden( + reason=f"You do not have sufficient rights to read project {project_uuid}" + ) from exc + except ProjectNotFoundError as exc: + raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") from exc + + +@routes.put(f"/{VTAG}/projects/{{project_uuid}}") +@login_required +@permission_required("project.update") +@permission_required("services.pipeline.*") # due to update_pipeline_db +async def replace_project(request: web.Request): + """Implements PUT /projects + + In a PUT request, the enclosed entity is considered to be a modified version of + the resource stored on the origin server, and the client is requesting that the + stored version be replaced. + + With PATCH, however, the enclosed entity contains a set of instructions describing how a + resource currently residing on the origin server should be modified to produce a new version. + + Also, another difference is that when you want to update a resource with PUT request, you have to send + the full payload as the request whereas with PATCH, you only send the parameters which you want to update. + + :raises web.HTTPNotFound: cannot find project id in repository + """ + user_id: int = request[RQT_USERID_KEY] + try: + project_uuid = ProjectID(request.match_info["project_id"]) + new_project = await request.json() + + # Prune state field (just in case) + new_project.pop("state", None) + + except AttributeError as err: + # NOTE: if new_project is not a dict, .pop will raise this error + raise web.HTTPBadRequest( + reason="Invalid request payload, expected a project model" + ) from err + except KeyError as err: + raise web.HTTPBadRequest(reason=f"Invalid request parameter {err}") from err + except json.JSONDecodeError as exc: + raise web.HTTPBadRequest(reason="Invalid request body") from exc + + db: ProjectDBAPI = request.config_dict[APP_PROJECT_DBAPI] + await check_permission( + request, + "project.update | project.workbench.node.inputs.update", + context={ + "dbapi": db, + "project_id": f"{project_uuid}", + "user_id": user_id, + "new_data": new_project, + }, + ) + + try: + await _core_get.validate_project(request.app, new_project) + + current_project = await _core_get.get_project_for_user( + request.app, + project_uuid=f"{project_uuid}", + user_id=user_id, + include_templates=True, + include_state=True, + ) + + if current_project["accessRights"] != new_project["accessRights"]: + await check_permission(request, "project.access_rights.update") + + if await director_v2_api.is_pipeline_running( + request.app, user_id, project_uuid + ): + + if any_node_inputs_changed(new_project, current_project): + # NOTE: This is a conservative measure that we take + # until nodeports logic is re-designed to tackle with this + # particular state. + # + # This measure avoid having a state with different node *links* in the + # comp-tasks table and the project's workbench column. + # The limitation is that nodeports only "sees" those in the comptask + # and this table does not add the new ones since it remains "blocked" + # for modification from that project while the pipeline runs. Therefore + # any extra link created while the pipeline is running can not + # be managed by nodeports because it basically "cannot see it" + # + # Responds https://httpstatuses.com/409: + # The request could not be completed due to a conflict with the current + # state of the target resource (i.e. pipeline is running). This code is used in + # situations where the user might be able to resolve the conflict + # and resubmit the request (front-end will show a pop-up with message below) + # + raise web.HTTPConflict( + reason=f"Project {project_uuid} cannot be modified while pipeline is still running." + ) + + new_project = await db.replace_user_project( + new_project, user_id, f"{project_uuid}", include_templates=True + ) + await director_v2_api.create_or_update_pipeline( + request.app, user_id, project_uuid + ) + # Appends state + new_project = await _core_states.add_project_states_for_user( + user_id=user_id, + project=new_project, + is_template=False, + app=request.app, + ) + + except ValidationError as exc: + raise web.HTTPBadRequest( + reason=f"Invalid project update: {exc.message}" + ) from exc + + except ProjectInvalidRightsError as exc: + raise web.HTTPForbidden( + reason="You do not have sufficient rights to save the project" + ) from exc + + except ProjectNotFoundError as exc: + raise web.HTTPNotFound from exc + + return {"data": new_project} + + +@routes.delete(f"/{VTAG}/projects/{{project_uuid}}") +@login_required +@permission_required("project.delete") +async def delete_project(request: web.Request): + # first check if the project exists + user_id: int = request[RQT_USERID_KEY] + db: ProjectDBAPI = request.config_dict[APP_PROJECT_DBAPI] + + try: + project_uuid = request.match_info["project_id"] + except KeyError as err: + raise web.HTTPBadRequest(reason=f"Invalid request parameter {err}") from err + + try: + + # exists? + await _core_get.get_project_for_user( + request.app, + project_uuid=project_uuid, + user_id=user_id, + include_templates=True, + ) + + # has access? + # TODO: optimize since this also check existence and read access + await db.raise_if_cannot_delete(user_id, project_uuid) + + # in use? + project_users: Set[int] = set() + with managed_resource(user_id, None, request.app) as rt: + project_users = { + user_session.user_id + for user_session in await rt.find_users_of_resource( + PROJECT_ID_KEY, project_uuid + ) + } + if user_id in project_users: + raise web.HTTPForbidden( + reason="Project is still open in another tab/browser. It cannot be deleted until it is closed." + ) + if project_users: + other_user_names = { + await get_user_name(request.app, x) for x in project_users + } + raise web.HTTPForbidden( + reason=f"Project is open by {other_user_names}. It cannot be deleted until the project is closed." + ) + + # DELETE --- + # TODO: this is a temp solution that hides this project from the listing until + # the delete_project_task completes + # TODO: see https://github.com/ITISFoundation/osparc-simcore/pull/2522 + await db.set_hidden_flag(f"{project_uuid}", enabled=True) + + # fire+forget: this operation can be heavy, specially with data deletion + task = create_delete_project_task( + request.app, project_uuid, user_id, logger=log + ) + log.debug("Spawned task %s to delete %s", task.get_name(), f"{project_uuid=}") + + except ProjectInvalidRightsError as err: + raise web.HTTPForbidden( + reason="You do not have sufficient rights to delete this project" + ) from err + except ProjectNotFoundError as err: + raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") from err + + raise web.HTTPNoContent(content_type="application/json") diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_nodes_handlers.py b/services/web/server/src/simcore_service_webserver/projects/projects_nodes_handlers.py index 18a16c7e16b..d1b00e8c5f5 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_nodes_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_nodes_handlers.py @@ -13,7 +13,7 @@ from .._meta import api_version_prefix as VTAG from ..login.decorators import RQT_USERID_KEY, login_required from ..security_decorators import permission_required -from . import projects_api +from . import _core_get, _core_nodes from .projects_exceptions import ProjectNotFoundError log = logging.getLogger(__name__) @@ -38,14 +38,14 @@ async def create_node(request: web.Request) -> web.Response: try: # ensure the project exists - await projects_api.get_project_for_user( + await _core_get.get_project_for_user( request.app, project_uuid=project_uuid, user_id=user_id, include_templates=True, ) data = { - "node_id": await projects_api.add_project_node( + "node_id": await _core_nodes.add_project_node( request, project_uuid, user_id, @@ -77,7 +77,7 @@ async def get_node(request: web.Request) -> web.Response: try: # ensure the project exists - await projects_api.get_project_for_user( + await _core_get.get_project_for_user( request.app, project_uuid=project_uuid, user_id=user_id, @@ -85,7 +85,7 @@ async def get_node(request: web.Request) -> web.Response: ) # NOTE: for legacy services a redirect to director-v0 is made - reply: Union[Dict, List] = await director_v2_api.get_service_state( + reply: Union[Dict, List] = await director_v2_api.get_dynamic_service_state( app=request.app, node_uuid=node_uuid ) @@ -111,7 +111,9 @@ async def post_retrieve(request: web.Request) -> web.Response: raise web.HTTPBadRequest(reason=f"Invalid request parameter {err}") from err return web.json_response( - await director_v2_api.retrieve(request.app, node_uuid, port_keys), + await director_v2_api.retrieve_dynamic_service_inputs( + request.app, node_uuid, port_keys + ), dumps=json_dumps, ) @@ -125,7 +127,7 @@ async def post_restart(request: web.Request) -> web.Response: except KeyError as err: raise web.HTTPBadRequest(reason=f"Invalid request parameter {err}") from err - await director_v2_api.restart(request.app, node_uuid) + await director_v2_api.restart_dynamic_service(request.app, node_uuid) return web.HTTPNoContent() @@ -145,16 +147,14 @@ async def delete_node(request: web.Request) -> web.Response: try: # ensure the project exists - await projects_api.get_project_for_user( + await _core_get.get_project_for_user( request.app, project_uuid=project_uuid, user_id=user_id, include_templates=True, ) - await projects_api.delete_project_node( - request, project_uuid, user_id, node_uuid - ) + await _core_nodes.delete_project_node(request, project_uuid, user_id, node_uuid) raise web.HTTPNoContent(content_type="application/json") except ProjectNotFoundError as exc: diff --git a/services/web/server/src/simcore_service_webserver/users_api.py b/services/web/server/src/simcore_service_webserver/users_api.py index 38900989047..5609307094b 100644 --- a/services/web/server/src/simcore_service_webserver/users_api.py +++ b/services/web/server/src/simcore_service_webserver/users_api.py @@ -6,14 +6,13 @@ import logging from collections import deque -from typing import Any, Dict, List, Tuple, TypedDict +from typing import Any, Dict, List, Optional, Tuple, TypedDict import sqlalchemy as sa from aiohttp import web from aiopg.sa.engine import Engine from aiopg.sa.result import RowProxy from servicelib.aiohttp.application_keys import APP_DB_ENGINE_KEY -from simcore_postgres_database.errors import ProgrammingError from simcore_postgres_database.models.users import UserRole from sqlalchemy import and_, literal_column @@ -110,19 +109,20 @@ async def update_user_profile( assert resp.rowcount == 1 # nosec -async def is_user_guest(app: web.Application, user_id: int) -> bool: - """Returns True if the user exists and is a GUEST""" +async def get_user_role(app: web.Application, user_id: int) -> UserRole: + """Returns user's role + + raises UserNotFoundError + raises aiopg erros + """ engine: Engine = app[APP_DB_ENGINE_KEY] async with engine.acquire() as conn: - try: - user_role = await conn.scalar( - sa.select([users.c.role]).where(users.c.id == int(user_id)) - ) - return user_role == UserRole.GUEST - - except ProgrammingError as err: - logger.warning("Could not find user with %s [%s]", f"{user_id=}", err) - return False + user_role: Optional[RowProxy] = await conn.scalar( + sa.select([users.c.role]).where(users.c.id == int(user_id)) + ) + if user_role is None: + raise UserNotFoundError(uid=user_id) + return UserRole(user_role) async def get_guest_user_ids_and_names(app: web.Application) -> List[Tuple[int, str]]: @@ -163,6 +163,9 @@ class UserNameDict(TypedDict): async def get_user_name(app: web.Application, user_id: int) -> UserNameDict: + """ + raises UserNotFoundError + """ engine = app[APP_DB_ENGINE_KEY] async with engine.acquire() as conn: user_name = await conn.scalar( @@ -175,6 +178,9 @@ async def get_user_name(app: web.Application, user_id: int) -> UserNameDict: async def get_user(app: web.Application, user_id: int) -> Dict: + """ + raises UserNotFoundError + """ engine = app[APP_DB_ENGINE_KEY] async with engine.acquire() as conn: result = await conn.execute(sa.select([users]).where(users.c.id == user_id)) diff --git a/services/web/server/tests/conftest.py b/services/web/server/tests/conftest.py index 749dd101913..3c3a5e093c3 100644 --- a/services/web/server/tests/conftest.py +++ b/services/web/server/tests/conftest.py @@ -11,7 +11,7 @@ import pytest import simcore_service_webserver from _pytest.monkeypatch import MonkeyPatch -from pytest_simcore.helpers.utils_login import AUserDict, LoggedUser +from pytest_simcore.helpers.utils_login import LoggedUser, UserInfoDict from servicelib.json_serialization import json_dumps from simcore_service_webserver.application_settings_utils import convert_to_environ_vars from simcore_service_webserver.db_models import UserRole @@ -94,7 +94,7 @@ def fake_project(tests_data_dir: Path) -> ProjectDict: @pytest.fixture() -async def logged_user(client, user_role: UserRole) -> AsyncIterator[AUserDict]: +async def logged_user(client, user_role: UserRole) -> AsyncIterator[UserInfoDict]: """adds a user in db and logs in with client NOTE: `user_role` fixture is defined as a parametrization below!!! diff --git a/services/web/server/tests/integration/01/test_garbage_collection.py b/services/web/server/tests/integration/01/test_garbage_collection.py index b9afa5aeb6a..a2310543c15 100644 --- a/services/web/server/tests/integration/01/test_garbage_collection.py +++ b/services/web/server/tests/integration/01/test_garbage_collection.py @@ -15,9 +15,9 @@ import aioredis import pytest from aiohttp.test_utils import TestClient -from aioresponses import aioresponses from models_library.projects_state import RunningState -from pytest_simcore.helpers.utils_login import AUserDict, log_client_in +from pytest_simcore.aioresponses_mocker import AioResponsesMock +from pytest_simcore.helpers.utils_login import UserInfoDict, log_client_in from pytest_simcore.helpers.utils_projects import create_project, empty_project_data from servicelib.aiohttp.application import create_safe_application from settings_library.redis import RedisSettings @@ -64,6 +64,9 @@ WAIT_FOR_COMPLETE_GC_CYCLE = GARBAGE_COLLECTOR_INTERVAL + SERVICE_DELETION_DELAY + 2 +# FIXTURES ---------------------------------------------------------------------------------- + + @pytest.fixture(autouse=True) def __drop_and_recreate_postgres__(database_from_template_before_each_function) -> None: yield @@ -80,14 +83,15 @@ async def __delete_all_redis_keys__(redis_settings: RedisSettings): # do nothing on teadown -@pytest.fixture -async def director_v2_service_mock() -> AsyncIterable[aioresponses]: - """uses aioresponses to mock all calls of an aiohttpclient - WARNING: any request done through the client will go through aioresponses. It is - unfortunate but that means any valid request (like calling the test server) prefix must be set as passthrough. - Other than that it seems to behave nicely - """ - PASSTHROUGH_REQUESTS_PREFIXES = ["http://127.0.0.1", "ws://"] +@pytest.fixture(autouse=True) +async def director_v2_service_responses_mock_autouse( + director_v2_service_responses_mock: AioResponsesMock, +) -> AsyncIterable[AioResponsesMock]: + # NOTE: auto-mock under this test-suite module is justified since + # directorv2 service is not included in pytest_simcore_core_services_selection + # All requests to director-v2 service are intercepted and responses are patched + # as defined in director_v2_service_responses_mock + get_computation_pattern = re.compile( r"^http://[a-z\-_]*director-v2:[0-9]+/v2/computations/.*$" ) @@ -95,22 +99,17 @@ async def director_v2_service_mock() -> AsyncIterable[aioresponses]: # NOTE: GitHK I have to copy paste that fixture for some unclear reason for now. # I think this is due to some conflict between these non-pytest-simcore fixtures and the loop fixture being defined at different locations?? not sure.. # anyway I think this should disappear once the garbage collector moves to its own micro-service - with aioresponses(passthrough=PASSTHROUGH_REQUESTS_PREFIXES) as mock: - mock.get( - get_computation_pattern, - status=202, - payload={"state": str(RunningState.NOT_STARTED.value)}, - repeat=True, - ) - mock.delete(delete_computation_pattern, status=204, repeat=True) - yield mock - -@pytest.fixture(autouse=True) -async def auto_mock_director_v2( - director_v2_service_mock: aioresponses, -) -> aioresponses: - return director_v2_service_mock + director_v2_service_responses_mock.get( + get_computation_pattern, + status=202, + payload={"state": str(RunningState.NOT_STARTED.value)}, + repeat=True, + ) + director_v2_service_responses_mock.delete( + delete_computation_pattern, status=204, repeat=True + ) + yield director_v2_service_responses_mock @pytest.fixture @@ -162,7 +161,17 @@ def client( ) -################ utils +@pytest.fixture +def disable_garbage_collector_background_task(mocker): + """patch the setup of the garbage collector so we can call it manually""" + mocker.patch( + "simcore_service_webserver.garbage_collector.setup_garbage_collector", + autospec=True, + return_value="", + ) + + +# UTILS ---------------------------------------------------------------------------------- async def login_user(client: TestClient): @@ -177,7 +186,7 @@ async def login_guest_user(client: TestClient): async def new_project( client: TestClient, - user: AUserDict, + user: UserInfoDict, tests_data_dir: Path, access_rights: Optional[Dict[str, Any]] = None, ): @@ -196,7 +205,7 @@ async def new_project( async def get_template_project( client: TestClient, - user: AUserDict, + user: UserInfoDict, project_data: ProjectDict, access_rights=None, ): @@ -378,20 +387,11 @@ async def assert_one_owner_for_project( return True -################ end utils - - -@pytest.fixture -def mock_garbage_collector_task(mocker): - """patch the setup of the garbage collector so we can call it manually""" - mocker.patch( - "simcore_service_webserver.garbage_collector.setup_garbage_collector", - return_value="", - ) +# TESTS --------------------------------------------------------------------------- async def test_t1_while_guest_is_connected_no_resources_are_removed( - mock_garbage_collector_task, + disable_garbage_collector_background_task, client, socketio_client_factory: Callable, aiopg_engine, @@ -417,7 +417,7 @@ async def test_t1_while_guest_is_connected_no_resources_are_removed( async def test_t2_cleanup_resources_after_browser_is_closed( - mock_garbage_collector_task, + disable_garbage_collector_background_task, simcore_services_ready, client, socketio_client_factory: Callable, diff --git a/services/web/server/tests/integration/01/test_project_workflow.py b/services/web/server/tests/integration/01/test_project_workflow.py index ea41dda41d2..c9ab46c62ed 100644 --- a/services/web/server/tests/integration/01/test_project_workflow.py +++ b/services/web/server/tests/integration/01/test_project_workflow.py @@ -102,7 +102,7 @@ async def storage_subsystem_mock(mocker): # requests storage to copy data mock = mocker.patch( - "simcore_service_webserver.projects.projects_handlers.copy_data_folders_from_project" + "simcore_service_webserver.projects.projects_handlers_crud.copy_data_folders_from_project" ) async def _mock_copy_data_from_project(*args): @@ -111,9 +111,8 @@ async def _mock_copy_data_from_project(*args): mock.side_effect = _mock_copy_data_from_project # requests storage to delete data - # mock1 = mocker.patch('simcore_service_webserver.projects.projects_handlers.delete_data_folders_of_project', return_value=None) mock1 = mocker.patch( - "simcore_service_webserver.projects.projects_handlers.projects_api.delete_data_folders_of_project", + "simcore_service_webserver.projects._core_delete.delete_data_folders_of_project", return_value="", ) return mock, mock1 @@ -203,7 +202,7 @@ async def test_workflow( primary_group: Dict[str, str], standard_groups: List[Dict[str, str]], storage_subsystem_mock, - director_v2_service_mock, + director_v2_service_responses_mock, ): # empty list projects = await _request_list(client) diff --git a/services/web/server/tests/integration/conftest.py b/services/web/server/tests/integration/conftest.py index d6dc9ed7776..c8ae82fd9f1 100644 --- a/services/web/server/tests/integration/conftest.py +++ b/services/web/server/tests/integration/conftest.py @@ -27,7 +27,7 @@ from pytest_simcore.helpers import FIXTURE_CONFIG_CORE_SERVICES_SELECTION from pytest_simcore.helpers.utils_dict import ConfigDict from pytest_simcore.helpers.utils_docker import get_service_published_port -from pytest_simcore.helpers.utils_login import AUserDict, NewUser +from pytest_simcore.helpers.utils_login import NewUser, UserInfoDict from simcore_service_webserver.groups_api import ( add_user_in_group, create_user_group, @@ -169,14 +169,14 @@ def mock_orphaned_services(mocker: MockerFixture) -> mock.Mock: @pytest.fixture -async def primary_group(client, logged_user: AUserDict) -> Dict[str, str]: +async def primary_group(client, logged_user: UserInfoDict) -> Dict[str, str]: primary_group, _, _ = await list_user_groups(client.app, logged_user["id"]) return primary_group @pytest.fixture async def standard_groups( - client, logged_user: AUserDict + client, logged_user: UserInfoDict ) -> AsyncIterable[List[Dict[str, str]]]: # create a separate admin account to create some standard groups for the logged user sparc_group = { diff --git a/services/web/server/tests/unit/isolated/scicrunch/test_scicrunch__resolver.py b/services/web/server/tests/unit/isolated/scicrunch/test_scicrunch__resolver.py index e50099415a4..0ae31d4654b 100644 --- a/services/web/server/tests/unit/isolated/scicrunch/test_scicrunch__resolver.py +++ b/services/web/server/tests/unit/isolated/scicrunch/test_scicrunch__resolver.py @@ -6,16 +6,16 @@ import pytest from aiohttp import ClientSession from aiohttp.client import ClientTimeout -from simcore_service_webserver.scicrunch._resolver import ResolvedItem, resolve_rrid -from simcore_service_webserver.scicrunch.settings import SciCrunchSettings # FIXME: PC check the CELL_LINE_CITATIONS test please -from ._citations import ( # CELL_LINE_CITATIONS, +from pytest_simcore.helpers.utils_scicrunch_citations import ( # CELL_LINE_CITATIONS, ANTIBODY_CITATIONS, ORGANISM_CITATIONS, PLAMID_CITATIONS, TOOL_CITATIONS, ) +from simcore_service_webserver.scicrunch._resolver import ResolvedItem, resolve_rrid +from simcore_service_webserver.scicrunch.settings import SciCrunchSettings @pytest.mark.parametrize( diff --git a/services/web/server/tests/unit/isolated/scicrunch/test_scicrunch__rest.py b/services/web/server/tests/unit/isolated/scicrunch/test_scicrunch__rest.py index 925a7c0837d..867fd6b5b62 100644 --- a/services/web/server/tests/unit/isolated/scicrunch/test_scicrunch__rest.py +++ b/services/web/server/tests/unit/isolated/scicrunch/test_scicrunch__rest.py @@ -19,6 +19,10 @@ import pytest from aiohttp import ClientResponseError, ClientSession, web +from pytest_simcore.helpers.utils_scicrunch_citations import ( + NOT_TOOL_CITATIONS, + TOOL_CITATIONS, +) from simcore_service_webserver.scicrunch._rest import ( ListOfResourceHits, ResourceView, @@ -31,8 +35,6 @@ SciCrunchSettings, ) -from ._citations import NOT_TOOL_CITATIONS, TOOL_CITATIONS - SCICRUNCH_API_KEY = os.environ.get("SCICRUNCH_API_KEY") pytestmark = pytest.mark.skipif( diff --git a/services/web/server/tests/unit/isolated/test_security_access_model.py b/services/web/server/tests/unit/isolated/test_security_access_model.py index 484335ee656..74ef01afca8 100644 --- a/services/web/server/tests/unit/isolated/test_security_access_model.py +++ b/services/web/server/tests/unit/isolated/test_security_access_model.py @@ -85,13 +85,6 @@ def can_update_inputs(context): # TESTS ------------------------------------------------------------------------- -def test_roles(): - super_users = UserRole.super_users() - assert super_users - assert UserRole.USER not in super_users - assert all(r in UserRole for r in super_users) - - def test_unique_permissions(): # Limit for scalability. Test that unnecessary resources and/or actions are used # Enforce reusable permission layouts diff --git a/services/web/server/tests/unit/with_dbs/02/clusters/test_clusters_handlers.py b/services/web/server/tests/unit/with_dbs/02/clusters/test_clusters_handlers.py index 89cca92cea7..c0568a40015 100644 --- a/services/web/server/tests/unit/with_dbs/02/clusters/test_clusters_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/clusters/test_clusters_handlers.py @@ -12,7 +12,6 @@ import pytest import sqlalchemy as sa -from _helpers import ExpectedResponse, standard_role_response # type: ignore from aiohttp.test_utils import TestClient from faker import Faker from models_library.clusters import ( @@ -29,6 +28,10 @@ from models_library.users import GroupID from pytest_simcore.helpers.utils_assert import assert_status from pytest_simcore.helpers.utils_login import NewUser +from pytest_simcore.helpers.utils_webserver_projects import ( + ExpectedResponse, + standard_role_response, +) from simcore_postgres_database.models.cluster_to_groups import cluster_to_groups from simcore_postgres_database.models.clusters import clusters from simcore_postgres_database.models.users import UserRole diff --git a/services/web/server/tests/unit/with_dbs/02/test_studies_dispatcher_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_studies_dispatcher_handlers.py index ffbfd18a1b1..47847d2df13 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_studies_dispatcher_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_studies_dispatcher_handlers.py @@ -177,9 +177,9 @@ def app_cfg( @pytest.fixture(autouse=True) async def director_v2_automock( - director_v2_service_mock: aioresponses, + director_v2_service_responses_mock: aioresponses, ) -> Iterator[aioresponses]: - yield director_v2_service_mock + yield director_v2_service_responses_mock # REST-API ----------------------------------------------------------------------------------------------- @@ -288,7 +288,7 @@ def mocks_on_projects_api(mocker): All projects in this module are UNLOCKED """ mocker.patch( - "simcore_service_webserver.projects.projects_api._get_project_lock_state", + "simcore_service_webserver.projects._core_states._get_project_lock_state", return_value=ProjectLocked(value=False, status=ProjectStatus.CLOSED), ) diff --git a/services/web/server/tests/unit/with_dbs/02/test_studies_dispatcher_studies_access.py b/services/web/server/tests/unit/with_dbs/02/test_studies_dispatcher_studies_access.py index 616e3b95a20..4f06d4c72b9 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_studies_dispatcher_studies_access.py +++ b/services/web/server/tests/unit/with_dbs/02/test_studies_dispatcher_studies_access.py @@ -27,7 +27,7 @@ from simcore_service_webserver import catalog from simcore_service_webserver.log import setup_logging from simcore_service_webserver.projects.projects_api import delete_project -from simcore_service_webserver.users_api import delete_user, is_user_guest +from simcore_service_webserver.users_api import delete_user, get_user_role from yarl import URL SHARED_STUDY_UUID = "e2e38eee-c569-4e55-b104-70d159e49c87" @@ -122,8 +122,10 @@ async def unpublished_project(client, fake_project, tests_data_dir: Path): @pytest.fixture(autouse=True) -async def director_v2_mock(director_v2_service_mock) -> AsyncIterator[aioresponses]: - yield director_v2_service_mock +async def director_v2_mock( + director_v2_service_responses_mock, +) -> AsyncIterator[aioresponses]: + yield director_v2_service_responses_mock async def _get_user_projects(client): @@ -211,7 +213,7 @@ def mocks_on_projects_api(mocker) -> None: All projects in this module are UNLOCKED """ mocker.patch( - "simcore_service_webserver.projects.projects_api._get_project_lock_state", + "simcore_service_webserver.projects._core_states._get_project_lock_state", return_value=ProjectLocked(value=False, status=ProjectStatus.CLOSED), ) @@ -351,14 +353,14 @@ async def test_access_cookie_of_expired_user( resp = await client.get(me_url) data, _ = await assert_status(resp, web.HTTPOk) - assert await is_user_guest(app, data["id"]) + assert await get_user_role(app, data["id"]) == UserRole.GUEST async def enforce_garbage_collect_guest(uid): # Emulates garbage collector: # - GUEST user expired, cleaning it up # - client still holds cookie with its identifier nonetheless # - assert await is_user_guest(app, uid) + assert await get_user_role(app, uid) == UserRole.GUEST projects = await _get_user_projects(client) assert len(projects) == 1 @@ -381,7 +383,7 @@ async def enforce_garbage_collect_guest(uid): # as a guest user resp = await client.get(me_url) data, _ = await assert_status(resp, web.HTTPOk) - assert await is_user_guest(app, data["id"]) + assert await get_user_role(app, data["id"]) == UserRole.GUEST # But I am another user assert data["id"] != user_id diff --git a/services/web/server/tests/unit/with_dbs/03/test_director_v2.py b/services/web/server/tests/unit/with_dbs/03/test_director_v2.py index f02d7520b20..0518a36457e 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_director_v2.py +++ b/services/web/server/tests/unit/with_dbs/03/test_director_v2.py @@ -3,25 +3,31 @@ # pylint:disable=redefined-outer-name -from typing import AsyncIterator, Dict +from typing import Dict from uuid import UUID, uuid4 import pytest -from _helpers import ExpectedResponse, standard_role_response from aiohttp import web -from aioresponses import aioresponses from models_library.projects_state import RunningState from pydantic.types import PositiveInt +from pytest_simcore.aioresponses_mocker import AioResponsesMock from pytest_simcore.helpers.utils_assert import assert_status +from pytest_simcore.helpers.utils_webserver_projects import ( + ExpectedResponse, + standard_role_response, +) from simcore_service_webserver import director_v2_api from simcore_service_webserver.db_models import UserRole @pytest.fixture(autouse=True) -async def auto_mock_director_v2( - director_v2_service_mock: aioresponses, -) -> AsyncIterator[aioresponses]: - yield director_v2_service_mock +async def director_v2_service_responses_mock_autouse( + director_v2_service_responses_mock: AioResponsesMock, +) -> AioResponsesMock: + # NOTE: auto-mock under this test-suite is justified (under test_director_v2 test-suite) + # All requests to director-v2 service are intercepted and mocked + # as defined in director_v2_service_responses_mock + return director_v2_service_responses_mock @pytest.fixture diff --git a/services/web/server/tests/unit/with_dbs/04/test_storage.py b/services/web/server/tests/unit/with_dbs/04/test_storage.py index 7ce4b769caf..8449067cec2 100644 --- a/services/web/server/tests/unit/with_dbs/04/test_storage.py +++ b/services/web/server/tests/unit/with_dbs/04/test_storage.py @@ -20,7 +20,7 @@ def storage_server(event_loop, aiohttp_server, app_cfg): app = create_safe_application(cfg) async def _get_locs(request: web.Request): - assert not request.has_body + assert not request.can_read_body query = request.query assert query @@ -36,7 +36,7 @@ async def _get_locs(request: web.Request): ) async def _post_sync_meta_data(request: web.Request): - assert not request.has_body + assert not request.can_read_body query = request.query assert query @@ -50,7 +50,7 @@ async def _post_sync_meta_data(request: web.Request): ) async def _get_filemeta(request: web.Request): - assert not request.has_body + assert not request.can_read_body query = request.query assert query @@ -67,7 +67,7 @@ async def _get_filemeta(request: web.Request): ) async def _get_filtered_list(request: web.Request): - assert not request.has_body + assert not request.can_read_body query = request.query assert query @@ -84,7 +84,7 @@ async def _get_filtered_list(request: web.Request): ) async def _get_datasets(request: web.Request): - assert not request.has_body + assert not request.can_read_body query = request.query assert query @@ -101,7 +101,7 @@ async def _get_datasets(request: web.Request): ) async def _get_datasets_meta(request: web.Request): - assert not request.has_body + assert not request.can_read_body query = request.query assert query diff --git a/services/web/server/tests/unit/with_dbs/06/conftest.py b/services/web/server/tests/unit/with_dbs/06/conftest.py index 70853126a1b..e1970e1759b 100644 --- a/services/web/server/tests/unit/with_dbs/06/conftest.py +++ b/services/web/server/tests/unit/with_dbs/06/conftest.py @@ -7,7 +7,7 @@ from typing import Callable, Dict, List, Optional, Union import pytest -from aioresponses import aioresponses +from pytest_simcore.aioresponses_mocker import AioResponsesMock from pytest_simcore.helpers.utils_projects import NewProject, delete_all_projects from servicelib.aiohttp.application import create_safe_application from simcore_service_webserver import catalog @@ -197,7 +197,10 @@ async def mocked_get_services_for_user(*args, **kwargs): @pytest.fixture(autouse=True) -async def director_v2_automock( - director_v2_service_mock: aioresponses, -) -> aioresponses: - yield director_v2_service_mock +async def director_v2_service_responses_mock_autouse( + director_v2_service_responses_mock: AioResponsesMock, +) -> AioResponsesMock: + # NOTE: auto-mock under this test-suite is justified (under unit/with_dbs/06) + # All requests to director-v2 service are intercepted and responses are patched + # as defined in director_v2_service_responses_mock + yield director_v2_service_responses_mock diff --git a/services/web/server/tests/unit/with_dbs/06/test_projects_open_close.py b/services/web/server/tests/unit/with_dbs/06/test_projects_open_close.py index a7f08e487a1..efb28cd07c3 100644 --- a/services/web/server/tests/unit/with_dbs/06/test_projects_open_close.py +++ b/services/web/server/tests/unit/with_dbs/06/test_projects_open_close.py @@ -12,7 +12,6 @@ import pytest import socketio -from _helpers import ExpectedResponse, standard_role_response from aiohttp import web from aiohttp.test_utils import TestClient, TestServer from models_library.projects_access import Owner @@ -26,9 +25,13 @@ from pytest_simcore.helpers.utils_assert import assert_status from pytest_simcore.helpers.utils_login import log_client_in from pytest_simcore.helpers.utils_projects import assert_get_same_project +from pytest_simcore.helpers.utils_webserver_projects import ( + ExpectedResponse, + standard_role_response, +) from servicelib.aiohttp.web_exceptions_extension import HTTPLocked from simcore_service_webserver.db_models import UserRole -from simcore_service_webserver.projects.projects_handlers import ( +from simcore_service_webserver.projects.projects_handlers_crud import ( OVERRIDABLE_DOCUMENT_KEYS, ) from simcore_service_webserver.socketio.events import SOCKET_IO_PROJECT_UPDATED_EVENT @@ -412,7 +415,9 @@ async def test_open_project( request_dns=request_dns, ) ) - mocked_director_v2_api["director_v2_api.start_service"].assert_has_calls(calls) + mocked_director_v2_api[ + "director_v2_api.start_dynamic_service" + ].assert_has_calls(calls) @pytest.mark.parametrize(*standard_role_response()) @@ -428,7 +433,7 @@ async def test_close_project( # POST /v0/projects/{project_id}:close fakes = fake_services(5) assert len(fakes) == 5 - mocked_director_v2_api["director_v2_core.get_services"].return_value = fakes + mocked_director_v2_api["director_v2_core.get_dynamic_services"].return_value = fakes # open project client_id = client_session_id_factory() @@ -436,10 +441,10 @@ async def test_close_project( resp = await client.post(url, json=client_id) if resp.status == web.HTTPOk.status_code: - mocked_director_v2_api["director_v2_api.get_services"].assert_any_call( + mocked_director_v2_api["director_v2_api.get_dynamic_services"].assert_any_call( client.server.app, logged_user["id"], user_project["uuid"] ) - mocked_director_v2_api["director_v2_core.get_services"].reset_mock() + mocked_director_v2_api["director_v2_core.get_dynamic_services"].reset_mock() # close project url = client.app.router["close_project"].url_for(project_id=user_project["uuid"]) @@ -458,7 +463,9 @@ async def test_close_project( project_id=user_project["uuid"], ), ] - mocked_director_v2_api["director_v2_core.get_services"].assert_has_calls(calls) + mocked_director_v2_api[ + "director_v2_core.get_dynamic_services" + ].assert_has_calls(calls) calls = [ call( @@ -468,7 +475,9 @@ async def test_close_project( ) for service in fakes ] - mocked_director_v2_api["director_v2_core.stop_service"].assert_has_calls(calls) + mocked_director_v2_api[ + "director_v2_core.stop_dynamic_service" + ].assert_has_calls(calls) @pytest.mark.parametrize( @@ -573,7 +582,7 @@ async def test_project_node_lifetime( ): mock_storage_api_delete_data_folders_of_project_node = mocker.patch( - "simcore_service_webserver.projects.projects_handlers.projects_api.delete_data_folders_of_project_node", + "simcore_service_webserver.projects._core_nodes.delete_data_folders_of_project_node", return_value="", ) @@ -584,34 +593,42 @@ async def test_project_node_lifetime( data, errors = await assert_status(resp, create_exp) node_id = "wrong_node_id" if resp.status == web.HTTPCreated.status_code: - mocked_director_v2_api["director_v2_api.start_service"].assert_called_once() + mocked_director_v2_api[ + "director_v2_api.start_dynamic_service" + ].assert_called_once() assert "node_id" in data node_id = data["node_id"] else: - mocked_director_v2_api["director_v2_api.start_service"].assert_not_called() + mocked_director_v2_api[ + "director_v2_api.start_dynamic_service" + ].assert_not_called() # create a new NOT dynamic node... - mocked_director_v2_api["director_v2_api.start_service"].reset_mock() + mocked_director_v2_api["director_v2_api.start_dynamic_service"].reset_mock() url = client.app.router["create_node"].url_for(project_id=user_project["uuid"]) body = {"service_key": "some/notdynamic/key", "service_version": "1.3.4"} resp = await client.post(url, json=body) data, errors = await assert_status(resp, create_exp) node_id_2 = "wrong_node_id" if resp.status == web.HTTPCreated.status_code: - mocked_director_v2_api["director_v2_api.start_service"].assert_not_called() + mocked_director_v2_api[ + "director_v2_api.start_dynamic_service" + ].assert_not_called() assert "node_id" in data node_id_2 = data["node_id"] else: - mocked_director_v2_api["director_v2_api.start_service"].assert_not_called() + mocked_director_v2_api[ + "director_v2_api.start_dynamic_service" + ].assert_not_called() # get the node state - mocked_director_v2_api["director_v2_api.get_services"].return_value = [ + mocked_director_v2_api["director_v2_api.get_dynamic_services"].return_value = [ {"service_uuid": node_id, "service_state": "running"} ] url = client.app.router["get_node"].url_for( project_id=user_project["uuid"], node_id=node_id ) - mocked_director_v2_api["director_v2_api.get_service_state"].return_value = { + mocked_director_v2_api["director_v2_api.get_dynamic_service_state"].return_value = { "service_state": "running" } resp = await client.get(url) @@ -621,12 +638,12 @@ async def test_project_node_lifetime( assert data["service_state"] == "running" # get the NOT dynamic node state - mocked_director_v2_api["director_v2_api.get_services"].return_value = [] + mocked_director_v2_api["director_v2_api.get_dynamic_services"].return_value = [] url = client.app.router["get_node"].url_for( project_id=user_project["uuid"], node_id=node_id_2 ) - mocked_director_v2_api["director_v2_api.get_service_state"].return_value = { + mocked_director_v2_api["director_v2_api.get_dynamic_service_state"].return_value = { "service_state": "idle" } resp = await client.get(url) @@ -636,7 +653,7 @@ async def test_project_node_lifetime( assert data["service_state"] == "idle" # delete the node - mocked_director_v2_api["director_v2_api.get_services"].return_value = [ + mocked_director_v2_api["director_v2_api.get_dynamic_services"].return_value = [ {"service_uuid": node_id} ] url = client.app.router["delete_node"].url_for( @@ -645,14 +662,18 @@ async def test_project_node_lifetime( resp = await client.delete(url) data, errors = await assert_status(resp, deletion_exp) if resp.status == web.HTTPNoContent.status_code: - mocked_director_v2_api["director_v2_api.stop_service"].assert_called_once() + mocked_director_v2_api[ + "director_v2_api.stop_dynamic_service" + ].assert_called_once() mock_storage_api_delete_data_folders_of_project_node.assert_called_once() else: - mocked_director_v2_api["director_v2_api.stop_service"].assert_not_called() + mocked_director_v2_api[ + "director_v2_api.stop_dynamic_service" + ].assert_not_called() mock_storage_api_delete_data_folders_of_project_node.assert_not_called() # delete the NOT dynamic node - mocked_director_v2_api["director_v2_api.stop_service"].reset_mock() + mocked_director_v2_api["director_v2_api.stop_dynamic_service"].reset_mock() mock_storage_api_delete_data_folders_of_project_node.reset_mock() # mock_director_api_get_running_services.return_value.set_result([{"service_uuid": node_id}]) url = client.app.router["delete_node"].url_for( @@ -661,10 +682,14 @@ async def test_project_node_lifetime( resp = await client.delete(url) data, errors = await assert_status(resp, deletion_exp) if resp.status == web.HTTPNoContent.status_code: - mocked_director_v2_api["director_v2_api.stop_service"].assert_not_called() + mocked_director_v2_api[ + "director_v2_api.stop_dynamic_service" + ].assert_not_called() mock_storage_api_delete_data_folders_of_project_node.assert_called_once() else: - mocked_director_v2_api["director_v2_api.stop_service"].assert_not_called() + mocked_director_v2_api[ + "director_v2_api.stop_dynamic_service" + ].assert_not_called() mock_storage_api_delete_data_folders_of_project_node.assert_not_called() diff --git a/services/web/server/tests/unit/with_dbs/07/test_users.py b/services/web/server/tests/unit/with_dbs/07/test_users.py index d848cc750d0..ca177d052f8 100644 --- a/services/web/server/tests/unit/with_dbs/07/test_users.py +++ b/services/web/server/tests/unit/with_dbs/07/test_users.py @@ -112,7 +112,9 @@ async def fake_tokens(logged_user, tokens_db): return all_tokens -# -------------------------------------------------------------------------- +# TESTS -------------------------------------------------------------------------- + + PREFIX = "/" + API_VERSION + "/me" diff --git a/services/web/server/tests/unit/with_dbs/08/test_groups.py b/services/web/server/tests/unit/with_dbs/08/test_groups.py index 2daa2a9584d..d57adfe1d0b 100644 --- a/services/web/server/tests/unit/with_dbs/08/test_groups.py +++ b/services/web/server/tests/unit/with_dbs/08/test_groups.py @@ -8,12 +8,12 @@ from typing import Callable, Dict, List import pytest -from _helpers import standard_role_response from aiohttp import web from aiohttp.test_utils import TestClient from pytest_simcore.helpers import utils_login from pytest_simcore.helpers.utils_assert import assert_status from pytest_simcore.helpers.utils_login import log_client_in +from pytest_simcore.helpers.utils_webserver_projects import standard_role_response from servicelib.aiohttp.application import create_safe_application from simcore_service_webserver.application_settings import setup_settings from simcore_service_webserver.db import setup_db diff --git a/services/web/server/tests/unit/with_dbs/09/conftest.py b/services/web/server/tests/unit/with_dbs/09/conftest.py index 149d47ea146..ee2f4ac78de 100644 --- a/services/web/server/tests/unit/with_dbs/09/conftest.py +++ b/services/web/server/tests/unit/with_dbs/09/conftest.py @@ -9,7 +9,6 @@ import pytest from aiohttp import web -from aioresponses import aioresponses from models_library.projects_access import Owner from models_library.projects_state import ( ProjectLocked, @@ -18,6 +17,7 @@ ProjectStatus, RunningState, ) +from pytest_simcore.aioresponses_mocker import AioResponsesMock from pytest_simcore.helpers.utils_assert import assert_status from pytest_simcore.helpers.utils_projects import NewProject, delete_all_projects from servicelib import async_utils @@ -121,7 +121,7 @@ def mocks_on_projects_api(mocker, logged_user) -> None: state=ProjectRunningState(value=RunningState.NOT_STARTED), ) mocker.patch( - "simcore_service_webserver.projects.projects_api._get_project_lock_state", + "simcore_service_webserver.projects._core_states._get_project_lock_state", return_value=state, ) @@ -238,10 +238,14 @@ async def mocked_get_services_for_user(*args, **kwargs): @pytest.fixture(autouse=True) -async def director_v2_automock( - director_v2_service_mock: aioresponses, -) -> aioresponses: - yield director_v2_service_mock +async def director_v2_service_responses_automock( + director_v2_service_responses_mock: AioResponsesMock, +) -> AioResponsesMock: + # NOTE: auto-mock under this test-suite is justified (under unit/with_dbs/09) + # The director-v2 service is not in place and this test-suite covers projects + # unit-testing. All requests to director-v2 service are intercepted and mocked + # as defined in director_v2_service_responses_mock + yield director_v2_service_responses_mock @pytest.fixture() diff --git a/services/web/server/tests/unit/with_dbs/09/test_projects_cancellations.py b/services/web/server/tests/unit/with_dbs/09/test_projects_cancellations.py index 503f9068ed0..d270a8c0434 100644 --- a/services/web/server/tests/unit/with_dbs/09/test_projects_cancellations.py +++ b/services/web/server/tests/unit/with_dbs/09/test_projects_cancellations.py @@ -6,11 +6,13 @@ from typing import Any, Callable, Dict, List, Tuple import pytest -from _helpers import ExpectedResponse # type: ignore -from _helpers import MockedStorageSubsystem # type: ignore -from _helpers import standard_role_response # type: ignore from aiohttp.test_utils import TestClient from pytest_simcore.helpers.utils_assert import assert_status +from pytest_simcore.helpers.utils_webserver_projects import ( + ExpectedResponse, + MockedStorageSubsystem, + standard_role_response, +) from simcore_postgres_database.models.users import UserRole from simcore_service_webserver._meta import api_version_prefix from tenacity._asyncio import AsyncRetrying diff --git a/services/web/server/tests/unit/with_dbs/09/test_projects_crud.py b/services/web/server/tests/unit/with_dbs/09/test_projects_crud.py index c3c375b2e5d..fed8e5d3e6e 100644 --- a/services/web/server/tests/unit/with_dbs/09/test_projects_crud.py +++ b/services/web/server/tests/unit/with_dbs/09/test_projects_crud.py @@ -10,19 +10,19 @@ from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union import pytest -from _helpers import ( # type: ignore - ExpectedResponse, - MockedStorageSubsystem, - standard_role_response, -) from aiohttp import web from aiohttp.test_utils import TestClient -from aioresponses import aioresponses from models_library.projects_state import ProjectState +from pytest_simcore.aioresponses_mocker import AioResponsesMock from pytest_simcore.helpers.utils_assert import assert_status +from pytest_simcore.helpers.utils_webserver_projects import ( + ExpectedResponse, + MockedStorageSubsystem, + standard_role_response, +) from simcore_service_webserver._meta import api_version_prefix from simcore_service_webserver.db_models import UserRole -from simcore_service_webserver.projects.projects_handlers import ( +from simcore_service_webserver.projects.projects_handlers_crud import ( OVERRIDABLE_DOCUMENT_KEYS, ) from simcore_service_webserver.utils import now_str, to_datetime @@ -268,7 +268,7 @@ async def test_list_projects( template_project: Dict[str, Any], expected: Type[web.HTTPException], catalog_subsystem_mock: Callable[[Optional[Union[List[Dict], Dict]]], None], - director_v2_service_mock: aioresponses, + director_v2_service_responses_mock: AioResponsesMock, ): catalog_subsystem_mock([user_project, template_project]) data, *_ = await _list_projects(client, expected) diff --git a/services/web/server/tests/unit/with_dbs/09/test_projects_delete.py b/services/web/server/tests/unit/with_dbs/09/test_projects_delete.py index 0e8bd1675fc..bdddfd89ae4 100644 --- a/services/web/server/tests/unit/with_dbs/09/test_projects_delete.py +++ b/services/web/server/tests/unit/with_dbs/09/test_projects_delete.py @@ -8,14 +8,16 @@ from unittest.mock import MagicMock, call import pytest -from _helpers import ExpectedResponse # type: ignore -from _helpers import MockedStorageSubsystem # type: ignore -from _helpers import standard_role_response # type: ignore from aiohttp import web # TESTS ----------------------------------------------------------------------------------------- from aiohttp.test_utils import TestClient from pytest_simcore.helpers.utils_assert import assert_status +from pytest_simcore.helpers.utils_webserver_projects import ( + ExpectedResponse, + MockedStorageSubsystem, + standard_role_response, +) from simcore_service_webserver._meta import api_version_prefix from simcore_service_webserver.db_models import UserRole from socketio.exceptions import ConnectionError as SocketConnectionError @@ -48,13 +50,15 @@ async def test_delete_project( # DELETE /v0/projects/{project_id} fakes = fake_services(5) - mocked_director_v2_api["director_v2_core.get_services"].return_value = fakes + mocked_director_v2_api["director_v2_core.get_dynamic_services"].return_value = fakes await _delete_project(client, user_project, expected.no_content) await asyncio.sleep(2) # let some time fly for the background tasks to run if expected.no_content == web.HTTPNoContent: - mocked_director_v2_api["director_v2_core.get_services"].assert_called_once() + mocked_director_v2_api[ + "director_v2_core.get_dynamic_services" + ].assert_called_once() expected_calls = [ call( @@ -64,9 +68,9 @@ async def test_delete_project( ) for service in fakes ] - mocked_director_v2_api["director_v2_core.stop_service"].assert_has_calls( - expected_calls - ) + mocked_director_v2_api[ + "director_v2_core.stop_dynamic_service" + ].assert_has_calls(expected_calls) # wait for the fire&forget to run await asyncio.sleep(2) diff --git a/services/web/server/tests/unit/with_dbs/09/test_projects_pagination.py b/services/web/server/tests/unit/with_dbs/09/test_projects_pagination.py index 11b121a7ba6..e11394821b2 100644 --- a/services/web/server/tests/unit/with_dbs/09/test_projects_pagination.py +++ b/services/web/server/tests/unit/with_dbs/09/test_projects_pagination.py @@ -4,21 +4,24 @@ # pylint: disable=unused-variable import asyncio -import uuid as uuidlib from copy import deepcopy from math import ceil from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union import pytest -from _helpers import ExpectedResponse, standard_role_response from aiohttp import web from aiohttp.test_utils import TestClient from aioresponses import aioresponses from models_library.projects_state import ProjectState +from pytest_simcore.aioresponses_mocker import AioResponsesMock from pytest_simcore.helpers.utils_assert import assert_status +from pytest_simcore.helpers.utils_webserver_projects import ( + ExpectedResponse, + standard_role_response, +) from simcore_service_webserver._meta import api_version_prefix from simcore_service_webserver.db_models import UserRole -from simcore_service_webserver.projects.projects_handlers import ( +from simcore_service_webserver.projects.projects_handlers_crud import ( OVERRIDABLE_DOCUMENT_KEYS, ) from simcore_service_webserver.utils import now_str, to_datetime @@ -237,7 +240,7 @@ async def test_list_projects_with_invalid_pagination_parameters( expected: ExpectedResponse, storage_subsystem_mock, catalog_subsystem_mock: Callable[[Optional[Union[List[Dict], Dict]]], None], - director_v2_service_mock: aioresponses, + director_v2_service_responses_mock: AioResponsesMock, project_db_cleaner, limit: int, offset: int, @@ -261,7 +264,7 @@ async def test_list_projects_with_pagination( expected: ExpectedResponse, storage_subsystem_mock, catalog_subsystem_mock: Callable[[Optional[Union[List[Dict], Dict]]], None], - director_v2_service_mock: aioresponses, + director_v2_service_responses_mock: aioresponses, project_db_cleaner, limit: int, ): diff --git a/services/web/server/tests/unit/with_dbs/10/meta_modeling/test_meta_modeling_iterations.py b/services/web/server/tests/unit/with_dbs/10/meta_modeling/test_meta_modeling_iterations.py index e48f9fe4ac5..dc03086e239 100644 --- a/services/web/server/tests/unit/with_dbs/10/meta_modeling/test_meta_modeling_iterations.py +++ b/services/web/server/tests/unit/with_dbs/10/meta_modeling/test_meta_modeling_iterations.py @@ -10,7 +10,7 @@ from faker import Faker from models_library.projects import Project from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import AUserDict +from pytest_simcore.helpers.utils_login import UserInfoDict from pytest_simcore.simcore_webserver_projects_rest_api import ( NEW_PROJECT, REPLACE_PROJECT_ON_MODIFIED, @@ -42,7 +42,7 @@ @pytest.fixture -async def context_with_logged_user(client: TestClient, logged_user: AUserDict): +async def context_with_logged_user(client: TestClient, logged_user: UserInfoDict): yield diff --git a/services/web/server/tests/unit/with_dbs/10/test_resource_manager.py b/services/web/server/tests/unit/with_dbs/10/test_resource_manager.py index 0adba76714f..89f08336968 100644 --- a/services/web/server/tests/unit/with_dbs/10/test_resource_manager.py +++ b/services/web/server/tests/unit/with_dbs/10/test_resource_manager.py @@ -1,7 +1,6 @@ -# pylint:disable=unused-variable -# pylint:disable=unused-argument -# pylint:disable=redefined-outer-name - +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable import asyncio import json @@ -9,7 +8,7 @@ from asyncio import Future from copy import deepcopy from pathlib import Path -from typing import Any, Callable, Dict +from typing import Any, AsyncIterable, Callable, Dict from unittest import mock from unittest.mock import call @@ -17,14 +16,14 @@ import socketio import socketio.exceptions import sqlalchemy as sa -from _helpers import MockedStorageSubsystem # type: ignore from aiohttp import web from aiohttp.test_utils import TestClient from aioredis import Redis -from aioresponses import aioresponses from pytest_mock.plugin import MockerFixture +from pytest_simcore.aioresponses_mocker import AioResponsesMock from pytest_simcore.helpers.utils_assert import assert_status from pytest_simcore.helpers.utils_projects import NewProject +from pytest_simcore.helpers.utils_webserver_projects import MockedStorageSubsystem from servicelib.aiohttp.application import create_safe_application from servicelib.aiohttp.application_setup import is_setup_completed from simcore_service_webserver import garbage_collector_core @@ -35,9 +34,10 @@ from simcore_service_webserver.director_v2 import setup_director_v2 from simcore_service_webserver.login.plugin import setup_login from simcore_service_webserver.projects.plugin import setup_projects +from simcore_service_webserver.projects.project_models import ProjectDict from simcore_service_webserver.projects.projects_api import ( delete_project, - remove_project_interactive_services, + remove_project_dynamic_services, ) from simcore_service_webserver.projects.projects_exceptions import ProjectNotFoundError from simcore_service_webserver.resource_manager.plugin import setup_resource_manager @@ -140,7 +140,7 @@ def client( @pytest.fixture def mock_delete_data_folders_for_project(mocker): mocker.patch( - "simcore_service_webserver.projects.projects_api.delete_data_folders_of_project", + "simcore_service_webserver.projects._core_delete.delete_data_folders_of_project", return_value=None, ) @@ -158,7 +158,7 @@ async def empty_user_project( empty_project, logged_user, tests_data_dir: Path, -) -> Dict[str, Any]: +) -> AsyncIterable[ProjectDict]: project = empty_project() async with NewProject( project, client.app, user_id=logged_user["id"], tests_data_dir=tests_data_dir @@ -174,7 +174,7 @@ async def empty_user_project2( empty_project, logged_user, tests_data_dir: Path, -) -> Dict[str, Any]: +) -> AsyncIterable[ProjectDict]: project = empty_project() async with NewProject( project, client.app, user_id=logged_user["id"], tests_data_dir=tests_data_dir @@ -184,11 +184,6 @@ async def empty_user_project2( print("<----- removed project", project["name"]) -@pytest.fixture(autouse=True) -async def director_v2_mock(director_v2_service_mock) -> aioresponses: - return director_v2_service_mock - - # TESTS ----------------------------------------------------------------------------- @@ -336,7 +331,8 @@ async def test_websocket_disconnected_after_logout( expected, mocker: MockerFixture, ): - app = client.server.app + assert client.app + app = client.app socket_registry = get_registry(app) # connect first socket @@ -396,13 +392,14 @@ async def test_interactive_services_removed_after_logout( logged_user: Dict[str, Any], empty_user_project: Dict[str, Any], mocked_director_v2_api: Dict[str, mock.MagicMock], - create_dynamic_service_mock, + create_dynamic_service_mock: Callable, client_session_id_factory: Callable[[], str], socketio_client_factory: Callable, storage_subsystem_mock: MockedStorageSubsystem, # when guest user logs out garbage is collected - director_v2_service_mock: aioresponses, + director_v2_service_responses_mock: AioResponsesMock, expected_save_state: bool, ): + assert client.app # login - logged_user fixture # create empty study - empty_user_project fixture # create dynamic service - create_dynamic_service_mock fixture @@ -436,8 +433,10 @@ async def test_interactive_services_removed_after_logout( service["service_uuid"], expected_save_state, ) - mocked_director_v2_api["director_v2_core.stop_service"].assert_awaited_with( - app=client.server.app, + mocked_director_v2_api[ + "director_v2_core.stop_dynamic_service" + ].assert_awaited_with( + app=client.app, service_uuid=service["service_uuid"], save_state=expected_save_state, ) @@ -459,6 +458,7 @@ async def test_interactive_services_remain_after_websocket_reconnection_from_2_t create_dynamic_service_mock, socketio_client_factory: Callable, client_session_id_factory: Callable[[], str], + director_v2_service_responses_mock: AioResponsesMock, storage_subsystem_mock, # when guest user logs out garbage is collected expected_save_state: bool, mocker: MockerFixture, @@ -517,19 +517,19 @@ async def test_interactive_services_remain_after_websocket_reconnection_from_2_t await garbage_collector_core.collect_garbage(client.app) # assert dynamic service is still around - mocked_director_v2_api["director_v2_api.stop_service"].assert_not_called() + mocked_director_v2_api["director_v2_api.stop_dynamic_service"].assert_not_called() # disconnect second websocket await sio2.disconnect() assert not sio2.sid # assert dynamic service is still around for now - mocked_director_v2_api["director_v2_api.stop_service"].assert_not_called() + mocked_director_v2_api["director_v2_api.stop_dynamic_service"].assert_not_called() # reconnect websocket sio2 = await socketio_client_factory(client_session_id2) # it should still be there even after waiting for auto deletion from garbage collector await asyncio.sleep(SERVICE_DELETION_DELAY + 1) await garbage_collector_core.collect_garbage(client.app) - mocked_director_v2_api["director_v2_api.stop_service"].assert_not_called() + mocked_director_v2_api["director_v2_api.stop_dynamic_service"].assert_not_called() # now really disconnect await sio2.disconnect() assert not sio2.sid @@ -547,7 +547,9 @@ async def test_interactive_services_remain_after_websocket_reconnection_from_2_t service_uuid=service["service_uuid"], ) ] - mocked_director_v2_api["director_v2_core.stop_service"].assert_has_calls(calls) + mocked_director_v2_api["director_v2_core.stop_dynamic_service"].assert_has_calls( + calls + ) @pytest.fixture @@ -581,75 +583,87 @@ async def test_interactive_services_removed_per_project( socketio_client_factory: Callable, client_session_id_factory: Callable[[], str], asyncpg_storage_system_mock, + director_v2_service_responses_mock: AioResponsesMock, storage_subsystem_mock, # when guest user logs out garbage is collected expected_save_state: bool, ): - # create server with delay set to DELAY - # login - logged_user fixture - # create empty study1 in project1 - empty_user_project fixture - # create empty study2 in project2- empty_user_project2 fixture - # service1 in project1 = await create_dynamic_service_mock(logged_user["id"], empty_user_project["uuid"]) - # service2 in project2 = await create_dynamic_service_mock(logged_user["id"], empty_user_project["uuid"]) - # service3 in project2 = await create_dynamic_service_mock(logged_user["id"], empty_user_project["uuid"]) - service1 = await create_dynamic_service_mock( + # logged_user fixture and two projects: + # service1 in project1 + service_1 = await create_dynamic_service_mock( logged_user["id"], empty_user_project["uuid"] ) - service2 = await create_dynamic_service_mock( + # service2 and service3 in project2 + service_2 = await create_dynamic_service_mock( logged_user["id"], empty_user_project2["uuid"] ) - service3 = await create_dynamic_service_mock( + service_3 = await create_dynamic_service_mock( logged_user["id"], empty_user_project2["uuid"] ) - # create websocket1 from tab1 + + # create websocket1 to emulate tab1 with project1 open session client_session_id1 = client_session_id_factory() sio1 = await socketio_client_factory(client_session_id1) await open_project(client, empty_user_project["uuid"], client_session_id1) - # create websocket2 from tab2 + + # create websocket2 to emulate tab2 with project2 open session client_session_id2 = client_session_id_factory() sio2 = await socketio_client_factory(client_session_id2) await open_project(client, empty_user_project2["uuid"], client_session_id2) - # disconnect websocket1 + + # ----- + # (1) disconnect websocket1 (seesion w/ project1) await sio1.disconnect() assert not sio1.sid + # assert dynamic service is still around - mocked_director_v2_api["director_v2_api.stop_service"].assert_not_called() - # wait the defined delay + mocked_director_v2_api["director_v2_api.stop_dynamic_service"].assert_not_called() + + # wait the defined delay so the TLL expires (not updated by socket) and then run GC await asyncio.sleep(SERVICE_DELETION_DELAY + 1) await garbage_collector_core.collect_garbage(client.app) + # assert dynamic service 1 is removed calls = [ call( app=client.server.app, - service_uuid=service1["service_uuid"], + service_uuid=service_1["service_uuid"], save_state=expected_save_state, ) ] - mocked_director_v2_api["director_v2_core.stop_service"].assert_has_calls(calls) - mocked_director_v2_api["director_v2_core.stop_service"].reset_mock() + mocked_director_v2_api["director_v2_core.stop_dynamic_service"].assert_has_calls( + calls + ) + mocked_director_v2_api["director_v2_core.stop_dynamic_service"].reset_mock() - # disconnect websocket2 + # ---- + # (2) disconnect websocket2 (session w/ project2) await sio2.disconnect() assert not sio2.sid + # assert dynamic services are still around - mocked_director_v2_api["director_v2_core.stop_service"].assert_not_called() - # wait the defined delay + mocked_director_v2_api["director_v2_core.stop_dynamic_service"].assert_not_called() + + # wait the defined delay so the TLL expires (not updated by socket) and then run GC await asyncio.sleep(SERVICE_DELETION_DELAY + 1) await garbage_collector_core.collect_garbage(client.app) + # assert dynamic service 2,3 is removed calls = [ call( app=client.server.app, - service_uuid=service2["service_uuid"], + service_uuid=service_2["service_uuid"], save_state=expected_save_state, ), call( app=client.server.app, - service_uuid=service3["service_uuid"], + service_uuid=service_3["service_uuid"], save_state=expected_save_state, ), ] - mocked_director_v2_api["director_v2_core.stop_service"].assert_has_calls(calls) - mocked_director_v2_api["director_v2_core.stop_service"].reset_mock() + mocked_director_v2_api["director_v2_core.stop_dynamic_service"].assert_has_calls( + calls + ) + mocked_director_v2_api["director_v2_core.stop_dynamic_service"].reset_mock() @pytest.mark.xfail( @@ -696,13 +710,13 @@ async def test_services_remain_after_closing_one_out_of_two_tabs( await asyncio.sleep(SERVICE_DELETION_DELAY + 1) await garbage_collector_core.collect_garbage(client.app) # assert dynamic service is still around - mocked_director_v2_api["director_v2_api.stop_service"].assert_not_called() + mocked_director_v2_api["director_v2_api.stop_dynamic_service"].assert_not_called() # close project in tab2 await close_project(client, empty_user_project["uuid"], client_session_id2) # wait the defined delay await asyncio.sleep(SERVICE_DELETION_DELAY + 1) await garbage_collector_core.collect_garbage(client.app) - mocked_director_v2_api["director_v2_api.stop_service"].assert_has_calls( + mocked_director_v2_api["director_v2_api.stop_dynamic_service"].assert_has_calls( [call(client.server.app, service["service_uuid"], expected_save_state)] ) @@ -717,14 +731,15 @@ async def test_services_remain_after_closing_one_out_of_two_tabs( ) async def test_websocket_disconnected_remove_or_maintain_files_based_on_role( client, + user_role, logged_user, empty_user_project, mocked_director_v2_api, - create_dynamic_service_mock, + create_dynamic_service_mock: Callable, client_session_id_factory: Callable[[], str], socketio_client_factory: Callable, - # asyncpg_storage_system_mock, - storage_subsystem_mock, # when guest user logs out garbage is collected + director_v2_service_responses_mock: AioResponsesMock, + storage_subsystem_mock: MockedStorageSubsystem, # when guest user logs out garbage is collected expect_call: bool, expected_save_state: bool, ): @@ -734,19 +749,20 @@ async def test_websocket_disconnected_remove_or_maintain_files_based_on_role( service = await create_dynamic_service_mock( logged_user["id"], empty_user_project["uuid"] ) + # create websocket client_session_id1 = client_session_id_factory() sio: socketio.AsyncClient = await socketio_client_factory(client_session_id1) + # open project in client 1 await open_project(client, empty_user_project["uuid"], client_session_id1) + # logout logout_url = client.app.router["auth_logout"].url_for() r = await client.post(logout_url, json={"client_session_id": client_session_id1}) assert r.url_obj.path == logout_url.path await assert_status(r, web.HTTPOk) - # ensure sufficient time is wasted here - await asyncio.sleep(SERVICE_DELETION_DELAY + 1) await garbage_collector_core.collect_garbage(client.app) # assert dynamic service is removed @@ -757,52 +773,63 @@ async def test_websocket_disconnected_remove_or_maintain_files_based_on_role( service_uuid=service["service_uuid"], ) ] - mocked_director_v2_api["director_v2_core.stop_service"].assert_has_calls(calls) + mocked_director_v2_api["director_v2_core.stop_dynamic_service"].assert_has_calls( + calls + ) - # this call is done async, so wait a bit here to ensure it is correctly done + # Deleting a project is fire&forget, we retry to ensure is done! async for attempt in AsyncRetrying(reraise=True, stop=stop_after_delay(10)): with attempt: if expect_call: # make sure `delete_project` is called - storage_subsystem_mock[1].assert_called_once() + storage_subsystem_mock.delete_project.assert_called_once() # make sure `delete_user` is called # asyncpg_storage_system_mock.assert_called_once() else: # make sure `delete_project` not called - storage_subsystem_mock[1].assert_not_called() + storage_subsystem_mock.delete_project.assert_not_called() # make sure `delete_user` not called # asyncpg_storage_system_mock.assert_not_called() @pytest.mark.parametrize("user_role", [UserRole.USER, UserRole.TESTER, UserRole.GUEST]) async def test_regression_removing_unexisting_user( + loop, client, logged_user, empty_user_project, + mocked_director_v2_api, # mocks calls to director_v2 client user_role, mock_delete_data_folders_for_project, + director_v2_service_responses_mock: AioResponsesMock, ) -> None: # regression test for https://github.com/ITISFoundation/osparc-simcore/issues/2504 - # remove project - await delete_project( - app=client.server.app, - project_uuid=empty_user_project["uuid"], - user_id=logged_user["id"], + spawned_tail_task = asyncio.create_task( + delete_project( + app=client.server.app, + project_uuid=empty_user_project["uuid"], + user_id=logged_user["id"], + ) ) - # remove user - await delete_user(app=client.server.app, user_id=logged_user["id"]) - with pytest.raises(UserNotFoundError): - await remove_project_interactive_services( + await spawned_tail_task + assert spawned_tail_task.done() + assert not spawned_tail_task.exception() + + with pytest.raises(ProjectNotFoundError): + await remove_project_dynamic_services( user_id=logged_user["id"], project_uuid=empty_user_project["uuid"], app=client.server.app, ) - with pytest.raises(ProjectNotFoundError): - await remove_project_interactive_services( + + # remove user + await delete_user(app=client.server.app, user_id=logged_user["id"]) + + with pytest.raises(UserNotFoundError): + await remove_project_dynamic_services( user_id=logged_user["id"], project_uuid=empty_user_project["uuid"], app=client.server.app, - user_name={"first_name": "my name is", "last_name": "pytest"}, ) diff --git a/services/web/server/tests/unit/with_dbs/10/version_control/conftest.py b/services/web/server/tests/unit/with_dbs/10/version_control/conftest.py index 876c90ac1eb..4e2776f1d6d 100644 --- a/services/web/server/tests/unit/with_dbs/10/version_control/conftest.py +++ b/services/web/server/tests/unit/with_dbs/10/version_control/conftest.py @@ -15,7 +15,7 @@ from models_library.projects import ProjectID from models_library.users import UserID from pytest_simcore.helpers.rawdata_fakers import random_project -from pytest_simcore.helpers.utils_login import AUserDict +from pytest_simcore.helpers.utils_login import UserInfoDict from pytest_simcore.helpers.utils_projects import NewProject from simcore_postgres_database.models.projects_version_control import ( projects_vc_repos, @@ -132,7 +132,7 @@ def app_cfg( @pytest.fixture -async def user_id(logged_user: AUserDict) -> UserID: +async def user_id(logged_user: UserInfoDict) -> UserID: return logged_user["id"] @@ -164,7 +164,7 @@ async def user_project( @pytest.fixture def do_update_user_project( - logged_user: AUserDict, client: TestClient, faker: Faker + logged_user: UserInfoDict, client: TestClient, faker: Faker ) -> Callable[[UUID], Awaitable]: async def _doit(project_uuid: UUID) -> None: resp: aiohttp.ClientResponse = await client.get(f"{VX}/projects/{project_uuid}") @@ -191,13 +191,15 @@ async def _doit(project_uuid: UUID) -> None: @pytest.fixture def do_delete_user_project( - logged_user: AUserDict, client: TestClient, mocker + logged_user: UserInfoDict, client: TestClient, mocker ) -> Callable[[UUID], Awaitable]: mocker.patch( - "simcore_service_webserver.projects.projects_api.director_v2_api.delete_pipeline", + "simcore_service_webserver.projects._core_delete.director_v2_api.delete_pipeline", + spec=True, ) mocker.patch( - "simcore_service_webserver.projects.projects_api.delete_data_folders_of_project", + "simcore_service_webserver.projects._core_delete.delete_data_folders_of_project", + spec=True, ) async def _doit(project_uuid: UUID) -> None: diff --git a/services/web/server/tests/unit/with_dbs/conftest.py b/services/web/server/tests/unit/with_dbs/conftest.py index 8071262e5ae..4437c02843c 100644 --- a/services/web/server/tests/unit/with_dbs/conftest.py +++ b/services/web/server/tests/unit/with_dbs/conftest.py @@ -12,6 +12,7 @@ import asyncio import sys import textwrap +from collections import defaultdict from copy import deepcopy from pathlib import Path from typing import AsyncIterator, Callable, Dict, Iterator, List @@ -25,12 +26,12 @@ import simcore_service_webserver.db_models as orm import simcore_service_webserver.utils import sqlalchemy as sa -from _helpers import MockedStorageSubsystem # type: ignore from _pytest.monkeypatch import MonkeyPatch from aiohttp import web from aiohttp.test_utils import TestClient, TestServer from pytest_simcore.helpers.utils_dict import ConfigDict from pytest_simcore.helpers.utils_login import NewUser +from pytest_simcore.helpers.utils_webserver_projects import MockedStorageSubsystem from servicelib.aiohttp.application_keys import APP_DB_ENGINE_KEY from servicelib.common_aiopg_utils import DSN from simcore_service_webserver._constants import INDEX_RESOURCE_NAME @@ -184,15 +185,6 @@ def add_index_route(app: web.Application) -> None: return add_index_route -@pytest.fixture -def computational_system_mock(mocker): - mock_fun = mocker.patch( - "simcore_service_webserver.projects.projects_handlers.update_pipeline_db", - return_value="", - ) - return mock_fun - - @pytest.fixture async def storage_subsystem_mock(mocker) -> MockedStorageSubsystem: """ @@ -204,18 +196,18 @@ async def storage_subsystem_mock(mocker) -> MockedStorageSubsystem: async def _mock_copy_data_from_project(*args): return args[2] - mock = mocker.patch( - "simcore_service_webserver.projects.projects_handlers.copy_data_folders_from_project", - autospec=True, - side_effect=_mock_copy_data_from_project, - ) - - async_mock = mocker.AsyncMock(return_value="") - mock1 = mocker.patch( - "simcore_service_webserver.projects.projects_handlers.projects_api.delete_data_folders_of_project", - side_effect=async_mock, + return MockedStorageSubsystem( + copy_data_folders_from_project=mocker.patch( + "simcore_service_webserver.projects.projects_handlers_crud.copy_data_folders_from_project", + autospec=True, + side_effect=_mock_copy_data_from_project, + ), + delete_project=mocker.patch( + "simcore_service_webserver.projects._core_delete.delete_data_folders_of_project", + autospec=True, + return_value="", + ), ) - return MockedStorageSubsystem(mock, mock1) @pytest.fixture @@ -236,19 +228,23 @@ async def mocked_director_v2_api(mocker) -> Dict[str, MagicMock]: # via the director_v2_api or director_v2_core modules # for func_name in ( - "get_service_state", - "get_services", - "start_service", - "stop_service", + "get_dynamic_service_state", + "get_dynamic_services", + "start_dynamic_service", + "stop_dynamic_service", ): - for mod_name in ("director_v2_api", "director_v2_core"): + for mod_name in ( + "director_v2_api", + "director_v2_core", + # "projects._core_nodes.director_v2_api", + # "garbage_collector_core.director_v2_api", + ): name = f"{mod_name}.{func_name}" mock[name] = mocker.patch( f"simcore_service_webserver.{name}", - autospec=True, + autospec=True, # errors if functions renamed or moved to another module while refactoring return_value={}, ) - return mock @@ -256,20 +252,27 @@ async def mocked_director_v2_api(mocker) -> Dict[str, MagicMock]: def create_dynamic_service_mock( client: TestClient, mocked_director_v2_api: Dict ) -> Callable: - services = [] + """overrides director_v2_api.get_dynamic_services and returns created services""" + + returned_dynamic_services = defaultdict(list) - async def create(user_id, project_id) -> Dict: + async def _get(app, user_id, project_id): + assert app + return returned_dynamic_services[(user_id, project_id)] + + async def _create(user_id, project_id) -> Dict: SERVICE_UUID = str(uuid4()) SERVICE_KEY = "simcore/services/dynamic/3d-viewer" SERVICE_VERSION = "1.4.2" + + # TODO: not sure what is url and create_node_data for?? url = client.app.router["create_node"].url_for(project_id=project_id) create_node_data = { "service_key": SERVICE_KEY, "service_version": SERVICE_VERSION, "service_uuid": SERVICE_UUID, } - - running_service_dict = { + running_service = { "published_port": "23423", "service_uuid": SERVICE_UUID, "service_key": SERVICE_KEY, @@ -279,13 +282,17 @@ async def create(user_id, project_id) -> Dict: "service_state": "some_service_state", } - services.append(running_service_dict) + returned_dynamic_services[(user_id, project_id)].append(running_service) + # reset the future or an invalidStateError will appear as set_result sets the future to done - mocked_director_v2_api["director_v2_api.get_services"].return_value = services - mocked_director_v2_api["director_v2_core.get_services"].return_value = services - return running_service_dict + for mod_name in ("director_v2_api", "director_v2_core"): + mocked_director_v2_api[ + f"{mod_name}.get_dynamic_services" + ].side_effect = _get + + return running_service - return create + return _create # POSTGRES CORE SERVICE ---------------------------------------------------