diff --git a/services/api-server/requirements/_test.txt b/services/api-server/requirements/_test.txt index da0692f1276..66aab598d61 100644 --- a/services/api-server/requirements/_test.txt +++ b/services/api-server/requirements/_test.txt @@ -235,6 +235,7 @@ pyyaml==5.4.1 # responses requests==2.31.0 # via + # -c requirements/_base.txt # docker # moto # responses diff --git a/services/catalog/requirements/_test.txt b/services/catalog/requirements/_test.txt index a15d6803fd8..e30e57b8aeb 100644 --- a/services/catalog/requirements/_test.txt +++ b/services/catalog/requirements/_test.txt @@ -142,7 +142,9 @@ python-dateutil==2.8.2 # -c requirements/_base.txt # faker requests==2.31.0 - # via docker + # via + # -c requirements/_base.txt + # docker respx==0.20.1 # via -r requirements/_test.in six==1.15.0 diff --git a/services/dask-sidecar/requirements/_base.txt b/services/dask-sidecar/requirements/_base.txt index 8c9a9b18108..2ca018c3d71 100644 --- a/services/dask-sidecar/requirements/_base.txt +++ b/services/dask-sidecar/requirements/_base.txt @@ -4,9 +4,9 @@ # # pip-compile --output-file=requirements/_base.txt --resolver=backtracking --strip-extras requirements/_base.in # -aio-pika==9.0.4 +aio-pika==9.0.7 # via -r requirements/../../../packages/service-library/requirements/_base.in -aiobotocore==2.4.2 +aiobotocore==2.5.0 # via s3fs aiodebug==2.3.0 # via -r requirements/../../../packages/service-library/requirements/_base.in @@ -26,7 +26,7 @@ aiohttp==3.8.4 # s3fs aioitertools==0.11.0 # via aiobotocore -aiormq==6.7.3 +aiormq==6.7.6 # via aio-pika aiosignal==1.3.1 # via aiohttp @@ -47,9 +47,9 @@ blosc==1.11.1 # via -r requirements/_base.in bokeh==2.4.3 # via dask -botocore==1.27.59 +botocore==1.29.76 # via aiobotocore -certifi==2022.12.7 +certifi==2023.5.7 # via requests charset-normalizer==3.1.0 # via @@ -65,7 +65,7 @@ cloudpickle==2.2.1 # via # dask # distributed -dask==2023.3.0 +dask==2023.3.2 # via # -r requirements/../../../packages/dask-task-models-library/requirements/_base.in # -r requirements/_base.in @@ -73,30 +73,30 @@ dask==2023.3.0 # distributed dask-gateway==2023.1.1 # via -r requirements/_base.in -distributed==2023.3.0 +distributed==2023.3.2 # via # dask # dask-gateway dnspython==2.3.0 # via email-validator -email-validator==1.3.1 +email-validator==2.0.0.post2 # via pydantic frozenlist==1.3.3 # via # aiohttp # aiosignal -fsspec==2023.3.0 +fsspec==2023.5.0 # via # -r requirements/_base.in # dask # s3fs -heapdict==1.0.1 - # via zict idna==3.4 # via # email-validator # requests # yarl +importlib-metadata==6.6.0 + # via dask jinja2==3.1.2 # via # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt @@ -123,22 +123,22 @@ multidict==6.0.4 # via # aiohttp # yarl -numpy==1.24.2 +numpy==1.24.3 # via bokeh -packaging==23.0 +packaging==23.1 # via # bokeh # dask # distributed pamqp==3.2.1 # via aiormq -partd==1.3.0 +partd==1.4.0 # via dask -pillow==9.4.0 +pillow==9.5.0 # via bokeh -psutil==5.9.4 +psutil==5.9.5 # via distributed -pydantic==1.10.2 +pydantic==1.10.8 # via # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/_base.in @@ -160,13 +160,13 @@ pyyaml==5.4.1 # dask # dask-gateway # distributed -redis==4.5.4 +redis==4.5.5 # via # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_base.in requests==2.31.0 # via fsspec -s3fs==2023.3.0 +s3fs==2023.5.0 # via fsspec six==1.16.0 # via @@ -183,22 +183,23 @@ toolz==0.12.0 # dask # distributed # partd -tornado==6.2 +tornado==6.3.2 # via # bokeh # dask-gateway # distributed tqdm==4.65.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.7.0 +typer==0.9.0 # via -r requirements/../../../packages/settings-library/requirements/_base.in -typing-extensions==4.5.0 +typing-extensions==4.6.1 # via # aiodebug # aiodocker # bokeh # pydantic -urllib3==1.26.14 + # typer +urllib3==1.26.16 # via # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # botocore @@ -206,13 +207,15 @@ urllib3==1.26.14 # requests wrapt==1.15.0 # via aiobotocore -yarl==1.8.2 +yarl==1.9.2 # via # aio-pika # aiohttp # aiormq -zict==2.2.0 +zict==3.0.0 # via distributed +zipp==3.15.0 + # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/services/dask-sidecar/requirements/_dask-distributed.txt b/services/dask-sidecar/requirements/_dask-distributed.txt index 3b0ff143e02..08e1a262cbd 100644 --- a/services/dask-sidecar/requirements/_dask-distributed.txt +++ b/services/dask-sidecar/requirements/_dask-distributed.txt @@ -16,22 +16,22 @@ cloudpickle==2.2.1 # -c requirements/./_base.txt # dask # distributed -dask==2023.3.0 +dask==2023.3.2 # via # -r requirements/_dask-distributed.in # distributed -distributed==2023.3.0 +distributed==2023.3.2 # via # -c requirements/./_base.txt # dask -fsspec==2023.3.0 +fsspec==2023.5.0 # via # -c requirements/./_base.txt # dask -heapdict==1.0.1 +importlib-metadata==6.6.0 # via # -c requirements/./_base.txt - # zict + # dask jinja2==3.1.2 # via # -c requirements/./_base.txt @@ -51,16 +51,16 @@ msgpack==1.0.5 # via # -c requirements/./_base.txt # distributed -packaging==23.0 +packaging==23.1 # via # -c requirements/./_base.txt # dask # distributed -partd==1.3.0 +partd==1.4.0 # via # -c requirements/./_base.txt # dask -psutil==5.9.4 +psutil==5.9.5 # via # -c requirements/./_base.txt # distributed @@ -83,15 +83,19 @@ toolz==0.12.0 # dask # distributed # partd -tornado==6.2 +tornado==6.3.2 # via # -c requirements/./_base.txt # distributed -urllib3==1.26.14 +urllib3==1.26.16 # via # -c requirements/./_base.txt # distributed -zict==2.2.0 +zict==3.0.0 # via # -c requirements/./_base.txt # distributed +zipp==3.15.0 + # via + # -c requirements/./_base.txt + # importlib-metadata diff --git a/services/dask-sidecar/requirements/_test.txt b/services/dask-sidecar/requirements/_test.txt index b4954989351..16bfa0e100f 100644 --- a/services/dask-sidecar/requirements/_test.txt +++ b/services/dask-sidecar/requirements/_test.txt @@ -23,24 +23,24 @@ attrs==21.4.0 # jschema-to-python # jsonschema # sarif-om -aws-sam-translator==1.66.0 +aws-sam-translator==1.67.0 # via cfn-lint aws-xray-sdk==2.12.0 # via moto blinker==1.6.2 # via flask -boto3==1.24.59 +boto3==1.26.76 # via # aws-sam-translator # moto -botocore==1.27.59 +botocore==1.29.76 # via # -c requirements/_base.txt # aws-xray-sdk # boto3 # moto # s3transfer -certifi==2022.12.7 +certifi==2023.5.7 # via # -c requirements/_base.txt # requests @@ -57,7 +57,7 @@ click==8.1.3 # via # -c requirements/_base.txt # flask -coverage==7.2.5 +coverage==7.2.6 # via # -r requirements/_test.in # pytest-cov @@ -79,7 +79,7 @@ ecdsa==0.18.0 # sshpubkeys exceptiongroup==1.1.1 # via pytest -faker==18.7.0 +faker==18.9.0 # via -r requirements/_test.in flask==2.3.2 # via @@ -139,7 +139,7 @@ markupsafe==2.1.2 # -c requirements/_base.txt # jinja2 # werkzeug -moto==4.1.9 +moto==4.1.10 # via -r requirements/_test.in mpmath==1.3.0 # via sympy @@ -154,7 +154,7 @@ openapi-schema-validator==0.2.3 # via openapi-spec-validator openapi-spec-validator==0.4.0 # via moto -packaging==23.0 +packaging==23.1 # via # -c requirements/_base.txt # docker @@ -176,7 +176,7 @@ pyasn1==0.5.0 # rsa pycparser==2.21 # via cffi -pydantic==1.10.2 +pydantic==1.10.8 # via # -c requirements/_base.txt # aws-sam-translator @@ -205,7 +205,7 @@ pytest-aiohttp==1.0.4 # via -r requirements/_test.in pytest-asyncio==0.21.0 # via pytest-aiohttp -pytest-cov==4.0.0 +pytest-cov==4.1.0 # via -r requirements/_test.in pytest-icdiff==0.6 # via -r requirements/_test.in @@ -238,6 +238,7 @@ regex==2023.5.5 # via cfn-lint requests==2.31.0 # via + # -c requirements/_base.txt # docker # moto # responses @@ -269,21 +270,21 @@ tomli==2.0.1 # via # coverage # pytest -types-pyyaml==6.0.12.9 +types-pyyaml==6.0.12.10 # via responses -typing-extensions==4.5.0 +typing-extensions==4.6.1 # via # -c requirements/_base.txt # aws-sam-translator # pydantic -urllib3==1.26.14 +urllib3==1.26.16 # via # -c requirements/_base.txt # botocore # docker # requests # responses -websocket-client==1.5.1 +websocket-client==1.5.2 # via docker werkzeug==2.3.4 # via @@ -295,7 +296,7 @@ wrapt==1.15.0 # aws-xray-sdk xmltodict==0.13.0 # via moto -yarl==1.8.2 +yarl==1.9.2 # via # -c requirements/_base.txt # aiohttp diff --git a/services/dask-sidecar/requirements/_tools.txt b/services/dask-sidecar/requirements/_tools.txt index 8784491cefa..4680706b624 100644 --- a/services/dask-sidecar/requirements/_tools.txt +++ b/services/dask-sidecar/requirements/_tools.txt @@ -39,7 +39,7 @@ mypy-extensions==1.0.0 # via black nodeenv==1.8.0 # via pre-commit -packaging==23.0 +packaging==23.1 # via # -c requirements/_test.txt # black @@ -53,7 +53,7 @@ platformdirs==3.5.1 # black # pylint # virtualenv -pre-commit==3.3.1 +pre-commit==3.3.2 # via -r requirements/../../../requirements/devenv.txt pylint==2.17.4 # via -r requirements/../../../requirements/devenv.txt @@ -73,7 +73,7 @@ tomli==2.0.1 # pyproject-hooks tomlkit==0.11.8 # via pylint -typing-extensions==4.5.0 +typing-extensions==4.6.1 # via # -c requirements/_test.txt # astroid diff --git a/services/dask-sidecar/requirements/constraints.txt b/services/dask-sidecar/requirements/constraints.txt index b8767f58a98..9df4d9f629e 100644 --- a/services/dask-sidecar/requirements/constraints.txt +++ b/services/dask-sidecar/requirements/constraints.txt @@ -5,7 +5,7 @@ # # Breaking changes # - +dask[distributed]<2023.4 # issue with publish_dataset: https://github.com/dask/distributed/issues/7859 # # Bugs # diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/constants.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/constants.py index fa2a269e1c7..76c80410960 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/constants.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/constants.py @@ -4,7 +4,7 @@ LEGACY_SERVICE_LOG_FILE_NAME: Final[str] = "log.dat" PARSE_LOG_INTERVAL_S: Final[float] = 0.5 -DOCKER_LOG_REGEXP: re.Pattern[str] = re.compile( +DOCKER_LOG_REGEXP_WITH_TIMESTAMP: re.Pattern[str] = re.compile( r"^(?P(?:(\d{4}-\d{2}-\d{2})T(\d{2}:\d{2}:\d{2}(?:\.\d+)?))(Z|[\+-]\d{2}:\d{2})?)" r"\s(?P.*)$" ) diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/core.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/core.py index 785519be79f..57aa5e6f05a 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/core.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/core.py @@ -167,16 +167,17 @@ async def _publish_sidecar_log( logger.log(log_level, log) async def run(self, command: list[str]) -> TaskOutputData: + # ensure we pass the initial logs and progress await self._publish_sidecar_log( f"Starting task for {self.service_key}:{self.service_version} on {socket.gethostname()}..." ) + self.task_publishers.publish_progress(0) settings = Settings.create_from_envs() run_id = f"{uuid4()}" async with Docker() as docker_client, TaskSharedVolumes( Path(f"{settings.SIDECAR_COMP_SERVICES_SHARED_FOLDER}/{run_id}") ) as task_volumes: - # PRE-PROCESSING await pull_image( docker_client, self.docker_auth, @@ -208,8 +209,7 @@ async def run(self, command: list[str]) -> TaskOutputData: container=container, service_key=self.service_key, service_version=self.service_version, - progress_pub=self.task_publishers.progress, - logs_pub=self.task_publishers.logs, + task_publishers=self.task_publishers, integration_version=integration_version, task_volumes=task_volumes, log_file_url=self.log_file_url, @@ -263,3 +263,5 @@ async def __aexit__( await self._publish_sidecar_log( "TIP: There might be more information in the service log file in the service outputs", ) + # ensure we pass the final progress + self.task_publishers.publish_progress(1) diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/docker_utils.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/docker_utils.py index c9f50217ac9..3dd9c6475d2 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/docker_utils.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/docker_utils.py @@ -1,6 +1,5 @@ import asyncio import contextlib -import datetime import json import logging import re @@ -16,12 +15,10 @@ from aiodocker.containers import DockerContainer from aiodocker.volumes import DockerVolume from dask_task_models_library.container_tasks.docker import DockerBasicAuth -from distributed.pubsub import Pub from models_library.services_resources import BootMode from packaging import version from pydantic import ByteSize from pydantic.networks import AnyUrl -from servicelib.docker_utils import to_datetime from servicelib.logging_utils import ( LogLevelInt, LogMessageStr, @@ -31,10 +28,10 @@ ) from settings_library.s3 import S3Settings -from ..dask_utils import LogType, publish_task_logs +from ..dask_utils import TaskPublisher from ..file_utils import push_file_to_remote from ..settings import Settings -from .constants import DOCKER_LOG_REGEXP, LEGACY_SERVICE_LOG_FILE_NAME, PROGRESS_REGEXP +from .constants import LEGACY_SERVICE_LOG_FILE_NAME, PROGRESS_REGEXP from .models import ( LEGACY_INTEGRATION_VERSION, ContainerHostConfig, @@ -127,78 +124,50 @@ async def managed_container( def _guess_progress_value(progress_match: re.Match[str]) -> float: - value: float = 0.0 - try: - # can be anything from "23 percent", 23%, 23/234, 0.0-1.0 - progress_str = progress_match.group("value") - if progress_match.group("percent_sign"): - # this is of the 23% kind - value = float(progress_str.split("%")[0].strip()) / 100.0 - elif progress_match.group("percent_explicit"): - # this is of the 23 percent kind - value = float(progress_str.split("percent")[0].strip()) / 100.0 - elif progress_match.group("fraction"): - # this is of the 23/123 kind - nums = progress_match.group("fraction").strip().split("/") - value = float(nums[0].strip()) / float(nums[1].strip()) - else: - # this is of the 0.0-1.0 kind - value = float(progress_str.strip()) - except ValueError: - logger.exception("Could not extract progress from log line %s", progress_match) - return value - - -async def _parse_line( + # can be anything from "23 percent", 23%, 23/234, 0.0-1.0 + value_str = progress_match.group("value") + if progress_match.group("percent_sign"): + # this is of the 23% kind + return float(value_str.split("%")[0].strip()) / 100.0 + if progress_match.group("percent_explicit"): + # this is of the 23 percent kind + return float(value_str.split("percent")[0].strip()) / 100.0 + if progress_match.group("fraction"): + # this is of the 23/123 kind + nums = progress_match.group("fraction").strip().split("/") + return float(nums[0].strip()) / float(nums[1].strip()) + # this is of the 0.0-1.0 kind + return float(value_str.strip()) + + +async def _try_parse_progress( line: str, -) -> tuple[LogType, datetime.datetime, LogMessageStr, LogLevelInt]: - match = re.search(DOCKER_LOG_REGEXP, line) - if not match: - # try to correct the log, it might be coming from an old comp service that does not put timestamps - corrected_line = f"{arrow.utcnow().datetime.isoformat()} {line}" - match = re.search(DOCKER_LOG_REGEXP, corrected_line) - if not match: - # default return as log - return ( - LogType.LOG, - arrow.utcnow().datetime, - f"{line}", - guess_message_log_level(line), - ) - - timestamp = to_datetime(match.group("timestamp")) - log = f"{match.group('log')}" - # now look for progress - if match := re.search(PROGRESS_REGEXP, log.lower()): - return ( - LogType.PROGRESS, - timestamp, - f"{_guess_progress_value(match):.2f}", - logging.INFO, - ) - - return (LogType.LOG, timestamp, log, guess_message_log_level(log)) - - -async def _publish_container_logs( - service_key: str, - service_version: str, - container: DockerContainer, - container_name: str, - progress_pub: Pub, - logs_pub: Pub, - log_type: LogType, - message: LogMessageStr, - log_level: LogLevelInt, +) -> float | None: + with log_catch(logger, reraise=False): + # pattern might be like "timestamp log" + log = line.strip("\n") + splitted_log = log.split(" ", maxsplit=1) + with contextlib.suppress(arrow.ParserError): + if len(splitted_log) == 2 and arrow.get(splitted_log[0]): + log = splitted_log[1] + if match := re.search(PROGRESS_REGEXP, log.lower()): + return _guess_progress_value(match) + + return None + + +async def _parse_and_publish_logs( + log_line: str, + *, + task_publishers: TaskPublisher, ) -> None: - return publish_task_logs( - progress_pub, - logs_pub, - log_type, - message_prefix=f"{service_key}:{service_version} - {container.id}{container_name}", - message=message, - log_level=log_level, - ) + progress_value = await _try_parse_progress(log_line) + if progress_value is not None: + task_publishers.publish_progress(progress_value) + else: + task_publishers.publish_logs( + message=log_line, log_level=guess_message_log_level(log_line) + ) async def _parse_container_log_file( @@ -206,8 +175,7 @@ async def _parse_container_log_file( service_key: str, service_version: str, container_name: str, - progress_pub: Pub, - logs_pub: Pub, + task_publishers: TaskPublisher, task_volumes: TaskSharedVolumes, log_file_url: AnyUrl, log_publishing_cb: LogPublishingCB, @@ -219,35 +187,29 @@ async def _parse_container_log_file( logging.DEBUG, "started monitoring of pre-1.0 service - using log file in /logs folder", ): - async with aiofiles.open(log_file, mode="r") as file_pointer: + async with aiofiles.open(log_file, mode="rt") as file_pointer: while (await container.show())["State"]["Running"]: if line := await file_pointer.readline(): - log_type, _, message, log_level = await _parse_line(line) - await _publish_container_logs( - service_key=service_key, - service_version=service_version, - container=container, - container_name=container_name, - progress_pub=progress_pub, - logs_pub=logs_pub, - log_type=log_type, - message=message, - log_level=log_level, + logger.info( + "[%s]: %s", + f"{service_key}:{service_version} - {container.id}{container_name}", + line, + ) + await _parse_and_publish_logs( + line, + task_publishers=task_publishers, ) # finish reading the logs if possible async for line in file_pointer: - log_type, _, message, log_level = await _parse_line(line) - await _publish_container_logs( - service_key=service_key, - service_version=service_version, - container=container, - container_name=container_name, - progress_pub=progress_pub, - logs_pub=logs_pub, - log_type=log_type, - message=message, - log_level=log_level, + logger.info( + "[%s]: %s", + f"{service_key}:{service_version} - {container.id}{container_name}", + line, + ) + await _parse_and_publish_logs( + line, + task_publishers=task_publishers, ) # copy the log file to the log_file_url @@ -261,8 +223,7 @@ async def _parse_container_docker_logs( service_key: str, service_version: str, container_name: str, - progress_pub: Pub, - logs_pub: Pub, + task_publishers: TaskPublisher, log_file_url: AnyUrl, log_publishing_cb: LogPublishingCB, s3_settings: S3Settings | None, @@ -280,26 +241,18 @@ async def _parse_container_docker_logs( async for log_line in cast( AsyncGenerator[str, None], container.log( - stdout=True, stderr=True, follow=True, timestamps=True + stdout=True, stderr=True, follow=True, timestamp=True ), ): + logger.info( + "[%s]: %s", + f"{service_key}:{service_version} - {container.id}{container_name}", + log_line, + ) await log_fp.write(log_line.encode("utf-8")) - ( - log_type, - _latest_log_timestamp, - message, - log_level, - ) = await _parse_line(log_line) - await _publish_container_logs( - service_key=service_key, - service_version=service_version, - container=container, - container_name=container_name, - progress_pub=progress_pub, - logs_pub=logs_pub, - log_type=log_type, - message=message, - log_level=log_level, + await _parse_and_publish_logs( + log_line, + task_publishers=task_publishers, ) # copy the log file to the log_file_url @@ -308,12 +261,11 @@ async def _parse_container_docker_logs( ) -async def monitor_container_logs( +async def _monitor_container_logs( container: DockerContainer, service_key: str, service_version: str, - progress_pub: Pub, - logs_pub: Pub, + task_publishers: TaskPublisher, integration_version: version.Version, task_volumes: TaskSharedVolumes, log_file_url: AnyUrl, @@ -339,8 +291,7 @@ async def monitor_container_logs( service_key, service_version, container_name, - progress_pub, - logs_pub, + task_publishers, log_file_url, log_publishing_cb, s3_settings, @@ -351,8 +302,7 @@ async def monitor_container_logs( service_key, service_version, container_name, - progress_pub, - logs_pub, + task_publishers, task_volumes, log_file_url, log_publishing_cb, @@ -365,8 +315,7 @@ async def managed_monitor_container_log_task( container: DockerContainer, service_key: str, service_version: str, - progress_pub: Pub, - logs_pub: Pub, + task_publishers: TaskPublisher, integration_version: version.Version, task_volumes: TaskSharedVolumes, log_file_url: AnyUrl, @@ -381,12 +330,11 @@ async def managed_monitor_container_log_task( log_file.touch() monitoring_task = asyncio.shield( asyncio.create_task( - monitor_container_logs( + _monitor_container_logs( container, service_key, service_version, - progress_pub, - logs_pub, + task_publishers, integration_version, task_volumes, log_file_url, diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/dask_utils.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/dask_utils.py index 1d769c91c16..1bf14129833 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/dask_utils.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/dask_utils.py @@ -2,7 +2,6 @@ import contextlib import logging from dataclasses import dataclass, field -from enum import Enum from typing import AsyncIterator, Final import distributed @@ -15,8 +14,7 @@ from dask_task_models_library.container_tasks.io import TaskCancelEventName from distributed.worker import get_worker from distributed.worker_state_machine import TaskState -from servicelib.logging_utils import LogLevelInt, LogMessageStr -from servicelib.logging_utils import log_catch +from servicelib.logging_utils import LogLevelInt, LogMessageStr, log_catch logger = logging.getLogger(__name__) @@ -60,12 +58,32 @@ def get_current_task_resources() -> dict[str, float]: @dataclass() class TaskPublisher: progress: distributed.Pub = field(init=False) + _last_published_progress_value: float = -1 logs: distributed.Pub = field(init=False) - def __post_init__(self): + def __post_init__(self) -> None: self.progress = distributed.Pub(TaskProgressEvent.topic_name()) self.logs = distributed.Pub(TaskLogEvent.topic_name()) + def publish_progress(self, value: float) -> None: + rounded_value = round(value, ndigits=2) + if rounded_value > self._last_published_progress_value: + publish_event( + self.progress, + TaskProgressEvent.from_dask_worker(progress=rounded_value), + ) + self._last_published_progress_value = rounded_value + + def publish_logs( + self, + *, + message: LogMessageStr, + log_level: LogLevelInt, + ) -> None: + publish_event( + self.logs, TaskLogEvent.from_dask_worker(log=message, log_level=log_level) + ) + _TASK_ABORTION_INTERVAL_CHECK_S: int = 2 @@ -126,32 +144,6 @@ async def periodicaly_check_if_aborted(task_name: str) -> None: def publish_event(dask_pub: distributed.Pub, event: BaseTaskEvent) -> None: - dask_pub.put(event.json()) - - -class LogType(Enum): - LOG = 1 - PROGRESS = 2 - INSTRUMENTATION = 3 - - -def publish_task_logs( - progress_pub: distributed.Pub, - logs_pub: distributed.Pub, - log_type: LogType, - message_prefix: str, - message: LogMessageStr, - log_level: LogLevelInt, -) -> None: - logger.info("[%s - %s]: %s", message_prefix, log_type.name, message) + """never reraises, only CancellationError""" with log_catch(logger, reraise=False): - if log_type == LogType.PROGRESS: - publish_event( - progress_pub, - TaskProgressEvent.from_dask_worker(progress=float(message)), - ) - else: - publish_event( - logs_pub, - TaskLogEvent.from_dask_worker(log=message, log_level=log_level), - ) + dask_pub.put(event.json()) diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/file_utils.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/file_utils.py index 0df353502d7..b111829e53d 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/file_utils.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/file_utils.py @@ -127,7 +127,7 @@ async def pull_file_from_remote( ) -> None: assert src_url.path # nosec await log_publishing_cb( - f"Downloading '{src_url.path.strip('/')}' into local file '{dst_path.name}'...", + f"Downloading '{src_url}' into local file '{dst_path}'...", logging.INFO, ) if not dst_path.parent.exists(): @@ -151,7 +151,7 @@ async def pull_file_from_remote( ) await log_publishing_cb( - f"Download of '{src_url.path.strip('/')}' into local file '{dst_path.name}' complete.", + f"Download of '{src_url}' into local file '{dst_path}' complete.", logging.INFO, ) diff --git a/services/dask-sidecar/tests/unit/conftest.py b/services/dask-sidecar/tests/unit/conftest.py index 043f5d06531..d9cf2bbf9e5 100644 --- a/services/dask-sidecar/tests/unit/conftest.py +++ b/services/dask-sidecar/tests/unit/conftest.py @@ -5,7 +5,7 @@ from pathlib import Path from pprint import pformat -from typing import AsyncIterator, Callable, Iterable, Iterator +from typing import AsyncIterator, Callable, Iterator import dask import distributed @@ -76,7 +76,7 @@ def mock_service_envs( @pytest.fixture -def dask_client(mock_service_envs: None) -> Iterable[distributed.Client]: +def local_cluster(mock_service_envs: None) -> Iterator[distributed.LocalCluster]: print(pformat(dask.config.get("distributed"))) with distributed.LocalCluster( worker_class=distributed.Worker, @@ -85,8 +85,43 @@ def dask_client(mock_service_envs: None) -> Iterable[distributed.Client]: "preload": "simcore_service_dask_sidecar.tasks", }, ) as cluster: - with distributed.Client(cluster) as client: - yield client + assert cluster + assert isinstance(cluster, distributed.LocalCluster) + yield cluster + + +@pytest.fixture +def dask_client( + local_cluster: distributed.LocalCluster, +) -> Iterator[distributed.Client]: + with distributed.Client(local_cluster) as client: + yield client + + +@pytest.fixture +async def async_local_cluster( + mock_service_envs: None, +) -> AsyncIterator[distributed.LocalCluster]: + print(pformat(dask.config.get("distributed"))) + async with distributed.LocalCluster( + worker_class=distributed.Worker, + **{ + "resources": {"CPU": 10, "GPU": 10}, + "preload": "simcore_service_dask_sidecar.tasks", + }, + asynchronous=True, + ) as cluster: + assert cluster + assert isinstance(cluster, distributed.LocalCluster) + yield cluster + + +@pytest.fixture +async def async_dask_client( + async_local_cluster: distributed.LocalCluster, +) -> AsyncIterator[distributed.Client]: + async with distributed.Client(async_local_cluster, asynchronous=True) as client: + yield client @pytest.fixture(scope="module") diff --git a/services/dask-sidecar/tests/unit/test_dask_utils.py b/services/dask-sidecar/tests/unit/test_dask_utils.py index 0a52a650d34..544597a20da 100644 --- a/services/dask-sidecar/tests/unit/test_dask_utils.py +++ b/services/dask-sidecar/tests/unit/test_dask_utils.py @@ -8,7 +8,7 @@ import concurrent.futures import logging import time -from typing import Any +from typing import Any, AsyncIterator, Callable, Coroutine import distributed import pytest @@ -31,47 +31,115 @@ DASK_TESTING_TIMEOUT_S = 25 -async def test_publish_event(dask_client: distributed.Client): - dask_pub = distributed.Pub("some_topic") - dask_sub = distributed.Sub("some_topic") - async for attempt in AsyncRetrying( - reraise=True, - retry=retry_if_exception_type(AssertionError), - wait=wait_fixed(0.01), - stop=stop_after_delay(60), - ): - with attempt: - print( - f"waiting for subscribers... attempt={attempt.retry_state.attempt_number}" - ) - assert dask_pub.subscribers - print("we do have subscribers!") +def test_publish_event(dask_client: distributed.Client): + dask_pub = distributed.Pub("some_topic", client=dask_client) + dask_sub = distributed.Sub("some_topic", client=dask_client) + event_to_publish = TaskLogEvent( + job_id="some_fake_job_id", log="the log", log_level=logging.INFO + ) + publish_event(dask_pub=dask_pub, event=event_to_publish) + + # NOTE: this tests runs a sync dask client, + # and the CI seems to have sometimes difficulties having this run in a reasonable time + # hence the long time out + message = dask_sub.get(timeout=DASK_TESTING_TIMEOUT_S) + assert message is not None + received_task_log_event = TaskLogEvent.parse_raw(message) # type: ignore + assert received_task_log_event == event_to_publish + +async def test_publish_event_async(async_dask_client: distributed.Client): + dask_pub = distributed.Pub("some_topic", client=async_dask_client) + dask_sub = distributed.Sub("some_topic", client=async_dask_client) event_to_publish = TaskLogEvent( job_id="some_fake_job_id", log="the log", log_level=logging.INFO ) publish_event(dask_pub=dask_pub, event=event_to_publish) + # NOTE: this tests runs a sync dask client, # and the CI seems to have sometimes difficulties having this run in a reasonable time # hence the long time out message = dask_sub.get(timeout=DASK_TESTING_TIMEOUT_S) + assert isinstance(message, Coroutine) + message = await message assert message is not None received_task_log_event = TaskLogEvent.parse_raw(message) # type: ignore assert received_task_log_event == event_to_publish -def _wait_for_task_to_start(): +@pytest.fixture +async def asyncio_task() -> AsyncIterator[Callable[[Coroutine], asyncio.Task]]: + created_tasks = [] + + def _creator(coro: Coroutine) -> asyncio.Task: + task = asyncio.create_task(coro, name="pytest_asyncio_task") + created_tasks.append(task) + return task + + yield _creator + for task in created_tasks: + task.cancel() + + await asyncio.gather(*created_tasks, return_exceptions=True) + + +async def test_publish_event_async_using_task( + async_dask_client: distributed.Client, + asyncio_task: Callable[[Coroutine], asyncio.Task], +): + dask_pub = distributed.Pub("some_topic", client=async_dask_client) + dask_sub = distributed.Sub("some_topic", client=async_dask_client) + NUMBER_OF_MESSAGES = 1000 + received_messages = [] + + async def _dask_sub_consumer_task(sub: distributed.Sub) -> None: + print("--> starting consumer task") + async for dask_event in sub: + print(f"received {dask_event}") + received_messages.append(dask_event) + print("<-- finished consumer task") + + consumer_task = asyncio_task(_dask_sub_consumer_task(dask_sub)) + assert consumer_task + + async def _dask_publisher_task(pub: distributed.Pub) -> None: + print("--> starting publisher task") + for n in range(NUMBER_OF_MESSAGES): + event_to_publish = TaskLogEvent( + job_id="some_fake_job_id", log=f"the log {n}", log_level=logging.INFO + ) + publish_event(dask_pub=pub, event=event_to_publish) + print("<-- finished publisher task") + + publisher_task = asyncio_task(_dask_publisher_task(dask_pub)) + assert publisher_task + + async for attempt in AsyncRetrying( + retry=retry_if_exception_type(AssertionError), + stop=stop_after_delay(DASK_TESTING_TIMEOUT_S), + wait=wait_fixed(0.01), + reraise=True, + ): + with attempt: + print( + f"checking number of received messages...currently {len(received_messages)}" + ) + assert len(received_messages) == NUMBER_OF_MESSAGES + print("all expected messages received") + + +def _wait_for_task_to_start() -> None: start_event = distributed.Event(DASK_TASK_STARTED_EVENT) start_event.wait(timeout=DASK_TESTING_TIMEOUT_S) -def _notify_task_is_started_and_ready(): +def _notify_task_is_started_and_ready() -> None: start_event = distributed.Event(DASK_TASK_STARTED_EVENT) start_event.set() def _some_long_running_task() -> int: - assert is_current_task_aborted() == False + assert is_current_task_aborted() is False _notify_task_is_started_and_ready() for i in range(300): @@ -110,7 +178,7 @@ def test_task_is_aborted_using_event(dask_client: distributed.Client): def _some_long_running_task_with_monitoring() -> int: - assert is_current_task_aborted() == False + assert is_current_task_aborted() is False # we are started now start_event = distributed.Event(DASK_TASK_STARTED_EVENT) start_event.set() diff --git a/services/dask-sidecar/tests/unit/test_docker_utils.py b/services/dask-sidecar/tests/unit/test_docker_utils.py index 6732c6a0c78..0882d05a82c 100644 --- a/services/dask-sidecar/tests/unit/test_docker_utils.py +++ b/services/dask-sidecar/tests/unit/test_docker_utils.py @@ -4,7 +4,6 @@ # pylint: disable=no-member import asyncio -import logging from typing import Any from unittest.mock import call @@ -13,10 +12,8 @@ import pytest from models_library.services_resources import BootMode from pytest_mock.plugin import MockerFixture -from servicelib.logging_utils import LogLevelInt, LogMessageStr from simcore_service_dask_sidecar.computational_sidecar.docker_utils import ( - LogType, - _parse_line, + _try_parse_progress, create_container_config, managed_container, ) @@ -97,100 +94,52 @@ async def test_create_container_config( ) +@pytest.mark.parametrize("with_timestamp", [True, False], ids=str) @pytest.mark.parametrize( - "version1_logs", - [True, False], - ids=lambda id: f"version{'>=1' if id is True else '0'}-logs", -) -@pytest.mark.parametrize( - "log_line, expected_log_type, expected_message, expected_log_level", + "log_line, expected_progress_value", [ - ("hello from the logs", LogType.LOG, "hello from the logs", logging.INFO), - ( - "[progress] this is some whatever progress without number", - LogType.LOG, - "[progress] this is some whatever progress without number", - logging.INFO, - ), - ("[Progress] 34%", LogType.PROGRESS, "0.34", logging.INFO), - ("[PROGRESS] .34", LogType.PROGRESS, "0.34", logging.INFO), - ("[progress] 0.44", LogType.PROGRESS, "0.44", logging.INFO), - ("[progress] 44 percent done", LogType.PROGRESS, "0.44", logging.INFO), - ("[progress] 44/150", LogType.PROGRESS, f"{(44.0 / 150.0):.2f}", logging.INFO), - ( - "Progress: this is some progress", - LogType.LOG, - "Progress: this is some progress", - logging.INFO, - ), - ( - "progress: 34%", - LogType.PROGRESS, - "0.34", - logging.INFO, - ), - ("PROGRESS: .34", LogType.PROGRESS, "0.34", logging.INFO), - ("progress: 0.44", LogType.PROGRESS, "0.44", logging.INFO), - ("progress: 44 percent done", LogType.PROGRESS, "0.44", logging.INFO), - ("44 percent done", LogType.PROGRESS, "0.44", logging.INFO), - ("progress: 44/150", LogType.PROGRESS, f"{(44.0/150.0):.2f}", logging.INFO), - ("progress: 44/150...", LogType.PROGRESS, f"{(44.0/150.0):.2f}", logging.INFO), - ( - "any kind of message even with progress inside", - LogType.LOG, - "any kind of message even with progress inside", - logging.INFO, - ), - ("[PROGRESS]1.000000\n", LogType.PROGRESS, "1.00", logging.INFO), - ("[PROGRESS] 1\n", LogType.PROGRESS, "1.00", logging.INFO), - ("[PROGRESS] 0\n", LogType.PROGRESS, "0.00", logging.INFO), + ("hello from the logs", None), + ("[progress] this is some whatever progress without number", None), + ("[Progress] 34%", 0.34), + ("[PROGRESS] .34", 0.34), + ("[progress] 0.44", 0.44), + ("[progress] 44 percent done", 0.44), + ("[progress] 44/150", 44.0 / 150.0), + ("Progress: this is some progress", None), + ("progress: 34%", 0.34), + ("PROGRESS: .34", 0.34), + ("progress: 0.44", 0.44), + ("progress: 44 percent done", 0.44), + ("44 percent done", 0.44), + ("progress: 44/150", 44.0 / 150.0), + ("progress: 44/150...", 44.0 / 150.0), + ("any kind of message even with progress inside", None), + ("[PROGRESS]1.000000\n", 1.00), + ("[PROGRESS] 1\n", 1.00), + ("[PROGRESS] 0\n", 0.00), ( "[PROGRESS]: 1% [ 10 / 624 ] Time Update, estimated remaining time 1 seconds @ 26.43 MCells/s", - LogType.PROGRESS, - "0.01", - logging.INFO, - ), - ( - "[warn]: this is some warning", - LogType.LOG, - "[warn]: this is some warning", - logging.WARNING, + 0.01, ), + ("[warn]: this is some warning", None), + ("err: this is some error", None), ( - "err: this is some error", - LogType.LOG, - "err: this is some error", - logging.ERROR, + "progress: 10/0 asd this is a 15% 10/asdf progress without progress it will not break the system", + None, ), ], ) -async def test_parse_line( - version1_logs: bool, +async def test__try_parse_progress( + with_timestamp: bool, log_line: str, - expected_log_type: LogType, - expected_message: LogMessageStr, - expected_log_level: LogLevelInt, + expected_progress_value: float, ): expected_time_stamp = arrow.utcnow().datetime - if version1_logs: - # from version 1 the logs come directly from ```docker logs -t -f``` and contain the timestamp - # version 0 does not contain a timestamp and is added at parsing time + if with_timestamp: log_line = f"{expected_time_stamp.isoformat()} {log_line}" - ( - received_log_type, - received_time_stamp, - received_message, - received_log_level, - ) = await _parse_line(log_line) - assert received_log_type == expected_log_type - assert received_message == expected_message - assert received_log_level == expected_log_level - if version1_logs: - assert received_time_stamp == expected_time_stamp - else: - # in version 0 the time stamps are expected to increase slowly - assert received_time_stamp >= expected_time_stamp + received_progress = await _try_parse_progress(log_line) + assert received_progress == expected_progress_value @pytest.mark.parametrize( diff --git a/services/dask-sidecar/tests/unit/test_tasks.py b/services/dask-sidecar/tests/unit/test_tasks.py index 6b8d56d83a3..a4b047dabd3 100644 --- a/services/dask-sidecar/tests/unit/test_tasks.py +++ b/services/dask-sidecar/tests/unit/test_tasks.py @@ -13,7 +13,7 @@ from dataclasses import dataclass from pprint import pformat from random import randint -from typing import Callable, Coroutine, Iterable +from typing import Any, Callable, Coroutine, Iterable from unittest import mock from uuid import uuid4 @@ -140,6 +140,17 @@ class ServiceExampleParam: expected_logs: list[str] integration_version: version.Version + def sidecar_params(self) -> dict[str, Any]: + return { + "docker_auth": self.docker_basic_auth, + "service_key": self.service_key, + "service_version": self.service_version, + "input_data": self.input_data, + "output_data_keys": self.output_data_keys, + "log_file_url": self.log_file_url, + "command": self.command, + } + pytest_simcore_core_services_selection = ["postgres"] pytest_simcore_ops_services_selection = [] @@ -166,12 +177,12 @@ def boot_mode(request: FixtureRequest) -> BootMode: ids=lambda v: f"integration.version.{v}", ) def integration_version(request: FixtureRequest) -> version.Version: - print("Using service integration:", request.param) + print("--> Using service integration:", request.param) return version.Version(request.param) @pytest.fixture -def ubuntu_task( +def sleeper_task( integration_version: version.Version, file_on_s3_server: Callable[..., AnyUrl], s3_remote_file_url: Callable[..., AnyUrl], @@ -205,7 +216,7 @@ def ubuntu_task( ) # check in the console that the expected files are present in the expected INPUT folder (set as ${INPUT_FOLDER} in the service) file_names = [file.path for file in list_of_files] - list_of_commands = [ + list_of_bash_commands = [ "echo User: $(id $(whoami))", "echo Inputs:", "ls -tlah -R ${INPUT_FOLDER}", @@ -218,20 +229,21 @@ def ubuntu_task( ] # check expected ENVS are set - list_of_commands += _bash_check_env_exist( - variable_name="SC_COMP_SERVICES_SCHEDULED_AS", variable_value=boot_mode.value + list_of_bash_commands += _bash_check_env_exist( + variable_name="SC_COMP_SERVICES_SCHEDULED_AS", + variable_value=f"{boot_mode.value}", ) - list_of_commands += _bash_check_env_exist( + list_of_bash_commands += _bash_check_env_exist( variable_name="SIMCORE_NANO_CPUS_LIMIT", variable_value=f"{int(_DEFAULT_MAX_RESOURCES['CPU']*1e9)}", ) - list_of_commands += _bash_check_env_exist( + list_of_bash_commands += _bash_check_env_exist( variable_name="SIMCORE_MEMORY_BYTES_LIMIT", variable_value=f"{_DEFAULT_MAX_RESOURCES['RAM']}", ) # check input files - list_of_commands += [ + list_of_bash_commands += [ f"(test -f ${{INPUT_FOLDER}}/{file} || (echo ${{INPUT_FOLDER}}/{file} does not exist && exit 1))" for file in file_names ] + [f"echo $(cat ${{INPUT_FOLDER}}/{file})" for file in file_names] @@ -242,7 +254,7 @@ def ubuntu_task( else "input.json" ) - list_of_commands += [ + list_of_bash_commands += [ f"echo '{faker.text(max_nb_chars=17216)}'", f"(test -f ${{INPUT_FOLDER}}/{input_json_file_name} || (echo ${{INPUT_FOLDER}}/{input_json_file_name} file does not exists && exit 1))", f"echo $(cat ${{INPUT_FOLDER}}/{input_json_file_name})", @@ -297,17 +309,17 @@ def ubuntu_task( ) # check for the log file if legacy version - list_of_commands += [ + list_of_bash_commands += [ "echo $(ls -tlah ${LOG_FOLDER})", f"(test {'!' if integration_version > LEGACY_INTEGRATION_VERSION else ''} -f ${{LOG_FOLDER}}/{LEGACY_SERVICE_LOG_FILE_NAME} || (echo ${{LOG_FOLDER}}/{LEGACY_SERVICE_LOG_FILE_NAME} file does {'' if integration_version > LEGACY_INTEGRATION_VERSION else 'not'} exists && exit 1))", ] if integration_version == LEGACY_INTEGRATION_VERSION: - list_of_commands = [ + list_of_bash_commands = [ f"{c} >> ${{LOG_FOLDER}}/{LEGACY_SERVICE_LOG_FILE_NAME}" - for c in list_of_commands + for c in list_of_bash_commands ] # set the final command to generate the output file(s) (files and json output) - list_of_commands += [ + list_of_bash_commands += [ f"echo {jsonized_outputs} > ${{OUTPUT_FOLDER}}/{output_json_file_name}", "echo 'some data for the output file' > ${OUTPUT_FOLDER}/a_outputfile", "mkdir -p ${OUTPUT_FOLDER}/subfolder", @@ -331,7 +343,7 @@ def ubuntu_task( command=[ "/bin/bash", "-c", - " && ".join(list_of_commands), + " && ".join(list_of_bash_commands), ], input_data=input_data, output_data_keys=expected_output_keys, @@ -350,58 +362,86 @@ def ubuntu_task( @pytest.fixture() -def ubuntu_task_fail(ubuntu_task: ServiceExampleParam) -> ServiceExampleParam: - ubuntu_task.command = ["/bin/bash", "-c", "some stupid failing command"] - return ubuntu_task +def sidecar_task( + integration_version: version.Version, + file_on_s3_server: Callable[..., AnyUrl], + s3_remote_file_url: Callable[..., AnyUrl], + boot_mode: BootMode, + faker: Faker, +) -> Callable[..., ServiceExampleParam]: + def _creator(command: list[str] | None = None) -> ServiceExampleParam: + return ServiceExampleParam( + docker_basic_auth=DockerBasicAuth( + server_address="docker.io", username="pytest", password=SecretStr("") + ), + service_key="ubuntu", + service_version="latest", + command=command + or ["/bin/bash", "-c", "echo 'hello I'm an empty ubuntu task!"], + input_data=TaskInputData.parse_obj({}), + output_data_keys=TaskOutputDataSchema.parse_obj({}), + log_file_url=s3_remote_file_url(file_path="log.dat"), + expected_output_data=TaskOutputData.parse_obj({}), + expected_logs=[], + integration_version=integration_version, + ) + + return _creator @pytest.fixture() -def ubuntu_task_unexpected_output( - ubuntu_task: ServiceExampleParam, +def failing_ubuntu_task( + sidecar_task: Callable[..., ServiceExampleParam] ) -> ServiceExampleParam: - ubuntu_task.command = ["/bin/bash", "-c", "echo we create nothingness"] - return ubuntu_task + return sidecar_task(command=["/bin/bash", "-c", "some stupid failing command"]) + + +@pytest.fixture() +def sleeper_task_unexpected_output( + sleeper_task: ServiceExampleParam, +) -> ServiceExampleParam: + sleeper_task.command = ["/bin/bash", "-c", "echo we create nothingness"] + return sleeper_task @pytest.fixture() def caplog_info_level(caplog: LogCaptureFixture) -> Iterable[LogCaptureFixture]: - with caplog.at_level( - logging.INFO, - ): + with caplog.at_level(logging.INFO, logger="simcore_service_dask_sidecar"): yield caplog +@pytest.fixture +def mocked_get_integration_version( + integration_version: version.Version, mocker: MockerFixture +) -> mock.Mock: + mocked_get_integration_version = mocker.patch( + "simcore_service_dask_sidecar.computational_sidecar.core.get_integration_version", + autospec=True, + return_value=integration_version, + ) + return mocked_get_integration_version + + def test_run_computational_sidecar_real_fct( caplog_info_level: LogCaptureFixture, event_loop: asyncio.AbstractEventLoop, mock_service_envs: None, dask_subsystem_mock: dict[str, mock.Mock], - ubuntu_task: ServiceExampleParam, - mocker: MockerFixture, + sleeper_task: ServiceExampleParam, s3_settings: S3Settings, boot_mode: BootMode, + mocked_get_integration_version: mock.Mock, ): - mocked_get_integration_version = mocker.patch( - "simcore_service_dask_sidecar.computational_sidecar.core.get_integration_version", - autospec=True, - return_value=ubuntu_task.integration_version, - ) output_data = run_computational_sidecar( - ubuntu_task.docker_basic_auth, - ubuntu_task.service_key, - ubuntu_task.service_version, - ubuntu_task.input_data, - ubuntu_task.output_data_keys, - ubuntu_task.log_file_url, - ubuntu_task.command, - s3_settings, - boot_mode, + **sleeper_task.sidecar_params(), + s3_settings=s3_settings, + boot_mode=boot_mode, ) mocked_get_integration_version.assert_called_once_with( mock.ANY, - ubuntu_task.docker_basic_auth, - ubuntu_task.service_key, - ubuntu_task.service_version, + sleeper_task.docker_basic_auth, + sleeper_task.service_key, + sleeper_task.service_version, ) for event in [TaskProgressEvent, TaskLogEvent]: dask_subsystem_mock["dask_event_publish"].assert_any_call( @@ -409,29 +449,29 @@ def test_run_computational_sidecar_real_fct( ) # check that the task produces expected logs - for log in ubuntu_task.expected_logs: + for log in sleeper_task.expected_logs: r = re.compile( - rf"\[{ubuntu_task.service_key}:{ubuntu_task.service_version} - .+\/.+ - .+\]: ({log})" + rf"\[{sleeper_task.service_key}:{sleeper_task.service_version} - .+\/.+\]: ({log})" ) search_results = list(filter(r.search, caplog_info_level.messages)) assert ( len(search_results) > 0 ), f"Could not find '{log}' in worker_logs:\n {pformat(caplog_info_level.messages, width=240)}" - for log in ubuntu_task.expected_logs: + for log in sleeper_task.expected_logs: assert re.search( - rf"\[{ubuntu_task.service_key}:{ubuntu_task.service_version} - .+\/.+ - .+\]: ({log})", + rf"\[{sleeper_task.service_key}:{sleeper_task.service_version} - .+\/.+\]: ({log})", caplog_info_level.text, ) # check that the task produce the expected data, not less not more - for k, v in ubuntu_task.expected_output_data.items(): + for k, v in sleeper_task.expected_output_data.items(): assert k in output_data assert output_data[k] == v s3_storage_kwargs = _s3fs_settings_from_s3_settings(s3_settings) for k, v in output_data.items(): - assert k in ubuntu_task.expected_output_data - assert v == ubuntu_task.expected_output_data[k] + assert k in sleeper_task.expected_output_data + assert v == sleeper_task.expected_output_data[k] # if there are file urls in the output, check they exist if isinstance(v, FileUrl): @@ -440,11 +480,11 @@ def test_run_computational_sidecar_real_fct( # check the task has created a log file with fsspec.open( - f"{ubuntu_task.log_file_url}", mode="rt", **s3_storage_kwargs + f"{sleeper_task.log_file_url}", mode="rt", **s3_storage_kwargs ) as fp: saved_logs = fp.read() # type: ignore assert saved_logs - for log in ubuntu_task.expected_logs: + for log in sleeper_task.expected_logs: assert log in saved_logs @@ -452,31 +492,19 @@ def test_run_computational_sidecar_real_fct( "integration_version, boot_mode", [("1.0.0", BootMode.CPU)], indirect=True ) def test_run_multiple_computational_sidecar_dask( - event_loop: asyncio.AbstractEventLoop, dask_client: distributed.Client, - ubuntu_task: ServiceExampleParam, - mocker: MockerFixture, + sleeper_task: ServiceExampleParam, s3_settings: S3Settings, boot_mode: BootMode, + mocked_get_integration_version: mock.Mock, ): NUMBER_OF_TASKS = 50 - mocker.patch( - "simcore_service_dask_sidecar.computational_sidecar.core.get_integration_version", - autospec=True, - return_value=ubuntu_task.integration_version, - ) futures = [ dask_client.submit( run_computational_sidecar, - ubuntu_task.docker_basic_auth, - ubuntu_task.service_key, - ubuntu_task.service_version, - ubuntu_task.input_data, - ubuntu_task.output_data_keys, - ubuntu_task.log_file_url, - ubuntu_task.command, - s3_settings, + **sleeper_task.sidecar_params(), + s3_settings=s3_settings, resources={}, boot_mode=boot_mode, ) @@ -489,10 +517,12 @@ def test_run_multiple_computational_sidecar_dask( # for result in results: # check that the task produce the expected data, not less not more for output_data in results: - for k, v in ubuntu_task.expected_output_data.items(): + for k, v in sleeper_task.expected_output_data.items(): assert k in output_data assert output_data[k] == v + mocked_get_integration_version.assert_called() + @pytest.fixture def log_sub( @@ -501,32 +531,27 @@ def log_sub( return distributed.Sub(TaskLogEvent.topic_name(), client=dask_client) +@pytest.fixture +def progress_sub(dask_client: distributed.Client) -> distributed.Sub: + return distributed.Sub(TaskProgressEvent.topic_name(), client=dask_client) + + @pytest.mark.parametrize( "integration_version, boot_mode", [("1.0.0", BootMode.CPU)], indirect=True ) async def test_run_computational_sidecar_dask( dask_client: distributed.Client, - ubuntu_task: ServiceExampleParam, - mocker: MockerFixture, + sleeper_task: ServiceExampleParam, s3_settings: S3Settings, boot_mode: BootMode, log_sub: distributed.Sub, + progress_sub: distributed.Sub, + mocked_get_integration_version: mock.Mock, ): - mocker.patch( - "simcore_service_dask_sidecar.computational_sidecar.core.get_integration_version", - autospec=True, - return_value=ubuntu_task.integration_version, - ) future = dask_client.submit( run_computational_sidecar, - ubuntu_task.docker_basic_auth, - ubuntu_task.service_key, - ubuntu_task.service_version, - ubuntu_task.input_data, - ubuntu_task.output_data_keys, - ubuntu_task.log_file_url, - ubuntu_task.command, - s3_settings, + **sleeper_task.sidecar_params(), + s3_settings=s3_settings, resources={}, boot_mode=boot_mode, ) @@ -534,12 +559,24 @@ async def test_run_computational_sidecar_dask( worker_name = next(iter(dask_client.scheduler_info()["workers"])) assert worker_name output_data = future.result() + assert output_data assert isinstance(output_data, TaskOutputData) # check that the task produces expected logs + worker_progresses = [ + TaskProgressEvent.parse_raw(msg).progress for msg in progress_sub.buffer + ] + # check ordering + assert worker_progresses == list( + set(worker_progresses) + ), "ordering of progress values incorrectly sorted!" + assert worker_progresses[0] == 0, "missing/incorrect initial progress value" + assert worker_progresses[-1] == 1, "missing/incorrect final progress value" + worker_logs = [TaskLogEvent.parse_raw(msg).log for msg in log_sub.buffer] + print(f"<-- we got {len(worker_logs)} lines of logs") - for log in ubuntu_task.expected_logs: + for log in sleeper_task.expected_logs: r = re.compile(rf"^({log}).*") search_results = list(filter(r.search, worker_logs)) assert ( @@ -547,19 +584,73 @@ async def test_run_computational_sidecar_dask( ), f"Could not find {log} in worker_logs:\n {pformat(worker_logs, width=240)}" # check that the task produce the expected data, not less not more - for k, v in ubuntu_task.expected_output_data.items(): + for k, v in sleeper_task.expected_output_data.items(): assert k in output_data assert output_data[k] == v s3_storage_kwargs = _s3fs_settings_from_s3_settings(s3_settings) for k, v in output_data.items(): - assert k in ubuntu_task.expected_output_data - assert v == ubuntu_task.expected_output_data[k] + assert k in sleeper_task.expected_output_data + assert v == sleeper_task.expected_output_data[k] # if there are file urls in the output, check they exist if isinstance(v, FileUrl): with fsspec.open(f"{v.url}", **s3_storage_kwargs) as fp: assert fp.details.get("size") > 0 # type: ignore + mocked_get_integration_version.assert_called() + + +@pytest.mark.parametrize( + "integration_version, boot_mode", [("1.0.0", BootMode.CPU)], indirect=True +) +async def test_run_computational_sidecar_dask_does_not_lose_messages_with_pubsub( + dask_client: distributed.Client, + sidecar_task: Callable[..., ServiceExampleParam], + s3_settings: S3Settings, + boot_mode: BootMode, + log_sub: distributed.Sub, + progress_sub: distributed.Sub, + mocked_get_integration_version: mock.Mock, + faker: Faker, +): + mocked_get_integration_version.assert_not_called() + NUMBER_OF_LOGS = 20000 + future = dask_client.submit( + run_computational_sidecar, + **sidecar_task( + command=[ + "/bin/bash", + "-c", + " && ".join( + [ + f'N={NUMBER_OF_LOGS}; for ((i=1; i<=N; i++));do echo "This is iteration $i"; echo "progress: $i/{NUMBER_OF_LOGS}"; done ' + ] + ), + ], + ).sidecar_params(), + s3_settings=s3_settings, + resources={}, + boot_mode=boot_mode, + ) + output_data = future.result() + assert output_data is not None + assert isinstance(output_data, TaskOutputData) + + # check that the task produces expected logs + worker_progresses = [ + TaskProgressEvent.parse_raw(msg).progress for msg in progress_sub.buffer + ] + # check length + assert len(worker_progresses) == len( + set(worker_progresses) + ), "there are duplicate progresses!" + assert sorted(worker_progresses) == worker_progresses, "ordering issue" + assert worker_progresses[0] == 0, "missing/incorrect initial progress value" + assert worker_progresses[-1] == 1, "missing/incorrect final progress value" + + worker_logs = [TaskLogEvent.parse_raw(msg).log for msg in log_sub.buffer] + assert len(worker_logs) == NUMBER_OF_LOGS + 15 + mocked_get_integration_version.assert_called() @pytest.mark.parametrize( @@ -567,22 +658,15 @@ async def test_run_computational_sidecar_dask( ) def test_failing_service_raises_exception( caplog_info_level: LogCaptureFixture, - event_loop: asyncio.AbstractEventLoop, mock_service_envs: None, dask_subsystem_mock: dict[str, mock.Mock], - ubuntu_task_fail: ServiceExampleParam, + failing_ubuntu_task: ServiceExampleParam, s3_settings: S3Settings, ): with pytest.raises(ServiceRuntimeError): run_computational_sidecar( - ubuntu_task_fail.docker_basic_auth, - ubuntu_task_fail.service_key, - ubuntu_task_fail.service_version, - ubuntu_task_fail.input_data, - ubuntu_task_fail.output_data_keys, - ubuntu_task_fail.log_file_url, - ubuntu_task_fail.command, - s3_settings, + **failing_ubuntu_task.sidecar_params(), + s3_settings=s3_settings, ) @@ -591,20 +675,13 @@ def test_failing_service_raises_exception( ) def test_running_service_that_generates_unexpected_data_raises_exception( caplog_info_level: LogCaptureFixture, - event_loop: asyncio.AbstractEventLoop, mock_service_envs: None, dask_subsystem_mock: dict[str, mock.Mock], - ubuntu_task_unexpected_output: ServiceExampleParam, + sleeper_task_unexpected_output: ServiceExampleParam, s3_settings: S3Settings, ): with pytest.raises(ServiceBadFormattedOutputError): run_computational_sidecar( - ubuntu_task_unexpected_output.docker_basic_auth, - ubuntu_task_unexpected_output.service_key, - ubuntu_task_unexpected_output.service_version, - ubuntu_task_unexpected_output.input_data, - ubuntu_task_unexpected_output.output_data_keys, - ubuntu_task_unexpected_output.log_file_url, - ubuntu_task_unexpected_output.command, - s3_settings, + **sleeper_task_unexpected_output.sidecar_params(), + s3_settings=s3_settings, ) diff --git a/services/director-v2/requirements/_base.txt b/services/director-v2/requirements/_base.txt index d0f8098b0bc..45c88926a19 100644 --- a/services/director-v2/requirements/_base.txt +++ b/services/director-v2/requirements/_base.txt @@ -89,9 +89,8 @@ cloudpickle==2.2.1 # distributed commonmark==0.9.1 # via rich -dask==2023.3.0 +dask==2023.3.2 # via - # -r requirements/../../../packages/dask-task-models-library/requirements/_base.in # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # dask-gateway # distributed @@ -99,7 +98,7 @@ dask-gateway==2023.1.1 # via -r requirements/_base.in decorator==4.4.2 # via networkx -distributed==2023.3.0 +distributed==2023.3.2 # via # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # dask @@ -118,7 +117,7 @@ frozenlist==1.3.0 # via # aiohttp # aiosignal -fsspec==2023.3.0 +fsspec==2023.5.0 # via # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # dask @@ -128,10 +127,6 @@ h11==0.12.0 # via # httpcore # uvicorn -heapdict==1.0.1 - # via - # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt - # zict httpcore==0.15.0 # via httpx httptools==0.2.0 @@ -147,6 +142,10 @@ idna==2.10 # httpx # requests # yarl +importlib-metadata==6.6.0 + # via + # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt + # dask itsdangerous==1.1.0 # via fastapi jinja2==3.1.2 @@ -191,7 +190,7 @@ orjson==3.7.2 # via # -r requirements/_base.in # fastapi -packaging==23.0 +packaging==23.1 # via # -r requirements/../../../packages/simcore-sdk/requirements/_base.in # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt @@ -199,13 +198,13 @@ packaging==23.0 # distributed pamqp==3.2.1 # via aiormq -partd==1.3.0 +partd==1.4.0 # via # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # dask pint==0.19.2 # via -r requirements/../../../packages/simcore-sdk/requirements/_base.in -psutil==5.9.4 +psutil==5.9.5 # via # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # distributed @@ -287,7 +286,7 @@ toolz==0.12.0 # dask # distributed # partd -tornado==6.2 +tornado==6.3.2 # via # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # dask-gateway @@ -310,7 +309,7 @@ ujson==5.5.0 # via # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # fastapi -urllib3==1.26.14 +urllib3==1.26.16 # via # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # distributed @@ -332,10 +331,14 @@ yarl==1.7.2 # aio-pika # aiohttp # aiormq -zict==2.2.0 +zict==3.0.0 # via # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # distributed +zipp==3.15.0 + # via + # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt + # importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/services/director-v2/requirements/_test.txt b/services/director-v2/requirements/_test.txt index 5fa3f36dc3b..8096e5e8929 100644 --- a/services/director-v2/requirements/_test.txt +++ b/services/director-v2/requirements/_test.txt @@ -88,7 +88,7 @@ cryptography==40.0.2 # via # -c requirements/../../../requirements/constraints.txt # dask-gateway-server -dask==2023.3.0 +dask==2023.3.2 # via # -r requirements/_test.in # distributed @@ -96,7 +96,7 @@ dask-gateway-server==2023.1.1 # via -r requirements/_test.in dill==0.3.6 # via pylint -distributed==2023.3.0 +distributed==2023.3.2 # via # -c requirements/_base.txt # dask @@ -115,7 +115,7 @@ frozenlist==1.3.0 # -c requirements/_base.txt # aiohttp # aiosignal -fsspec==2023.3.0 +fsspec==2023.5.0 # via # -c requirements/_base.txt # dask @@ -127,10 +127,6 @@ h11==0.12.0 # via # -c requirements/_base.txt # httpcore -heapdict==1.0.1 - # via - # -c requirements/_base.txt - # zict httpcore==0.15.0 # via # -c requirements/_base.txt @@ -148,6 +144,10 @@ idna==2.10 # httpx # requests # yarl +importlib-metadata==6.6.0 + # via + # -c requirements/_base.txt + # dask iniconfig==2.0.0 # via pytest isort==5.12.0 @@ -198,7 +198,7 @@ mypy-extensions==1.0.0 # via mypy numpy==1.24.3 # via bokeh -packaging==23.0 +packaging==23.1 # via # -c requirements/_base.txt # bokeh @@ -210,7 +210,7 @@ pamqp==3.2.1 # via # -c requirements/_base.txt # aiormq -partd==1.3.0 +partd==1.4.0 # via # -c requirements/_base.txt # dask @@ -222,7 +222,7 @@ pluggy==1.0.0 # via pytest pprintpp==0.4.0 # via pytest-icdiff -psutil==5.9.4 +psutil==5.9.5 # via # -c requirements/_base.txt # distributed @@ -269,6 +269,7 @@ pyyaml==5.4.1 # distributed requests==2.31.0 # via + # -c requirements/_base.txt # async-asgi-testclient # docker respx==0.20.1 @@ -315,7 +316,7 @@ toolz==0.12.0 # dask # distributed # partd -tornado==6.2 +tornado==6.3.2 # via # -c requirements/_base.txt # bokeh @@ -329,7 +330,7 @@ typing-extensions==4.3.0 # bokeh # mypy # sqlalchemy2-stubs -urllib3==1.26.14 +urllib3==1.26.16 # via # -c requirements/_base.txt # botocore @@ -349,7 +350,11 @@ yarl==1.7.2 # aio-pika # aiohttp # aiormq -zict==2.2.0 +zict==3.0.0 # via # -c requirements/_base.txt # distributed +zipp==3.15.0 + # via + # -c requirements/_base.txt + # importlib-metadata diff --git a/services/director-v2/requirements/_tools.txt b/services/director-v2/requirements/_tools.txt index 2aa25cac0fc..bbee86f57f4 100644 --- a/services/director-v2/requirements/_tools.txt +++ b/services/director-v2/requirements/_tools.txt @@ -49,7 +49,7 @@ mypy-extensions==1.0.0 # black nodeenv==1.8.0 # via pre-commit -packaging==23.0 +packaging==23.1 # via # -c requirements/_test.txt # black diff --git a/services/director-v2/tests/unit/test_modules_dask_client.py b/services/director-v2/tests/unit/test_modules_dask_client.py index f16eae3904f..6db862cef99 100644 --- a/services/director-v2/tests/unit/test_modules_dask_client.py +++ b/services/director-v2/tests/unit/test_modules_dask_client.py @@ -9,7 +9,7 @@ import functools import traceback from dataclasses import dataclass -from typing import Any, AsyncIterator, Awaitable, Callable, NoReturn +from typing import Any, AsyncIterator, Awaitable, Callable, Coroutine, NoReturn from unittest import mock from uuid import uuid4 @@ -233,6 +233,22 @@ async def dask_client( try: assert client.app.state.engine is not None + + # check we can run some simple python script + def _square(x): + return x**2 + + def neg(x): + return -x + + a = client.backend.client.map(_square, range(10)) + b = client.backend.client.map(neg, a) + total = client.backend.client.submit(sum, b) + future = total.result() + assert future + assert isinstance(future, Coroutine) + result = await future + assert result == -285 except AttributeError: # enforces existance of 'app.state.engine' and sets to None client.app.state.engine = None diff --git a/services/dynamic-sidecar/requirements/_test.txt b/services/dynamic-sidecar/requirements/_test.txt index af3445fa79a..a7f0cc8f058 100644 --- a/services/dynamic-sidecar/requirements/_test.txt +++ b/services/dynamic-sidecar/requirements/_test.txt @@ -67,7 +67,9 @@ python-dateutil==2.8.2 # -c requirements/_base.txt # faker requests==2.31.0 - # via async-asgi-testclient + # via + # -c requirements/_base.txt + # async-asgi-testclient six==1.16.0 # via # -c requirements/_base.txt diff --git a/services/osparc-gateway-server/requirements/_base.txt b/services/osparc-gateway-server/requirements/_base.txt index 925df6e8b35..b7c119e67a1 100644 --- a/services/osparc-gateway-server/requirements/_base.txt +++ b/services/osparc-gateway-server/requirements/_base.txt @@ -15,7 +15,7 @@ aiosignal==1.3.1 # via aiohttp async-timeout==4.0.2 # via aiohttp -attrs==22.2.0 +attrs==23.1.0 # via aiohttp cffi==1.15.1 # via cryptography @@ -23,7 +23,7 @@ charset-normalizer==3.1.0 # via aiohttp colorlog==6.7.0 # via dask-gateway-server -cryptography==39.0.2 +cryptography==40.0.2 # via # -c requirements/../../../requirements/constraints.txt # dask-gateway-server @@ -31,7 +31,7 @@ dask-gateway-server==2023.1.1 # via -r requirements/_base.in dnspython==2.3.0 # via email-validator -email-validator==1.3.1 +email-validator==2.0.0.post2 # via pydantic frozenlist==1.3.3 # via @@ -49,19 +49,19 @@ multidict==6.0.4 # yarl pycparser==2.21 # via cffi -pydantic==1.10.2 +pydantic==1.10.8 # via -r requirements/_base.in python-dotenv==1.0.0 # via pydantic -sqlalchemy==1.4.47 +sqlalchemy==1.4.48 # via # -c requirements/../../../requirements/constraints.txt # dask-gateway-server traitlets==5.9.0 # via dask-gateway-server -typing-extensions==4.5.0 +typing-extensions==4.6.0 # via # aiodocker # pydantic -yarl==1.8.2 +yarl==1.9.2 # via aiohttp diff --git a/services/osparc-gateway-server/requirements/_test.txt b/services/osparc-gateway-server/requirements/_test.txt index 87d2fc8e085..19987d7df83 100644 --- a/services/osparc-gateway-server/requirements/_test.txt +++ b/services/osparc-gateway-server/requirements/_test.txt @@ -16,7 +16,7 @@ async-timeout==4.0.2 # via # -c requirements/_base.txt # aiohttp -attrs==22.2.0 +attrs==23.1.0 # via # -c requirements/_base.txt # aiohttp @@ -42,16 +42,15 @@ coverage==7.2.5 # via # -r requirements/_test.in # pytest-cov -dask==2023.3.0 +dask==2023.3.2 # via - # -c requirements/../../dask-sidecar/requirements/_dask-distributed.txt # dask-gateway # distributed dask-gateway==2023.1.1 # via -r requirements/_test.in debugpy==1.6.7 # via -r requirements/_test.in -distributed==2023.3.0 +distributed==2023.3.2 # via # -c requirements/../../dask-sidecar/requirements/_dask-distributed.txt # dask-gateway @@ -59,14 +58,14 @@ docker==6.1.2 # via -r requirements/_test.in exceptiongroup==1.1.1 # via pytest -faker==18.7.0 +faker==18.9.0 # via -r requirements/_test.in frozenlist==1.3.3 # via # -c requirements/_base.txt # aiohttp # aiosignal -fsspec==2023.3.0 +fsspec==2023.5.0 # via # -c requirements/../../dask-sidecar/requirements/_dask-distributed.txt # dask @@ -74,10 +73,6 @@ greenlet==2.0.2 # via # -c requirements/_base.txt # sqlalchemy -heapdict==1.0.1 - # via - # -c requirements/../../dask-sidecar/requirements/_dask-distributed.txt - # zict icdiff==2.0.6 # via pytest-icdiff idna==3.4 @@ -85,6 +80,10 @@ idna==3.4 # -c requirements/_base.txt # requests # yarl +importlib-metadata==6.6.0 + # via + # -c requirements/../../dask-sidecar/requirements/_dask-distributed.txt + # dask iniconfig==2.0.0 # via pytest jinja2==3.1.2 @@ -113,7 +112,7 @@ mypy==1.3.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy -packaging==23.0 +packaging==23.1 # via # -c requirements/../../dask-sidecar/requirements/_dask-distributed.txt # dask @@ -121,7 +120,7 @@ packaging==23.0 # docker # pytest # pytest-sugar -partd==1.3.0 +partd==1.4.0 # via # -c requirements/../../dask-sidecar/requirements/_dask-distributed.txt # dask @@ -129,7 +128,7 @@ pluggy==1.0.0 # via pytest pprintpp==0.4.0 # via pytest-icdiff -psutil==5.9.4 +psutil==5.9.5 # via # -c requirements/../../dask-sidecar/requirements/_dask-distributed.txt # distributed @@ -170,7 +169,7 @@ sortedcontainers==2.4.0 # via # -c requirements/../../dask-sidecar/requirements/_dask-distributed.txt # distributed -sqlalchemy==1.4.47 +sqlalchemy==1.4.48 # via -r requirements/_test.in sqlalchemy2-stubs==0.0.2a34 # via sqlalchemy @@ -193,29 +192,33 @@ toolz==0.12.0 # dask # distributed # partd -tornado==6.2 +tornado==6.3.2 # via # -c requirements/../../dask-sidecar/requirements/_dask-distributed.txt # dask-gateway # distributed -typing-extensions==4.5.0 +typing-extensions==4.6.0 # via # -c requirements/_base.txt # mypy # sqlalchemy2-stubs -urllib3==1.26.14 +urllib3==1.26.16 # via # -c requirements/../../dask-sidecar/requirements/_dask-distributed.txt # distributed # docker # requests -websocket-client==1.5.1 +websocket-client==1.5.2 # via docker -yarl==1.8.2 +yarl==1.9.2 # via # -c requirements/_base.txt # aiohttp -zict==2.2.0 +zict==3.0.0 # via # -c requirements/../../dask-sidecar/requirements/_dask-distributed.txt # distributed +zipp==3.15.0 + # via + # -c requirements/../../dask-sidecar/requirements/_dask-distributed.txt + # importlib-metadata diff --git a/services/osparc-gateway-server/requirements/_tools.txt b/services/osparc-gateway-server/requirements/_tools.txt index 16138045d99..6a14c031481 100644 --- a/services/osparc-gateway-server/requirements/_tools.txt +++ b/services/osparc-gateway-server/requirements/_tools.txt @@ -41,7 +41,7 @@ mypy-extensions==1.0.0 # black nodeenv==1.8.0 # via pre-commit -packaging==23.0 +packaging==23.1 # via # -c requirements/_test.txt # black @@ -75,7 +75,7 @@ tomli==2.0.1 # pyproject-hooks tomlkit==0.11.8 # via pylint -typing-extensions==4.5.0 +typing-extensions==4.6.0 # via # -c requirements/_test.txt # astroid diff --git a/services/osparc-gateway-server/tests/system/requirements/_test.txt b/services/osparc-gateway-server/tests/system/requirements/_test.txt index 8cd4eef97ed..c9f52f05564 100644 --- a/services/osparc-gateway-server/tests/system/requirements/_test.txt +++ b/services/osparc-gateway-server/tests/system/requirements/_test.txt @@ -38,16 +38,15 @@ cloudpickle==2.2.1 # distributed coverage==7.2.5 # via pytest-cov -dask==2023.3.0 +dask==2023.3.2 # via - # -c requirements/../../../../dask-sidecar/requirements/_dask-distributed.txt # dask-gateway # distributed dask-gateway==2023.1.1 # via -r requirements/_test.in dill==0.3.6 # via pylint -distributed==2023.3.0 +distributed==2023.3.2 # via # -c requirements/../../../../dask-sidecar/requirements/_dask-distributed.txt # dask-gateway @@ -61,20 +60,20 @@ frozenlist==1.3.3 # via # aiohttp # aiosignal -fsspec==2023.3.0 +fsspec==2023.5.0 # via # -c requirements/../../../../dask-sidecar/requirements/_dask-distributed.txt # dask -heapdict==1.0.1 - # via - # -c requirements/../../../../dask-sidecar/requirements/_dask-distributed.txt - # zict icdiff==2.0.6 # via pytest-icdiff idna==3.4 # via # requests # yarl +importlib-metadata==6.6.0 + # via + # -c requirements/../../../../dask-sidecar/requirements/_dask-distributed.txt + # dask iniconfig==2.0.0 # via pytest isort==5.12.0 @@ -108,7 +107,7 @@ multidict==6.0.4 # yarl numpy==1.24.3 # via -r requirements/_test.in -packaging==23.0 +packaging==23.1 # via # -c requirements/../../../../dask-sidecar/requirements/_dask-distributed.txt # dask @@ -116,7 +115,7 @@ packaging==23.0 # docker # pytest # pytest-sugar -partd==1.3.0 +partd==1.4.0 # via # -c requirements/../../../../dask-sidecar/requirements/_dask-distributed.txt # dask @@ -126,7 +125,7 @@ pluggy==1.0.0 # via pytest pprintpp==0.4.0 # via pytest-icdiff -psutil==5.9.4 +psutil==5.9.5 # via # -c requirements/../../../../dask-sidecar/requirements/_dask-distributed.txt # distributed @@ -192,7 +191,7 @@ toolz==0.12.0 # dask # distributed # partd -tornado==6.2 +tornado==6.3.2 # via # -c requirements/../../../../dask-sidecar/requirements/_dask-distributed.txt # dask-gateway @@ -201,7 +200,7 @@ typing-extensions==4.5.0 # via # aiodocker # astroid -urllib3==1.26.14 +urllib3==1.26.16 # via # -c requirements/../../../../dask-sidecar/requirements/_dask-distributed.txt # distributed @@ -213,7 +212,11 @@ wrapt==1.15.0 # via astroid yarl==1.9.2 # via aiohttp -zict==2.2.0 +zict==3.0.0 # via # -c requirements/../../../../dask-sidecar/requirements/_dask-distributed.txt # distributed +zipp==3.15.0 + # via + # -c requirements/../../../../dask-sidecar/requirements/_dask-distributed.txt + # importlib-metadata diff --git a/services/osparc-gateway-server/tests/system/requirements/_tools.txt b/services/osparc-gateway-server/tests/system/requirements/_tools.txt index 1e8e51a640f..8944af115ec 100644 --- a/services/osparc-gateway-server/tests/system/requirements/_tools.txt +++ b/services/osparc-gateway-server/tests/system/requirements/_tools.txt @@ -47,7 +47,7 @@ mypy-extensions==1.0.0 # via black nodeenv==1.8.0 # via pre-commit -packaging==23.0 +packaging==23.1 # via # -c requirements/_test.txt # black diff --git a/services/web/server/requirements/_test.txt b/services/web/server/requirements/_test.txt index 55cbae3abc7..ca713afa16f 100644 --- a/services/web/server/requirements/_test.txt +++ b/services/web/server/requirements/_test.txt @@ -167,7 +167,9 @@ pyyaml==5.4.1 redis==4.5.4 # via -r requirements/_test.in requests==2.31.0 - # via docker + # via + # -c requirements/_base.txt + # docker six==1.16.0 # via # -c requirements/_base.txt