From 85acdac538c4df7e529b0d672b61b9bca0dee599 Mon Sep 17 00:00:00 2001 From: Mohammad Alisafaee Date: Wed, 4 Oct 2023 17:18:59 +0200 Subject: [PATCH] add v2.2 to service api --- .../service/controllers/datasets_add_file.py | 4 +- .../ui/service/controllers/datasets_create.py | 4 +- renku/ui/service/controllers/datasets_edit.py | 4 +- .../controllers/datasets_files_list.py | 4 +- .../ui/service/controllers/datasets_import.py | 4 +- renku/ui/service/controllers/datasets_list.py | 4 +- .../ui/service/controllers/datasets_remove.py | 4 +- .../ui/service/controllers/datasets_unlink.py | 4 +- .../controllers/v1/datasets_add_file.py | 26 ++ .../service/controllers/v1/datasets_create.py | 26 ++ .../service/controllers/v1/datasets_edit.py | 27 ++ .../controllers/v1/datasets_files_list.py | 26 ++ .../service/controllers/v1/datasets_import.py | 26 ++ .../service/controllers/v1/datasets_list.py | 27 ++ .../service/controllers/v1/datasets_remove.py | 26 ++ .../service/controllers/v1/datasets_unlink.py | 27 ++ renku/ui/service/serializers/v1/datasets.py | 227 ++++++++++++++ renku/ui/service/views/api_versions.py | 14 +- renku/ui/service/views/cache.py | 16 +- renku/ui/service/views/datasets.py | 36 ++- renku/ui/service/views/templates.py | 8 +- renku/ui/service/views/v1/cache.py | 15 +- renku/ui/service/views/v1/datasets.py | 285 ++++++++++++++++++ renku/ui/service/views/v1/templates.py | 9 +- 24 files changed, 801 insertions(+), 52 deletions(-) create mode 100644 renku/ui/service/controllers/v1/datasets_add_file.py create mode 100644 renku/ui/service/controllers/v1/datasets_create.py create mode 100644 renku/ui/service/controllers/v1/datasets_edit.py create mode 100644 renku/ui/service/controllers/v1/datasets_files_list.py create mode 100644 renku/ui/service/controllers/v1/datasets_import.py create mode 100644 renku/ui/service/controllers/v1/datasets_list.py create mode 100644 renku/ui/service/controllers/v1/datasets_remove.py create mode 100644 renku/ui/service/controllers/v1/datasets_unlink.py create mode 100644 renku/ui/service/serializers/v1/datasets.py create mode 100644 renku/ui/service/views/v1/datasets.py diff --git a/renku/ui/service/controllers/datasets_add_file.py b/renku/ui/service/controllers/datasets_add_file.py index 1af7c81028..6fbed71ebc 100644 --- a/renku/ui/service/controllers/datasets_add_file.py +++ b/renku/ui/service/controllers/datasets_add_file.py @@ -39,7 +39,7 @@ class DatasetsAddFileCtrl(ServiceCtrl, RenkuOpSyncMixin): def __init__(self, cache, user_data, request_data, migrate_project=False): """Construct a datasets add controller.""" - self.ctx = DatasetsAddFileCtrl.REQUEST_SERIALIZER.load(request_data) + self.ctx = self.REQUEST_SERIALIZER.load(request_data) self.ctx["commit_message"] = f"{MESSAGE_PREFIX} dataset add {self.ctx['slug']}" super().__init__(cache, user_data, request_data, migrate_project=migrate_project) @@ -132,4 +132,4 @@ def to_response(self): **{"local_paths": local_paths, "enqueued_paths": enqueued_paths, "remote_branch": remote_branch}, } - return result_response(DatasetsAddFileCtrl.RESPONSE_SERIALIZER, response) + return result_response(self.RESPONSE_SERIALIZER, response) diff --git a/renku/ui/service/controllers/datasets_create.py b/renku/ui/service/controllers/datasets_create.py index a51c20f4e1..51cf4c5cde 100644 --- a/renku/ui/service/controllers/datasets_create.py +++ b/renku/ui/service/controllers/datasets_create.py @@ -34,7 +34,7 @@ class DatasetsCreateCtrl(ServiceCtrl, RenkuOpSyncMixin): def __init__(self, cache, user_data, request_data, migrate_project=False): """Construct a datasets create controller.""" - self.ctx = DatasetsCreateCtrl.REQUEST_SERIALIZER.load(request_data) + self.ctx = self.REQUEST_SERIALIZER.load(request_data) self.ctx["commit_message"] = f"{MESSAGE_PREFIX} dataset create {self.ctx['slug']}" super().__init__(cache, user_data, request_data, migrate_project) @@ -91,4 +91,4 @@ def to_response(self): op_result = self.ctx op_result["remote_branch"] = remote_branch - return result_response(DatasetsCreateCtrl.RESPONSE_SERIALIZER, op_result) + return result_response(self.RESPONSE_SERIALIZER, op_result) diff --git a/renku/ui/service/controllers/datasets_edit.py b/renku/ui/service/controllers/datasets_edit.py index b2ce9cedb3..e86569e4e3 100644 --- a/renku/ui/service/controllers/datasets_edit.py +++ b/renku/ui/service/controllers/datasets_edit.py @@ -38,7 +38,7 @@ class DatasetsEditCtrl(ServiceCtrl, RenkuOpSyncMixin): def __init__(self, cache, user_data, request_data, migrate_project=False): """Construct a datasets edit list controller.""" - self.ctx = cast(Dict, DatasetsEditCtrl.REQUEST_SERIALIZER.load(request_data)) + self.ctx = cast(Dict, self.REQUEST_SERIALIZER.load(request_data)) self.ctx["commit_message"] = f"{MESSAGE_PREFIX} dataset edit {self.ctx['slug']}" super().__init__(cache, user_data, request_data, migrate_project=migrate_project) @@ -142,4 +142,4 @@ def to_response(self): "remote_branch": remote_branch, } - return result_response(DatasetsEditCtrl.RESPONSE_SERIALIZER, response) + return result_response(self.RESPONSE_SERIALIZER, response) diff --git a/renku/ui/service/controllers/datasets_files_list.py b/renku/ui/service/controllers/datasets_files_list.py index 230a4eb2a1..37806ea3cd 100644 --- a/renku/ui/service/controllers/datasets_files_list.py +++ b/renku/ui/service/controllers/datasets_files_list.py @@ -30,7 +30,7 @@ class DatasetsFilesListCtrl(ServiceCtrl, RenkuOperationMixin): def __init__(self, cache, user_data, request_data): """Construct a datasets files list controller.""" - self.ctx = DatasetsFilesListCtrl.REQUEST_SERIALIZER.load(request_data) + self.ctx = self.REQUEST_SERIALIZER.load(request_data) super().__init__(cache, user_data, request_data) @@ -47,4 +47,4 @@ def renku_op(self): def to_response(self): """Execute controller flow and serialize to service response.""" self.ctx["files"] = self.execute_op() - return result_response(DatasetsFilesListCtrl.RESPONSE_SERIALIZER, self.ctx) + return result_response(self.RESPONSE_SERIALIZER, self.ctx) diff --git a/renku/ui/service/controllers/datasets_import.py b/renku/ui/service/controllers/datasets_import.py index 8024dcd36f..0eb8675b9b 100644 --- a/renku/ui/service/controllers/datasets_import.py +++ b/renku/ui/service/controllers/datasets_import.py @@ -34,7 +34,7 @@ class DatasetsImportCtrl(ServiceCtrl, RenkuOpSyncMixin): def __init__(self, cache, user_data, request_data, migrate_project=False): """Construct a datasets import controller.""" - self.ctx = DatasetsImportCtrl.REQUEST_SERIALIZER.load(request_data) + self.ctx = self.REQUEST_SERIALIZER.load(request_data) self.ctx["commit_message"] = f"{MESSAGE_PREFIX} dataset import of {self.ctx['dataset_uri']}" super().__init__(cache, user_data, request_data, migrate_project=migrate_project) @@ -73,4 +73,4 @@ def renku_op(self): def to_response(self): """Execute controller flow and serialize to service response.""" - return result_response(DatasetsImportCtrl.RESPONSE_SERIALIZER, self.execute_op()) + return result_response(self.RESPONSE_SERIALIZER, self.execute_op()) diff --git a/renku/ui/service/controllers/datasets_list.py b/renku/ui/service/controllers/datasets_list.py index 9bcccaa3e3..6e6a239806 100644 --- a/renku/ui/service/controllers/datasets_list.py +++ b/renku/ui/service/controllers/datasets_list.py @@ -30,7 +30,7 @@ class DatasetsListCtrl(ServiceCtrl, RenkuOperationMixin): def __init__(self, cache, user_data, request_data): """Construct a datasets list controller.""" - self.ctx = DatasetsListCtrl.REQUEST_SERIALIZER.load(request_data) + self.ctx = self.REQUEST_SERIALIZER.load(request_data) super().__init__(cache, user_data, request_data) @property @@ -46,4 +46,4 @@ def renku_op(self): def to_response(self): """Execute controller flow and serialize to service response.""" self.ctx["datasets"] = self.execute_op() - return result_response(DatasetsListCtrl.RESPONSE_SERIALIZER, self.ctx) + return result_response(self.RESPONSE_SERIALIZER, self.ctx) diff --git a/renku/ui/service/controllers/datasets_remove.py b/renku/ui/service/controllers/datasets_remove.py index 1460264577..b67ac08d4f 100644 --- a/renku/ui/service/controllers/datasets_remove.py +++ b/renku/ui/service/controllers/datasets_remove.py @@ -31,7 +31,7 @@ class DatasetsRemoveCtrl(ServiceCtrl, RenkuOpSyncMixin): def __init__(self, cache, user_data, request_data, migrate_project=False): """Construct a datasets remove controller.""" - self.ctx = DatasetsRemoveCtrl.REQUEST_SERIALIZER.load(request_data) + self.ctx = self.REQUEST_SERIALIZER.load(request_data) self.ctx["commit_message"] = f"{MESSAGE_PREFIX} dataset remove {self.ctx['slug']}" super().__init__(cache, user_data, request_data, migrate_project=migrate_project) @@ -58,4 +58,4 @@ def to_response(self): response = self.ctx response["remote_branch"] = remote_branch - return result_response(DatasetsRemoveCtrl.RESPONSE_SERIALIZER, response) + return result_response(self.RESPONSE_SERIALIZER, response) diff --git a/renku/ui/service/controllers/datasets_unlink.py b/renku/ui/service/controllers/datasets_unlink.py index 84b5107af7..b2bfcd2db1 100644 --- a/renku/ui/service/controllers/datasets_unlink.py +++ b/renku/ui/service/controllers/datasets_unlink.py @@ -31,7 +31,7 @@ class DatasetsUnlinkCtrl(ServiceCtrl, RenkuOpSyncMixin): def __init__(self, cache, user_data, request_data, migrate_project=False): """Construct a datasets unlink list controller.""" - self.ctx = DatasetsUnlinkCtrl.REQUEST_SERIALIZER.load(request_data) + self.ctx = self.REQUEST_SERIALIZER.load(request_data) self.include = self.ctx.get("include_filter") self.exclude = self.ctx.get("exclude_filter") @@ -79,4 +79,4 @@ def to_response(self): "remote_branch": remote_branch, } - return result_response(DatasetsUnlinkCtrl.RESPONSE_SERIALIZER, response) + return result_response(self.RESPONSE_SERIALIZER, response) diff --git a/renku/ui/service/controllers/v1/datasets_add_file.py b/renku/ui/service/controllers/v1/datasets_add_file.py new file mode 100644 index 0000000000..5b5d6ae938 --- /dev/null +++ b/renku/ui/service/controllers/v1/datasets_add_file.py @@ -0,0 +1,26 @@ +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Renku service datasets add controller.""" + +from renku.ui.service.controllers.datasets_add_file import DatasetsAddFileCtrl +from renku.ui.service.serializers.v1.datasets import DatasetAddRequest_2_1, DatasetAddResponseRPC_2_1 + + +class DatasetsAddFileCtrl_2_1(DatasetsAddFileCtrl): + """Controller for datasets add endpoint.""" + + REQUEST_SERIALIZER = DatasetAddRequest_2_1() # type: ignore + RESPONSE_SERIALIZER = DatasetAddResponseRPC_2_1() # type: ignore diff --git a/renku/ui/service/controllers/v1/datasets_create.py b/renku/ui/service/controllers/v1/datasets_create.py new file mode 100644 index 0000000000..9701fda2f2 --- /dev/null +++ b/renku/ui/service/controllers/v1/datasets_create.py @@ -0,0 +1,26 @@ +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Renku service datasets create controller.""" + +from renku.ui.service.controllers.datasets_create import DatasetsCreateCtrl +from renku.ui.service.serializers.v1.datasets import DatasetCreateRequest_2_1, DatasetCreateResponseRPC_2_1 + + +class DatasetsCreateCtrl_2_1(DatasetsCreateCtrl): + """Controller for datasets create endpoint.""" + + REQUEST_SERIALIZER = DatasetCreateRequest_2_1() # type: ignore + RESPONSE_SERIALIZER = DatasetCreateResponseRPC_2_1() # type: ignore diff --git a/renku/ui/service/controllers/v1/datasets_edit.py b/renku/ui/service/controllers/v1/datasets_edit.py new file mode 100644 index 0000000000..6835a4081a --- /dev/null +++ b/renku/ui/service/controllers/v1/datasets_edit.py @@ -0,0 +1,27 @@ +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Renku service datasets edit controller.""" + +from renku.ui.service.controllers.datasets_edit import DatasetsEditCtrl +from renku.ui.service.serializers.datasets import DatasetEditResponseRPC +from renku.ui.service.serializers.v1.datasets import DatasetEditRequest_2_1 + + +class DatasetsEditCtrl_2_1(DatasetsEditCtrl): + """Controller for datasets edit endpoint.""" + + REQUEST_SERIALIZER = DatasetEditRequest_2_1() # type: ignore + RESPONSE_SERIALIZER = DatasetEditResponseRPC() diff --git a/renku/ui/service/controllers/v1/datasets_files_list.py b/renku/ui/service/controllers/v1/datasets_files_list.py new file mode 100644 index 0000000000..f09ad202f1 --- /dev/null +++ b/renku/ui/service/controllers/v1/datasets_files_list.py @@ -0,0 +1,26 @@ +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Renku service datasets files controller.""" + +from renku.ui.service.controllers.datasets_files_list import DatasetsFilesListCtrl +from renku.ui.service.serializers.v1.datasets import DatasetFilesListRequest_2_1, DatasetFilesListResponseRPC_2_1 + + +class DatasetsFilesListCtrl_2_1(DatasetsFilesListCtrl): + """Controller for datasets files list endpoint.""" + + REQUEST_SERIALIZER = DatasetFilesListRequest_2_1() # type: ignore + RESPONSE_SERIALIZER = DatasetFilesListResponseRPC_2_1() # type: ignore diff --git a/renku/ui/service/controllers/v1/datasets_import.py b/renku/ui/service/controllers/v1/datasets_import.py new file mode 100644 index 0000000000..dbd3c3892f --- /dev/null +++ b/renku/ui/service/controllers/v1/datasets_import.py @@ -0,0 +1,26 @@ +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Renku service datasets import controller.""" +from renku.ui.service.controllers.datasets_import import DatasetsImportCtrl +from renku.ui.service.serializers.datasets import DatasetImportResponseRPC +from renku.ui.service.serializers.v1.datasets import DatasetImportRequest_2_1 + + +class DatasetsImportCtrl_2_1(DatasetsImportCtrl): + """Controller for datasets import endpoint.""" + + REQUEST_SERIALIZER = DatasetImportRequest_2_1() # type: ignore + RESPONSE_SERIALIZER = DatasetImportResponseRPC() diff --git a/renku/ui/service/controllers/v1/datasets_list.py b/renku/ui/service/controllers/v1/datasets_list.py new file mode 100644 index 0000000000..49efc3391f --- /dev/null +++ b/renku/ui/service/controllers/v1/datasets_list.py @@ -0,0 +1,27 @@ +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Renku service datasets list controller.""" + +from renku.ui.service.controllers.datasets_list import DatasetsListCtrl +from renku.ui.service.serializers.datasets import DatasetListRequest +from renku.ui.service.serializers.v1.datasets import DatasetListResponseRPC_2_1 + + +class DatasetsListCtrl_2_1(DatasetsListCtrl): + """Controller for datasets list endpoint.""" + + REQUEST_SERIALIZER = DatasetListRequest() + RESPONSE_SERIALIZER = DatasetListResponseRPC_2_1() # type: ignore diff --git a/renku/ui/service/controllers/v1/datasets_remove.py b/renku/ui/service/controllers/v1/datasets_remove.py new file mode 100644 index 0000000000..ffa7c1b4d4 --- /dev/null +++ b/renku/ui/service/controllers/v1/datasets_remove.py @@ -0,0 +1,26 @@ +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Renku service datasets remove controller.""" + +from renku.ui.service.controllers.datasets_remove import DatasetsRemoveCtrl +from renku.ui.service.serializers.v1.datasets import DatasetRemoveRequest_2_1, DatasetRemoveResponseRPC_2_1 + + +class DatasetsRemoveCtrl_2_1(DatasetsRemoveCtrl): + """Controller for datasets remove endpoint.""" + + REQUEST_SERIALIZER = DatasetRemoveRequest_2_1() # type: ignore + RESPONSE_SERIALIZER = DatasetRemoveResponseRPC_2_1() # type: ignore diff --git a/renku/ui/service/controllers/v1/datasets_unlink.py b/renku/ui/service/controllers/v1/datasets_unlink.py new file mode 100644 index 0000000000..7ed3ad878c --- /dev/null +++ b/renku/ui/service/controllers/v1/datasets_unlink.py @@ -0,0 +1,27 @@ +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Renku service datasets unlink controller.""" + +from renku.ui.service.controllers.datasets_unlink import DatasetsUnlinkCtrl +from renku.ui.service.serializers.datasets import DatasetUnlinkResponseRPC +from renku.ui.service.serializers.v1.datasets import DatasetUnlinkRequest_2_1 + + +class DatasetsUnlinkCtrl_2_1(DatasetsUnlinkCtrl): + """Controller for datasets unlink endpoint.""" + + REQUEST_SERIALIZER = DatasetUnlinkRequest_2_1() # type: ignore + RESPONSE_SERIALIZER = DatasetUnlinkResponseRPC() diff --git a/renku/ui/service/serializers/v1/datasets.py b/renku/ui/service/serializers/v1/datasets.py new file mode 100644 index 0000000000..c87f856ee4 --- /dev/null +++ b/renku/ui/service/serializers/v1/datasets.py @@ -0,0 +1,227 @@ +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Renku service datasets serializers for API before 2.2.""" + +from marshmallow import Schema, ValidationError, fields, post_load + +from renku.domain_model.dataset import AnnotationJson +from renku.domain_model.dataset import DatasetCreatorsJson +from renku.domain_model.dataset import DatasetCreatorsJson as DatasetCreators +from renku.domain_model.dataset import ImageObjectJson as ImageObject +from renku.domain_model.dataset import ImageObjectRequestJson as ImageObjectRequest +from renku.ui.service.serializers.common import AsyncSchema, MigrateSchema, RemoteRepositorySchema, RenkuSyncSchema +from renku.ui.service.serializers.datasets import DatasetAddFile, DatasetFileDetails +from renku.ui.service.serializers.rpc import JsonRPCResponse + + +class DatasetDetails_2_1(Schema): + """Schema for dataset details.""" + + name = fields.String(required=True) + version = fields.String(allow_none=True) + created_at = fields.String(allow_none=True, attribute="date_created") + + title = fields.String() + creators = fields.List(fields.Nested(DatasetCreatorsJson)) + description = fields.String() + keywords = fields.List(fields.String()) + identifier = fields.String() + storage = fields.String() + + annotations = fields.List(fields.Nested(AnnotationJson)) + + data_directory = fields.Method("get_datadir") + + @staticmethod + def get_datadir(obj): + """Get data directory.""" + if isinstance(obj, dict): + return str(obj.get("datadir_path", obj.get("datadir", ""))) + if hasattr(obj, "datadir_path"): + return obj.datadir_path + + return str(obj.get_datadir()) + + +class DatasetSlugSchema_2_1(Schema): + """Schema for dataset slug.""" + + slug = fields.String(metadata={"description": "Mandatory dataset slug."}, required=True) + + +class DatasetDetailsRequest_2_1(DatasetDetails_2_1): + """Request schema with dataset image information.""" + + images = fields.List(fields.Nested(ImageObjectRequest)) + + custom_metadata: fields.Field = fields.Dict() + + +class DatasetCreateRequest_2_1(AsyncSchema, DatasetDetailsRequest_2_1, RemoteRepositorySchema, MigrateSchema): + """Request schema for a dataset create view.""" + + # NOTE: Override field in DatasetDetails + data_directory = fields.String( # type: ignore + load_default=None, + metadata={"description": "Base dataset data directory. '/' by default"}, + ) + + +class DatasetCreateResponse_2_1(DatasetSlugSchema_2_1, RenkuSyncSchema): + """Response schema for a dataset create view.""" + + +class DatasetCreateResponseRPC_2_1(JsonRPCResponse): + """RPC response schema for dataset create view.""" + + result = fields.Nested(DatasetCreateResponse_2_1) + + +class DatasetRemoveRequest_2_1(AsyncSchema, DatasetSlugSchema_2_1, RemoteRepositorySchema, MigrateSchema): + """Request schema for a dataset remove.""" + + +class DatasetRemoveResponse_2_1(DatasetSlugSchema_2_1, RenkuSyncSchema): + """Response schema for a dataset create view.""" + + +class DatasetRemoveResponseRPC_2_1(JsonRPCResponse): + """RPC response schema for dataset create view.""" + + result = fields.Nested(DatasetRemoveResponse_2_1) + + +class DatasetAddRequest_2_1(AsyncSchema, DatasetSlugSchema_2_1, RemoteRepositorySchema, MigrateSchema): + """Request schema for a dataset add file view.""" + + files = fields.List(fields.Nested(DatasetAddFile), required=True) + + create_dataset = fields.Boolean(load_default=False) + force = fields.Boolean(load_default=False) + + client_extras = fields.String() + + @post_load() + def check_files(self, data, **kwargs): + """Check serialized file list.""" + for _file in data["files"]: + if "file_id" in _file and "file_path" in _file: + raise ValidationError("invalid reference found: use either `file_id` or `file_path`") + + return data + + +class DatasetAddResponse_2_1(DatasetSlugSchema_2_1, RenkuSyncSchema): + """Response schema for a dataset add file view.""" + + project_id = fields.String(required=True) + files = fields.List(fields.Nested(DatasetAddFile), required=True) + + +class DatasetAddResponseRPC_2_1(JsonRPCResponse): + """RPC schema for a dataset add.""" + + result = fields.Nested(DatasetAddResponse_2_1) + + +class DatasetDetailsResponse_2_1(DatasetDetails_2_1): + """Request schema with dataset image information.""" + + images = fields.List(fields.Nested(ImageObject)) + + +class DatasetListResponse_2_1(Schema): + """Response schema for dataset list view.""" + + datasets = fields.List(fields.Nested(DatasetDetailsResponse_2_1), required=True) + + +class DatasetListResponseRPC_2_1(JsonRPCResponse): + """RPC response schema for dataset list view.""" + + result = fields.Nested(DatasetListResponse_2_1) + + +class DatasetFilesListRequest_2_1(DatasetSlugSchema_2_1, RemoteRepositorySchema): + """Request schema for dataset files list view.""" + + +class DatasetFilesListResponse_2_1(DatasetSlugSchema_2_1): + """Response schema for dataset files list view.""" + + files = fields.List(fields.Nested(DatasetFileDetails), required=True) + + +class DatasetFilesListResponseRPC_2_1(JsonRPCResponse): + """RPC schema for dataset files list view.""" + + result = fields.Nested(DatasetFilesListResponse_2_1) + + +class DatasetImportRequest_2_1(AsyncSchema, RemoteRepositorySchema, MigrateSchema): + """Dataset import request.""" + + dataset_uri = fields.String(required=True) + slug = fields.String(metadata={"description": "Optional dataset slug."}) + extract = fields.Boolean() + tag = fields.String(metadata={"description": "Dataset version to import."}) + data_directory = fields.String( + load_default=None, + metadata={"description": "Base dataset data directory. '/' by default"}, + ) + + +class DatasetEditRequest_2_1( + AsyncSchema, + DatasetDetailsRequest_2_1, + DatasetSlugSchema_2_1, + RemoteRepositorySchema, + MigrateSchema, +): + """Dataset edit metadata request.""" + + name = fields.String(metadata={"description": "New name of the dataset"}) + description = fields.String(metadata={"description": "New description of the dataset"}) + creators = fields.List(fields.Nested(DatasetCreators), metadata={"description": "New creators of the dataset"}) + keywords = fields.List(fields.String(), allow_none=True, metadata={"description": "New keywords for the dataset"}) + images = fields.List( + fields.Nested(ImageObjectRequest), allow_none=True, metadata={"description": "New dataset images"} + ) + custom_metadata = fields.List( + fields.Dict(), metadata={"description": "New custom metadata for the dataset"}, allow_none=True + ) + custom_metadata_source = fields.String( + allow_none=True, + metadata={"description": "Source for the custom metadata for the dataset"}, + ) + + +class DatasetUnlinkRequest_2_1(AsyncSchema, DatasetSlugSchema_2_1, RemoteRepositorySchema, MigrateSchema): + """Dataset unlink file request.""" + + include_filters = fields.List(fields.String()) + exclude_filters = fields.List(fields.String()) + + @post_load() + def check_filters(self, data, **kwargs): + """Check filters.""" + include_filter = data.get("include_filters") + exclude_filter = data.get("exclude_filters") + + if not include_filter and not exclude_filter: + raise ValidationError("one of the filters must be specified") + + return data diff --git a/renku/ui/service/views/api_versions.py b/renku/ui/service/views/api_versions.py index af7376d408..d4074bbbd9 100644 --- a/renku/ui/service/views/api_versions.py +++ b/renku/ui/service/views/api_versions.py @@ -62,12 +62,20 @@ def add_url_rule( V1_4 = ApiVersion("1.4") V1_5 = ApiVersion("1.5") V2_0 = ApiVersion("2.0") -V2_1 = ApiVersion("2.1", is_base_version=True) +V2_1 = ApiVersion("2.1") +V2_2 = ApiVersion("2.2", is_base_version=True) -VERSIONS_FROM_V1_5 = [V1_5, V2_0, V2_1] +VERSIONS_FROM_V2_2 = [V2_2] +VERSIONS_FROM_V2_1 = [V2_1] + VERSIONS_FROM_V2_2 +VERSIONS_FROM_V2_0 = [V2_0] + VERSIONS_FROM_V2_1 +VERSIONS_FROM_V1_5 = [V1_5] + VERSIONS_FROM_V2_0 VERSIONS_FROM_V1_4 = [V1_4] + VERSIONS_FROM_V1_5 VERSIONS_FROM_V1_1 = [V1_1, V1_2, V1_3] + VERSIONS_FROM_V1_4 ALL_VERSIONS = [V1_0] + VERSIONS_FROM_V1_1 +VERSIONS_BEFORE_1_1 = [V1_0] +VERSIONS_BEFORE_2_0 = [V1_1, V1_2, V1_3, V1_4, V1_5] + VERSIONS_BEFORE_1_1 +VERSIONS_BEFORE_2_2 = [V2_0, V2_1] + VERSIONS_BEFORE_2_0 + MINIMUM_VERSION = V1_0 -MAXIMUM_VERSION = V2_1 +MAXIMUM_VERSION = V2_2 diff --git a/renku/ui/service/views/cache.py b/renku/ui/service/views/cache.py index 2c49e50eb5..aa5df0bade 100644 --- a/renku/ui/service/views/cache.py +++ b/renku/ui/service/views/cache.py @@ -25,14 +25,20 @@ from renku.ui.service.gateways.gitlab_api_provider import GitlabAPIProvider from renku.ui.service.gateways.repository_cache import LocalRepositoryCache from renku.ui.service.jobs.cleanup import cache_files_cleanup -from renku.ui.service.views.api_versions import ALL_VERSIONS, V2_0, V2_1, VERSIONS_FROM_V1_1, VersionedBlueprint +from renku.ui.service.views.api_versions import ( + ALL_VERSIONS, + VERSIONS_FROM_V1_1, + VERSIONS_FROM_V2_0, + VERSIONS_FROM_V2_1, + VersionedBlueprint, +) from renku.ui.service.views.decorators import accepts_json, optional_identity, requires_cache, requires_identity from renku.ui.service.views.error_handlers import ( handle_common_except, handle_migration_read_errors, handle_migration_write_errors, ) -from renku.ui.service.views.v1.cache import add_v1_specific_endpoints +from renku.ui.service.views.v1.cache import add_v1_specific_cache_endpoints CACHE_BLUEPRINT_TAG = "cache" cache_blueprint = VersionedBlueprint("cache", __name__, url_prefix=SERVICE_PREFIX) @@ -155,7 +161,7 @@ def migrate_project_view(user_data, cache): @cache_blueprint.route( - "/cache.migrations_check", methods=["GET"], provide_automatic_options=False, versions=[V2_0, V2_1] + "/cache.migrations_check", methods=["GET"], provide_automatic_options=False, versions=VERSIONS_FROM_V2_0 ) @handle_common_except @handle_migration_read_errors @@ -183,7 +189,7 @@ def migration_check_project_view(user_data, cache): return MigrationsCheckCtrl(cache, user_data, dict(request.args), GitlabAPIProvider()).to_response() -@cache_blueprint.route("/cache.cleanup", methods=["GET"], provide_automatic_options=False, versions=[V2_1]) +@cache_blueprint.route("/cache.cleanup", methods=["GET"], provide_automatic_options=False, versions=VERSIONS_FROM_V2_1) @handle_common_except @handle_migration_read_errors @requires_cache @@ -209,4 +215,4 @@ def cache_cleanup(user_data, cache): return jsonify({"result": "ok"}) -cache_blueprint = add_v1_specific_endpoints(cache_blueprint) +cache_blueprint = add_v1_specific_cache_endpoints(cache_blueprint) diff --git a/renku/ui/service/views/datasets.py b/renku/ui/service/views/datasets.py index 40bb1a7d36..3b2765dc82 100644 --- a/renku/ui/service/views/datasets.py +++ b/renku/ui/service/views/datasets.py @@ -25,19 +25,22 @@ from renku.ui.service.controllers.datasets_list import DatasetsListCtrl from renku.ui.service.controllers.datasets_remove import DatasetsRemoveCtrl from renku.ui.service.controllers.datasets_unlink import DatasetsUnlinkCtrl -from renku.ui.service.views.api_versions import ALL_VERSIONS, VersionedBlueprint +from renku.ui.service.views.api_versions import VERSIONS_FROM_V2_2, VersionedBlueprint from renku.ui.service.views.decorators import accepts_json, optional_identity, requires_cache, requires_identity from renku.ui.service.views.error_handlers import ( handle_common_except, handle_datasets_unlink_errors, handle_datasets_write_errors, ) +from renku.ui.service.views.v1.datasets import add_v1_specific_dataset_endpoints DATASET_BLUEPRINT_TAG = "datasets" dataset_blueprint = VersionedBlueprint(DATASET_BLUEPRINT_TAG, __name__, url_prefix=SERVICE_PREFIX) -@dataset_blueprint.route("/datasets.list", methods=["GET"], provide_automatic_options=False, versions=ALL_VERSIONS) +@dataset_blueprint.route( + "/datasets.list", methods=["GET"], provide_automatic_options=False, versions=VERSIONS_FROM_V2_2 +) @handle_common_except @requires_cache @optional_identity @@ -64,7 +67,7 @@ def list_datasets_view(user_data, cache): @dataset_blueprint.route( - "/datasets.files_list", methods=["GET"], provide_automatic_options=False, versions=ALL_VERSIONS + "/datasets.files_list", methods=["GET"], provide_automatic_options=False, versions=VERSIONS_FROM_V2_2 ) @handle_common_except @requires_cache @@ -91,7 +94,9 @@ def list_dataset_files_view(user_data, cache): return DatasetsFilesListCtrl(cache, user_data, dict(request.args)).to_response() -@dataset_blueprint.route("/datasets.add", methods=["POST"], provide_automatic_options=False, versions=ALL_VERSIONS) +@dataset_blueprint.route( + "/datasets.add", methods=["POST"], provide_automatic_options=False, versions=VERSIONS_FROM_V2_2 +) @handle_common_except @handle_datasets_write_errors @accepts_json @@ -120,7 +125,9 @@ def add_file_to_dataset_view(user_data, cache): return DatasetsAddFileCtrl(cache, user_data, dict(request.json)).to_response() # type: ignore -@dataset_blueprint.route("/datasets.create", methods=["POST"], provide_automatic_options=False, versions=ALL_VERSIONS) +@dataset_blueprint.route( + "/datasets.create", methods=["POST"], provide_automatic_options=False, versions=VERSIONS_FROM_V2_2 +) @handle_common_except @handle_datasets_write_errors @accepts_json @@ -149,7 +156,9 @@ def create_dataset_view(user_data, cache): return DatasetsCreateCtrl(cache, user_data, dict(request.json)).to_response() # type: ignore -@dataset_blueprint.route("/datasets.remove", methods=["POST"], provide_automatic_options=False, versions=ALL_VERSIONS) +@dataset_blueprint.route( + "/datasets.remove", methods=["POST"], provide_automatic_options=False, versions=VERSIONS_FROM_V2_2 +) @handle_common_except @accepts_json @requires_cache @@ -177,7 +186,9 @@ def remove_dataset_view(user_data, cache): return DatasetsRemoveCtrl(cache, user_data, dict(request.json)).to_response() # type: ignore -@dataset_blueprint.route("/datasets.import", methods=["POST"], provide_automatic_options=False, versions=ALL_VERSIONS) +@dataset_blueprint.route( + "/datasets.import", methods=["POST"], provide_automatic_options=False, versions=VERSIONS_FROM_V2_2 +) @handle_common_except @accepts_json @requires_cache @@ -205,7 +216,9 @@ def import_dataset_view(user_data, cache): return DatasetsImportCtrl(cache, user_data, dict(request.json)).to_response() # type: ignore -@dataset_blueprint.route("/datasets.edit", methods=["POST"], provide_automatic_options=False, versions=ALL_VERSIONS) +@dataset_blueprint.route( + "/datasets.edit", methods=["POST"], provide_automatic_options=False, versions=VERSIONS_FROM_V2_2 +) @handle_common_except @handle_datasets_write_errors @accepts_json @@ -236,7 +249,9 @@ def edit_dataset_view(user_data, cache): return DatasetsEditCtrl(cache, user_data, dict(request.json)).to_response() # type: ignore -@dataset_blueprint.route("/datasets.unlink", methods=["POST"], provide_automatic_options=False, versions=ALL_VERSIONS) +@dataset_blueprint.route( + "/datasets.unlink", methods=["POST"], provide_automatic_options=False, versions=VERSIONS_FROM_V2_2 +) @handle_common_except @handle_datasets_unlink_errors @accepts_json @@ -263,3 +278,6 @@ def unlink_file_view(user_data, cache): - datasets """ return DatasetsUnlinkCtrl(cache, user_data, dict(request.json)).to_response() # type: ignore + + +dataset_blueprint = add_v1_specific_dataset_endpoints(dataset_blueprint) diff --git a/renku/ui/service/views/templates.py b/renku/ui/service/views/templates.py index 64482cc404..b43f96f820 100644 --- a/renku/ui/service/views/templates.py +++ b/renku/ui/service/views/templates.py @@ -19,21 +19,21 @@ from renku.ui.service.config import SERVICE_PREFIX from renku.ui.service.controllers.templates_create_project import TemplatesCreateProjectCtrl from renku.ui.service.controllers.templates_read_manifest import TemplatesReadManifestCtrl -from renku.ui.service.views.api_versions import ALL_VERSIONS, V2_0, V2_1, VersionedBlueprint +from renku.ui.service.views.api_versions import ALL_VERSIONS, VERSIONS_FROM_V2_0, VersionedBlueprint from renku.ui.service.views.decorators import accepts_json, requires_cache, requires_identity from renku.ui.service.views.error_handlers import ( handle_common_except, handle_templates_create_errors, handle_templates_read_errors, ) -from renku.ui.service.views.v1.templates import add_v1_specific_endpoints +from renku.ui.service.views.v1.templates import add_v1_specific_template_endpoints TEMPLATES_BLUEPRINT_TAG = "templates" templates_blueprint = VersionedBlueprint(TEMPLATES_BLUEPRINT_TAG, __name__, url_prefix=SERVICE_PREFIX) @templates_blueprint.route( - "/templates.read_manifest", methods=["GET"], provide_automatic_options=False, versions=[V2_0, V2_1] + "/templates.read_manifest", methods=["GET"], provide_automatic_options=False, versions=VERSIONS_FROM_V2_0 ) @handle_common_except @handle_templates_read_errors @@ -103,4 +103,4 @@ def create_project_from_template(user_data, cache): return TemplatesCreateProjectCtrl(cache, user_data, dict(request.json)).to_response() # type: ignore -templates_blueprint = add_v1_specific_endpoints(templates_blueprint) +templates_blueprint = add_v1_specific_template_endpoints(templates_blueprint) diff --git a/renku/ui/service/views/v1/cache.py b/renku/ui/service/views/v1/cache.py index 3e10d96f02..f721d60772 100644 --- a/renku/ui/service/views/v1/cache.py +++ b/renku/ui/service/views/v1/cache.py @@ -24,7 +24,7 @@ from renku.ui.service.gateways.gitlab_api_provider import GitlabAPIProvider from renku.ui.service.serializers.v1.cache import ProjectMigrateResponseRPC_1_0, ProjectMigrationCheckResponseRPC_1_5 from renku.ui.service.views import result_response -from renku.ui.service.views.api_versions import V1_0, V1_1, V1_2, V1_3, V1_4, V1_5 +from renku.ui.service.views.api_versions import VERSIONS_BEFORE_1_1, VERSIONS_BEFORE_2_0 from renku.ui.service.views.decorators import accepts_json, optional_identity, requires_cache, requires_identity from renku.ui.service.views.error_handlers import ( handle_common_except, @@ -117,15 +117,12 @@ def migration_check_project_view_1_5(user_data, cache): return result_response(ProjectMigrationCheckResponseRPC_1_5(), asdict(result)) -def add_v1_specific_endpoints(cache_blueprint): +def add_v1_specific_cache_endpoints(cache_blueprint): """Add v1 only endpoints to blueprint.""" - cache_blueprint.route("/cache.migrate", methods=["POST"], provide_automatic_options=False, versions=[V1_0])( - migrate_project_view_1_0 - ) cache_blueprint.route( - "/cache.migrations_check", - methods=["GET"], - provide_automatic_options=False, - versions=[V1_0, V1_1, V1_2, V1_3, V1_4, V1_5], + "/cache.migrate", methods=["POST"], provide_automatic_options=False, versions=VERSIONS_BEFORE_1_1 + )(migrate_project_view_1_0) + cache_blueprint.route( + "/cache.migrations_check", methods=["GET"], provide_automatic_options=False, versions=VERSIONS_BEFORE_2_0 )(migration_check_project_view_1_5) return cache_blueprint diff --git a/renku/ui/service/views/v1/datasets.py b/renku/ui/service/views/v1/datasets.py new file mode 100644 index 0000000000..214f8cbbd4 --- /dev/null +++ b/renku/ui/service/views/v1/datasets.py @@ -0,0 +1,285 @@ +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Renku service datasets view.""" +from flask import request + +from renku.ui.service.config import SERVICE_PREFIX +from renku.ui.service.controllers.v1.datasets_add_file import DatasetsAddFileCtrl_2_1 +from renku.ui.service.controllers.v1.datasets_create import DatasetsCreateCtrl_2_1 +from renku.ui.service.controllers.v1.datasets_edit import DatasetsEditCtrl_2_1 +from renku.ui.service.controllers.v1.datasets_files_list import DatasetsFilesListCtrl_2_1 +from renku.ui.service.controllers.v1.datasets_import import DatasetsImportCtrl_2_1 +from renku.ui.service.controllers.v1.datasets_list import DatasetsListCtrl_2_1 +from renku.ui.service.controllers.v1.datasets_remove import DatasetsRemoveCtrl_2_1 +from renku.ui.service.controllers.v1.datasets_unlink import DatasetsUnlinkCtrl_2_1 +from renku.ui.service.views.api_versions import VERSIONS_BEFORE_2_2, VersionedBlueprint +from renku.ui.service.views.decorators import accepts_json, optional_identity, requires_cache, requires_identity +from renku.ui.service.views.error_handlers import ( + handle_common_except, + handle_datasets_unlink_errors, + handle_datasets_write_errors, +) + +DATASET_BLUEPRINT_TAG = "datasets" +dataset_blueprint = VersionedBlueprint(DATASET_BLUEPRINT_TAG, __name__, url_prefix=SERVICE_PREFIX) + + +@handle_common_except +@requires_cache +@optional_identity +def list_datasets_view_2_1(user_data, cache): + """ + List all datasets in a project. + + --- + get: + description: List all datasets in a project. + parameters: + - in: query + schema: DatasetListRequest + responses: + 200: + description: Listing of all datasets in a project. + content: + application/json: + schema: DatasetListResponseRPC_2_1 + tags: + - datasets + """ + return DatasetsListCtrl_2_1(cache, user_data, dict(request.args)).to_response() + + +@handle_common_except +@requires_cache +@optional_identity +def list_dataset_files_view_2_1(user_data, cache): + """ + List files in a dataset. + + --- + get: + description: List files in a dataset. + parameters: + - in: query + schema: DatasetFilesListRequest_2_1 + responses: + 200: + description: Listing of all files in a dataset. + content: + application/json: + schema: DatasetFilesListResponseRPC_2_1 + tags: + - datasets + """ + return DatasetsFilesListCtrl_2_1(cache, user_data, dict(request.args)).to_response() + + +@handle_common_except +@handle_datasets_write_errors +@accepts_json +@requires_cache +@requires_identity +def add_file_to_dataset_view_2_1(user_data, cache): + """ + Add the uploaded file to a cloned repository. + + --- + post: + description: Add the uploaded file to a cloned repository. + requestBody: + content: + application/json: + schema: DatasetAddRequest_2_1 + responses: + 200: + description: Details of the added files. + content: + application/json: + schema: DatasetAddResponseRPC_2_1 + tags: + - datasets + """ + return DatasetsAddFileCtrl_2_1(cache, user_data, dict(request.json)).to_response() # type: ignore + + +@handle_common_except +@handle_datasets_write_errors +@accepts_json +@requires_cache +@requires_identity +def create_dataset_view_2_1(user_data, cache): + """ + Create a new dataset in a project. + + --- + post: + description: Create a new dataset in a project. + requestBody: + content: + application/json: + schema: DatasetCreateRequest_2_1 + responses: + 200: + description: Properties of the created dataset. + content: + application/json: + schema: DatasetCreateResponseRPC_2_1 + tags: + - datasets + """ + return DatasetsCreateCtrl_2_1(cache, user_data, dict(request.json)).to_response() # type: ignore + + +@handle_common_except +@accepts_json +@requires_cache +@requires_identity +def remove_dataset_view_2_1(user_data, cache): + """ + Remove a dataset from a project. + + --- + post: + description: Remove a dataset from a project. + requestBody: + content: + application/json: + schema: DatasetRemoveRequest_2_1 + responses: + 200: + description: Details of the removed dataset. + content: + application/json: + schema: DatasetRemoveResponseRPC_2_1 + tags: + - datasets + """ + return DatasetsRemoveCtrl_2_1(cache, user_data, dict(request.json)).to_response() # type: ignore + + +@handle_common_except +@accepts_json +@requires_cache +@requires_identity +def import_dataset_view_2_1(user_data, cache): + """ + Import a dataset view. + + --- + post: + description: Import a dataset into a project. + requestBody: + content: + application/json: + schema: DatasetImportRequest_2_1 + responses: + 200: + description: Details of the dispatched import dataset job. + content: + application/json: + schema: DatasetImportResponseRPC + tags: + - datasets + """ + return DatasetsImportCtrl_2_1(cache, user_data, dict(request.json)).to_response() # type: ignore + + +@handle_common_except +@handle_datasets_write_errors +@accepts_json +@requires_cache +@requires_identity +def edit_dataset_view_2_1(user_data, cache): + """ + Edit dataset metadata view. + + Not passing a field leaves it unchanged. + + --- + post: + description: Edit dataset metadata. + requestBody: + content: + application/json: + schema: DatasetEditRequest_2_1 + responses: + 200: + description: Status of the requested dataset edits. + content: + application/json: + schema: DatasetEditResponseRPC + tags: + - datasets + """ + return DatasetsEditCtrl_2_1(cache, user_data, dict(request.json)).to_response() # type: ignore + + +@handle_common_except +@handle_datasets_unlink_errors +@accepts_json +@requires_cache +@requires_identity +def unlink_file_view_2_1(user_data, cache): + """ + Unlink a file from a dataset view. + + --- + post: + description: Unlink a file from a dataset. + requestBody: + content: + application/json: + schema: DatasetUnlinkRequest_2_1 + responses: + 200: + description: Details of the unlinked files. + content: + application/json: + schema: DatasetUnlinkResponseRPC + tags: + - datasets + """ + return DatasetsUnlinkCtrl_2_1(cache, user_data, dict(request.json)).to_response() # type: ignore + + +def add_v1_specific_dataset_endpoints(dataset_blueprint): + """Add v1 only endpoints to blueprint.""" + dataset_blueprint.route( + "/datasets.list", methods=["GET"], provide_automatic_options=False, versions=VERSIONS_BEFORE_2_2 + )(list_datasets_view_2_1) + dataset_blueprint.route( + "/datasets.files_list", methods=["GET"], provide_automatic_options=False, versions=VERSIONS_BEFORE_2_2 + )(list_dataset_files_view_2_1) + dataset_blueprint.route( + "/datasets.add", methods=["POST"], provide_automatic_options=False, versions=VERSIONS_BEFORE_2_2 + )(add_file_to_dataset_view_2_1) + dataset_blueprint.route( + "/datasets.create", methods=["POST"], provide_automatic_options=False, versions=VERSIONS_BEFORE_2_2 + )(create_dataset_view_2_1) + dataset_blueprint.route( + "/datasets.remove", methods=["POST"], provide_automatic_options=False, versions=VERSIONS_BEFORE_2_2 + )(remove_dataset_view_2_1) + dataset_blueprint.route( + "/datasets.import", methods=["POST"], provide_automatic_options=False, versions=VERSIONS_BEFORE_2_2 + )(import_dataset_view_2_1) + dataset_blueprint.route( + "/datasets.edit", methods=["POST"], provide_automatic_options=False, versions=VERSIONS_BEFORE_2_2 + )(edit_dataset_view_2_1) + dataset_blueprint.route( + "/datasets.unlink", methods=["POST"], provide_automatic_options=False, versions=VERSIONS_BEFORE_2_2 + )(unlink_file_view_2_1) + + return dataset_blueprint diff --git a/renku/ui/service/views/v1/templates.py b/renku/ui/service/views/v1/templates.py index 49f628afa7..5da9a4e58b 100644 --- a/renku/ui/service/views/v1/templates.py +++ b/renku/ui/service/views/v1/templates.py @@ -18,7 +18,7 @@ from renku.ui.service.controllers.templates_read_manifest import TemplatesReadManifestCtrl from renku.ui.service.serializers.v1.templates import ManifestTemplatesResponseRPC_1_5 -from renku.ui.service.views.api_versions import V1_0, V1_1, V1_2, V1_3, V1_4, V1_5 +from renku.ui.service.views.api_versions import VERSIONS_BEFORE_2_0 from renku.ui.service.views.decorators import requires_cache, requires_identity from renku.ui.service.views.error_handlers import handle_common_except, handle_templates_read_errors @@ -63,12 +63,9 @@ def read_manifest_from_template_1_5(user_data, cache): return ctrl.to_response() -def add_v1_specific_endpoints(templates_blueprint): +def add_v1_specific_template_endpoints(templates_blueprint): """Add v1 only endpoints to blueprint.""" templates_blueprint.route( - "/templates.read_manifest", - methods=["GET"], - provide_automatic_options=False, - versions=[V1_0, V1_1, V1_2, V1_3, V1_4, V1_5], + "/templates.read_manifest", methods=["GET"], provide_automatic_options=False, versions=VERSIONS_BEFORE_2_0 )(read_manifest_from_template_1_5) return templates_blueprint