diff --git a/CHANGELOG.md b/CHANGELOG.md index 3bfc362fb..66e724cac 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,7 +7,7 @@ #### Schema migrations #### Data migrations ### Changes -- [#169](https://github.com/LayerManager/layman/issues/169) [POST Workspace Layers](doc/rest.md#post-workspace-layers) accepts also compressed data files in ZIP format (`*.zip`) in `file` parameter. [PATCH Workspace Layer](doc/rest.md#patch-workspace-layer) accepts also data file in ZIP format (`*.zip`) in `file` parameter. +- [#169](https://github.com/LayerManager/layman/issues/169) [POST Workspace Layers](doc/rest.md#post-workspace-layers) accepts also compressed data files in ZIP format (`*.zip`) in `file` parameter. [PATCH Workspace Layer](doc/rest.md#patch-workspace-layer) accepts also data file in ZIP format (`*.zip`) in `file` parameter. ZIP archives can be also uploaded by chunks. - [#503](https://github.com/LayerManager/layman/issues/503) Normalized GeoTIFF for raster files are also compressed. - [#169](https://github.com/LayerManager/layman/issues/169) [GET Workspace Layer](doc/rest.md#get-workspace-layer) returns path to main file inside archive if zipped file was sent (key `file.path`). - [#465](https://github.com/LayerManager/layman/issues/465) Fix situation, when Layman does not start if *.qgis file of the first layer with QML style does not exist. It was already fixed in v1.14.1. diff --git a/doc/rest.md b/doc/rest.md index d4a6a6994..bd443bd5e 100644 --- a/doc/rest.md +++ b/doc/rest.md @@ -123,7 +123,7 @@ Body parameters: - PNG (.png, with .png.aux.xml or .pgw) - JPEG (.jpg, with .jpg.aux.xml or .jgw) - any of above types in single ZIP file (.zip) - - file names, i.e. array of strings (not supported for ZIP file) + - file names, i.e. array of strings - if file names are provided, files must be uploaded subsequently using [POST Workspace Layer Chunk](#post-workspace-layer-chunk) - in case of raster data input, following input combinations of bands and color interpretations are supported: - 1 band: Gray diff --git a/sample/layman.layer/small_layer.zip b/sample/layman.layer/small_layer.zip new file mode 100644 index 000000000..39db5d619 Binary files /dev/null and b/sample/layman.layer/small_layer.zip differ diff --git a/src/layman/celery_test.py b/src/layman/celery_test.py index 91fc1431e..07654766e 100644 --- a/src/layman/celery_test.py +++ b/src/layman/celery_test.py @@ -8,7 +8,7 @@ del sys.modules['layman'] from layman import app, celery_app -from layman.layer.filesystem import input_chunk +from layman.layer.filesystem import input_chunk, util as fs_util from layman import celery as celery_util from layman.common import tasks as tasks_util from test_tools import flask_client @@ -45,7 +45,7 @@ def test_single_abortable_task(): 'ensure_user': True, 'check_crs': check_crs, } - filenames = ['abc.geojson'] + filenames = fs_util.InputFiles(sent_paths=['abc.geojson']) workspace = 'test_abort_workspace' layername = 'test_abort_layer' with app.app_context(): @@ -98,7 +98,7 @@ def test_abortable_task_chain(): 'ensure_user': True, 'check_crs': check_crs, } - filenames = ['abc.geojson'] + filenames = fs_util.InputFiles(sent_paths=['abc.geojson']) workspace = 'test_abort_workspace' layername = 'test_abort_layer2' with app.app_context(): diff --git a/src/layman/layer/filesystem/input_chunk.py b/src/layman/layer/filesystem/input_chunk.py index f3f61d138..11f99352a 100644 --- a/src/layman/layer/filesystem/input_chunk.py +++ b/src/layman/layer/filesystem/input_chunk.py @@ -41,14 +41,14 @@ def delete_layer(workspace, layername): get_publication_uuid = input_file.get_publication_uuid -def save_layer_files_str(workspace, layername, files_str, check_crs): +def save_layer_files_str(workspace, layername, input_files, check_crs): input_file_dir = input_file.get_layer_input_file_dir(workspace, layername) - if len(files_str) == 1 and input_file.get_compressed_main_file_extension(files_str[0]): - main_filename = files_str[0] + if input_files.is_one_archive: + main_filename = input_files.raw_paths_to_archives[0] else: - main_filename = input_file.get_main_file_name(files_str) + main_filename = input_files.raw_or_archived_main_file_path _, filepath_mapping = input_file.get_file_name_mappings( - files_str, main_filename, layername, input_file_dir + input_files.raw_paths, main_filename, layername, input_file_dir ) filepath_mapping = { k: v for k, v in filepath_mapping.items() if v is not None diff --git a/src/layman/layer/filesystem/input_file.py b/src/layman/layer/filesystem/input_file.py index afe941d62..fd0d156f6 100644 --- a/src/layman/layer/filesystem/input_file.py +++ b/src/layman/layer/filesystem/input_file.py @@ -185,12 +185,23 @@ def check_filenames(workspace, layername, input_files, check_crs, ignore_existin + ', '.join(settings.COMPRESSED_FILE_EXTENSIONS.keys()), 'files': [os.path.relpath(fp, input_files.saved_paths_dir) for fp in input_files.raw_paths_to_archives], }) - raise LaymanError(2, {'parameter': 'file', - 'expected': 'At least one file with any of extensions: ' - + ', '.join(util.get_all_allowed_main_extensions()) - + '; or one of them in single .zip file.', - 'files': [os.path.relpath(fp, input_files.saved_paths_dir) for fp in filenames], - }) + if len(input_files.raw_paths_to_archives) == 0: + raise LaymanError(2, {'parameter': 'file', + 'message': 'No data file in input.', + 'expected': 'At least one file with any of extensions: ' + + ', '.join(util.get_all_allowed_main_extensions()) + + '; or one of them in single .zip file.', + 'files': [os.path.relpath(fp, input_files.saved_paths_dir) for fp in filenames], + }) + if input_files.is_one_archive_with_available_content: + raise LaymanError(2, {'parameter': 'file', + 'message': 'Zip file without data file inside.', + 'expected': 'At least one file with any of extensions: ' + + ', '.join(util.get_all_allowed_main_extensions()) + + '; or one of them in single .zip file.', + 'files': [os.path.relpath(fp, input_files.saved_paths_dir) for fp in filenames], + }) + main_files = input_files.raw_paths_to_archives main_filename = main_files[0] basename, ext = map( lambda s: s.lower(), @@ -211,7 +222,7 @@ def check_filenames(workspace, layername, input_files, check_crs, ignore_existin if len(missing_exts) > 0: detail = { 'missing_extensions': missing_exts, - 'path': main_filename, + 'path': os.path.relpath(main_filename, input_files.saved_paths_dir), } if '.prj' in missing_exts: detail['suggestion'] = 'Missing .prj file can be fixed also ' \ @@ -230,18 +241,17 @@ def check_filenames(workspace, layername, input_files, check_crs, ignore_existin raise LaymanError(3, conflict_paths) -def save_layer_files(workspace, layername, files, check_crs, *, output_dir=None, zipped=False): - filenames = list(map(lambda f: f.filename, files)) - if zipped: - main_filename = files[0].filename +def save_layer_files(workspace, layername, input_files, check_crs, *, output_dir=None): + if input_files.is_one_archive: + main_filename = input_files.raw_paths_to_archives[0] else: - main_filename = get_main_file_name(filenames) + main_filename = input_files.raw_or_archived_main_file_path output_dir = output_dir or ensure_layer_input_file_dir(workspace, layername) _, filepath_mapping = get_file_name_mappings( - filenames, main_filename, layername, output_dir + input_files.raw_paths, main_filename, layername, output_dir ) - common.save_files(files, filepath_mapping) + common.save_files(input_files.sent_streams, filepath_mapping) main_filepath = get_gdal_format_file_path(filepath_mapping[main_filename]) check_main_file(main_filepath, check_crs=check_crs) diff --git a/src/layman/layer/filesystem/util.py b/src/layman/layer/filesystem/util.py index bc431e7fe..1df8dd7fc 100644 --- a/src/layman/layer/filesystem/util.py +++ b/src/layman/layer/filesystem/util.py @@ -68,7 +68,11 @@ def saved_paths_to_archives(self): @property def is_one_archive(self): - return len(self.raw_paths) == 1 and len(self.raw_paths_to_archives) == 1 + return len(self.raw_paths_to_archives) == 1 and not self.raw_main_file_paths + + @property + def is_one_archive_with_available_content(self): + return self.is_one_archive and (self.archive_streams or self.saved_paths_to_archives) def archived_paths(self, *, with_zip_in_path=False): return [ @@ -80,10 +84,15 @@ def archived_paths(self, *, with_zip_in_path=False): @property def raw_or_archived_paths(self): - if self.is_one_archive: - return self.archived_paths(with_zip_in_path=True) + if self.is_one_archive_with_available_content: + return self.archived_paths(with_zip_in_path=True) or self.raw_paths return self.raw_paths + @property + def raw_main_file_paths(self): + return [fn for fn in self.raw_paths + if os.path.splitext(fn)[1] in get_all_allowed_main_extensions()] + @property def raw_or_archived_main_file_paths(self): return [fn for fn in self.raw_or_archived_paths @@ -98,7 +107,7 @@ def raw_or_archived_main_file_path(self): @property def archive_type(self): - return os.path.splitext(self.raw_paths[0])[1] if self.is_one_archive else None + return os.path.splitext(self.saved_paths_to_archives[0])[1] if self.is_one_archive else None @property def main_file_path_for_gdal(self): diff --git a/src/layman/layer/rest_workspace_layer.py b/src/layman/layer/rest_workspace_layer.py index 329e60ab7..7f1b7e3ae 100644 --- a/src/layman/layer/rest_workspace_layer.py +++ b/src/layman/layer/rest_workspace_layer.py @@ -108,8 +108,7 @@ def patch(workspace, layername): # file checks if not use_chunk_upload: temp_dir = tempfile.mkdtemp(prefix="layman_") - input_file.save_layer_files(workspace, layername, input_files.sent_streams, check_crs, - output_dir=temp_dir, zipped=input_files.is_one_archive) + input_file.save_layer_files(workspace, layername, input_files, check_crs, output_dir=temp_dir) if input_files.raw_paths: file_type = input_file.get_file_type(input_files.raw_or_archived_main_file_path) @@ -152,7 +151,7 @@ def patch(workspace, layername): if use_chunk_upload: files_to_upload = input_chunk.save_layer_files_str( - workspace, layername, input_files.sent_paths, check_crs) + workspace, layername, input_files, check_crs) layer_result.update({ 'files_to_upload': files_to_upload, }) diff --git a/src/layman/layer/rest_workspace_layers.py b/src/layman/layer/rest_workspace_layers.py index 167f13d98..0852a6b43 100644 --- a/src/layman/layer/rest_workspace_layers.py +++ b/src/layman/layer/rest_workspace_layers.py @@ -141,7 +141,7 @@ def post(workspace): input_style.save_layer_file(workspace, layername, style_file, style_type) if use_chunk_upload: files_to_upload = input_chunk.save_layer_files_str( - workspace, layername, input_files.sent_paths, check_crs) + workspace, layername, input_files, check_crs) layer_result.update({ 'files_to_upload': files_to_upload, }) @@ -150,8 +150,7 @@ def post(workspace): }) else: try: - input_file.save_layer_files(workspace, layername, input_files.sent_streams, check_crs, - zipped=input_files.is_one_archive) + input_file.save_layer_files(workspace, layername, input_files, check_crs) except BaseException as exc: uuid.delete_layer(workspace, layername) input_file.delete_layer(workspace, layername) diff --git a/src/layman/upgrade/upgrade_v1_10_test.py b/src/layman/upgrade/upgrade_v1_10_test.py index 7249dc314..542d51c29 100644 --- a/src/layman/upgrade/upgrade_v1_10_test.py +++ b/src/layman/upgrade/upgrade_v1_10_test.py @@ -92,7 +92,7 @@ def ensure_layer_internal(workspace, layer): db.ensure_workspace(workspace) with open(file_path, 'rb') as file: file = FileStorage(file) - layer_in_file.save_layer_files(workspace, layer, [file], False) + layer_in_file.save_layer_files(workspace, layer, layer_fs_util.InputFiles(sent_streams=[file]), False) db.import_layer_vector_file(workspace, layer, file_path, None) created = gs_util.ensure_workspace(workspace, settings.LAYMAN_GS_AUTH) if created: diff --git a/src/layman/upgrade/upgrade_v1_12_test.py b/src/layman/upgrade/upgrade_v1_12_test.py index 77ad1be8d..90a2221ec 100644 --- a/src/layman/upgrade/upgrade_v1_12_test.py +++ b/src/layman/upgrade/upgrade_v1_12_test.py @@ -8,7 +8,7 @@ from layman.common.filesystem import uuid as uuid_common from layman.common.micka import util as micka_util from layman.layer import geoserver as gs_layer, NO_STYLE_DEF, db -from layman.layer.filesystem import input_file as layer_in_file +from layman.layer.filesystem import input_file as layer_in_file, util as layer_fs_util from layman.layer.geoserver import wms from layman.layer.prime_db_schema import table as prime_db_schema_table from layman.uuid import generate_uuid @@ -37,7 +37,7 @@ def ensure_layer_internal(workspace, layer): db.ensure_workspace(workspace) with open(file_path, 'rb') as file: file = FileStorage(file) - layer_in_file.save_layer_files(workspace, layer, [file], False) + layer_in_file.save_layer_files(workspace, layer, layer_fs_util.InputFiles(sent_streams=[file]), False) db.import_layer_vector_file(workspace, layer, file_path, None) # wfs created = gs_util.ensure_workspace(workspace, settings.LAYMAN_GS_AUTH) diff --git a/test_tools/process_client.py b/test_tools/process_client.py index 93138c680..aace0404a 100644 --- a/test_tools/process_client.py +++ b/test_tools/process_client.py @@ -285,7 +285,7 @@ def publish_workspace_publication(publication_type, title = title or name headers = headers or {} publication_type_def = PUBLICATION_TYPES_DEF[publication_type] - file_paths = file_paths or [publication_type_def.source_path, ] + file_paths = [publication_type_def.source_path] if file_paths is None else file_paths if style_file: assert publication_type == LAYER_TYPE diff --git a/tests/dynamic_data/publications/__init__.py b/tests/dynamic_data/publications/__init__.py index 0627ceb31..cca892abb 100644 --- a/tests/dynamic_data/publications/__init__.py +++ b/tests/dynamic_data/publications/__init__.py @@ -2,7 +2,7 @@ import tests.asserts.final.publication as publication import tests.asserts.processing as processing from test_tools import process_client -from . import wrong_input +from . import wrong_input, file_input from .. import predefined_actions, predefined_zip_files from ... import Action, Publication, dynamic_data as consts @@ -972,6 +972,7 @@ 'expected': {'http_code': 400, 'code': 2, 'detail': {'parameter': 'file', + 'message': 'Zip file without data file inside.', 'expected': 'At least one file with any of extensions: .geojson, .shp, .tiff, .tif, .jp2, .png, .jpg; or one of them in single .zip file.', 'files': [ 'temporary_zip_file.zip/sm5.zip', @@ -1045,4 +1046,5 @@ }, ], **wrong_input.generate(consts.COMMON_WORKSPACE + '_generated_wrong_input'), + **file_input.generate(consts.COMMON_WORKSPACE + '_generated_file_input'), } diff --git a/tests/dynamic_data/publications/file_input.py b/tests/dynamic_data/publications/file_input.py new file mode 100644 index 000000000..a3e7bc173 --- /dev/null +++ b/tests/dynamic_data/publications/file_input.py @@ -0,0 +1,68 @@ +import tests.asserts.processing as processing +import tests.asserts.final.publication as publication +from test_tools import process_client +from ... import Action, Publication, dynamic_data as consts + +KEY_PUBLICATION_TYPE = 'publ_type' +KEY_ACTION_PARAMS = 'action_params' + +TESTCASES = { + 'zip_and_other_than_main_file': { + KEY_PUBLICATION_TYPE: process_client.LAYER_TYPE, + KEY_ACTION_PARAMS: { + 'file_paths': [ + 'sample/style/small_layer.qml', + 'sample/layman.layer/small_layer.zip', + ], + }, + consts.KEY_FINAL_ASSERTS: [ + Action(publication.internal.correct_values_in_detail, { + 'exp_publication_detail': { + 'bounding_box': [1571204.369948366, 6268896.225570714, 1572590.854206196, 6269876.33561699], + }, + 'file_extension': 'zip/small_layer.geojson', + 'gdal_prefix': '/vsizip/', + 'publ_type_detail': ('vector', 'sld'), + }), + Action(publication.internal.thumbnail_equals, { + 'exp_thumbnail': 'sample/style/basic_sld.png', + }), + ], + }, +} + + +def generate(workspace=None): + workspace = workspace or consts.COMMON_WORKSPACE + + result = dict() + for testcase, tc_params in TESTCASES.items(): + post = [{ + consts.KEY_ACTION: { + consts.KEY_CALL: Action(process_client.publish_workspace_publication, + tc_params[KEY_ACTION_PARAMS]), + consts.KEY_RESPONSE_ASSERTS: [ + Action(processing.response.valid_post, dict()), + ], }, + consts.KEY_FINAL_ASSERTS: [ + *publication.IS_LAYER_COMPLETE_AND_CONSISTENT, + *tc_params[consts.KEY_FINAL_ASSERTS], + ], + }] + post_chunks = [{ + consts.KEY_ACTION: { + consts.KEY_CALL: Action(process_client.publish_workspace_publication, + {**tc_params[KEY_ACTION_PARAMS], + 'with_chunks': True, }), + consts.KEY_RESPONSE_ASSERTS: [ + Action(processing.response.valid_post, dict()), + ], }, + consts.KEY_FINAL_ASSERTS: [ + *publication.IS_LAYER_COMPLETE_AND_CONSISTENT, + *tc_params[consts.KEY_FINAL_ASSERTS], + ], + }] + result[Publication(workspace, tc_params[KEY_PUBLICATION_TYPE], testcase + '_post_sync')] = post + result[Publication(workspace, tc_params[KEY_PUBLICATION_TYPE], testcase + '_post_chunks')] = post_chunks + + return result diff --git a/tests/dynamic_data/publications/wrong_input.py b/tests/dynamic_data/publications/wrong_input.py index 467f189ee..8720f8c6a 100644 --- a/tests/dynamic_data/publications/wrong_input.py +++ b/tests/dynamic_data/publications/wrong_input.py @@ -1,4 +1,5 @@ import copy +import itertools from layman import LaymanError from tests.asserts import util as asserts_util @@ -10,11 +11,15 @@ KEY_PUBLICATION_TYPE = 'publ_type' KEY_ACTION_PARAMS = 'action_params' KEY_EXPECTED_EXCEPTION = 'expected_exception' -KEY_EXPECTED_EXCEPTION_ZIPPED = 'expected_exception_zipped' -KEY_EXPECTED_EXCEPTION_CHUNKS_ZIPPED = 'expected_exception_chunks_zipped' +KEY_DEFAULT = 'default' KEY_PATCHES = 'patches' KEY_PATCH_POST = 'post_params' +REST_PARAMETRIZATION = { + 'with_chunks': {False: 'sync', True: 'chunks'}, + 'compress': {False: '', True: 'zipped'}, +} + TESTCASES = { 'shp_without_dbf': { KEY_PUBLICATION_TYPE: process_client.LAYER_TYPE, @@ -28,181 +33,170 @@ ], }, consts.KEY_EXCEPTION: LaymanError, - KEY_EXPECTED_EXCEPTION: {'http_code': 400, - 'code': 18, - 'message': 'Missing one or more ShapeFile files.', - 'detail': {'missing_extensions': ['.dbf', '.prj'], - 'suggestion': 'Missing .prj file can be fixed also by setting "crs" parameter.', - 'path': 'ne_110m_admin_0_boundary_lines_land.shp', - }, - }, - KEY_EXPECTED_EXCEPTION_ZIPPED: {'detail': {'path': 'temporary_zip_file.zip/ne_110m_admin_0_boundary_lines_land.shp'}}, - KEY_EXPECTED_EXCEPTION_CHUNKS_ZIPPED: {'detail': {'path': '/layman_data_test/workspaces/dynamic_test_workspace_generated_wrong_input/layers/shp_without_dbf_post_chunks_zipped/input_file/shp_without_dbf_post_chunks_zipped.zip/ne_110m_admin_0_boundary_lines_land.shp'}}, + KEY_EXPECTED_EXCEPTION: { + KEY_DEFAULT: {'http_code': 400, + 'sync': True, + 'code': 18, + 'message': 'Missing one or more ShapeFile files.', + 'detail': {'missing_extensions': ['.dbf', '.prj'], + 'suggestion': 'Missing .prj file can be fixed also by setting "crs" parameter.', + 'path': 'ne_110m_admin_0_boundary_lines_land.shp', + }, + }, + frozenset([('compress', True), ('with_chunks', False)]): { + 'detail': {'path': 'temporary_zip_file.zip/ne_110m_admin_0_boundary_lines_land.shp'}}, + frozenset([('compress', True), ('with_chunks', True)]): { + 'sync': False, + 'detail': {'path': 'shp_without_dbf_post_chunks_zipped.zip/ne_110m_admin_0_boundary_lines_land.shp'}}, + }, KEY_PATCHES: { 'all_files': { KEY_PATCH_POST: dict(), - KEY_EXPECTED_EXCEPTION_CHUNKS_ZIPPED: {'detail': {'path': '/layman_data_test/workspaces/dynamic_test_workspace_generated_wrong_input/layers/shp_without_dbf_patch_all_files/input_file/shp_without_dbf_patch_all_files.zip/ne_110m_admin_0_boundary_lines_land.shp'}}, + KEY_EXPECTED_EXCEPTION: { + frozenset([('compress', True), ('with_chunks', False)]): { + 'detail': {'path': 'temporary_zip_file.zip/ne_110m_admin_0_boundary_lines_land.shp'}}, + frozenset([('compress', True), ('with_chunks', True)]): { + 'sync': False, + 'detail': {'path': 'shp_without_dbf_patch_all_files.zip/ne_110m_admin_0_boundary_lines_land.shp'}} + }, }, }, }, + 'empty_zip': { + KEY_PUBLICATION_TYPE: process_client.LAYER_TYPE, + KEY_ACTION_PARAMS: { + 'file_paths': [], + 'compress': True, + }, + consts.KEY_EXCEPTION: LaymanError, + KEY_EXPECTED_EXCEPTION: { + KEY_DEFAULT: {'http_code': 400, + 'sync': True, + 'code': 2, + 'detail': {'parameter': 'file', + 'message': 'Zip file without data file inside.', + 'expected': 'At least one file with any of extensions: .geojson, .shp, .tiff, .tif, .jp2, .png, .jpg; or one of them in single .zip file.', + 'files': [ + 'temporary_zip_file.zip', + ], + }, + }, + frozenset([('compress', True), ('with_chunks', True)]): { + 'sync': False, + 'detail': {'files': ['empty_zip_post_chunks_zipped.zip']}} + }, + }, } def generate(workspace=None): workspace = workspace or consts.COMMON_WORKSPACE + rest_param_names = list(REST_PARAMETRIZATION.keys()) + rest_param_all_values = [list(REST_PARAMETRIZATION[p_name].keys()) for p_name in rest_param_names] + result = dict() for testcase, tc_params in TESTCASES.items(): - post = [{ - consts.KEY_ACTION: { - consts.KEY_CALL: Action(process_client.publish_workspace_publication, - tc_params[KEY_ACTION_PARAMS]), - consts.KEY_CALL_EXCEPTION: { - consts.KEY_EXCEPTION: LaymanError, - consts.KEY_EXCEPTION_ASSERTS: [ - Action(processing.exception.response_exception, {'expected': tc_params[KEY_EXPECTED_EXCEPTION], }, ), - ], - }, }, - consts.KEY_FINAL_ASSERTS: [ - Action(publication.internal.does_not_exist, dict()) - ], - }] - post_sync_zipped = [{ - consts.KEY_ACTION: { - consts.KEY_CALL: Action(process_client.publish_workspace_publication, - {**tc_params[KEY_ACTION_PARAMS], - 'compress': True, }), - consts.KEY_CALL_EXCEPTION: { - consts.KEY_EXCEPTION: LaymanError, - consts.KEY_EXCEPTION_ASSERTS: [ - Action(processing.exception.response_exception, - {'expected': asserts_util.recursive_dict_update(copy.deepcopy(tc_params[KEY_EXPECTED_EXCEPTION]), - tc_params.get( - KEY_EXPECTED_EXCEPTION_ZIPPED, dict()), )}, ), - ], - }, }, - consts.KEY_FINAL_ASSERTS: [ - Action(publication.internal.does_not_exist, dict()) - ], - }] - post_chunks = [{ - consts.KEY_ACTION: { - consts.KEY_CALL: Action(process_client.publish_workspace_publication, - {**tc_params[KEY_ACTION_PARAMS], - 'with_chunks': True, }), - consts.KEY_CALL_EXCEPTION: { - consts.KEY_EXCEPTION: LaymanError, - consts.KEY_EXCEPTION_ASSERTS: [ - Action(processing.exception.response_exception, {'expected': tc_params[KEY_EXPECTED_EXCEPTION], }, ), - ], - }, }, - consts.KEY_FINAL_ASSERTS: [ - Action(publication.internal.does_not_exist, dict()) - ], - }] - post_chunks_zipped = [{ - consts.KEY_ACTION: { - consts.KEY_CALL: Action(process_client.publish_workspace_publication, - {**tc_params[KEY_ACTION_PARAMS], - 'compress': True, - 'with_chunks': True, }), - consts.KEY_RESPONSE_ASSERTS: [ - Action(processing.response.valid_post, dict()), - ], - }, - consts.KEY_FINAL_ASSERTS: [ - Action(publication.rest.async_error_in_info_key, {'info_key': 'file', - 'expected': asserts_util.recursive_dict_update( - copy.deepcopy(tc_params[KEY_EXPECTED_EXCEPTION]), - tc_params.get( - KEY_EXPECTED_EXCEPTION_CHUNKS_ZIPPED, dict()), ), }, ), - ], - }] - result[Publication(workspace, tc_params[KEY_PUBLICATION_TYPE], testcase + '_post_sync')] = post - result[Publication(workspace, tc_params[KEY_PUBLICATION_TYPE], testcase + '_post_sync_zipped')] = post_sync_zipped - result[Publication(workspace, tc_params[KEY_PUBLICATION_TYPE], testcase + '_post_chunks')] = post_chunks - result[Publication(workspace, tc_params[KEY_PUBLICATION_TYPE], testcase + '_post_chunks_zipped')] = post_chunks_zipped - - for patch_key, patch_params in tc_params.get(KEY_PATCHES, dict()).items(): - patch = [ - { + for rest_param_values in itertools.product(*rest_param_all_values): + test_case_postfix = '_'.join([REST_PARAMETRIZATION[rest_param_names[idx]][value] + for idx, value in enumerate(rest_param_values) + if REST_PARAMETRIZATION[rest_param_names[idx]][value]]) + rest_param_dict = {rest_param_names[idx]: value for idx, value in enumerate(rest_param_values)} + if any(k in rest_param_dict and rest_param_dict[k] != v for k, v in tc_params[KEY_ACTION_PARAMS].items()): + continue + rest_param_frozen_set = frozenset(rest_param_dict.items()) + default_exp_exception = copy.deepcopy(tc_params[KEY_EXPECTED_EXCEPTION][KEY_DEFAULT]) + exception_diff = tc_params[KEY_EXPECTED_EXCEPTION].get(rest_param_frozen_set, dict()) + exp_exception = asserts_util.recursive_dict_update(default_exp_exception, exception_diff) + is_sync = exp_exception.pop('sync') + if is_sync: + action_def = { consts.KEY_ACTION: { consts.KEY_CALL: Action(process_client.publish_workspace_publication, - patch_params[KEY_PATCH_POST]), - consts.KEY_RESPONSE_ASSERTS: [ - Action(processing.response.valid_post, dict()), - ], - }, - consts.KEY_FINAL_ASSERTS: [ - *publication.IS_LAYER_COMPLETE_AND_CONSISTENT, - ] - }, - { - consts.KEY_ACTION: { - consts.KEY_CALL: Action(process_client.patch_workspace_publication, - tc_params[KEY_ACTION_PARAMS]), - consts.KEY_CALL_EXCEPTION: { - consts.KEY_EXCEPTION: LaymanError, - consts.KEY_EXCEPTION_ASSERTS: [ - Action(processing.exception.response_exception, {'expected': tc_params[KEY_EXPECTED_EXCEPTION], }, ), - ], - }, }, - consts.KEY_FINAL_ASSERTS: [ - *publication.IS_LAYER_COMPLETE_AND_CONSISTENT, - ] - }, - { - consts.KEY_ACTION: { - consts.KEY_CALL: Action(process_client.patch_workspace_publication, {**tc_params[KEY_ACTION_PARAMS], - 'compress': True, }), + **rest_param_dict}), consts.KEY_CALL_EXCEPTION: { consts.KEY_EXCEPTION: LaymanError, consts.KEY_EXCEPTION_ASSERTS: [ - Action(processing.exception.response_exception, - {'expected': asserts_util.recursive_dict_update(copy.deepcopy(tc_params[KEY_EXPECTED_EXCEPTION]), - tc_params.get( - KEY_EXPECTED_EXCEPTION_ZIPPED, dict()), )}, ), + Action(processing.exception.response_exception, {'expected': exp_exception}, ), ], }, }, consts.KEY_FINAL_ASSERTS: [ - *publication.IS_LAYER_COMPLETE_AND_CONSISTENT, - ] - }, - { + Action(publication.internal.does_not_exist, dict()) + ], + } + else: + action_def = { consts.KEY_ACTION: { - consts.KEY_CALL: Action(process_client.patch_workspace_publication, + consts.KEY_CALL: Action(process_client.publish_workspace_publication, {**tc_params[KEY_ACTION_PARAMS], - 'with_chunks': True, }), - consts.KEY_CALL_EXCEPTION: { - consts.KEY_EXCEPTION: LaymanError, - consts.KEY_EXCEPTION_ASSERTS: [ - Action(processing.exception.response_exception, {'expected': tc_params[KEY_EXPECTED_EXCEPTION], }, ), - ], - }, }, + **rest_param_dict}), + consts.KEY_RESPONSE_ASSERTS: [ + Action(processing.response.valid_post, dict()), + ], + }, consts.KEY_FINAL_ASSERTS: [ - *publication.IS_LAYER_COMPLETE_AND_CONSISTENT, - ] - }, + Action(publication.rest.async_error_in_info_key, {'info_key': 'file', + 'expected': exp_exception, }, ), + ], + } + publ_name = f"{testcase}_post_{test_case_postfix}" + result[Publication(workspace, tc_params[KEY_PUBLICATION_TYPE], publ_name)] = [action_def] + + for patch_key, patch_params in tc_params.get(KEY_PATCHES, dict()).items(): + patch = [ { consts.KEY_ACTION: { - consts.KEY_CALL: Action(process_client.patch_workspace_publication, - {**tc_params[KEY_ACTION_PARAMS], - 'compress': True, - 'with_chunks': True, }), + consts.KEY_CALL: Action(process_client.publish_workspace_publication, + patch_params[KEY_PATCH_POST]), consts.KEY_RESPONSE_ASSERTS: [ Action(processing.response.valid_post, dict()), ], }, consts.KEY_FINAL_ASSERTS: [ - Action(publication.rest.async_error_in_info_key, {'info_key': 'file', - 'expected': asserts_util.recursive_dict_update( - copy.deepcopy(tc_params[KEY_EXPECTED_EXCEPTION]), - patch_params.get( - KEY_EXPECTED_EXCEPTION_CHUNKS_ZIPPED, dict()), ), }, ), - ], + *publication.IS_LAYER_COMPLETE_AND_CONSISTENT, + ] }, ] + for rest_param_values in itertools.product(*rest_param_all_values): + rest_param_dict = {rest_param_names[idx]: value for idx, value in enumerate(rest_param_values)} + rest_param_frozen_set = frozenset(rest_param_dict.items()) + default_exp_exception = copy.deepcopy(tc_params[KEY_EXPECTED_EXCEPTION][KEY_DEFAULT]) + exception_diff = patch_params[KEY_EXPECTED_EXCEPTION].get(rest_param_frozen_set, dict()) + exp_exception = asserts_util.recursive_dict_update(default_exp_exception, exception_diff) + is_sync = exp_exception.pop('sync') + if is_sync: + action_def = { + consts.KEY_ACTION: { + consts.KEY_CALL: Action(process_client.patch_workspace_publication, + {**tc_params[KEY_ACTION_PARAMS], + **rest_param_dict}), + consts.KEY_CALL_EXCEPTION: { + consts.KEY_EXCEPTION: LaymanError, + consts.KEY_EXCEPTION_ASSERTS: [ + Action(processing.exception.response_exception, {'expected': exp_exception}, ), + ], + }, }, + consts.KEY_FINAL_ASSERTS: [ + *publication.IS_LAYER_COMPLETE_AND_CONSISTENT, + ] + } + else: + action_def = { + consts.KEY_ACTION: { + consts.KEY_CALL: Action(process_client.patch_workspace_publication, + {**tc_params[KEY_ACTION_PARAMS], + **rest_param_dict}), + consts.KEY_RESPONSE_ASSERTS: [ + Action(processing.response.valid_post, dict()), + ], + }, + consts.KEY_FINAL_ASSERTS: [ + Action(publication.rest.async_error_in_info_key, {'info_key': 'file', + 'expected': exp_exception, }, ), + ], + } + patch.append(action_def) result[Publication(workspace, tc_params[KEY_PUBLICATION_TYPE], testcase + '_patch_' + patch_key)] = patch return result