Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Make get_complete_layer/map_info more consistent #961

Merged
merged 6 commits into from
Nov 28, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@
#### Data migrations
### Changes
- All changes from [v1.22.1](#v1221) and [v1.22.2](#v1222).
- [#960](https://github.com/LayerManager/layman/issues/960) Handle WMS requests with HTTP error more efficiently in timgen.
- [#962](https://github.com/LayerManager/layman/issues/962) Make values of `layman_metadata.publication_status` and `status` key(s) more consistent in responses of PATCH Workspace [Layer](doc/rest.md#patch-workspace-layer)/[Map](doc/rest.md#patch-workspace-map) and GET Workspace [Layer](doc/rest.md#get-workspace-layer)/[Map](doc/rest.md#get-workspace-map).

## v1.22.2
2023-11-10
Expand Down
1 change: 1 addition & 0 deletions src/layman/error_list.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,4 +55,5 @@
52: (400, 'GeoServer HTTP or connection error'),
53: (500, 'Error when publishing on GeoServer. It happens for example for raster files with wrong explicit CRS.'),
54: (400, 'Wrong header value'),
55: (400, 'Publication is not complete'), # raised by process_client only
}
1 change: 1 addition & 0 deletions src/layman/layer/filesystem/thumbnail_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ def wait_for_thumbnail_error(response):
file_paths=geojson_file,
style_file=style_file,
check_response_fn=wait_for_thumbnail_error,
raise_if_not_complete=False,
)

layer_info = process_client.get_workspace_layer(workspace, layer)
Expand Down
14 changes: 9 additions & 5 deletions src/layman/layer/rest_workspace_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -909,10 +909,13 @@ def test_post_layers_long_and_delete_it(client):

chain_info = util.get_layer_chain(workspace, layername)
assert chain_info is not None and not celery_util.is_chain_ready(chain_info)
layer_info = util.get_layer_info(workspace, layername)
keys_to_check = ['db', 'wms', 'wfs', 'thumbnail', 'metadata']
for key_to_check in keys_to_check:
assert 'status' in layer_info[key_to_check]
layer_info = util.get_complete_layer_info(workspace, layername)

# sometimes, "long" post is not long enough and the layer is already in COMPLETE state
if layer_info['layman_metadata']['publication_status'] == 'UPDATING':
jirik marked this conversation as resolved.
Show resolved Hide resolved
keys_to_check = ['db', 'wms', 'wfs', 'thumbnail', 'metadata']
for key_to_check in keys_to_check:
assert 'status' in layer_info[key_to_check]

rest_path = url_for('rest_workspace_layer.delete_layer', workspace=workspace, layername=layername)
response = client.delete(rest_path)
Expand Down Expand Up @@ -958,7 +961,8 @@ def wait_for_db_finish(response):
info = response.json()
return info.get('db', {}).get('status', '') == 'FAILURE'

process_client.publish_workspace_layer(workspace, layername, file_paths=file_paths, check_response_fn=wait_for_db_finish)
process_client.publish_workspace_layer(workspace, layername, file_paths=file_paths,
check_response_fn=wait_for_db_finish, raise_if_not_complete=False)

layer_info = util.get_layer_info(workspace, layername)
assert layer_info['db']['status'] == 'FAILURE', f'layer_info={layer_info}'
Expand Down
8 changes: 7 additions & 1 deletion src/layman/layer/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def clear_publication_info(layer_info, file_type):
return clear_info


def get_complete_layer_info(workspace, layername, *, x_forwarded_items=None):
def _get_complete_layer_info(workspace, layername, *, x_forwarded_items=None):
partial_info = get_layer_info(workspace, layername, context={'x_forwarded_items': x_forwarded_items})

if not any(partial_info):
Expand Down Expand Up @@ -125,6 +125,12 @@ def get_complete_layer_info(workspace, layername, *, x_forwarded_items=None):
return complete_info


def get_complete_layer_info(workspace, layername, *, x_forwarded_items=None):
return layman_util.get_complete_publication_info(workspace, LAYER_TYPE, layername,
x_forwarded_items=x_forwarded_items,
complete_info_method=_get_complete_layer_info)


def pre_publication_action_check(workspace, layername, task_options):
# sync processing
sources = get_sources()
Expand Down
8 changes: 7 additions & 1 deletion src/layman/map/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ def clear_publication_info(layer_info):
return clear_info


def get_complete_map_info(workspace, mapname, *, x_forwarded_items=None):
def _get_complete_map_info(workspace, mapname, *, x_forwarded_items=None):
partial_info = get_map_info(workspace, mapname, context={'x_forwarded_items': x_forwarded_items})

if not any(partial_info):
Expand Down Expand Up @@ -163,6 +163,12 @@ def get_complete_map_info(workspace, mapname, *, x_forwarded_items=None):
return complete_info


def get_complete_map_info(workspace, layername, *, x_forwarded_items=None):
return layman_util.get_complete_publication_info(workspace, MAP_TYPE, layername,
x_forwarded_items=x_forwarded_items,
complete_info_method=_get_complete_map_info)


def get_composition_schema(url):
match = re.compile(SCHEMA_URL_PATTERN).match(url)
if not match:
Expand Down
3 changes: 2 additions & 1 deletion src/layman/requests_concurrency_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,8 @@ def test_patch_after_feature_change_concurrency(publication_type):
assert lock == common_const.PUBLICATION_LOCK_FEATURE_CHANGE

process_client.patch_workspace_publication(publication_type, workspace, publication, title='New title',
check_response_fn=empty_method_returns_true)
check_response_fn=empty_method_returns_true,
raise_if_not_complete=False)
queue = celery.get_run_after_chain_queue(workspace, publication_type, publication)
assert len(queue) == 0, queue
lock = redis.get_publication_lock(workspace, publication_type, publication)
Expand Down
6 changes: 4 additions & 2 deletions src/layman/rest_publication_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,8 @@ def test_get_publication_layman_status(publ_type, error_params):
workspace = 'test_get_publication_layman_status_workspace'
publication = 'test_get_publication_layman_status_publication'

process_client.publish_workspace_publication(publ_type, workspace, publication, check_response_fn=common.empty_method_returns_true,)
process_client.publish_workspace_publication(publ_type, workspace, publication, check_response_fn=common.empty_method_returns_true,
raise_if_not_complete=False)

info = process_client.get_workspace_publication(publ_type, workspace, publication,)
assert 'layman_metadata' in info, f'info={info}'
Expand All @@ -134,7 +135,8 @@ def test_get_publication_layman_status(publ_type, error_params):
assert info['layman_metadata']['publication_status'] == 'COMPLETE', f'info={info}'

if error_params:
process_client.patch_workspace_publication(publ_type, workspace, publication, **error_params, )
process_client.patch_workspace_publication(publ_type, workspace, publication, **error_params,
raise_if_not_complete=False)
info = process_client.get_workspace_publication(publ_type, workspace, publication, )
assert 'layman_metadata' in info, f'info={info}'
assert 'publication_status' in info['layman_metadata'], f'info={info}'
Expand Down
2 changes: 2 additions & 0 deletions src/layman/upgrade/upgrade_v1_21_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
'compress': True,
'with_chunks': True,
'do_not_upload_chunks': True,
'raise_if_not_complete': False,
},
settings.EnumWfsWmsStatus.NOT_AVAILABLE,
id='layer_updating',
Expand All @@ -47,6 +48,7 @@
],
'compress': True,
'with_chunks': True,
'raise_if_not_complete': False,
},
settings.EnumWfsWmsStatus.NOT_AVAILABLE,
id='layer_not_available',
Expand Down
40 changes: 38 additions & 2 deletions src/layman/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -551,15 +551,19 @@ def patch_after_feature_change(workspace, publication_type, publication, *, queu
celery_util.set_publication_chain_info(workspace, publication_type, publication, task_methods, res)


def get_publication_status(workspace, publication_type, publication_name, complete_info, item_keys, ):
def is_publication_updating(workspace, publication_type, publication_name):
chain_info = celery_util.get_publication_chain_info(workspace, publication_type, publication_name)
current_lock = redis.get_publication_lock(
workspace,
publication_type,
publication_name,
)

if (chain_info and not celery_util.is_chain_ready(chain_info)) or current_lock:
return bool((chain_info and not celery_util.is_chain_ready(chain_info)) or current_lock)


def get_publication_status(workspace, publication_type, publication_name, complete_info, item_keys, ):
if is_publication_updating(workspace, publication_type, publication_name):
publication_status = 'UPDATING'
elif any(complete_info.get(v, {}).get('status') for v in item_keys if isinstance(complete_info.get(v, {}), dict)):
publication_status = 'INCOMPLETE'
Expand Down Expand Up @@ -660,3 +664,35 @@ def get_x_forwarded_items(request_headers):
}
)
return XForwardedClass.from_headers(request_headers)


def get_complete_publication_info(workspace, publication_type, publication_name, *, x_forwarded_items=None,
complete_info_method):
is_updating_before = is_publication_updating(workspace, publication_type, publication_name)
is_updating_after = None

complete_info = {}

logger.debug(f"get_complete_publication_info START, publication={workspace, publication_type, publication_name},"
f"is_updating_before={is_updating_before}")

# In the result of _get_complete_layer_info, an inconsistency may occur between `status` key of internal sources
# (e.g. `db`, `wms`, `thumbnail`, or `metadata` keys) and global `layman_metadata.publication_status` key,
# because some time passes between computation of their values.
# To ensure more consistent result, we check that layer is either updating before and after
# _get_complete_layer_info call, or it is not updating before and after _get_complete_layer_info call.
# If this check is negative, we repeat _get_complete_layer_info call.
while is_updating_before != is_updating_after:
if is_updating_after is not None:
is_updating_before = is_updating_after

complete_info = complete_info_method(workspace, publication_name, x_forwarded_items=x_forwarded_items)

publication_status = complete_info['layman_metadata']['publication_status']
is_updating_after = publication_status == 'UPDATING'

logger.debug(
f"get_complete_publication_info, publication={workspace, publication_type, publication_name},"
f"publication_status={publication_status}, is_updating_after={is_updating_after}")

return complete_info
34 changes: 30 additions & 4 deletions test_tools/process_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,7 @@ def wait_for_rest(url, max_attempts, sleeping_time, check_response, headers=None
if attempts > max_attempts:
logger.error(f"r.status_code={response.status_code}\nrltest={response.text}")
raise Exception('Max attempts reached!')
return response


def raise_layman_error(response, status_codes_to_skip=None):
Expand All @@ -132,6 +133,20 @@ def raise_layman_error(response, status_codes_to_skip=None):
assert 'Deprecation' not in response.headers, f'This is deprecated URL! Use new one. headers={response.headers}'


def raise_if_not_complete_status(response):
resp_json = response.json()
status = resp_json.get('layman_metadata', {}).get('publication_status')
if status != 'COMPLETE':
failed_source_key = next((k for k, v in resp_json.items() if isinstance(v, dict) and v.get('status') == 'FAILURE'), None)
if failed_source_key and resp_json[failed_source_key].get('error').get('code'):
failed_source = resp_json[failed_source_key]
error_desc = failed_source['error']
raise LaymanError(error_desc['code'],
error_desc.get('detail'),
sub_code=error_desc.get('sub_code'))
raise LaymanError(55, data=resp_json)


def upload_file_chunks(publication_type,
workspace,
name,
Expand Down Expand Up @@ -177,6 +192,7 @@ def patch_workspace_publication(publication_type,
title=None,
style_file=None,
check_response_fn=None,
raise_if_not_complete=True,
compress=False,
compress_settings=None,
with_chunks=False,
Expand All @@ -201,6 +217,8 @@ def patch_workspace_publication(publication_type,

assert not (not with_chunks and do_not_upload_chunks)
assert not (check_response_fn and do_not_upload_chunks) # because check_response_fn is not called when do_not_upload_chunks
assert not (raise_if_not_complete and do_not_upload_chunks)
assert not (check_response_fn and raise_if_not_complete)

assert not (time_regex and publication_type == MAP_TYPE)
assert not (publication_type == LAYER_TYPE and crs and not file_paths)
Expand Down Expand Up @@ -281,7 +299,8 @@ def patch_workspace_publication(publication_type,
file_paths, )

if not do_not_upload_chunks:
wait_for_publication_status(workspace, publication_type, name, check_response_fn=check_response_fn, headers=headers)
wait_for_publication_status(workspace, publication_type, name, check_response_fn=check_response_fn,
headers=headers, raise_if_not_complete=raise_if_not_complete)
wfs.clear_cache(workspace)
wms.clear_cache(workspace)
if temp_dir:
Expand Down Expand Up @@ -337,6 +356,7 @@ def publish_workspace_publication(publication_type,
style_file=None,
description=None,
check_response_fn=None,
raise_if_not_complete=True,
with_chunks=False,
compress=False,
compress_settings=None,
Expand All @@ -360,6 +380,8 @@ def publish_workspace_publication(publication_type,

assert not (not with_chunks and do_not_upload_chunks)
assert not (check_response_fn and do_not_upload_chunks) # because check_response_fn is not called when do_not_upload_chunks
assert not (raise_if_not_complete and do_not_upload_chunks)
assert not (check_response_fn and raise_if_not_complete)

file_paths = [publication_type_def.source_path] if file_paths is None and external_table_uri is None and not map_layers else file_paths

Expand Down Expand Up @@ -440,7 +462,8 @@ def publish_workspace_publication(publication_type,
file_paths, )

if not do_not_upload_chunks:
wait_for_publication_status(workspace, publication_type, name, check_response_fn=check_response_fn, headers=headers)
wait_for_publication_status(workspace, publication_type, name, check_response_fn=check_response_fn,
headers=headers, raise_if_not_complete=raise_if_not_complete)
if temp_dir:
shutil.rmtree(temp_dir)
return response.json()[0]
Expand Down Expand Up @@ -661,14 +684,17 @@ def check_publication_status(response):
return current_status in {'COMPLETE', 'INCOMPLETE'}


def wait_for_publication_status(workspace, publication_type, publication, *, check_response_fn=None, headers=None,):
def wait_for_publication_status(workspace, publication_type, publication, *, check_response_fn=None, headers=None,
raise_if_not_complete=True):
publication_type_def = PUBLICATION_TYPES_DEF[publication_type]
with app.app_context():
url = url_for(publication_type_def.get_workspace_publication_url,
workspace=workspace,
**{publication_type_def.url_param_name: publication})
check_response_fn = check_response_fn or check_publication_status
wait_for_rest(url, 60, 0.5, check_response=check_response_fn, headers=headers)
response = wait_for_rest(url, 60, 0.5, check_response=check_response_fn, headers=headers)
if raise_if_not_complete:
raise_if_not_complete_status(response)


def patch_after_feature_change(workspace, publ_type, name):
Expand Down
1 change: 1 addition & 0 deletions tests/dynamic_data/publications/celery.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ def generate(workspace):
'tmp/naturalearth/10m/cultural/ne_10m_admin_0_countries.geojson',
],
'check_response_fn': empty_method_returns_true,
'raise_if_not_complete': False,

}),
consts.KEY_RESPONSE_ASSERTS: [
Expand Down
1 change: 1 addition & 0 deletions tests/dynamic_data/publications/updating_layer_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ class TestUpdatingLayer(base_test.TestSingleRestPublication):
params={'compress': True,
'with_chunks': True,
'do_not_upload_chunks': True,
'raise_if_not_complete': False,
}
)]

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1399,6 +1399,7 @@ def generate_test_cases():
for key, test_case_params in TESTCASES.items():
all_params = deepcopy(test_case_params)
rest_args = all_params.pop(Key.REST_ARGS)
rest_args['raise_if_not_complete'] = False

mandatory_cases = all_params.pop(Key.MANDATORY_CASES)
specific_types = {mandatory_cases: EnumTestTypes.MANDATORY} if mandatory_cases else {}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ class TestPublication(base_test.TestSingleRestPublication):
def before_class(self):
self.post_publication(MAP, args={
'file_paths': [MAP_FILE_PATH],
'raise_if_not_complete': False, # timgen fails, because one URL points to non-existent service
}, scope='class')

@pytest.mark.parametrize('headers, exp_url_prefix', [
Expand Down
6 changes: 4 additions & 2 deletions tests/static_data/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,14 +91,16 @@ def publish_publications_step(publications_set, step_num):
for workspace, publ_type, publication in publications_set:
data_def = data.PUBLICATIONS[(workspace, publ_type, publication)][data.DEFINITION]
params = data_def[step_num]
write_method(publ_type, workspace, publication, **params, check_response_fn=empty_method_returns_true)
write_method(publ_type, workspace, publication, **params, check_response_fn=empty_method_returns_true,
raise_if_not_complete=False)
if len(data_def) == step_num + 1:
done_publications.add((workspace, publ_type, publication))
for workspace, publ_type, publication in publications_set:
params = data.PUBLICATIONS[(workspace, publ_type, publication)][data.DEFINITION][step_num]
headers = params.get('headers')
try:
process_client.wait_for_publication_status(workspace, publ_type, publication, headers=headers, check_response_fn=check_publication_status)
process_client.wait_for_publication_status(workspace, publ_type, publication, headers=headers,
check_response_fn=check_publication_status)
except AssertionError as ex:
print(f"AssertionError in publication {workspace, publ_type, publication}, step_num={step_num}.")
raise ex
Expand Down
10 changes: 5 additions & 5 deletions timgen/src/map.js
Original file line number Diff line number Diff line change
Expand Up @@ -91,11 +91,11 @@ const proxify_layer_loader = (layer, tiled, gs_public_url, gs_url, headers) => {
}).then(res => {
const headers = [...res.headers];
log(`load_fn.fetch_retry, res.status=${res.status}, headers=${JSON.stringify(headers, null, 2)}, image_url=${image_url}`)
if(res.headers.get('content-type').includes('text/xml')) {
return Promise.all([false, res.text()])
} else {
return Promise.all([true, res.blob()])
}
const is_xml = (res.headers.get('content-type') || '').includes('text/xml')
const ok = res.status < 400 && !is_xml
return Promise.all([ok, (ok ? res.blob() : res.text())])
}).catch(reason => {
log(`load_fn.fetch_retry, catch reason=${reason}`)
});

log(`load_fn.fetch_retry, loaded, ok=${ok}, image_url=${image_url}`)
Expand Down
Loading