From 490d600a3326857e7b37d11106124ef1e3431e2f Mon Sep 17 00:00:00 2001 From: lcard Date: Mon, 21 Aug 2023 14:21:27 +0100 Subject: [PATCH 01/17] Update UI, API and Infra to V7.0.0 --- .env.example | 4 + .github/workflows/ui-tests.yml | 39 + .gitignore | 3 + api/api/adapter/dynamodb_adapter.py | 3 +- api/api/adapter/s3_adapter.py | 36 +- .../authorisation/dataset_access_evaluator.py | 39 +- api/api/application/services/data_service.py | 61 +- .../application/services/format_service.py | 2 + .../services/schema_infer_service.py | 29 +- .../application/services/schema_service.py | 15 +- api/api/common/config/constants.py | 8 +- api/api/common/custom_exceptions.py | 4 + api/api/common/data_handlers.py | 111 ++ api/api/controller/auth.py | 10 +- api/api/controller/datasets.py | 77 +- api/api/controller/schema.py | 24 +- api/api/domain/data_types.py | 5 +- api/api/domain/metadata_search.py | 4 +- api/api/domain/mime_type.py | 1 + api/api/entry.py | 1 + api/test/api/adapter/test_dynamodb_adapter.py | 6 +- api/test/api/adapter/test_glue_adapter.py | 4 +- api/test/api/adapter/test_s3_adapter.py | 100 +- .../test_dataset_access_evaluator.py | 39 +- .../application/services/test_data_service.py | 192 +- .../services/test_dataset_validation.py | 20 +- .../services/test_partitioning_service.py | 2 +- .../services/test_schema_infer_service.py | 33 +- .../services/test_schema_service.py | 73 +- .../services/test_schema_validation.py | 32 +- api/test/api/common/controller_test_utils.py | 4 + api/test/api/common/test_data_handlers.py | 114 ++ api/test/api/controller/test_datasets.py | 268 ++- api/test/api/controller/test_layers.py | 1 + api/test/api/controller/test_schema.py | 102 +- api/test/api/domain/test_schema.py | 8 +- api/test/e2e/setup_e2e_tests.py | 13 +- .../test_files/schemas/test_e2e-delete.json | 13 +- .../test_files/schemas/test_e2e-query.json | 13 +- .../schemas/test_e2e-update_v1.json | 13 +- .../schemas/test_e2e-update_v2.json | 15 +- .../test_files/schemas/test_e2e-upload.json | 13 +- .../test_e2e_protected-do_not_delete.json | 119 +- api/test/e2e/test_journey.py | 112 +- .../delete_protected_domain_permission.py | 2 +- api/test/test_utils.py | 2 +- infrastructure/blocks/app-cluster/main.tf | 6 +- infrastructure/blocks/data-workflow/main.tf | 13 - infrastructure/blocks/data-workflow/output.tf | 18 +- infrastructure/blocks/pipeline/iam.tf | 8 - infrastructure/blocks/pipeline/main.tf | 3 +- infrastructure/blocks/vpc/main.tf | 25 +- infrastructure/blocks/vpc/output.tf | 6 +- infrastructure/modules/app-cluster/README.md | 4 +- .../modules/app-cluster/cloudtrail.tf | 5 +- .../modules/app-cluster/load_balancer.tf | 4 + infrastructure/modules/app-cluster/main.tf | 90 +- infrastructure/modules/app-cluster/outputs.tf | 4 + infrastructure/modules/app-cluster/routing.tf | 2 - .../modules/app-cluster/variables.tf | 8 +- infrastructure/modules/auth/README.md | 8 +- infrastructure/modules/auth/data.tf | 17 + infrastructure/modules/auth/db.tf | 7 +- infrastructure/modules/auth/variables.tf | 44 +- infrastructure/modules/aws-core/vpc/main.tf | 135 -- .../modules/aws-core/vpc/outputs.tf | 14 - .../modules/aws-core/vpc/variables.tf | 79 - .../modules/aws-core/vpc/versions.tf | 4 - .../modules/data-workflow/README.md | 4 +- .../modules/data-workflow/dynamodb.tf | 23 + .../modules/data-workflow/glue-components.tf | 118 -- infrastructure/modules/data-workflow/glue.tf | 3 + .../modules/data-workflow/output.tf | 18 +- .../modules/data-workflow/variables.tf | 20 - .../modules/data-workflow/vpc-endpoint.tf | 47 - infrastructure/modules/rapid/README.md | 5 +- infrastructure/modules/rapid/variables.tf | 2 +- infrastructure/modules/ui/README.md | 13 +- infrastructure/modules/ui/scripts/ui.sh.tpl | 2 +- .../scripts/initialisation-script.sh.tpl | 6 +- ui/jest.config.js | 3 +- ui/package.json | 5 +- ui/playwright.config.ts | 22 + ui/playwright/auth.setup.ts | 48 + ui/playwright/gapminder.csv | 1705 +++++++++++++++++ ui/playwright/test-data-flow.spec.ts | 87 + ui/playwright/test-homepage.spec.ts | 18 + ui/playwright/test-user-flow.spec.ts | 21 + ui/src/__tests__/app.test.tsx | 2 +- ui/src/__tests__/catalog.test.tsx | 2 +- ui/src/__tests__/data/delete.test.tsx | 39 +- ui/src/__tests__/data/download.test.tsx | 43 +- ui/src/__tests__/data/upload.test.tsx | 50 +- ui/src/__tests__/index.test.tsx | 2 +- ui/src/__tests__/login.test.tsx | 2 +- ui/src/__tests__/schema/create.test.tsx | 20 +- ui/src/__tests__/subject/create.test.tsx | 90 +- ui/src/__tests__/subject/modify.test.tsx | 2 +- ui/src/__tests__/tasks.test.tsx | 2 +- .../components/Autocomplete/Autocomplete.tsx | 29 + ui/src/components/Button/Button.test.tsx | 2 +- .../ConditionalWrapper.test.tsx | 2 +- .../DatasetSelector/DatasetSelector.tsx | 166 ++ ui/src/components/FormControl/FormControl.tsx | 14 + ui/src/components/Icon/svg/logo.svg | 2 +- ui/src/components/Icon/svg/search-alt.svg | 2 +- .../PermissionsTable/PermissionsTable.tsx | 281 +++ ui/src/components/SchemaCreate.tsx | 46 +- ui/src/components/Select/Select.tsx | 17 +- .../SimpleTable/SimpleTable.test.tsx | 2 +- .../UploadProgress/UploadProgress.tsx | 2 +- ui/src/pages/_app.tsx | 4 +- ui/src/pages/_document.tsx | 2 +- ui/src/pages/data/delete/index.tsx | 45 +- .../{ => [layer]}/[domain]/[dataset].tsx | 10 +- ui/src/pages/data/download/file.tsx | 2 +- ui/src/pages/data/download/index.tsx | 103 +- ui/src/pages/data/upload/index.tsx | 55 +- ui/src/pages/schema/create/index.tsx | 53 +- ui/src/pages/subject/create/index.tsx | 64 +- ui/src/pages/subject/modify/[subjectId].tsx | 113 +- .../subject/modify/success/[subjectId].tsx | 2 +- ui/src/pages/tasks/[jobId].tsx | 3 +- ui/src/pages/tasks/index.tsx | 2 + ui/src/service/fetch.ts | 27 +- ui/src/service/permissions.ts | 29 + ui/src/service/schema.ts | 39 +- ui/src/service/types.ts | 20 + ui/src/utils/createEmotionCache.ts | 5 - ui/src/utils/data-utils.test.ts | 41 - ui/src/utils/data-utils.ts | 25 - ui/src/utils/index.ts | 13 - ui/src/utils/test-utils.tsx | 92 - ui/src/utils/url-utils.test.ts | 65 - ui/src/utils/url-utils.ts | 21 - 135 files changed, 4152 insertions(+), 1965 deletions(-) create mode 100644 .github/workflows/ui-tests.yml create mode 100644 api/api/common/data_handlers.py create mode 100644 api/test/api/common/test_data_handlers.py create mode 100644 infrastructure/modules/auth/data.tf delete mode 100644 infrastructure/modules/aws-core/vpc/main.tf delete mode 100644 infrastructure/modules/aws-core/vpc/outputs.tf delete mode 100644 infrastructure/modules/aws-core/vpc/variables.tf delete mode 100644 infrastructure/modules/aws-core/vpc/versions.tf create mode 100644 infrastructure/modules/data-workflow/dynamodb.tf delete mode 100644 infrastructure/modules/data-workflow/glue-components.tf create mode 100644 infrastructure/modules/data-workflow/glue.tf delete mode 100644 infrastructure/modules/data-workflow/vpc-endpoint.tf create mode 100644 ui/playwright.config.ts create mode 100644 ui/playwright/auth.setup.ts create mode 100644 ui/playwright/gapminder.csv create mode 100644 ui/playwright/test-data-flow.spec.ts create mode 100644 ui/playwright/test-homepage.spec.ts create mode 100644 ui/playwright/test-user-flow.spec.ts create mode 100644 ui/src/components/Autocomplete/Autocomplete.tsx create mode 100644 ui/src/components/DatasetSelector/DatasetSelector.tsx create mode 100644 ui/src/components/FormControl/FormControl.tsx create mode 100644 ui/src/components/PermissionsTable/PermissionsTable.tsx rename ui/src/pages/data/download/{ => [layer]}/[domain]/[dataset].tsx (95%) create mode 100644 ui/src/service/permissions.ts delete mode 100644 ui/src/utils/createEmotionCache.ts delete mode 100644 ui/src/utils/data-utils.test.ts delete mode 100644 ui/src/utils/data-utils.ts delete mode 100644 ui/src/utils/index.ts delete mode 100644 ui/src/utils/test-utils.tsx delete mode 100644 ui/src/utils/url-utils.test.ts delete mode 100644 ui/src/utils/url-utils.ts diff --git a/.env.example b/.env.example index ba33224..86dc1c0 100644 --- a/.env.example +++ b/.env.example @@ -16,3 +16,7 @@ RAPID_URL= # UI Specific NEXT_PUBLIC_API_URL= NEXT_PUBLIC_API_URL_PROXY= + +# UI Specific for integration tests +DOMAIN= +RESOURCE_PREFIX= \ No newline at end of file diff --git a/.github/workflows/ui-tests.yml b/.github/workflows/ui-tests.yml new file mode 100644 index 0000000..dd280c9 --- /dev/null +++ b/.github/workflows/ui-tests.yml @@ -0,0 +1,39 @@ +name: rAPId Integration Tests + +on: + push: + branches: + - "**" + + workflow_dispatch: + + pull_request: + types: + - opened + +jobs: + + run-ui-test: + runs-on: self-hosted + + steps: + - uses: actions/checkout@v3 + + - uses: actions/setup-node@v3 + with: + node-version: 19 + + - name: Install packages + run: | + cd ui + npm install + + - name: Install playwright browsers + run: npx playwright install-deps && npx playwright install + + - name: run playwright tests + run: npx playwright test ui/playwright + env: + DOMAIN: ${{ secrets.DOMAIN }} + RESOURCE_PREFIX: ${{ secrets.RESOURCE_PREFIX }} + AWS_REGION: ${{ secrets.AWS_REGION }} diff --git a/.gitignore b/.gitignore index 16024e8..2b0728b 100644 --- a/.gitignore +++ b/.gitignore @@ -181,3 +181,6 @@ docs/_build/ # terraform files .terraform/ .terraform.lock.hcl + +playwright/.auth +playwright/.downloads \ No newline at end of file diff --git a/api/api/adapter/dynamodb_adapter.py b/api/api/adapter/dynamodb_adapter.py index 7e98e52..36648c4 100644 --- a/api/api/adapter/dynamodb_adapter.py +++ b/api/api/adapter/dynamodb_adapter.py @@ -414,7 +414,8 @@ def get_latest_schema(self, dataset: Type[DatasetMetadata]) -> Optional[dict]: KeyConditionExpression=Key("PK").eq( dataset.dataset_identifier(with_version=False) ), - FilterExpression=Attr("IsLatestVersion").eq(True), + # Sort by SK in descending order to get the latest version + ScanIndexForward=False, )["Items"][0] except IndexError: return None diff --git a/api/api/adapter/s3_adapter.py b/api/api/adapter/s3_adapter.py index 51faa60..198cd41 100644 --- a/api/api/adapter/s3_adapter.py +++ b/api/api/adapter/s3_adapter.py @@ -1,6 +1,6 @@ import os from pathlib import Path -from typing import Dict, List, Type, Union +from typing import Dict, List, Optional, Type, Union import boto3 from botocore.exceptions import ClientError @@ -90,19 +90,22 @@ def list_dataset_files(self, dataset: DatasetMetadata) -> List[Dict]: *self.list_files_from_path(dataset.dataset_location(with_version=False)), ] - def get_last_updated_time(self, file_path: str) -> int: + def get_last_updated_time(self, file_path: str) -> Optional[str]: """ :return: Returns the last updated time for the dataset """ paginator = self.__s3_client.get_paginator("list_objects_v2") page_iterator = paginator.paginate(Bucket=self.__s3_bucket, Prefix=file_path) - return max( - [ - item["LastModified"] - for page in page_iterator - for item in page["Contents"] - ] - ) + try: + return max( + [ + item["LastModified"] + for page in page_iterator + for item in page["Contents"] + ] + ) + except KeyError: + return None def get_folder_size(self, file_path: str) -> int: """ @@ -148,7 +151,7 @@ def delete_previous_dataset_files( for file in files_to_delete: self._delete_data(file) - def delete_dataset_files_using_key(self, keys: List[Dict], filename: str): + def delete_dataset_files_using_key(self, keys: List[str], filename: str): files_to_delete = [{"Key": key} for key in keys] self._delete_objects(files_to_delete, filename) @@ -187,10 +190,13 @@ def _construct_partitioned_data_path( return os.path.join(dataset.dataset_location(), partition_path, filename) def _delete_objects(self, files_to_delete: List[Dict], filename: str): - response = self.__s3_client.delete_objects( - Bucket=self.__s3_bucket, Delete={"Objects": files_to_delete} - ) - self._handle_deletion_response(filename, response) + if files_to_delete: + response = self.__s3_client.delete_objects( + Bucket=self.__s3_bucket, Delete={"Objects": files_to_delete} + ) + self._handle_deletion_response(filename, response) + else: + AppLogger.info(f"No files to delete for: {filename}") def _handle_deletion_response(self, filename, response): if "Deleted" in response: @@ -206,7 +212,7 @@ def _handle_deletion_response(self, filename, response): def list_files_from_path(self, file_path: str) -> List[Dict]: try: - paginator = self.__s3_client.get_paginator("list_objects") + paginator = self.__s3_client.get_paginator("list_objects_v2") page_iterator = paginator.paginate( Bucket=self.__s3_bucket, Prefix=file_path ) diff --git a/api/api/application/services/authorisation/dataset_access_evaluator.py b/api/api/application/services/authorisation/dataset_access_evaluator.py index c395dda..e23d5c9 100644 --- a/api/api/application/services/authorisation/dataset_access_evaluator.py +++ b/api/api/application/services/authorisation/dataset_access_evaluator.py @@ -34,18 +34,21 @@ def __init__( self.permission_serivice = permission_service def get_authorised_datasets( - self, subject_id: str, action: Action + self, + subject_id: str, + action: Action, + filters: DatasetFilters = DatasetFilters(), ) -> List[DatasetMetadata]: """ This function does the following: 1. Get the permissions of the subject 2. Filters the permission by the relevant action e.g READ/WRITE - 3. Queries the datasets to find those that match these permissions + 3. Queries the datasets to find those that match these permissions and tags 4. Returns them """ permissions = self.permission_serivice.get_subject_permissions(subject_id) permissions = self.filter_permissions_by_action(permissions, action) - return self.fetch_datasets(permissions) + return self.fetch_datasets(permissions, filters) def can_access_dataset( self, dataset: DatasetMetadata, subject_id: str, actions: List[Action] @@ -98,21 +101,35 @@ def filter_permissions_by_action( return [permission for permission in permissions if permission.type == action] def fetch_datasets( - self, permissions: List[PermissionItem] - ) -> List[DatasetMetadata]: + self, + permissions: List[PermissionItem], + filters: DatasetFilters = DatasetFilters(), + ) -> List[SchemaMetadata]: authorised_datasets = set() for permission in permissions: authorised_datasets.update( - self.extract_datasets_from_permission(permission) + self.extract_datasets_from_permission(permission, filters) ) return sorted(authorised_datasets) def extract_datasets_from_permission( - self, permission: PermissionItem - ) -> List[DatasetMetadata]: + self, permission: PermissionItem, filters: DatasetFilters = DatasetFilters() + ) -> List[SchemaMetadata]: + """ + Extracts the datasets from the permission, while combining with the filters argument. + The permission filters overwrite the filters argument to stop any injection of permissions via the filters. + """ query = DatasetFilters( - sensitivity=SensitivityPermissionConverter[permission.sensitivity].value, - layer=LayerPermissionConverter[permission.layer].value, - domain=permission.domain, + **( + # If there are overlapping keys, the permission values will overwrite the others + dict(filters) + | { + "sensitivity": SensitivityPermissionConverter[ + permission.sensitivity + ].value, + "layer": LayerPermissionConverter[permission.layer].value, + "domain": permission.domain, + } + ) ) return self.schema_service.get_schema_metadatas(query) diff --git a/api/api/application/services/data_service.py b/api/api/application/services/data_service.py index 03947cc..c7b4670 100644 --- a/api/api/application/services/data_service.py +++ b/api/api/application/services/data_service.py @@ -1,11 +1,9 @@ -import os import uuid from pathlib import Path from threading import Thread from typing import List, Tuple import pandas as pd -from pandas.io.parsers import TextFileReader from api.adapter.athena_adapter import AthenaAdapter from api.adapter.glue_adapter import GlueAdapter @@ -15,7 +13,6 @@ from api.application.services.partitioning_service import generate_partitioned_data from api.application.services.schema_service import SchemaService from api.common.config.constants import ( - CONTENT_ENCODING, DATASET_ROWS_QUERY_LIMIT, DATASET_SIZE_QUERY_LIMIT, ) @@ -26,10 +23,14 @@ UnprocessableDatasetError, UserError, ) +from api.common.data_handlers import ( + construct_chunked_dataframe, + delete_incoming_raw_file, + get_dataframe_from_chunk_type, +) from api.common.logger import AppLogger from api.common.utilities import build_error_message_list from api.domain.data_types import DateType -from api.domain.dataset_filters import DatasetFilters from api.domain.dataset_metadata import DatasetMetadata from api.domain.enriched_schema import ( EnrichedColumn, @@ -42,10 +43,6 @@ from api.domain.sql_query import SQLQuery -def construct_chunked_dataframe(file_path: Path) -> TextFileReader: - return pd.read_csv(file_path, encoding=CONTENT_ENCODING, sep=",", chunksize=200_000) - - class DataService: def __init__( self, @@ -112,14 +109,14 @@ def process_upload( self.job_service.update_step(job, UploadStep.LOAD_PARTITIONS) self.load_partitions(schema) self.job_service.update_step(job, UploadStep.CLEAN_UP) - self.delete_incoming_raw_file(schema, file_path, raw_file_identifier) + delete_incoming_raw_file(schema, file_path, raw_file_identifier) self.job_service.update_step(job, UploadStep.NONE) self.job_service.succeed(job) except Exception as error: AppLogger.error( f"Processing upload failed for layer [{schema.get_layer()}], domain [{schema.get_domain()}], dataset [{schema.get_dataset()}], and version [{schema.get_version()}]: {error}" ) - self.delete_incoming_raw_file(schema, file_path, raw_file_identifier) + delete_incoming_raw_file(schema, file_path, raw_file_identifier) self.job_service.fail(job, build_error_message_list(error)) raise error @@ -131,12 +128,13 @@ def validate_incoming_data( ) dataset_errors = set() for chunk in construct_chunked_dataframe(file_path): + dataframe = get_dataframe_from_chunk_type(chunk) try: - build_validated_dataframe(schema, chunk) + build_validated_dataframe(schema, dataframe) except DatasetValidationError as error: dataset_errors.update(error.message) if dataset_errors: - self.delete_incoming_raw_file(schema, file_path, raw_file_identifier) + delete_incoming_raw_file(schema, file_path, raw_file_identifier) raise DatasetValidationError(list(dataset_errors)) def process_chunks( @@ -146,7 +144,8 @@ def process_chunks( f"Processing chunks for {schema.get_layer()}/{schema.get_domain()}/{schema.get_dataset()}/{schema.get_version()}" ) for chunk in construct_chunked_dataframe(file_path): - self.process_chunk(schema, raw_file_identifier, chunk) + dataframe = get_dataframe_from_chunk_type(chunk) + self.process_chunk(schema, raw_file_identifier, dataframe) if schema.has_overwrite_behaviour(): self.remove_existing_data(schema, raw_file_identifier) @@ -162,19 +161,6 @@ def process_chunk( permanent_filename = self.generate_permanent_filename(raw_file_identifier) self.upload_data(schema, validated_dataframe, permanent_filename) - def delete_incoming_raw_file( - self, schema: Schema, file_path: Path, raw_file_identifier: str - ): - try: - os.remove(file_path.name) - AppLogger.info( - f"Temporary upload file for {schema.get_layer()}/{schema.get_domain()}/{schema.get_dataset()}/{schema.get_version()} deleted. Raw file identifier: {raw_file_identifier}" - ) - except (FileNotFoundError, TypeError) as error: - AppLogger.error( - f"Temporary upload file for {schema.get_layer()}/{schema.get_domain()}/{schema.get_dataset()}/{schema.get_version()} not deleted. Raw file identifier: {raw_file_identifier}. Detail: {error}" - ) - def remove_existing_data(self, schema: Schema, raw_file_identifier: str) -> None: AppLogger.info( f"Overwriting existing data for layer [{schema.get_layer()}], domain [{schema.get_domain()}] and dataset [{schema.get_dataset()}]" @@ -196,29 +182,18 @@ def remove_existing_data(self, schema: Schema, raw_file_identifier: str) -> None f"Overriding existing data failed for layer [{schema.get_layer()}], domain [{schema.get_domain()}] and dataset [{schema.get_dataset()}]. Raw file identifier: {raw_file_identifier}" ) - def list_datasets(self, query: DatasetFilters, enriched: bool = False): - metadatas = self.schema_service.get_schema_metadatas(query=query) - if metadatas: - if enriched: - return [ - dict(metadata) - | { - "last_updated_date": self.s3_adapter.get_last_updated_time( - metadata.s3_file_location() - ) - } - for metadata in metadatas - ] - else: - return [dict(metadata) for metadata in metadatas] - return [] + def get_last_updated_time(self, metadata: DatasetMetadata) -> str: + last_updated = self.s3_adapter.get_last_updated_time( + metadata.s3_file_location() + ) + return last_updated or "Never updated" def get_dataset_info(self, dataset: DatasetMetadata) -> EnrichedSchema: schema = self.schema_service.get_schema(dataset) statistics_dataframe = self.athena_adapter.query( dataset, self._build_query(schema) ) - last_updated = self.s3_adapter.get_last_updated_time(dataset.s3_file_location()) + last_updated = self.get_last_updated_time(dataset) return EnrichedSchema( metadata=self._enrich_metadata(schema, statistics_dataframe, last_updated), columns=self._enrich_columns(schema, statistics_dataframe), diff --git a/api/api/application/services/format_service.py b/api/api/application/services/format_service.py index 584de3e..dda5370 100644 --- a/api/api/application/services/format_service.py +++ b/api/api/application/services/format_service.py @@ -9,5 +9,7 @@ class FormatService: def from_df_to_mimetype(df: DataFrame, mime_type: MimeType): if mime_type == MimeType.TEXT_CSV: return df.to_csv(quoting=csv.QUOTE_NONNUMERIC, index=False) + elif mime_type == MimeType.BINARY: + return df.to_parquet(engine="pyarrow") else: return df.to_dict(orient="index") diff --git a/api/api/application/services/schema_infer_service.py b/api/api/application/services/schema_infer_service.py index 9a51f25..9ebdc69 100644 --- a/api/api/application/services/schema_infer_service.py +++ b/api/api/application/services/schema_infer_service.py @@ -1,12 +1,16 @@ -from io import StringIO -from typing import List, Union, Any +from typing import List, Any +from pathlib import Path import pandas as pd from api.application.services.schema_validation import validate_schema -from api.common.config.constants import CONTENT_ENCODING from api.common.config.layers import Layer from api.common.custom_exceptions import UserError +from api.common.data_handlers import ( + construct_chunked_dataframe, + delete_incoming_raw_file, + get_dataframe_from_chunk_type, +) from api.common.value_transformers import clean_column_name from api.domain.data_types import extract_athena_types @@ -21,11 +25,10 @@ def infer_schema( domain: str, dataset: str, sensitivity: str, - file_content: Union[bytes, str], + file_path: Path, ) -> dict[str, Any]: - dataframe = self._construct_dataframe(file_content) + dataframe = self._construct_single_chunk_dataframe(file_path) columns = self._infer_columns(dataframe) - schema = Schema( metadata=SchemaMetadata( layer=layer, @@ -36,13 +39,19 @@ def infer_schema( ), columns=columns, ) - validate_schema(schema) + try: + validate_schema(schema) + finally: + # We need to delete the incoming file from the local file system + # regardless of the schema validation was successful or not + delete_incoming_raw_file(schema, file_path) return schema.dict(exclude={"metadata": {"version"}}) - def _construct_dataframe(self, file_content: Union[bytes, str]) -> pd.DataFrame: - parsed_contents = StringIO(str(file_content, CONTENT_ENCODING)) + def _construct_single_chunk_dataframe(self, file_path: Path) -> pd.DataFrame: try: - return pd.read_csv(parsed_contents, encoding=CONTENT_ENCODING, sep=",") + for chunk in construct_chunked_dataframe(file_path): + # We only validate a schema based on the frist chunk + return get_dataframe_from_chunk_type(chunk) except ValueError as error: raise UserError( f"The dataset you have provided is not formatted correctly: {self._clean_error(error.args[0])}" diff --git a/api/api/application/services/schema_service.py b/api/api/application/services/schema_service.py index e78453e..8fd6b17 100644 --- a/api/api/application/services/schema_service.py +++ b/api/api/application/services/schema_service.py @@ -36,7 +36,7 @@ def __init__( def get_schema( self, dataset: Type[DatasetMetadata], latest: bool = False ) -> Schema: - if latest: + if latest or not dataset.get_version(): schema_dict = self.dynamodb_adapter.get_latest_schema(dataset) else: schema_dict = self.dynamodb_adapter.get_schema(dataset) @@ -90,10 +90,15 @@ def delete_schema(self, dataset: Type[DatasetMetadata]) -> int: return self.dynamodb_adapter.delete_schema(dataset) def delete_schemas(self, dataset: Type[DatasetMetadata]) -> int: - dataset.version = self.get_latest_schema_version(dataset) - for i in range(dataset.version): - dataset.version = i - self.dynamodb_adapter.delete_schema(dataset) + max_version = self.get_latest_schema_version(dataset) + for i in range(max_version): + metadata = DatasetMetadata( + layer=dataset.layer, + domain=dataset.domain, + dataset=dataset.dataset, + version=i + 1, + ) + self.dynamodb_adapter.delete_schema(metadata) def upload_schema(self, schema: Schema) -> str: schema.metadata.version = FIRST_SCHEMA_VERSION_NUMBER diff --git a/api/api/common/config/constants.py b/api/api/common/config/constants.py index 36f7ca3..60f6ccb 100644 --- a/api/api/common/config/constants.py +++ b/api/api/common/config/constants.py @@ -3,6 +3,9 @@ FILENAME_WITH_TIMESTAMP_REGEX = r"[a-zA-Z0-9:_\-]+.csv$" CONTENT_ENCODING = "utf-8" +# https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types/Common_types +VALID_FILE_MIME_TYPES = ["text/csv", "application/octest-stream"] +VALID_FILE_EXTENSIONS = ["csv", "parquet"] TAG_KEYS_REGEX = BASE_REGEX + "{1,128}$" TAG_VALUES_REGEX = BASE_REGEX + "{0,256}$" @@ -29,7 +32,10 @@ DATASET_ROWS_QUERY_LIMIT = 100_000 # 200MB DATASET_SIZE_QUERY_LIMIT = 200_000_000 - +MB_1 = 1024 * 1024 +CHUNK_SIZE = 50 +CHUNK_SIZE_MB = MB_1 * CHUNK_SIZE +PARQUET_CHUNK_SIZE = 10000 FIRST_SCHEMA_VERSION_NUMBER = 1 SCHEMA_VERSION_INCREMENT = 1 diff --git a/api/api/common/custom_exceptions.py b/api/api/common/custom_exceptions.py index 437b519..c9b189e 100644 --- a/api/api/common/custom_exceptions.py +++ b/api/api/common/custom_exceptions.py @@ -63,6 +63,10 @@ class UnprocessableDatasetError(UserError): pass +class InvalidFileUploadError(UserError): + pass + + # Specifically handled in global handler ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/api/api/common/data_handlers.py b/api/api/common/data_handlers.py new file mode 100644 index 0000000..9930369 --- /dev/null +++ b/api/api/common/data_handlers.py @@ -0,0 +1,111 @@ +import os +import psutil +from typing import Any +from pathlib import Path + +import pandas as pd +import pyarrow as pa +import pyarrow.parquet as pq +from fastapi import UploadFile, File +from pandas.io.parsers import TextFileReader + +from api.common.logger import AppLogger +from api.common.config.constants import ( + CHUNK_SIZE_MB, + PARQUET_CHUNK_SIZE, + CONTENT_ENCODING, +) +from api.domain.schema import Schema + +CHUNK_SIZE = 200_000 + + +def store_file_to_disk( + extension: str, id: str, file: UploadFile = File(...), to_chunk: bool = False +) -> Path: + file_path = Path(f"{id}-{file.filename}") + AppLogger.info( + f"Writing incoming file chunk ({CHUNK_SIZE_MB}MB) to disk [{file.filename}]" + ) + AppLogger.info(f"Available disk space: {psutil.disk_usage('/').free / (2 ** 30)}GB") + + if extension == "csv": + store_csv_file_to_disk(file_path, to_chunk, file) + elif extension == "parquet": + store_parquet_file_to_disk(file_path, to_chunk, file) + return file_path + + +def store_csv_file_to_disk( + file_path: Path, to_chunk: bool, file: UploadFile = File(...) +): + with open(file_path, "wb") as incoming_file: + while contents := file.file.read(CHUNK_SIZE_MB): + incoming_file.write(contents) + + if to_chunk: + incoming_file.close() + break + + +def store_parquet_file_to_disk( + file_path: Path, to_chunk: bool, file: UploadFile = File(...) +): + parquet_file = pq.ParquetFile(file.file) + for index, batch in enumerate(parquet_file.iter_batches(PARQUET_CHUNK_SIZE)): + if index == 0: + writer = pq.ParquetWriter(file_path.as_posix(), batch.schema) + + table = pa.Table.from_batches([batch]) + writer.write_table(table) + + if to_chunk: + break + writer.close() + + +def construct_chunked_dataframe( + file_path: Path, +) -> TextFileReader | Any | None: + # Loads the file from the local path and splits into each dataframe chunk for processing + # when loading csv Pandas returns an IO iterable TextFileReader but for a Pyarrow chunking + # it retuns an iterable of pyarrow.RecordBatch, we then pass this through the extra function + # to return a dataframe compatiable format + extension = file_path.as_posix().split(".")[-1].lower() + if extension == "csv": + chunk = pd.read_csv( + file_path, encoding=CONTENT_ENCODING, sep=",", chunksize=CHUNK_SIZE + ) + return chunk + # return get_dataframe_from_chunk_type(chunk) + elif extension == "parquet": + parquet_file = pq.ParquetFile(file_path.as_posix()) + chunk = parquet_file.iter_batches(batch_size=CHUNK_SIZE) + # return get_dataframe_from_chunk_type(chunk) + return chunk + + +def get_dataframe_from_chunk_type( + chunk: TextFileReader | Any, +) -> pd.DataFrame: + # Work out the processing that may need to occur on a chunk to get it into a Pandas compatible format + # for csv this is TextFileReader for Pyarrow Parquet we need to perform a to_pandas() + if isinstance(chunk, pd.DataFrame): + return chunk + elif isinstance(chunk, pa.RecordBatch): + return chunk.to_pandas() + + +def delete_incoming_raw_file( + schema: Schema, file_path: Path, raw_file_identifier: str = None +): + raw_file_identifier_string = f"Raw file identifier: {raw_file_identifier}" + try: + os.remove(file_path.name) + AppLogger.info( + f"""Temporary upload file for {schema.metadata.string_representation()} deleted. {raw_file_identifier_string if raw_file_identifier is not None else ''}""" + ) + except (FileNotFoundError, TypeError) as error: + AppLogger.error( + f"Temporary upload file for {schema.metadata.string_representation()} not deleted. {raw_file_identifier_string if raw_file_identifier is not None else ''}. Detail: {error}" + ) diff --git a/api/api/controller/auth.py b/api/api/controller/auth.py index 6791a71..fbcb64c 100644 --- a/api/api/controller/auth.py +++ b/api/api/controller/auth.py @@ -65,9 +65,7 @@ async def redirect_oauth_token_request(request: Request): } payload = await _load_json_bytes_to_dict(request) - response = requests.post( - IDENTITY_PROVIDER_TOKEN_URL, headers=headers, data=payload, timeout=30 - ) + response = requests.post(IDENTITY_PROVIDER_TOKEN_URL, headers=headers, data=payload) return response.json() @@ -116,11 +114,7 @@ async def _get_access_token(auth, code, cognito_user_login_client_id): "code": code, } response = requests.post( - IDENTITY_PROVIDER_TOKEN_URL, - auth=auth, - headers=headers, - data=payload, - timeout=30, + IDENTITY_PROVIDER_TOKEN_URL, auth=auth, headers=headers, data=payload ) response_content = json.loads(response.content.decode(CONTENT_ENCODING)) access_token = response_content["access_token"] diff --git a/api/api/controller/datasets.py b/api/api/controller/datasets.py index ca05beb..6c4e759 100644 --- a/api/api/controller/datasets.py +++ b/api/api/controller/datasets.py @@ -1,8 +1,6 @@ import os -from pathlib import Path from typing import Optional -import psutil from fastapi import APIRouter, Request from fastapi import UploadFile, File, Response, Security from fastapi import status as http_status @@ -16,26 +14,35 @@ secure_endpoint, get_subject_id, ) +from api.application.services.authorisation.dataset_access_evaluator import ( + DatasetAccessEvaluator, +) from api.application.services.data_service import DataService + from api.application.services.delete_service import DeleteService from api.application.services.format_service import FormatService from api.application.services.schema_service import SchemaService +from api.common.data_handlers import store_file_to_disk from api.common.utilities import strtobool from api.common.config.auth import Action from api.common.config.constants import ( BASE_API_PATH, LOWERCASE_ROUTE_DESCRIPTION, LOWERCASE_REGEX, + VALID_FILE_MIME_TYPES, + VALID_FILE_EXTENSIONS, ) from api.common.config.layers import Layer from api.common.custom_exceptions import ( SchemaNotFoundError, UserError, + InvalidFileUploadError, ) from api.common.logger import AppLogger from api.common.utilities import construct_dataset_metadata from api.domain.dataset_filters import DatasetFilters from api.domain.dataset_metadata import DatasetMetadata +from api.domain.schema_metadata import SchemaMetadata from api.domain.metadata_search import metadata_search_query from api.domain.mime_type import MimeType from api.domain.sql_query import SQLQuery @@ -48,6 +55,7 @@ data_service = DataService() delete_service = DeleteService() schema_service = SchemaService() +data_access_evaluator = DatasetAccessEvaluator() datasets_router = APIRouter( @@ -63,7 +71,9 @@ status_code=http_status.HTTP_200_OK, ) async def list_all_datasets( - tag_filters: DatasetFilters = DatasetFilters(), enriched: Optional[bool] = False + request: Request, + tag_filters: DatasetFilters = DatasetFilters(), + enriched: Optional[bool] = False, ): """ ## List datasets @@ -88,9 +98,25 @@ async def list_all_datasets( a `READ` permission, e.g.: `READ_ALL`, `READ_PUBLIC`, `READ_PRIVATE`, `READ_PROTECTED_{DOMAIN}` ### Click `Try it out` to use the endpoint - """ - return data_service.list_datasets(query=tag_filters, enriched=enriched) + subject_id = get_subject_id(request) + datasets = data_access_evaluator.get_authorised_datasets( + subject_id, Action.READ, tag_filters + ) + + class EnrichedMetadata(SchemaMetadata): + last_updated_date: str + + if enriched: + return [ + EnrichedMetadata( + **metadata.dict(), + last_updated_date=data_service.get_last_updated_time(metadata), + ) + for metadata in datasets + ] + else: + return datasets if not CATALOG_DISABLED: @@ -348,7 +374,7 @@ def upload_data( """ ## Upload dataset - Given a schema has been uploaded you can upload data which matches that schema. Uploading a CSV file via this endpoint + Given a schema has been uploaded you can upload data which matches that schema. Uploading a CSV or Parquet file via this endpoint ensures that the data matches the schema and that it is consistent and sanitised. Should any errors be detected during upload, these are sent back in the response to facilitate you fixing the issues. @@ -394,9 +420,18 @@ def upload_data( """ try: + extension = file.filename.split(".")[-1].lower() + if ( + file.content_type not in VALID_FILE_MIME_TYPES + and extension not in VALID_FILE_EXTENSIONS + ): + raise InvalidFileUploadError( + f"This file type {extension}, is not supported." + ) + subject_id = get_subject_id(request) job_id = generate_uuid() - incoming_file_path = store_file_to_disk(job_id, file) + incoming_file_path = store_file_to_disk(extension, job_id, file) raw_filename, version, job_id = data_service.upload_dataset( subject_id, job_id, @@ -418,24 +453,6 @@ def upload_data( raise UserError(message=error.args[0]) -def store_file_to_disk(id: str, file: UploadFile = File(...)) -> Path: - file_path = Path(f"{id}-{file.filename}") - chunk_size_mb = 50 - mb_1 = 1024 * 1024 - - with open(file_path, "wb") as incoming_file: - while contents := file.file.read(mb_1 * chunk_size_mb): - AppLogger.info( - f"Writing incoming file chunk ({chunk_size_mb}MB) to disk [{file.filename}]" - ) - AppLogger.info( - f"Available disk space: {psutil.disk_usage('/').free / (2 ** 30)}GB" - ) - incoming_file.write(contents) - - return file_path - - @datasets_router.post( "/{layer}/{domain}/{dataset}/query", dependencies=[Security(secure_dataset_endpoint, scopes=[Action.READ])], @@ -451,6 +468,7 @@ def store_file_to_disk(id: str, file: UploadFile = File(...)) -> Path: "text/csv": { "example": 'col1;col2;col3\n"123","something","500"\n"456","something else","600"' }, + "application/octet-stream": {}, } } }, @@ -519,6 +537,13 @@ async def query_dataset( ... ``` + ### Parquet + + To get a Parquet response, the `Accept` Header has to be set to `application/octet-stream`, this can be set below. The response will be the raw Parquet + binary result. + + We recommend using this in a programmatic sense. + ### Accepted permissions In order to use this endpoint you need a `READ` permission with appropriate sensitivity level permission, @@ -600,7 +625,7 @@ async def query_large_dataset( def _format_query_output(df: DataFrame, mime_type: MimeType) -> Response: formatted_output = FormatService.from_df_to_mimetype(df, mime_type) - if mime_type == MimeType.TEXT_CSV: + if mime_type in [MimeType.TEXT_CSV, MimeType.BINARY]: return PlainTextResponse(status_code=200, content=formatted_output) else: return formatted_output diff --git a/api/api/controller/schema.py b/api/api/controller/schema.py index bb88110..7872ca8 100644 --- a/api/api/controller/schema.py +++ b/api/api/controller/schema.py @@ -12,12 +12,17 @@ BASE_API_PATH, LOWERCASE_REGEX, LOWERCASE_ROUTE_DESCRIPTION, + VALID_FILE_MIME_TYPES, + VALID_FILE_EXTENSIONS, ) from api.common.config.layers import Layer from api.common.custom_exceptions import ( AWSServiceError, + InvalidFileUploadError, ) +from api.common.data_handlers import store_file_to_disk from api.common.logger import AppLogger +from api.domain.Jobs.Job import generate_uuid from api.domain.schema import Schema delete_service = DeleteService() @@ -50,7 +55,7 @@ async def generate_schema( output of this endpoint in the Schema Upload endpoint. ⚠️ WARNING: - - The first 50MB of the uploaded file (regardless of size) are used to infer the schema + - The first 50MB if the file is of type csv or the first 10,000 rows if Parquet, of the uploaded file (regardless of size) are used to infer the schema - Consider uploading a representative sample of your dataset (e.g.: the first 10,000 rows) instead of uploading the entire large file which could take a long time ### Inputs @@ -79,17 +84,20 @@ async def generate_schema( ### Click `Try it out` to use the endpoint """ - infer_contents = get_first_mb_of_file(file) + extension = file.filename.split(".")[-1].lower() + if ( + file.content_type not in VALID_FILE_MIME_TYPES + and extension not in VALID_FILE_EXTENSIONS + ): + raise InvalidFileUploadError(f"This file type {extension}, is not supported.") + + job_id = generate_uuid() + incoming_file_path = store_file_to_disk(extension, job_id, file, to_chunk=True) return schema_infer_service.infer_schema( - layer, domain, dataset, sensitivity, infer_contents + layer, domain, dataset, sensitivity, incoming_file_path ) -def get_first_mb_of_file(file: UploadFile, chunk_size_mb: int = 50) -> bytes: - mb_1 = 1024 * 1024 - return file.file.read(mb_1 * chunk_size_mb) - - @schema_router.post( "", status_code=http_status.HTTP_201_CREATED, diff --git a/api/api/domain/data_types.py b/api/api/domain/data_types.py index 2bbfbe2..2dbdc23 100644 --- a/api/api/domain/data_types.py +++ b/api/api/domain/data_types.py @@ -9,6 +9,7 @@ class NumericType(StrEnum): INTEGER = "integer" + INT = "int" MIXED_INTEGER_FLOAT = "mixed-integer-float" FLOATING = "floating" TINYINT = "tinyint" @@ -63,7 +64,7 @@ class AthenaDataType(Enum): DECIMAL = NumericType.DECIMAL DOUBLE = NumericType.DOUBLE FLOAT = NumericType.FLOAT - INTEGER = NumericType.INTEGER + INT = NumericType.INT SMALLINT = NumericType.SMALLINT STRING = StringType.STRING TIMESTAMP = TimestampType.TIMESTAMP @@ -76,7 +77,7 @@ class AthenaDataType(Enum): PandasDataType.DATE: AthenaDataType.DATE, PandasDataType.DATETIME: AthenaDataType.DATE, PandasDataType.DECIMAL: AthenaDataType.DECIMAL, - PandasDataType.INTEGER: AthenaDataType.INTEGER, + PandasDataType.INTEGER: AthenaDataType.INT, PandasDataType.FLOATING: AthenaDataType.DOUBLE, PandasDataType.MIXED: AthenaDataType.STRING, PandasDataType.MIXED_INTEGER: AthenaDataType.STRING, diff --git a/api/api/domain/metadata_search.py b/api/api/domain/metadata_search.py index 6cf4555..c736725 100644 --- a/api/api/domain/metadata_search.py +++ b/api/api/domain/metadata_search.py @@ -11,7 +11,7 @@ DATA_TYPE_COLUMN = "data_type" # fmt: off -METADATA_QUERY = Template( +METADATA_QUERY = Template( # nosec f""" SELECT * FROM ( SELECT @@ -40,7 +40,7 @@ FROM "{GLUE_CATALOGUE_DB_NAME}"."{METADATA_CATALOGUE_DB_NAME}" ) WHERE {{{{ where_clause }}}} -""" # nosec B608 +""" ) # fmt: on diff --git a/api/api/domain/mime_type.py b/api/api/domain/mime_type.py index 32e1319..30ac79b 100644 --- a/api/api/domain/mime_type.py +++ b/api/api/domain/mime_type.py @@ -6,6 +6,7 @@ class MimeType(StrEnum): APPLICATION_JSON = "application/json" TEXT_CSV = "text/csv" + BINARY = "application/octet-stream" @staticmethod def to_mimetype(mime_type: str): diff --git a/api/api/entry.py b/api/api/entry.py index 1d257ca..738b7b5 100644 --- a/api/api/entry.py +++ b/api/api/entry.py @@ -185,6 +185,7 @@ async def get_permissions_ui(): ) async def get_datasets_ui(action: Action, request: Request): subject_id = parse_token(request.cookies.get(RAPID_ACCESS_TOKEN)).subject + datasets = upload_service.get_authorised_datasets(subject_id, action) return [dataset.to_dict() for dataset in datasets] diff --git a/api/test/api/adapter/test_dynamodb_adapter.py b/api/test/api/adapter/test_dynamodb_adapter.py index 02fda36..5230a4b 100644 --- a/api/test/api/adapter/test_dynamodb_adapter.py +++ b/api/test/api/adapter/test_dynamodb_adapter.py @@ -1015,7 +1015,7 @@ def setup_method(self): Column( name="colname1", partition_index=0, - data_type="integer", + data_type="int", allow_null=False, ), Column( @@ -1049,7 +1049,7 @@ def test_store_schema(self): { "name": "colname1", "partition_index": 0, - "data_type": "integer", + "data_type": "int", "allow_null": False, "format": None, }, @@ -1154,7 +1154,7 @@ def test_get_latest_schema(self, result, expected): self.schema_table.query.assert_called_once_with( KeyConditionExpression=Key("PK").eq("raw/domain/dataset"), - FilterExpression=Attr("IsLatestVersion").eq(True), + ScanIndexForward=False, ) assert res == expected diff --git a/api/test/api/adapter/test_glue_adapter.py b/api/test/api/adapter/test_glue_adapter.py index 0f16489..541c415 100644 --- a/api/test/api/adapter/test_glue_adapter.py +++ b/api/test/api/adapter/test_glue_adapter.py @@ -41,7 +41,7 @@ def setup_method(self): Column( name="colname1", partition_index=0, - data_type="integer", + data_type="int", allow_null=True, ), Column( @@ -74,7 +74,7 @@ def test_create_table(self): "StoredAsSubDirectories": False, }, "PartitionKeys": [ - {"Name": "colname1", "Type": "integer"}, + {"Name": "colname1", "Type": "int"}, ], "TableType": "EXTERNAL_TABLE", "Parameters": { diff --git a/api/test/api/adapter/test_s3_adapter.py b/api/test/api/adapter/test_s3_adapter.py index 9005c31..d769af4 100644 --- a/api/test/api/adapter/test_s3_adapter.py +++ b/api/test/api/adapter/test_s3_adapter.py @@ -144,8 +144,9 @@ class TestS3AdapterDataRetrieval: def setup_method(self): self.mock_s3_client = Mock() + self.s3_bucket = "bucket" self.persistence_adapter = S3Adapter( - s3_client=self.mock_s3_client, s3_bucket="dataset" + s3_client=self.mock_s3_client, s3_bucket=self.s3_bucket ) def test_retrieve_data(self): @@ -157,7 +158,7 @@ def test_find_raw_file_when_file_exists(self): DatasetMetadata("raw", "domain", "dataset", 1), "filename.csv" ) self.mock_s3_client.get_object.assert_called_once_with( - Bucket="dataset", Key="raw_data/raw/domain/dataset/1/filename.csv" + Bucket=self.s3_bucket, Key="raw_data/raw/domain/dataset/1/filename.csv" ) def test_throws_error_for_find_raw_file_when_file_does_not_exist(self): @@ -174,7 +175,66 @@ def test_throws_error_for_find_raw_file_when_file_does_not_exist(self): ) self.mock_s3_client.get_object.assert_called_once_with( - Bucket="dataset", Key="raw_data/raw/domain/dataset/2/bad_file" + Bucket=self.s3_bucket, Key="raw_data/raw/domain/dataset/2/bad_file" + ) + + def test_get_last_updated_time_success(self): + self.mock_s3_client.get_paginator.return_value.paginate.return_value = [ + { + "NextToken": "xxx", + "ResponseMetadata": {"key": "value"}, + "Contents": [ + { + "Key": "data/layer/domain/dataset/1/123-456-789_111-222-333.parquet", + "LastModified": "2020-01-03", + }, + { + "Key": "data/layer/domain/dataset/1/123-456-789_444-555-666.parquet", + "LastModified": "2020-01-03", + }, + { + "Key": "data/layer/domain/dataset/1/123-456-789_777-888-999.parquet", + "LastModified": "2020-01-03", + }, + { + "Key": "data/layer/domain/dataset/1/999-999-999_111-888-999.parquet", + "LastModified": "2020-01-03", + }, + { + "Key": "data/layer/domain/dataset/2/888-888-888_777-888-999.parquet", + "LastModified": "2020-01-28", + }, + ], + "Name": "data-bucket", + "Prefix": "data/layer/domain/dataset", + "EncodingType": "url", + } + ] + + res = self.persistence_adapter.get_last_updated_time("path") + assert res == "2020-01-28" + self.mock_s3_client.get_paginator.assert_called_once_with("list_objects_v2") + self.mock_s3_client.get_paginator.return_value.paginate.assert_called_once_with( + Bucket=self.s3_bucket, Prefix="path" + ) + + def test_get_last_updated_time_when_empty(self): + self.mock_s3_client.get_paginator.return_value.paginate.return_value = [ + { + "NextToken": "xxx", + "ResponseMetadata": {"key": "value"}, + "KeyCount": 0, + "Name": "data-bucket", + "Prefix": "data/layer/domain/dataset", + "EncodingType": "url", + } + ] + + res = self.persistence_adapter.get_last_updated_time("path") + assert res is None + self.mock_s3_client.get_paginator.assert_called_once_with("list_objects_v2") + self.mock_s3_client.get_paginator.return_value.paginate.assert_called_once_with( + Bucket=self.s3_bucket, Prefix="path" ) @@ -238,7 +298,7 @@ def test_deletion_of_dataset_files_with_no_partitions(self): ), "123-456-789.csv", ) - self.mock_s3_client.get_paginator.assert_called_once_with("list_objects") + self.mock_s3_client.get_paginator.assert_called_once_with("list_objects_v2") self.mock_s3_client.get_paginator.return_value.paginate.assert_called_once_with( Bucket="data-bucket", Prefix="data/layer/domain/dataset/1" ) @@ -304,7 +364,7 @@ def test_deletion_of_dataset_files_with_partitions(self): self.persistence_adapter.delete_dataset_files( DatasetMetadata("layer", "domain", "dataset", 1), "123-456-789.csv" ) - self.mock_s3_client.get_paginator.assert_called_once_with("list_objects") + self.mock_s3_client.get_paginator.assert_called_once_with("list_objects_v2") self.mock_s3_client.get_paginator.return_value.paginate.assert_called_once_with( Bucket="data-bucket", Prefix="data/layer/domain/dataset/1" ) @@ -327,8 +387,6 @@ def test_deletion_of_dataset_files_with_partitions(self): ) def test_deletion_of_dataset_files_when_error_is_thrown(self): - self.mock_s3_client.get_paginator.return_value.paginate.return_value = {} - self.mock_s3_client.delete_objects.return_value = { "Errors": [ { @@ -346,16 +404,30 @@ def test_deletion_of_dataset_files_when_error_is_thrown(self): ] } msg = "The item \\[123-456-789.csv\\] could not be deleted. Please contact your administrator." - + self.persistence_adapter.list_files_from_path = Mock( + return_value=["data/123-456-789.csv"] + ) with pytest.raises(AWSServiceError, match=msg): self.persistence_adapter.delete_dataset_files( DatasetMetadata("layer", "domain", "dataset", 3), "123-456-789.csv" ) - self.mock_s3_client.get_paginator.assert_called_once_with("list_objects") - self.mock_s3_client.get_paginator.return_value.paginate.assert_called_once_with( - Bucket="data-bucket", Prefix="data/layer/domain/dataset/3" + self.persistence_adapter.list_files_from_path.assert_called_once_with( + "data/layer/domain/dataset/3" + ) + + def test_no_deletion_is_attempted_if_there_are_no_files(self): + self.persistence_adapter.list_files_from_path = Mock(return_value=[""]) + self.persistence_adapter._delete_objects = Mock() + + self.persistence_adapter.delete_dataset_files( + DatasetMetadata("layer", "domain", "dataset", 3), "123-456-789.csv" + ) + + self.persistence_adapter.list_files_from_path.assert_called_once_with( + "data/layer/domain/dataset/3" ) + self.mock_s3_client.delete_objects.assert_not_called() def test_deletion_of_raw_files(self): self.mock_s3_client.list_objects.return_value = { @@ -489,7 +561,7 @@ def test_list_raw_files(self): "2020-11-15T16:00:00-file3.csv", ] - self.mock_s3_client.get_paginator.assert_called_once_with("list_objects") + self.mock_s3_client.get_paginator.assert_called_once_with("list_objects_v2") self.mock_s3_client.get_paginator.return_value.paginate.assert_called_once_with( Bucket="my-bucket", Prefix="raw_data/layer/my_domain/my_dataset/1" ) @@ -510,7 +582,7 @@ def test_list_raw_files_when_empty(self): ) assert raw_files == [] - self.mock_s3_client.get_paginator.assert_called_once_with("list_objects") + self.mock_s3_client.get_paginator.assert_called_once_with("list_objects_v2") self.mock_s3_client.get_paginator.return_value.paginate.assert_called_once_with( Bucket="my-bucket", Prefix="raw_data/layer/my_domain/my_dataset/2" ) @@ -523,7 +595,7 @@ def test_list_raw_files_when_empty_response(self): ) assert raw_files == [] - self.mock_s3_client.get_paginator.assert_called_once_with("list_objects") + self.mock_s3_client.get_paginator.assert_called_once_with("list_objects_v2") self.mock_s3_client.get_paginator.return_value.paginate.assert_called_once_with( Bucket="my-bucket", Prefix="raw_data/layer/my_domain/my_dataset/1" ) diff --git a/api/test/api/application/services/authorisation/test_dataset_access_evaluator.py b/api/test/api/application/services/authorisation/test_dataset_access_evaluator.py index 11bfb5d..d0810a9 100644 --- a/api/test/api/application/services/authorisation/test_dataset_access_evaluator.py +++ b/api/test/api/application/services/authorisation/test_dataset_access_evaluator.py @@ -30,12 +30,13 @@ def test_layer_permission_converter(self): assert actual == expected @pytest.mark.parametrize( - "permission, expected_filters", + "permission, input_filters, expected_filters", [ ( PermissionItem( id="READ_ALL_ALL", layer="ALL", sensitivity="ALL", type="READ" ), + DatasetFilters(), DatasetFilters( sensitivity=["PUBLIC", "PRIVATE", "PROTECTED"], layer=["raw", "layer"], @@ -48,6 +49,7 @@ def test_layer_permission_converter(self): sensitivity="ALL", type="WRITE", ), + DatasetFilters(), DatasetFilters( sensitivity=["PUBLIC", "PRIVATE", "PROTECTED"], layer=["raw"] ), @@ -59,6 +61,7 @@ def test_layer_permission_converter(self): sensitivity="PUBLIC", type="WRITE", ), + DatasetFilters(), DatasetFilters(sensitivity=["PUBLIC"], layer=["raw", "layer"]), ), ( @@ -69,15 +72,39 @@ def test_layer_permission_converter(self): type="READ", domain="TEST", ), + DatasetFilters(), DatasetFilters(sensitivity=["PROTECTED"], layer=["raw"], domain="TEST"), ), + ( + PermissionItem( + id="READ_ALL_ALL", layer="ALL", sensitivity="ALL", type="READ" + ), + DatasetFilters(key_only_tags=["tag1"]), + DatasetFilters( + sensitivity=["PUBLIC", "PRIVATE", "PROTECTED"], + layer=["raw", "layer"], + key_only_tags=["tag1"], + ), + ), + ( + PermissionItem( + id="WRITE_ALL_PUBLIC", + layer="ALL", + sensitivity="PUBLIC", + type="WRITE", + ), + DatasetFilters(sensitivity="ALL"), + DatasetFilters(sensitivity=["PUBLIC"], layer=["raw", "layer"]), + ), ], ) - def test_extract_datasets_from_permission(self, permission, expected_filters): + def test_extract_datasets_from_permission( + self, permission, input_filters, expected_filters + ): self.schema_service.get_schema_metadatas = Mock() self.schema_service.get_schema_metadatas.return_value = "dataset" - res = self.evaluator.extract_datasets_from_permission(permission) + res = self.evaluator.extract_datasets_from_permission(permission, input_filters) self.schema_service.get_schema_metadatas.assert_called_once_with( expected_filters @@ -265,13 +292,15 @@ def test_get_authorised_datasets(self, action: Action, permission_mask: list[boo self.permission_service.get_subject_permissions = Mock(return_value=permissions) self.evaluator.fetch_datasets = Mock(return_value=["dataset"]) - res = self.evaluator.get_authorised_datasets(subject_id, action) + res = self.evaluator.get_authorised_datasets( + subject_id, action, "dataset_filter" + ) assert res == ["dataset"] self.permission_service.get_subject_permissions.assert_called_once_with( subject_id ) self.evaluator.fetch_datasets.assert_called_once_with( - list(compress(permissions, permission_mask)) + list(compress(permissions, permission_mask)), "dataset_filter" ) def test_can_access_dataset_success(self): diff --git a/api/test/api/application/services/test_data_service.py b/api/test/api/application/services/test_data_service.py index d98a89e..23d61e2 100644 --- a/api/test/api/application/services/test_data_service.py +++ b/api/test/api/application/services/test_data_service.py @@ -8,9 +8,7 @@ from api.application.services.data_service import ( DataService, - construct_chunked_dataframe, ) -from api.common.config.constants import CONTENT_ENCODING from api.common.custom_exceptions import ( UserError, AWSServiceError, @@ -57,7 +55,7 @@ def setup_method(self): Column( name="colname1", partition_index=0, - data_type="integer", + data_type="int", allow_null=True, ), Column( @@ -81,16 +79,6 @@ def chunked_dataframe_values( mock_test_file_reader.__iter__.return_value = dataframes return mock_test_file_reader - # Dataset chunking ------------------------------- - @patch("api.application.services.data_service.pd") - def test_construct_chunked_dataframe(self, mock_pd): - path = Path("file/path") - - construct_chunked_dataframe(path) - mock_pd.read_csv.assert_called_once_with( - path, encoding=CONTENT_ENCODING, sep=",", chunksize=200_000 - ) - # Upload Dataset ------------------------------------- @patch("api.application.services.data_service.UploadJob") @@ -160,7 +148,7 @@ def test_generates_permanent_filename(self, mock_uuid): # Process Upload @patch.object(DataService, "validate_incoming_data") @patch.object(DataService, "process_chunks") - @patch.object(DataService, "delete_incoming_raw_file") + @patch("api.application.services.data_service.delete_incoming_raw_file") @patch.object(DataService, "load_partitions") def test_process_upload_calls_relevant_methods( self, @@ -205,7 +193,7 @@ def test_process_upload_calls_relevant_methods( self.job_service.update_step.assert_has_calls(expected_update_step_calls) self.job_service.succeed.assert_called_once_with(upload_job) - @patch.object(DataService, "delete_incoming_raw_file") + @patch("api.application.services.data_service.delete_incoming_raw_file") @patch.object(DataService, "validate_incoming_data") def test_deletes_incoming_file_from_disk_and_fails_job_if_any_error_during_processing( self, @@ -417,7 +405,7 @@ def test_processes_each_dataset_chunk_with_overwrite_behaviour( Column( name="colname1", partition_index=0, - data_type="integer", + data_type="int", allow_null=True, ), Column( @@ -592,7 +580,7 @@ def setup_method(self): Column( name="colname1", partition_index=0, - data_type="integer", + data_type="int", allow_null=False, ), Column( @@ -619,6 +607,29 @@ def setup_method(self): ) self.s3_adapter.get_last_updated_time.return_value = "2022-03-01 11:03:49+00:00" + def test_get_last_updated_time(self): + time = "2022-03-01 11:03:49+00:00" + self.s3_adapter.get_last_updated_time.return_value = time + + last_updated_time = self.data_service.get_last_updated_time( + self.valid_schema.metadata + ) + assert last_updated_time == "2022-03-01 11:03:49+00:00" + self.s3_adapter.get_last_updated_time.assert_called_once_with( + self.valid_schema.metadata.s3_file_location() + ) + + def test_get_last_updated_time_empty(self): + self.s3_adapter.get_last_updated_time.return_value = None + + last_updated_time = self.data_service.get_last_updated_time( + self.valid_schema.metadata + ) + assert last_updated_time == "Never updated" + self.s3_adapter.get_last_updated_time.assert_called_once_with( + self.valid_schema.metadata.s3_file_location() + ) + def test_get_schema_information(self): expected_schema = EnrichedSchema( metadata=EnrichedSchemaMetadata( @@ -636,7 +647,7 @@ def test_get_schema_information(self): EnrichedColumn( name="colname1", partition_index=0, - data_type="integer", + data_type="int", allow_null=False, ), EnrichedColumn( @@ -677,9 +688,6 @@ def test_get_schema_information(self): ), ) assert actual_schema == expected_schema - self.s3_adapter.get_last_updated_time.assert_called_once_with( - dataset_metadata.s3_file_location() - ) def test_get_schema_information_for_multiple_dates(self): valid_schema = Schema( @@ -695,7 +703,7 @@ def test_get_schema_information_for_multiple_dates(self): Column( name="colname1", partition_index=0, - data_type="integer", + data_type="int", allow_null=False, ), Column( @@ -731,7 +739,7 @@ def test_get_schema_information_for_multiple_dates(self): EnrichedColumn( name="colname1", partition_index=0, - data_type="integer", + data_type="int", allow_null=False, ), EnrichedColumn( @@ -797,7 +805,7 @@ def test_get_schema_size_for_datasets_with_no_dates(self): Column( name="colname1", partition_index=0, - data_type="integer", + data_type="int", allow_null=False, ) ], @@ -818,7 +826,7 @@ def test_get_schema_size_for_datasets_with_no_dates(self): EnrichedColumn( name="colname1", partition_index=0, - data_type="integer", + data_type="int", allow_null=False, ) ], @@ -1040,137 +1048,3 @@ def test_fails_query_job_upon_any_other_error_and_raises_error(self, error): self.job_service.update_step.assert_has_calls(expected_job_calls) self.job_service.set_results_url.assert_not_called() self.job_service.fail.assert_called_once_with(query_job, ["the error message"]) - - -class TestListDatasets: - def setup_method(self): - self.s3_adapter = Mock() - self.schema_service = Mock() - self.data_service = DataService( - self.s3_adapter, - None, - None, - None, - self.schema_service, - ) - - def test_list_datasets_not_enriched_success(self): - mock_schemas = [ - SchemaMetadata( - layer="raw", - domain="domain1", - dataset="dataset1", - version=2, - sensitivity="PUBLIC", - ), - SchemaMetadata( - layer="raw", - domain="domain12", - dataset="dataset_c", - version=4, - sensitivity="PRIVATE", - ), - ] - expected = [ - { - "dataset": "dataset1", - "description": "", - "domain": "domain1", - "is_latest_version": True, - "key_only_tags": [], - "key_value_tags": {}, - "layer": "raw", - "owners": None, - "sensitivity": "PUBLIC", - "update_behaviour": "APPEND", - "version": 2, - }, - { - "dataset": "dataset_c", - "description": "", - "domain": "domain12", - "is_latest_version": True, - "key_only_tags": [], - "key_value_tags": {}, - "layer": "raw", - "owners": None, - "sensitivity": "PRIVATE", - "update_behaviour": "APPEND", - "version": 4, - }, - ] - - self.schema_service.get_schema_metadatas.return_value = mock_schemas - - response = self.data_service.list_datasets("query") - assert response == expected - - def test_list_datasets_not_enriched_empty(self): - mock_schemas = [] - - self.schema_service.get_schema_metadatas.return_value = mock_schemas - - response = self.data_service.list_datasets("query") - assert response == [] - - def test_list_datasets_enriched_success(self): - mock_schemas = [ - SchemaMetadata( - layer="raw", - domain="domain1", - dataset="dataset1", - version=2, - sensitivity="PUBLIC", - ), - SchemaMetadata( - layer="raw", - domain="domain12", - dataset="dataset_c", - version=4, - sensitivity="PRIVATE", - ), - ] - expected = [ - { - "dataset": "dataset1", - "description": "", - "domain": "domain1", - "is_latest_version": True, - "key_only_tags": [], - "key_value_tags": {}, - "layer": "raw", - "owners": None, - "sensitivity": "PUBLIC", - "update_behaviour": "APPEND", - "version": 2, - "last_updated_date": "date1", - }, - { - "dataset": "dataset_c", - "description": "", - "domain": "domain12", - "is_latest_version": True, - "key_only_tags": [], - "key_value_tags": {}, - "layer": "raw", - "owners": None, - "sensitivity": "PRIVATE", - "update_behaviour": "APPEND", - "version": 4, - "last_updated_date": "date2", - }, - ] - - self.s3_adapter.get_last_updated_time.side_effect = ["date1", "date2"] - self.schema_service.get_schema_metadatas.return_value = mock_schemas - - response = self.data_service.list_datasets("query", enriched=True) - assert response == expected - - def test_list_datasets_enriched_empty(self): - mock_schemas = [] - - self.schema_service.get_schema_metadatas.return_value = mock_schemas - - response = self.data_service.list_datasets("query", enriched=True) - assert response == [] diff --git a/api/test/api/application/services/test_dataset_validation.py b/api/test/api/application/services/test_dataset_validation.py index 32505b2..1eeb8b0 100644 --- a/api/test/api/application/services/test_dataset_validation.py +++ b/api/test/api/application/services/test_dataset_validation.py @@ -39,7 +39,7 @@ def setup_method(self): Column( name="colname1", partition_index=0, - data_type="integer", + data_type="int", allow_null=True, ), Column( @@ -136,7 +136,7 @@ def test_invalid_when_partition_column_with_illegal_characters(self): Column( name="colname1", partition_index=0, - data_type="integer", + data_type="int", allow_null=True, ), Column( @@ -221,7 +221,7 @@ def test_checks_for_unacceptable_null_values(self, dataframe: pd.DataFrame): Column( name="col1", partition_index=None, - data_type="integer", + data_type="int", allow_null=False, ), Column( @@ -419,7 +419,7 @@ def test_return_error_message_when_not_accepted_null_values(self): Column( name="col3", partition_index=None, - data_type="integer", + data_type="int", allow_null=False, ), ], @@ -461,7 +461,7 @@ def test_return_error_message_when_not_correct_datatypes(self): Column( name="col3", partition_index=None, - data_type="integer", + data_type="int", allow_null=False, ), Column( @@ -484,7 +484,7 @@ def test_return_error_message_when_not_correct_datatypes(self): except DatasetValidationError as error: assert error.message == [ "Column [col2] has an incorrect data type. Expected boolean, received string", - "Column [col3] has an incorrect data type. Expected integer, received string", + "Column [col3] has an incorrect data type. Expected int, received string", "Column [col4] has an incorrect data type. Expected double, received string", ] @@ -530,7 +530,7 @@ def test_return_error_message_when_dataset_has_illegal_chars_in_partition_column Column( name="col5", partition_index=4, - data_type="integer", + data_type="int", allow_null=False, ), ], @@ -586,7 +586,7 @@ def test_return_list_of_validation_error_messages_when_multiple_validation_steps Column( name="col5", partition_index=None, - data_type="integer", + data_type="int", allow_null=False, ), ], @@ -598,7 +598,7 @@ def test_return_list_of_validation_error_messages_when_multiple_validation_steps assert error.message == [ "Column [col4] does not match specified date format in at least one row", "Column [col3] does not allow null values", - "Column [col5] has an incorrect data type. Expected integer, received string", + "Column [col5] has an incorrect data type. Expected int, received string", "Partition column [col1] has values with illegal characters '/'", "Partition column [col2] has values with illegal characters '/'", ] @@ -738,7 +738,7 @@ def test_raises_error_if_provided_date_is_not_valid(self): Column( name="value", partition_index=None, - data_type="integer", + data_type="int", allow_null=False, ), ], diff --git a/api/test/api/application/services/test_partitioning_service.py b/api/test/api/application/services/test_partitioning_service.py index 387e120..e046229 100644 --- a/api/test/api/application/services/test_partitioning_service.py +++ b/api/test/api/application/services/test_partitioning_service.py @@ -153,7 +153,7 @@ def test_handles_one_partition(self): def test_handles_no_partitions(self): # Forcing Int64 for testing purposes as parsing & validation should occur before partitioning # noqa E501 - column_dtype = "integer" + column_dtype = "int" schema = Schema( metadata=SchemaMetadata( diff --git a/api/test/api/application/services/test_schema_infer_service.py b/api/test/api/application/services/test_schema_infer_service.py index 26de2ba..c130de3 100644 --- a/api/test/api/application/services/test_schema_infer_service.py +++ b/api/test/api/application/services/test_schema_infer_service.py @@ -1,3 +1,6 @@ +import tempfile +import os +from pathlib import Path from unittest.mock import patch import pytest @@ -32,14 +35,14 @@ def test_infer_schema(self): Column( name="colname2", partition_index=None, - data_type="integer", + data_type="int", allow_null=True, format=None, ), Column( name="col_name_3", partition_index=None, - data_type="integer", + data_type="int", allow_null=True, format=None, ), @@ -53,29 +56,39 @@ def test_infer_schema(self): ], ).dict(exclude={"metadata": {"version"}}) file_content = b"colname1,colname2,Col name 3,Col/name 4! \nsomething,123,1,True\notherthing,123,3,False\n\n" + temp_out_path = tempfile.mkstemp(suffix=".csv")[1] + path = Path(temp_out_path) + with open(path, "wb") as file: + file.write(file_content) actual_schema = self.infer_schema_service.infer_schema( - "raw", "mydomain", "mydataset", "PUBLIC", file_content + "raw", "mydomain", "mydataset", "PUBLIC", path ) assert actual_schema == expected_schema + os.remove(temp_out_path) - @patch("api.application.services.schema_infer_service.pd") - def test_raises_error_when_parsing_provided_file_fails(self, mock_pd): - file_content = b"" - - mock_pd.read_csv.side_effect = ValueError("Some message") + @patch("api.application.services.schema_infer_service.construct_chunked_dataframe") + def test_raises_error_when_parsing_provided_file_fails( + self, mock_construct_chunked_dataframe + ): + mock_construct_chunked_dataframe.side_effect = ValueError("Some message") with pytest.raises(UserError): self.infer_schema_service.infer_schema( - "raw", "mydomain", "mydataset", "PUBLIC", file_content + "raw", "mydomain", "mydataset", "PUBLIC", Path("xxx-yyy.csv") ) def test_raises_error_when_some_rows_contain_too_many_values(self): file_content = ( b"colname1,colname2\n" b"value1,value2\n" b"value1,value2,EXTRA_VALUE\n" ) + temp_out_path = tempfile.mkstemp(suffix=".csv")[1] + path = Path(temp_out_path) + with open(path, "wb") as file: + file.write(file_content) with pytest.raises(UserError): self.infer_schema_service.infer_schema( - "raw", "mydomain", "mydataset", "PUBLIC", file_content + "raw", "mydomain", "mydataset", "PUBLIC", path ) + os.remove(temp_out_path) diff --git a/api/test/api/application/services/test_schema_service.py b/api/test/api/application/services/test_schema_service.py index 10549e2..46d668a 100644 --- a/api/test/api/application/services/test_schema_service.py +++ b/api/test/api/application/services/test_schema_service.py @@ -1,4 +1,4 @@ -from unittest.mock import Mock +from unittest.mock import Mock, call import pytest from botocore.exceptions import ClientError @@ -14,6 +14,7 @@ ConflictError, UserError, ) +from api.domain.dataset_metadata import DatasetMetadata from api.domain.schema import Schema, Column from api.domain.schema_metadata import Owner, SchemaMetadata @@ -40,7 +41,7 @@ def setup_method(self): Column( name="colname1", partition_index=0, - data_type="integer", + data_type="int", allow_null=False, ), Column( @@ -97,7 +98,7 @@ def test_check_for_protected_domain_success(self): Column( name="colname1", partition_index=0, - data_type="integer", + data_type="int", allow_null=True, ), ], @@ -124,7 +125,7 @@ def test_check_for_protected_domain_fails(self): Column( name="colname1", partition_index=0, - data_type="integer", + data_type="int", allow_null=True, ), ], @@ -160,7 +161,7 @@ def test_upload_schema_throws_error_when_schema_invalid(self): Column( name="colname1", partition_index=invalid_partition_index, - data_type="integer", + data_type="int", allow_null=True, ) ], @@ -196,7 +197,7 @@ def setup_method(self): Column( name="colname1", partition_index=0, - data_type="integer", + data_type="int", allow_null=False, ), Column( @@ -249,7 +250,7 @@ def test_update_schema_throws_error_when_schema_invalid(self): Column( name="colname1", partition_index=invalid_partition_index, - data_type="integer", + data_type="int", allow_null=True, ) ], @@ -276,24 +277,6 @@ def test_update_schema_for_protected_domain_failure(self): ): self.schema_service.update_schema(new_schema) - # TODO: Fix this test - # def test_update_schema_when_crawler_raises_error(self): - # new_schema = self.valid_updated_schema - # expected_schema = self.valid_updated_schema.copy(deep=True) - # expected_schema.metadata.version = 2 - - # self.schema_service.get_schema = Mock(return_value=self.valid_schema) - # self.glue_adapter.create_table.side_effect = TableCreationError( - # "error occurred" - # ) - - # with pytest.raises(TableCreationError, match="error occurred"): - # self.schema_service.update_schema(new_schema) - - # self.glue_adapter.create_table.assert_called_once_with(new_schema) - # self.schema_service.store_schema.assert_not_called() - # self.schema_service.deprecate_schema.assert_not_called() - def test_update_schema_success(self): original_schema = self.valid_schema original_schema.metadata.version = 2 @@ -369,7 +352,7 @@ def setup_method(self): Column( name="colname1", partition_index=0, - data_type="integer", + data_type="int", allow_null=False, ), Column( @@ -412,6 +395,15 @@ def test_get_schema_success_latest(self): assert res == self.schema self.dynamodb_adapter.get_latest_schema.assert_called_once_with(self.metadata) + def test_get_schema_with_no_version_success(self): + self.dynamodb_adapter.get_latest_schema = Mock(return_value=self.schema_dict) + metadata = DatasetMetadata("raw", "some", "other") + + res = self.schema_service.get_schema(metadata) + + assert res == self.schema + self.dynamodb_adapter.get_latest_schema.assert_called_once_with(metadata) + def test_get_schema_raises_exception(self): self.dynamodb_adapter.get_schema = Mock(return_value=None) @@ -445,3 +437,32 @@ def test_get_latest_schema_version_no_schema(self): res = self.schema_service.get_latest_schema_version(self.metadata) assert res == 1 self.dynamodb_adapter.get_latest_schema.assert_called_once_with(self.metadata) + + +class TestDeleteSchema: + def setup_method(self): + self.dynamodb_adapter = Mock() + self.schema_service = SchemaService( + self.dynamodb_adapter, + None, + None, + ) + + def test_delete_schemas(self): + self.schema_service.get_latest_schema_version = Mock(return_value=3) + metadata = DatasetMetadata("layer", "domain", "dataset") + + self.schema_service.delete_schemas(metadata) + self.dynamodb_adapter.delete_schema.assert_has_calls( + [ + call(DatasetMetadata("layer", "domain", "dataset", 1)), + call(DatasetMetadata("layer", "domain", "dataset", 2)), + call(DatasetMetadata("layer", "domain", "dataset", 3)), + ] + ) + + def test_delete_schema(self): + metadata = DatasetMetadata("layer", "domain", "dataset", 1) + self.schema_service.delete_schema(metadata) + + self.dynamodb_adapter.delete_schema.assert_called_once_with(metadata) diff --git a/api/test/api/application/services/test_schema_validation.py b/api/test/api/application/services/test_schema_validation.py index 8e32b12..16d0450 100644 --- a/api/test/api/application/services/test_schema_validation.py +++ b/api/test/api/application/services/test_schema_validation.py @@ -29,7 +29,7 @@ def setup_method(self): Column( name="colname1", partition_index=0, - data_type="integer", + data_type="int", allow_null=False, ), Column( @@ -87,7 +87,7 @@ def test_is_invalid_schema_with_domain_name_containing_hyphen(self): Column( name="colname1", partition_index=None, - data_type="integer", + data_type="int", allow_null=True, ) ], @@ -110,7 +110,7 @@ def test_is_invalid_schema_with_dataset_name_containing_hyphen(self): Column( name="colname1", partition_index=None, - data_type="integer", + data_type="int", allow_null=True, ) ], @@ -177,7 +177,7 @@ def test_is_invalid_schema_with_duplicate_column_name(self): Column( name="colname1", partition_index=0, - data_type="integer", + data_type="int", allow_null=False, ), Column( @@ -206,7 +206,7 @@ def test_is_invalid_schema_with_empty_domain(self): Column( name="colname1", partition_index=0, - data_type="integer", + data_type="int", allow_null=False, ), Column( @@ -231,7 +231,7 @@ def test_is_invalid_schema_with_empty_dataset(self): Column( name="colname1", partition_index=0, - data_type="integer", + data_type="int", allow_null=False, ), Column( @@ -293,7 +293,7 @@ def test_is_invalid_schema_with_invalid_column_name(self, col_name: str): Column( name=col_name, partition_index=0, - data_type="integer", + data_type="int", allow_null=False, ) ], @@ -315,7 +315,7 @@ def test_is_invalid_schema_with_duplicate_partition_number(self): Column( name="colname1", partition_index=0, - data_type="integer", + data_type="int", allow_null=False, ), Column( @@ -343,7 +343,7 @@ def test_is_invalid_schema_with_negative_partition_number(self): Column( name="colname1", partition_index=-1, - data_type="integer", + data_type="int", allow_null=False, ), Column( @@ -373,7 +373,7 @@ def test_is_invalid_schema_with_partition_number_higher_than_the_number_of_parti Column( name="colname1", partition_index=2, - data_type="integer", + data_type="int", allow_null=False, ), Column( @@ -408,7 +408,7 @@ def test_is_invalid_schema_when_all_columns_are_partitioned(self): Column( name="colname1", partition_index=1, - data_type="integer", + data_type="int", allow_null=False, ), Column( @@ -436,7 +436,7 @@ def test_is_invalid_schema_when_partitioned_columns_allow_null_values(self): Column( name="colname1", partition_index=1, - data_type="integer", + data_type="int", allow_null=False, ), Column( @@ -1014,7 +1014,7 @@ def test_is_invalid_when_schema_for_upload_has_invalid_owners_email_address( Column( name="colname1", partition_index=0, - data_type="integer", + data_type="int", allow_null=False, ), Column( @@ -1052,7 +1052,7 @@ def test_is_invalid_when_schema_for_upload_has_no_owners(self, owners: List[Owne Column( name="colname1", partition_index=0, - data_type="integer", + data_type="int", allow_null=False, ), Column( @@ -1095,7 +1095,7 @@ def test_is_invalid_when_domain_has_incorrect_format(self, domain): Column( name="colname1", partition_index=0, - data_type="integer", + data_type="int", allow_null=False, ), Column( @@ -1140,7 +1140,7 @@ def test_is_invalid_when_dataset_has_incorrect_format(self, dataset): Column( name="colname1", partition_index=0, - data_type="integer", + data_type="int", allow_null=False, ), Column( diff --git a/api/test/api/common/controller_test_utils.py b/api/test/api/common/controller_test_utils.py index 3e3d67f..0d8387d 100644 --- a/api/test/api/common/controller_test_utils.py +++ b/api/test/api/common/controller_test_utils.py @@ -16,11 +16,15 @@ class BaseClientTest(ABC): @classmethod def setup_class(cls): cls.client = TestClient(app, raise_server_exceptions=False) + app.dependency_overrides[ secure_dataset_endpoint ] = mock_secure_dataset_endpoint() app.dependency_overrides[secure_endpoint] = mock_secure_endpoint() + app.user_middleware.clear() + app.middleware_stack = app.build_middleware_stack() + @classmethod def teardown_class(cls): app.dependency_overrides[secure_dataset_endpoint] = {} diff --git a/api/test/api/common/test_data_handlers.py b/api/test/api/common/test_data_handlers.py new file mode 100644 index 0000000..954c140 --- /dev/null +++ b/api/test/api/common/test_data_handlers.py @@ -0,0 +1,114 @@ +import os +import tempfile +from pathlib import Path + +from fastapi import UploadFile +from unittest.mock import patch, Mock +from pandas.util.testing import assert_frame_equal + +import pandas as pd + +from api.common.config.constants import CONTENT_ENCODING +from api.common.data_handlers import ( + CHUNK_SIZE, + construct_chunked_dataframe, + store_file_to_disk, + store_csv_file_to_disk, +) + + +class TestStoreFileToDisk: + @patch("api.common.data_handlers.store_csv_file_to_disk") + def test_store_file_to_disk_csv_file(self, mock_store_csv_file_to_disk): + mock_file = UploadFile(filename="test.csv", file=None) + extension = "csv" + id = "xxx-yyy" + store_file_to_disk(extension, id, mock_file) + + path = Path("xxx-yyy-test.csv") + mock_store_csv_file_to_disk.assert_called_once_with(path, False, mock_file) + + @patch("api.common.data_handlers.store_csv_file_to_disk") + def test_store_file_to_disk_csv_file_chunked(self, mock_store_csv_file_to_disk): + mock_file = UploadFile(filename="test.csv", file=None) + extension = "csv" + id = "xxx-yyy" + to_chunk = True + store_file_to_disk(extension, id, mock_file, to_chunk) + + path = Path("xxx-yyy-test.csv") + mock_store_csv_file_to_disk.assert_called_once_with(path, True, mock_file) + + @patch("api.common.data_handlers.store_parquet_file_to_disk") + def test_store_file_to_disk_parquet(self, mock_store_parquet_file_to_disk): + mock_file = UploadFile(filename="test.parquet", file=None) + extension = "parquet" + id = "xxx-yyy" + store_file_to_disk(extension, id, mock_file) + + path = Path("xxx-yyy-test.parquet") + mock_store_parquet_file_to_disk.assert_called_once_with(path, False, mock_file) + + @patch("api.common.data_handlers.store_parquet_file_to_disk") + def test_store_file_to_disk_parquet_chunked(self, mock_store_parquet_file_to_disk): + mock_file = UploadFile(filename="test.parquet", file=None) + extension = "parquet" + id = "xxx-yyy" + to_chunk = True + store_file_to_disk(extension, id, mock_file, to_chunk) + + path = Path("xxx-yyy-test.parquet") + mock_store_parquet_file_to_disk.assert_called_once_with(path, True, mock_file) + + +class TestStoreCSVFileToDisk: + def test_store_csv_file_to_disk(self): + file_data = open("./test/api/resources/test_csv.csv", "rb") + mock_file = UploadFile(filename="test.csv", file=file_data) + temp_out_path = tempfile.mkstemp()[1] + path = Path(temp_out_path) + store_csv_file_to_disk(path, False, mock_file) + + df1 = pd.read_csv("./test/api/resources/test_csv.csv") + df2 = pd.read_csv(temp_out_path) + + assert_frame_equal(df1, df2) + os.remove(temp_out_path) + + +class TestStoreParquetFileToDisk: + def test_store_parquet_file_to_disk(self): + file_data = open("./test/api/resources/test_parquet.parquet", "rb") + mock_file = UploadFile(filename="test.parquet", file=file_data) + temp_out_path = tempfile.mkstemp()[1] + path = Path(temp_out_path) + store_csv_file_to_disk(path, False, mock_file) + + df1 = pd.read_parquet( + "./test/api/resources/test_parquet.parquet", engine="pyarrow" + ) + df2 = pd.read_parquet(temp_out_path, engine="pyarrow") + + assert_frame_equal(df1, df2) + os.remove(temp_out_path) + + +class TestConstructChunkedDataframe: + @patch("api.common.data_handlers.pd") + def test_construct_chunked_dataframe_csv(self, mock_pd): + path = Path("file/path.csv") + + construct_chunked_dataframe(path) + mock_pd.read_csv.assert_called_once_with( + path, encoding=CONTENT_ENCODING, sep=",", chunksize=CHUNK_SIZE + ) + + @patch("api.common.data_handlers.pq") + def test__construct_chunked_dataframe_parquet(self, mock_pq): + path = Path("file/path.parquet") + mock_parquet_file = Mock() + mock_pq.ParquetFile.return_value = mock_parquet_file + + construct_chunked_dataframe(path) + mock_pq.ParquetFile.assert_called_once_with("file/path.parquet") + mock_parquet_file.iter_batches.assert_called_once_with(batch_size=CHUNK_SIZE) diff --git a/api/test/api/controller/test_datasets.py b/api/test/api/controller/test_datasets.py index 13cb5fe..9e07410 100644 --- a/api/test/api/controller/test_datasets.py +++ b/api/test/api/controller/test_datasets.py @@ -6,14 +6,17 @@ from api.adapter.athena_adapter import AthenaAdapter from api.adapter.s3_adapter import S3Adapter +from api.application.services.authorisation.dataset_access_evaluator import ( + DatasetAccessEvaluator, +) from api.application.services.data_service import DataService from api.application.services.delete_service import DeleteService -from api.application.services.schema_service import SchemaService from api.common.custom_exceptions import ( UserError, DatasetValidationError, SchemaNotFoundError, ) +from api.common.config.auth import Action from api.common.config.constants import BASE_API_PATH from api.domain.dataset_filters import DatasetFilters from api.domain.dataset_metadata import DatasetMetadata @@ -53,7 +56,7 @@ def test_calls_data_upload_service_successfully( headers={"Authorization": "Bearer test-token"}, ) - mock_store_file_to_disk.assert_called_once_with(job_id, ANY) + mock_store_file_to_disk.assert_called_once_with("csv", job_id, ANY) mock_upload_dataset.assert_called_once_with( subject_id, job_id, @@ -105,7 +108,7 @@ def test_calls_data_upload_service_with_latest_version_when_none_provided( headers={"Authorization": "Bearer test-token"}, ) - mock_store_file_to_disk.assert_called_once_with(job_id, ANY) + mock_store_file_to_disk.assert_called_once_with("csv", job_id, ANY) mock_upload_dataset.assert_called_once_with( subject_id, job_id, @@ -124,6 +127,65 @@ def test_calls_data_upload_service_with_latest_version_when_none_provided( } } + @patch("api.controller.datasets.construct_dataset_metadata") + @patch.object(DataService, "upload_dataset") + @patch("api.controller.datasets.store_file_to_disk") + @patch("api.controller.datasets.get_subject_id") + @patch("api.controller.datasets.generate_uuid") + def test_calls_data_upload_service_successfully_parquet( + self, + mock_generate_uuid, + mock_get_subject_id, + mock_store_file_to_disk, + mock_upload_dataset, + mock_construct_datset_metadata, + ): + file_content = b"some,content" + incoming_file_path = Path("filename.parquet") + incoming_file_name = "filename.parquet" + raw_file_identifier = "123-456-789" + subject_id = "subject_id" + job_id = "abc-123" + mock_construct_datset_metadata.return_value = DatasetMetadata( + "layer", "domain", "dataset", 14 + ) + + mock_generate_uuid.return_value = job_id + mock_get_subject_id.return_value = subject_id + mock_store_file_to_disk.return_value = incoming_file_path + mock_upload_dataset.return_value = ( + f"{raw_file_identifier}.parquet", + 5, + "abc-123", + ) + + response = self.client.post( + f"{BASE_API_PATH}/datasets/layer/domain/dataset", + files={ + "file": (incoming_file_name, file_content, "application/octest-stream") + }, + headers={"Authorization": "Bearer test-token"}, + ) + + mock_store_file_to_disk.assert_called_once_with("parquet", job_id, ANY) + mock_upload_dataset.assert_called_once_with( + subject_id, + job_id, + DatasetMetadata("layer", "domain", "dataset", 14), + incoming_file_path, + ) + + assert response.status_code == 202 + assert response.json() == { + "details": { + "original_filename": "filename.parquet", + "raw_filename": "123-456-789.parquet", + "dataset_version": 5, + "status": "Data processing", + "job_id": "abc-123", + } + } + @patch.object(DataService, "upload_dataset") @patch("api.controller.datasets.store_file_to_disk") @patch("api.controller.datasets.get_subject_id") @@ -153,7 +215,7 @@ def test_calls_data_upload_service_with_version_successfully( headers={"Authorization": "Bearer test-token"}, ) - mock_store_file_to_disk.assert_called_once_with(job_id, ANY) + mock_store_file_to_disk.assert_called_once_with("csv", job_id, ANY) mock_upload_dataset.assert_called_once_with( subject_id, job_id, @@ -172,6 +234,60 @@ def test_calls_data_upload_service_with_version_successfully( } } + @patch.object(DataService, "upload_dataset") + @patch("api.controller.datasets.store_file_to_disk") + @patch("api.controller.datasets.get_subject_id") + @patch("api.controller.datasets.generate_uuid") + def test_calls_data_upload_service_with_version_successfully_parquet( + self, + mock_generate_uuid, + mock_get_subject_id, + mock_store_file_to_disk, + mock_upload_dataset, + ): + job_id = "abc-123" + file_content = b"some,content" + incoming_file_path = Path("filename.parquet") + incoming_file_name = "filename.parquet" + raw_file_identifier = "123-456-789" + subject_id = "subject_id" + + mock_generate_uuid.return_value = job_id + mock_get_subject_id.return_value = subject_id + mock_store_file_to_disk.return_value = incoming_file_path + mock_upload_dataset.return_value = ( + f"{raw_file_identifier}.parquet", + 2, + "abc-123", + ) + + response = self.client.post( + f"{BASE_API_PATH}/datasets/layer/domain/dataset?version=2", + files={ + "file": (incoming_file_name, file_content, "application/octest-stream") + }, + headers={"Authorization": "Bearer test-token"}, + ) + + mock_store_file_to_disk.assert_called_once_with("parquet", job_id, ANY) + mock_upload_dataset.assert_called_once_with( + subject_id, + job_id, + DatasetMetadata("layer", "domain", "dataset", 2), + incoming_file_path, + ) + + assert response.status_code == 202 + assert response.json() == { + "details": { + "original_filename": "filename.parquet", + "raw_filename": "123-456-789.parquet", + "dataset_version": 2, + "status": "Data processing", + "job_id": "abc-123", + } + } + def test_calls_data_upload_service_fails_when_domain_uppercase(self): file_content = b"some,content" incoming_file_name = "filename.csv" @@ -187,6 +303,19 @@ def test_calls_data_upload_service_fails_when_domain_uppercase(self): "details": ["domain -> was required to be lowercase only."] } + def test_calls_data_upload_service_fails_when_filetype_is_invalid(self): + file_content = b"some content" + incoming_file_name = "filename.txt" + + response = self.client.post( + f"{BASE_API_PATH}/datasets/raw/domain/dataset", + files={"file": (incoming_file_name, file_content, "text/plain")}, + headers={"Authorization": "Bearer test-token"}, + ) + + assert response.status_code == 400 + assert response.json() == {"details": "This file type txt, is not supported."} + @patch.object(DataService, "upload_dataset") @patch("api.controller.datasets.store_file_to_disk") @patch("api.controller.datasets.get_subject_id") @@ -271,10 +400,10 @@ def test_raises_error_when_schema_does_not_exist( class TestListDatasets(BaseClientTest): - @patch.object(S3Adapter, "get_last_updated_time") - @patch.object(SchemaService, "get_schema_metadatas") - def test_returns_enriched_schema_metadata_for_all_datasets( - self, mock_get_schema_metadatas, mock_get_last_updated_time + @patch.object(DatasetAccessEvaluator, "get_authorised_datasets") + @patch("api.controller.datasets.get_subject_id") + def test_returns_metadata_for_all_datasets( + self, mock_get_subject_id, mock_get_authorised_datasets ): metadata_response = [ SchemaMetadata( @@ -296,78 +425,9 @@ def test_returns_enriched_schema_metadata_for_all_datasets( sensitivity="PUBLIC", ), ] - - mock_get_schema_metadatas.return_value = metadata_response - mock_get_last_updated_time.side_effect = ["1234", "23456"] - expected_response = [ - { - "layer": "layer", - "domain": "domain1", - "dataset": "dataset1", - "sensitivity": "PUBLIC", - "version": 1, - "description": "", - "key_value_tags": {"tag1": "value1"}, - "key_only_tags": [], - "owners": None, - "update_behaviour": "APPEND", - "is_latest_version": True, - "last_updated_date": "1234", - }, - { - "layer": "layer", - "domain": "domain2", - "dataset": "dataset2", - "sensitivity": "PUBLIC", - "version": 1, - "description": "some test description", - "key_value_tags": {"tag2": "value2"}, - "key_only_tags": [], - "update_behaviour": "APPEND", - "owners": None, - "is_latest_version": True, - "last_updated_date": "23456", - }, - ] - - expected_query = DatasetFilters() - - response = self.client.post( - f"{BASE_API_PATH}/datasets?enriched=True", - headers={"Authorization": "Bearer test-token"}, - # Not passing a JSON body here to filter by tags - ) - - _, kwargs = mock_get_schema_metadatas.call_args - assert expected_query == kwargs.get("query") - - assert response.status_code == 200 - assert response.json() == expected_response - - @patch.object(SchemaService, "get_schema_metadatas") - def test_returns_schema_metadata_for_all_datasets(self, mock_get_schema_metadatas): - metadata_response = [ - SchemaMetadata( - layer="layer", - domain="domain1", - dataset="dataset1", - key_value_tags={"tag1": "value1"}, - description="", - version=1, - sensitivity="PUBLIC", - ), - SchemaMetadata( - layer="layer", - domain="domain2", - dataset="dataset2", - key_value_tags={"tag2": "value2"}, - version=1, - description="some test description", - sensitivity="PUBLIC", - ), - ] - - mock_get_schema_metadatas.return_value = metadata_response + subject_id = "subject_id" + mock_get_subject_id.return_value = subject_id + mock_get_authorised_datasets.return_value = metadata_response expected_response = [ { @@ -406,16 +466,21 @@ def test_returns_schema_metadata_for_all_datasets(self, mock_get_schema_metadata # Not passing a JSON body here to filter by tags ) - _, kwargs = mock_get_schema_metadatas.call_args - assert expected_query == kwargs.get("query") + mock_get_authorised_datasets.assert_called_once_with( + subject_id, Action.READ, expected_query + ) assert response.status_code == 200 assert response.json() == expected_response - @patch.object(SchemaService, "get_schema_metadatas") + @patch.object(DatasetAccessEvaluator, "get_authorised_datasets") + @patch("api.controller.datasets.get_subject_id") def test_returns_metadata_for_datasets_with_certain_tags( - self, mock_get_schema_metadatas + self, mock_get_subject_id, mock_get_authorised_datasets ): + subject_id = "abc-123" + mock_get_subject_id.return_value = subject_id + metadata_response = [ SchemaMetadata( layer="layer", @@ -437,7 +502,7 @@ def test_returns_metadata_for_datasets_with_certain_tags( ), ] - mock_get_schema_metadatas.return_value = metadata_response + mock_get_authorised_datasets.return_value = metadata_response expected_response = [ { @@ -481,16 +546,21 @@ def test_returns_metadata_for_datasets_with_certain_tags( json={"tags": tag_filters}, ) - _, kwargs = mock_get_schema_metadatas.call_args - assert expected_query_object == kwargs.get("query") + mock_get_authorised_datasets.assert_called_once_with( + subject_id, Action.READ, expected_query_object + ) assert response.status_code == 200 assert response.json() == expected_response @patch.object(S3Adapter, "get_last_updated_time") - @patch.object(SchemaService, "get_schema_metadatas") + @patch.object(DatasetAccessEvaluator, "get_authorised_datasets") + @patch("api.controller.datasets.get_subject_id") def test_returns_enriched_metadata_for_datasets_with_certain_sensitivity( - self, mock_get_enriched_datasets_metadata, mock_get_last_updated_time + self, + mock_get_subject_id, + mock_get_authorised_datasets, + mock_get_last_updated_time, ): metadata_response = [ SchemaMetadata( @@ -512,9 +582,12 @@ def test_returns_enriched_metadata_for_datasets_with_certain_sensitivity( description="some test description", ), ] - + subject_id = "abc-123" + mock_get_subject_id.return_value = subject_id mock_get_last_updated_time.side_effect = ["1234", "23456"] - mock_get_enriched_datasets_metadata.return_value = metadata_response + mock_get_authorised_datasets.return_value = metadata_response + + expected_query_object = DatasetFilters(sensitivity="PUBLIC") expected_response = [ { @@ -547,17 +620,16 @@ def test_returns_enriched_metadata_for_datasets_with_certain_sensitivity( }, ] - expected_query_object = DatasetFilters(sensitivity="PUBLIC") - response = self.client.post( f"{BASE_API_PATH}/datasets?enriched=true", headers={"Authorization": "Bearer test-token"}, json={"sensitivity": "PUBLIC"}, ) - _, kwargs = mock_get_enriched_datasets_metadata.call_args + mock_get_authorised_datasets.assert_called_once_with( + subject_id, Action.READ, expected_query_object + ) - assert expected_query_object == kwargs.get("query") assert response.status_code == 200 assert response.json() == expected_response @@ -620,7 +692,7 @@ def test_returns_metadata_for_all_datasets(self, mock_get_dataset_info): Column( name="colname2", partition_index=None, - data_type="integer", + data_type="int", allow_null=True, format=None, ), @@ -666,7 +738,7 @@ def test_returns_metadata_for_all_datasets_for_latest_verion_when_none_provided( Column( name="colname2", partition_index=None, - data_type="integer", + data_type="int", allow_null=True, format=None, ), @@ -897,7 +969,7 @@ def test_returns_error_from_query_request_when_format_is_unsupported( assert response.status_code == 400 assert response.json() == { - "details": "Provided value for Accept header parameter [text/plain] is not supported. Supported formats: application/json, text/csv" + "details": "Provided value for Accept header parameter [text/plain] is not supported. Supported formats: application/json, text/csv, application/octet-stream" } @pytest.mark.parametrize( diff --git a/api/test/api/controller/test_layers.py b/api/test/api/controller/test_layers.py index 6a4fc61..cdfc7d7 100644 --- a/api/test/api/controller/test_layers.py +++ b/api/test/api/controller/test_layers.py @@ -4,6 +4,7 @@ class TestLayersList(BaseClientTest): def test_returns_expected_layers(self): + response = self.client.get( f"{BASE_API_PATH}/layers", headers={"Authorization": "Bearer test-token"}, diff --git a/api/test/api/controller/test_schema.py b/api/test/api/controller/test_schema.py index 6ff43fa..7f28aa7 100644 --- a/api/test/api/controller/test_schema.py +++ b/api/test/api/controller/test_schema.py @@ -1,5 +1,6 @@ +from pathlib import Path from typing import Tuple, Dict -from unittest.mock import patch +from unittest.mock import patch, ANY from api.application.services.delete_service import DeleteService from api.application.services.schema_infer_service import SchemaInferService @@ -360,7 +361,11 @@ def test_returns_cleans_up_if_upload_fails( class TestSchemaGeneration(BaseClientTest): @patch.object(SchemaInferService, "infer_schema") - def test_returns_schema_from_a_csv_file(self, mock_infer_schema): + @patch("api.controller.schema.store_file_to_disk") + @patch("api.controller.schema.generate_uuid") + def test_returns_schema_from_a_csv_file( + self, mock_generate_uuid, mock_store_file_to_disk, mock_infer_schema + ): expected_response = Schema( metadata=SchemaMetadata( layer="raw", @@ -380,7 +385,7 @@ def test_returns_schema_from_a_csv_file(self, mock_infer_schema): Column( name="colname2", partition_index=None, - data_type="integer", + data_type="int", allow_null=True, format=None, ), @@ -388,6 +393,10 @@ def test_returns_schema_from_a_csv_file(self, mock_infer_schema): ) file_content = b"colname1,colname2\nsomething,123\notherthing,456\n\n" file_name = "filename.csv" + job_id = "abc-123" + incoming_file_path = Path(file_name) + mock_generate_uuid.return_value = job_id + mock_store_file_to_disk.return_value = incoming_file_path mock_infer_schema.return_value = expected_response response = self.client.post( @@ -396,16 +405,94 @@ def test_returns_schema_from_a_csv_file(self, mock_infer_schema): headers={"Authorization": "Bearer test-token"}, ) mock_infer_schema.assert_called_once_with( - "raw", "mydomain", "mydataset", "PUBLIC", file_content + "raw", "mydomain", "mydataset", "PUBLIC", incoming_file_path + ) + mock_store_file_to_disk.assert_called_once_with( + "csv", job_id, ANY, to_chunk=True ) assert response.status_code == 200 assert response.json() == expected_response @patch.object(SchemaInferService, "infer_schema") - def test_bad_request_when_schema_is_invalid(self, mock_infer_schema): + @patch("api.controller.schema.store_file_to_disk") + @patch("api.controller.schema.generate_uuid") + def test_returns_schema_from_a_parquet_file( + self, mock_generate_uuid, mock_store_file_to_disk, mock_infer_schema + ): + expected_response = Schema( + metadata=SchemaMetadata( + layer="raw", + domain="mydomain", + dataset="mydataset", + sensitivity="PUBLIC", + owners=[Owner(name="owner", email="owner@email.com")], + ), + columns=[ + Column( + name="colname1", + partition_index=None, + data_type="object", + allow_null=True, + format=None, + ), + Column( + name="colname2", + partition_index=None, + data_type="Int64", + allow_null=True, + format=None, + ), + ], + ) + file_content = b"colname1,colname2\nsomething,123\notherthing,456\n\n" + file_name = "filename.parquet" + job_id = "abc-123" + incoming_file_path = Path(file_name) + mock_generate_uuid.return_value = job_id + mock_store_file_to_disk.return_value = incoming_file_path + mock_infer_schema.return_value = expected_response + + response = self.client.post( + f"{BASE_API_PATH}/schema/raw/PUBLIC/mydomain/mydataset/generate", + files={"file": (file_name, file_content, "application/octest-stream")}, + headers={"Authorization": "Bearer test-token"}, + ) + mock_infer_schema.assert_called_once_with( + "raw", "mydomain", "mydataset", "PUBLIC", incoming_file_path + ) + mock_store_file_to_disk.assert_called_once_with( + "parquet", job_id, ANY, to_chunk=True + ) + + assert response.status_code == 200 + assert response.json() == expected_response + + def test_bad_request_when_filetype_is_invalid(self): + file_content = b"some content" + file_name = "filename.txt" + + response = self.client.post( + f"{BASE_API_PATH}/schema/raw/PUBLIC/mydomain/mydataset/generate", + files={"file": (file_name, file_content, "text/plain")}, + headers={"Authorization": "Bearer test-token"}, + ) + + assert response.status_code == 400 + assert response.json() == {"details": "This file type txt, is not supported."} + + @patch.object(SchemaInferService, "infer_schema") + @patch("api.controller.schema.store_file_to_disk") + @patch("api.controller.schema.generate_uuid") + def test_bad_request_when_schema_is_invalid( + self, mock_generate_uuid, mock_store_file_to_disk, mock_infer_schema + ): file_content = b"colname1,colname2\nsomething,123\notherthing,456\n\n" file_name = "filename.csv" + job_id = "abc-123" + incoming_file_path = Path(file_name) + mock_generate_uuid.return_value = job_id + mock_store_file_to_disk.return_value = incoming_file_path error_message = "The schema is wrong" mock_infer_schema.side_effect = SchemaValidationError(error_message) @@ -415,7 +502,10 @@ def test_bad_request_when_schema_is_invalid(self, mock_infer_schema): headers={"Authorization": "Bearer test-token"}, ) mock_infer_schema.assert_called_once_with( - "raw", "mydomain", "mydataset", "PUBLIC", file_content + "raw", "mydomain", "mydataset", "PUBLIC", incoming_file_path + ) + mock_store_file_to_disk.assert_called_once_with( + "csv", job_id, ANY, to_chunk=True ) assert response.status_code == 400 diff --git a/api/test/api/domain/test_schema.py b/api/test/api/domain/test_schema.py index a99afb7..fc4310b 100644 --- a/api/test/api/domain/test_schema.py +++ b/api/test/api/domain/test_schema.py @@ -21,7 +21,7 @@ def setup_method(self): Column( name="colname1", partition_index=1, - data_type="integer", + data_type="int", allow_null=True, ), Column( @@ -61,7 +61,7 @@ def test_gets_partition_numbers(self): assert actual_partitions_numbers == expected_partitions_numbers def test_get_data_types(self): - expected_data_types = {"integer", "string", "boolean"} + expected_data_types = {"int", "string", "boolean"} actual_data_types = self.schema.get_data_types() @@ -80,7 +80,7 @@ def test_get_partition_columns(self): Column( name="colname1", partition_index=1, - data_type="integer", + data_type="int", allow_null=True, format=None, ), @@ -94,7 +94,7 @@ def test_get_partition_columns_for_glue(self): "Name": "colname2", "Type": "string", }, - {"Name": "colname1", "Type": "integer"}, + {"Name": "colname1", "Type": "int"}, ] assert res == expected diff --git a/api/test/e2e/setup_e2e_tests.py b/api/test/e2e/setup_e2e_tests.py index d8e99fa..8ecbbef 100644 --- a/api/test/e2e/setup_e2e_tests.py +++ b/api/test/e2e/setup_e2e_tests.py @@ -1,5 +1,6 @@ import json import os + import requests from requests.auth import HTTPBasicAuth @@ -42,8 +43,8 @@ def create_protected_domain(token: str, domain: str): return response.status_code, json.loads(response.content.decode("utf-8")) -def upload_dataset(token: str, file_path: str, domain: str, dataset: str): - post_url = f"{BASE_URL}/datasets/{domain}/{dataset}" +def upload_dataset(token: str, file_path: str, layer: str, domain: str, dataset: str): + post_url = f"{BASE_URL}/datasets/{layer}/{domain}/{dataset}" headers = {"Authorization": "Bearer " + token} filename = os.path.basename(file_path) files = {"file": (filename, open(file_path, "rb"))} @@ -59,13 +60,16 @@ def upload_dataset(token: str, file_path: str, domain: str, dataset: str): for file in files: with open(os.path.join(SCHEMA_PATH, file), "r") as f: schema = json.load(f) - print(f) res = upload_schema(token, schema) print(res) print( upload_dataset( - token, os.path.join(PATH, "test_journey_file.csv"), "test_e2e", "query" + token, + os.path.join(PATH, "test_journey_file.csv"), + "default", + "test_e2e", + "query", ) ) @@ -73,6 +77,7 @@ def upload_dataset(token: str, file_path: str, domain: str, dataset: str): upload_dataset( token, os.path.join(PATH, "test_journey_file.csv"), + "default", "test_e2e_protected", "do_not_delete", ) diff --git a/api/test/e2e/test_files/schemas/test_e2e-delete.json b/api/test/e2e/test_files/schemas/test_e2e-delete.json index 148e1d4..ac118a4 100644 --- a/api/test/e2e/test_files/schemas/test_e2e-delete.json +++ b/api/test/e2e/test_files/schemas/test_e2e-delete.json @@ -1,5 +1,6 @@ { "metadata": { + "layer": "default", "domain": "test_e2e", "dataset": "delete", "sensitivity": "PUBLIC", @@ -20,42 +21,42 @@ { "name": "year", "partition_index": null, - "data_type": "Int64", + "data_type": "int", "allow_null": true, "format": null }, { "name": "month", "partition_index": null, - "data_type": "Int64", + "data_type": "int", "allow_null": true, "format": null }, { "name": "destination", "partition_index": null, - "data_type": "object", + "data_type": "string", "allow_null": true, "format": null }, { "name": "arrival", "partition_index": null, - "data_type": "object", + "data_type": "string", "allow_null": true, "format": null }, { "name": "type", "partition_index": null, - "data_type": "object", + "data_type": "string", "allow_null": true, "format": null }, { "name": "status", "partition_index": null, - "data_type": "object", + "data_type": "string", "allow_null": true, "format": null } diff --git a/api/test/e2e/test_files/schemas/test_e2e-query.json b/api/test/e2e/test_files/schemas/test_e2e-query.json index 18b54f9..64ab1c7 100644 --- a/api/test/e2e/test_files/schemas/test_e2e-query.json +++ b/api/test/e2e/test_files/schemas/test_e2e-query.json @@ -1,5 +1,6 @@ { "metadata": { + "layer": "default", "domain": "test_e2e", "dataset": "query", "sensitivity": "PUBLIC", @@ -19,42 +20,42 @@ { "name": "year", "partition_index": null, - "data_type": "Int64", + "data_type": "int", "allow_null": true, "format": null }, { "name": "month", "partition_index": null, - "data_type": "Int64", + "data_type": "int", "allow_null": true, "format": null }, { "name": "destination", "partition_index": null, - "data_type": "object", + "data_type": "string", "allow_null": true, "format": null }, { "name": "arrival", "partition_index": null, - "data_type": "object", + "data_type": "string", "allow_null": true, "format": null }, { "name": "type", "partition_index": null, - "data_type": "object", + "data_type": "string", "allow_null": true, "format": null }, { "name": "status", "partition_index": null, - "data_type": "object", + "data_type": "string", "allow_null": true, "format": null } diff --git a/api/test/e2e/test_files/schemas/test_e2e-update_v1.json b/api/test/e2e/test_files/schemas/test_e2e-update_v1.json index 3b94be6..77d26b3 100644 --- a/api/test/e2e/test_files/schemas/test_e2e-update_v1.json +++ b/api/test/e2e/test_files/schemas/test_e2e-update_v1.json @@ -1,5 +1,6 @@ { "metadata": { + "layer": "default", "domain": "test_e2e", "dataset": "update", "sensitivity": "PUBLIC", @@ -20,32 +21,32 @@ "columns": [ { "name": "year", - "data_type": "Int64", + "data_type": "int", "allow_null": true }, { "name": "month", - "data_type": "Int64", + "data_type": "int", "allow_null": true }, { "name": "destination", - "data_type": "object", + "data_type": "string", "allow_null": true }, { "name": "arrival", - "data_type": "object", + "data_type": "string", "allow_null": true }, { "name": "type", - "data_type": "object", + "data_type": "string", "allow_null": true }, { "name": "status", - "data_type": "object", + "data_type": "string", "allow_null": true } ] diff --git a/api/test/e2e/test_files/schemas/test_e2e-update_v2.json b/api/test/e2e/test_files/schemas/test_e2e-update_v2.json index 3f200eb..d3167d9 100644 --- a/api/test/e2e/test_files/schemas/test_e2e-update_v2.json +++ b/api/test/e2e/test_files/schemas/test_e2e-update_v2.json @@ -1,5 +1,6 @@ { "metadata": { + "layer": "default", "domain": "test_e2e", "dataset": "update", "sensitivity": "PRIVATE", @@ -21,49 +22,49 @@ { "name": "year", "partition_index": null, - "data_type": "Int64", + "data_type": "int", "allow_null": true, "format": null }, { "name": "month", "partition_index": null, - "data_type": "Float64", + "data_type": "double", "allow_null": true, "format": null }, { "name": "destination", "partition_index": null, - "data_type": "object", + "data_type": "string", "allow_null": true, "format": null }, { "name": "arrival", "partition_index": null, - "data_type": "object", + "data_type": "string", "allow_null": true, "format": null }, { "name": "type", "partition_index": null, - "data_type": "object", + "data_type": "string", "allow_null": true, "format": null }, { "name": "status", "partition_index": null, - "data_type": "object", + "data_type": "string", "allow_null": true, "format": null }, { "name": "newcolumn", "partition_index": null, - "data_type": "Int64", + "data_type": "int", "allow_null": true, "format": null } diff --git a/api/test/e2e/test_files/schemas/test_e2e-upload.json b/api/test/e2e/test_files/schemas/test_e2e-upload.json index 2dc33af..57559a1 100644 --- a/api/test/e2e/test_files/schemas/test_e2e-upload.json +++ b/api/test/e2e/test_files/schemas/test_e2e-upload.json @@ -1,5 +1,6 @@ { "metadata": { + "layer": "default", "domain": "test_e2e", "dataset": "upload", "sensitivity": "PUBLIC", @@ -20,42 +21,42 @@ { "name": "year", "partition_index": null, - "data_type": "Int64", + "data_type": "int", "allow_null": true, "format": null }, { "name": "month", "partition_index": null, - "data_type": "Int64", + "data_type": "int", "allow_null": true, "format": null }, { "name": "destination", "partition_index": null, - "data_type": "object", + "data_type": "string", "allow_null": true, "format": null }, { "name": "arrival", "partition_index": null, - "data_type": "object", + "data_type": "string", "allow_null": true, "format": null }, { "name": "type", "partition_index": null, - "data_type": "object", + "data_type": "string", "allow_null": true, "format": null }, { "name": "status", "partition_index": null, - "data_type": "object", + "data_type": "string", "allow_null": true, "format": null } diff --git a/api/test/e2e/test_files/schemas/test_e2e_protected-do_not_delete.json b/api/test/e2e/test_files/schemas/test_e2e_protected-do_not_delete.json index ee89eac..c67347c 100644 --- a/api/test/e2e/test_files/schemas/test_e2e_protected-do_not_delete.json +++ b/api/test/e2e/test_files/schemas/test_e2e_protected-do_not_delete.json @@ -1,61 +1,62 @@ { - "metadata": { - "domain": "test_e2e_protected", - "dataset": "do_not_delete", - "sensitivity": "PROTECTED", - "description": "A test dataset", - "key_value_tags": {}, - "key_only_tags": [], - "owners": [ - { - "name": "test_e2e_protected", - "email": "test_e2e_protected@email.com" - } - ], - "update_behaviour": "OVERWRITE" - }, - "columns": [ - { - "name": "year", - "partition_index": null, - "data_type": "Int64", - "allow_null": true, - "format": null - }, - { - "name": "month", - "partition_index": null, - "data_type": "Int64", - "allow_null": true, - "format": null - }, - { - "name": "destination", - "partition_index": null, - "data_type": "object", - "allow_null": true, - "format": null - }, - { - "name": "arrival", - "partition_index": null, - "data_type": "object", - "allow_null": true, - "format": null - }, - { - "name": "type", - "partition_index": null, - "data_type": "object", - "allow_null": true, - "format": null - }, - { - "name": "status", - "partition_index": null, - "data_type": "object", - "allow_null": true, - "format": null - } - ] + "metadata": { + "layer": "default", + "domain": "test_e2e_protected", + "dataset": "do_not_delete", + "sensitivity": "PROTECTED", + "description": "A test dataset", + "key_value_tags": {}, + "key_only_tags": [], + "owners": [ + { + "name": "test_e2e_protected", + "email": "test_e2e_protected@email.com" + } + ], + "update_behaviour": "OVERWRITE" + }, + "columns": [ + { + "name": "year", + "partition_index": null, + "data_type": "int", + "allow_null": true, + "format": null + }, + { + "name": "month", + "partition_index": null, + "data_type": "int", + "allow_null": true, + "format": null + }, + { + "name": "destination", + "partition_index": null, + "data_type": "string", + "allow_null": true, + "format": null + }, + { + "name": "arrival", + "partition_index": null, + "data_type": "string", + "allow_null": true, + "format": null + }, + { + "name": "type", + "partition_index": null, + "data_type": "string", + "allow_null": true, + "format": null + }, + { + "name": "status", + "partition_index": null, + "data_type": "string", + "allow_null": true, + "format": null + } + ] } diff --git a/api/test/e2e/test_journey.py b/api/test/e2e/test_journey.py index a8e2ade..aace815 100644 --- a/api/test/e2e/test_journey.py +++ b/api/test/e2e/test_journey.py @@ -28,24 +28,28 @@ class BaseJourneyTest(ABC): schema_endpoint = f"{base_url}/schema" e2e_test_domain = "test_e2e" - - schemas_directory = "data/schemas" - data_directory = f"data/{e2e_test_domain}" - raw_data_directory = f"raw_data/{e2e_test_domain}" + layer = "default" + schemas_directory = "schemas" + data_directory = f"data/{layer}/{e2e_test_domain}" + raw_data_directory = f"raw_data/{layer}/{e2e_test_domain}" filename = "test_journey_file.csv" - def upload_dataset_url(self, domain: str, dataset: str) -> str: - return f"{self.datasets_endpoint}/{domain}/{dataset}" + def upload_dataset_url(self, layer: str, domain: str, dataset: str) -> str: + return f"{self.datasets_endpoint}/{layer}/{domain}/{dataset}" - def query_dataset_url(self, domain: str, dataset: str, version: int = 0) -> str: - return f"{self.datasets_endpoint}/{domain}/{dataset}/query?version={version}" + def query_dataset_url( + self, layer: str, domain: str, dataset: str, version: int = 0 + ) -> str: + return f"{self.datasets_endpoint}/{layer}/{domain}/{dataset}/query?version={version}" - def info_dataset_url(self, domain: str, dataset: str, version: int = 0) -> str: - return f"{self.datasets_endpoint}/{domain}/{dataset}/info?version={version}" + def info_dataset_url( + self, layer: str, domain: str, dataset: str, version: int = 0 + ) -> str: + return f"{self.datasets_endpoint}/{layer}/{domain}/{dataset}/info?version={version}" - def list_dataset_raw_files_url(self, domain: str, dataset: str) -> str: - return f"{self.datasets_endpoint}/{domain}/{dataset}/1/files" + def list_dataset_raw_files_url(self, layer: str, domain: str, dataset: str) -> str: + return f"{self.datasets_endpoint}/{layer}/{domain}/{dataset}/1/files" def create_protected_domain_url(self, domain: str) -> str: return f"{self.base_url}/protected_domains/{domain}" @@ -56,8 +60,10 @@ def list_protected_domain_url(self) -> str: def modify_subjects_permissions_url(self) -> str: return f"{self.base_url}/subjects/permissions" - def delete_data_url(self, domain: str, dataset: str, raw_filename: str) -> str: - return f"{self.datasets_endpoint}/{domain}/{dataset}/1/{raw_filename}" + def delete_data_url( + self, layer: str, domain: str, dataset: str, raw_filename: str + ) -> str: + return f"{self.datasets_endpoint}/{layer}/{domain}/{dataset}/1/{raw_filename}" def permissions_url(self) -> str: return f"{self.base_url}/permissions" @@ -82,13 +88,13 @@ def test_status_always_accessible(self): class TestUnauthenticatedJourneys(BaseJourneyTest): def test_query_is_forbidden_when_no_token_provided(self): - url = self.query_dataset_url("mydomain", "unknowndataset") + url = self.query_dataset_url(self.layer, "mydomain", "unknowndataset") response = requests.post(url) assert response.status_code == HTTPStatus.FORBIDDEN def test_upload_is_forbidden_when_no_token_provided(self): files = {"file": (self.filename, open("./test/e2e/" + self.filename, "rb"))} - url = self.upload_dataset_url(self.e2e_test_domain, "upload") + url = self.upload_dataset_url(self.layer, self.e2e_test_domain, "upload") response = requests.post(url, files=files) assert response.status_code == HTTPStatus.FORBIDDEN @@ -104,7 +110,6 @@ def test_list_permissions_is_forbidden_when_no_token_provided(self): class TestUnauthorisedJourney(BaseJourneyTest): def setup_class(self): token_url = f"https://{DOMAIN_NAME}/oauth2/token" - write_all_credentials = get_secret( secret_name=f"{RESOURCE_PREFIX}_E2E_TEST_CLIENT_WRITE_ALL" ) @@ -121,13 +126,13 @@ def setup_class(self): payload = {"grant_type": "client_credentials", "client_id": cognito_client_id} response = requests.post(token_url, auth=auth, headers=headers, json=payload) + res = json.loads(response.content.decode(CONTENT_ENCODING)) if response.status_code != HTTPStatus.OK: raise AuthenticationFailedError(f"{response.status_code}") - self.token = json.loads(response.content.decode(CONTENT_ENCODING))[ - "access_token" - ] + res = json.loads(response.content.decode(CONTENT_ENCODING)) + self.token = res["access_token"] # Utils ------------- @@ -135,14 +140,13 @@ def generate_auth_headers(self): return {"Authorization": f"Bearer {self.token}"} # Tests ------------- - def test_query_existing_dataset_when_not_authorised_to_read(self): - url = self.query_dataset_url(self.e2e_test_domain, "query") + url = self.query_dataset_url(self.layer, self.e2e_test_domain, "query") response = requests.post(url, headers=self.generate_auth_headers()) assert response.status_code == HTTPStatus.UNAUTHORIZED def test_existing_dataset_info_when_not_authorised_to_read(self): - url = self.info_dataset_url(self.e2e_test_domain, "query") + url = self.info_dataset_url(self.layer, self.e2e_test_domain, "query") response = requests.get(url, headers=self.generate_auth_headers()) assert response.status_code == HTTPStatus.UNAUTHORIZED @@ -191,15 +195,14 @@ def setup_class(self): def generate_auth_headers(self): return {"Authorization": f"Bearer {self.token}"} - def upload_test_file_to_(self, data_directory: str, domain: str, filename: str): + def upload_test_file_to_(self, data_directory: str, dataset: str, filename: str): self.s3_client.put_object( Bucket=DATA_BUCKET, - Key=f"{data_directory}/{domain}/1/{filename}", + Key=f"{data_directory}/{dataset}/1/{filename}", Body=open("./test/e2e/" + self.filename, "rb"), ) # Tests ------------- - def test_list_when_authorised(self): response = requests.post( self.datasets_endpoint, @@ -210,7 +213,7 @@ def test_list_when_authorised(self): def test_uploads_when_authorised(self): files = {"file": (self.filename, open("./test/e2e/" + self.filename, "rb"))} - upload_url = self.upload_dataset_url(self.e2e_test_domain, "upload") + upload_url = self.upload_dataset_url(self.layer, self.e2e_test_domain, "upload") response = requests.post( upload_url, headers=self.generate_auth_headers(), files=files ) @@ -218,25 +221,27 @@ def test_uploads_when_authorised(self): assert response.status_code == HTTPStatus.ACCEPTED raw_filename = json.loads(response.text)["details"]["raw_filename"] - delete_url = self.delete_data_url(self.e2e_test_domain, "upload", raw_filename) + delete_url = self.delete_data_url( + self.layer, self.e2e_test_domain, "upload", raw_filename + ) requests.delete(delete_url, headers=self.generate_auth_headers()) def test_gets_existing_dataset_info_when_authorised(self): url = self.info_dataset_url( - domain=self.e2e_test_domain, dataset="query", version=1 + layer=self.layer, domain=self.e2e_test_domain, dataset="query", version=1 ) response = requests.get(url, headers=(self.generate_auth_headers())) assert response.status_code == HTTPStatus.OK def test_queries_non_existing_dataset_when_authorised(self): - url = self.query_dataset_url("mydomain", "unknowndataset") + url = self.query_dataset_url(self.layer, "mydomain", "unknowndataset") response = requests.post(url, headers=self.generate_auth_headers()) assert response.status_code == HTTPStatus.NOT_FOUND def test_queries_existing_dataset_as_csv_when_authorised(self): url = self.query_dataset_url( - domain=self.e2e_test_domain, dataset="query", version=1 + layer=self.layer, domain=self.e2e_test_domain, dataset="query", version=1 ) headers = { "Accept": "text/csv", @@ -246,7 +251,9 @@ def test_queries_existing_dataset_as_csv_when_authorised(self): assert response.status_code == HTTPStatus.OK def test_fails_to_query_when_authorised_and_sql_injection_attempted(self): - url = self.query_dataset_url(domain=self.e2e_test_domain, dataset="query") + url = self.query_dataset_url( + layer=self.layer, domain=self.e2e_test_domain, dataset="query" + ) body = {"filter": "';DROP TABLE test_e2e--"} response = requests.post(url, headers=(self.generate_auth_headers()), json=body) assert response.status_code == HTTPStatus.FORBIDDEN @@ -254,15 +261,19 @@ def test_fails_to_query_when_authorised_and_sql_injection_attempted(self): def test_deletes_existing_data_when_authorised(self): # Upload files directly to relevant directories in S3 self.upload_test_file_to_( - self.raw_data_directory, domain="delete", filename="test_journey_file.csv" + self.raw_data_directory, + dataset="delete", + filename="test_journey_file.csv", ) self.upload_test_file_to_( - self.data_directory, domain="delete", filename="test_journey_file.parquet" + self.data_directory, + dataset="delete", + filename="test_journey_file.parquet", ) # Get available raw dataset names list_raw_files_url = self.list_dataset_raw_files_url( - domain=self.e2e_test_domain, dataset="delete" + layer=self.layer, domain=self.e2e_test_domain, dataset="delete" ) available_datasets_response = requests.get( list_raw_files_url, headers=(self.generate_auth_headers()) @@ -275,6 +286,7 @@ def test_deletes_existing_data_when_authorised(self): # Delete chosen dataset file (raw file and actual data file) first_dataset_file = response_list[0] delete_raw_data_url = self.delete_data_url( + layer=self.layer, domain=self.e2e_test_domain, dataset="delete", raw_filename=first_dataset_file, @@ -345,7 +357,7 @@ def test_uploads_new_schema_version(self): "arn:aws:glue:{region}:{account_id}:crawler/{glue_crawler}".format( region=AWS_REGION, account_id=AWS_ACCOUNT, - glue_crawler=f"{RESOURCE_PREFIX}_crawler/test_e2e/update", + glue_crawler=f"{RESOURCE_PREFIX}_crawler/default/test_e2e/update", ) ) @@ -362,16 +374,16 @@ def test_uploads_new_schema_version(self): Delete={ "Objects": [ { - "Key": f"{self.schemas_directory}/PUBLIC/test_e2e/update/1/schema.json" + "Key": f"{self.schemas_directory}/default/PUBLIC/test_e2e/update/1/schema.json" }, { - "Key": f"{self.schemas_directory}/PUBLIC/test_e2e/update/2/schema.json" + "Key": f"{self.schemas_directory}/default/PUBLIC/test_e2e/update/2/schema.json" }, ] }, ) self.glue_client.delete_crawler( - Name=f"{RESOURCE_PREFIX}_crawler/test_e2e/update" + Name=f"{RESOURCE_PREFIX}_crawler/default/test_e2e/update" ) @@ -422,18 +434,22 @@ def test_lists_all_permissions_contains_all_default_permissions(self): expected_permissions = [ "READ_ALL", "WRITE_ALL", - "READ_PUBLIC", - "WRITE_PUBLIC", - "READ_PRIVATE", - "WRITE_PRIVATE", + "READ_ALL_PUBLIC", + "WRITE_ALL_PUBLIC", + "READ_ALL_PRIVATE", + "WRITE_ALL_PRIVATE", + "READ_DEFAULT_PUBLIC", + "WRITE_DEFAULT_PUBLIC", + "READ_DEFAULT_PRIVATE", + "WRITE_DEFAULT_PRIVATE", "DATA_ADMIN", "USER_ADMIN", ] response_json = response.json() - assert response.status_code == HTTPStatus.OK - assert all((permission in response_json for permission in expected_permissions)) + for permission in expected_permissions: + assert permission in response_json def test_lists_subject_permissions(self): response = requests.get( @@ -542,15 +558,15 @@ def test_create_protected_domain(self): # Not authorised to access existing protected domain url = self.query_dataset_url( - domain="test_e2e_protected", dataset="do_not_delete" + layer="default", domain="test_e2e_protected", dataset="do_not_delete" ) response = requests.post(url, headers=self.generate_auth_headers()) assert response.status_code == HTTPStatus.UNAUTHORIZED def test_allows_access_to_protected_domain_when_granted_permission(self): - self.assume_permissions(["READ_PROTECTED_TEST_E2E_PROTECTED"]) + self.assume_permissions(["READ_DEFAULT_PROTECTED_TEST_E2E_PROTECTED"]) - url = self.query_dataset_url("test_e2e_protected", "do_not_delete") + url = self.query_dataset_url("default", "test_e2e_protected", "do_not_delete") response = requests.post(url, headers=self.generate_auth_headers()) assert response.status_code == HTTPStatus.OK diff --git a/api/test/scripts/delete_protected_domain_permission.py b/api/test/scripts/delete_protected_domain_permission.py index 931e1b8..3e6c8b8 100644 --- a/api/test/scripts/delete_protected_domain_permission.py +++ b/api/test/scripts/delete_protected_domain_permission.py @@ -12,7 +12,7 @@ RESOURCE_PREFIX = os.environ["RESOURCE_PREFIX"] DATA_BUCKET = os.environ["DATA_BUCKET"] DYNAMO_PERMISSIONS_TABLE_NAME = f"{RESOURCE_PREFIX}_users_permissions" -PROTECTED_PATH = "data/schemas/PROTECTED/" +PROTECTED_PATH = "schemas/PROTECTED/" PROTECTED_DOMAIN_PERMISSIONS_PARAMETER_NAME = ( f"{RESOURCE_PREFIX}_protected_domain_permissions" ) diff --git a/api/test/test_utils.py b/api/test/test_utils.py index a84b4e4..345e512 100644 --- a/api/test/test_utils.py +++ b/api/test/test_utils.py @@ -45,7 +45,7 @@ def mock_schema_response(): { "name": "colname1", "partition_index": 0, - "data_type": "integer", + "data_type": "int", "allow_null": True, } ], diff --git a/infrastructure/blocks/app-cluster/main.tf b/infrastructure/blocks/app-cluster/main.tf index 6eeb894..9994bbf 100644 --- a/infrastructure/blocks/app-cluster/main.tf +++ b/infrastructure/blocks/app-cluster/main.tf @@ -13,8 +13,8 @@ module "app_cluster" { cognito_user_pool_id = data.terraform_remote_state.auth-state.outputs.cognito_user_pool_id cognito_user_login_app_credentials_secrets_name = data.terraform_remote_state.auth-state.outputs.cognito_user_app_secret_manager_name - permissions_table = data.terraform_remote_state.auth-state.outputs.user_permission_table_name permissions_table_arn = data.terraform_remote_state.auth-state.outputs.user_permission_table_arn + schema_table_arn = data.terraform_remote_state.data-workflow-state.outputs.schema_table_arn application_version = var.application_version domain_name = var.domain_name @@ -40,8 +40,8 @@ module "app_cluster" { } data "terraform_remote_state" "vpc-state" { - backend = "s3" - + backend = "s3" + workspace = terraform.workspace config = { key = "vpc/terraform.tfstate" bucket = var.state_bucket diff --git a/infrastructure/blocks/data-workflow/main.tf b/infrastructure/blocks/data-workflow/main.tf index 7544397..523bc1c 100644 --- a/infrastructure/blocks/data-workflow/main.tf +++ b/infrastructure/blocks/data-workflow/main.tf @@ -10,22 +10,9 @@ module "data_workflow" { resource-name-prefix = var.resource-name-prefix aws_account = var.aws_account aws_region = var.aws_region - data_s3_bucket_arn = data.terraform_remote_state.s3-state.outputs.s3_bucket_arn - data_s3_bucket_name = data.terraform_remote_state.s3-state.outputs.s3_bucket_name - vpc_id = data.terraform_remote_state.vpc-state.outputs.vpc_id - private_subnet = data.terraform_remote_state.vpc-state.outputs.private_subnets_ids[0] tags = var.tags } -data "terraform_remote_state" "vpc-state" { - backend = "s3" - - config = { - key = "vpc/terraform.tfstate" - bucket = var.state_bucket - } -} - data "terraform_remote_state" "s3-state" { backend = "s3" workspace = terraform.workspace diff --git a/infrastructure/blocks/data-workflow/output.tf b/infrastructure/blocks/data-workflow/output.tf index f3f349b..18cb014 100644 --- a/infrastructure/blocks/data-workflow/output.tf +++ b/infrastructure/blocks/data-workflow/output.tf @@ -1,6 +1,6 @@ -output "tags" { - value = var.tags - description = "The tags used in the project" +output "athena_query_output_bucket_arn" { + value = module.data_workflow.athena_query_result_output_bucket_arn + description = "Output S3 bucket ARN for Athena query results" } output "athena_workgroup_arn" { @@ -8,12 +8,12 @@ output "athena_workgroup_arn" { description = "Query workgroup for Athena" } -output "athena_query_output_bucket_arn" { - value = module.data_workflow.athena_query_result_output_bucket_arn - description = "Output S3 bucket for Athena query results" +output "schema_table_arn" { + value = module.data_workflow.schema_table_arn + description = "The ARN of the DynamoDB schema table" } -output "glue_catalog_arn" { - value = module.data_workflow.glue_catalog_arn - description = "Catalog database arn" +output "tags" { + value = module.data_workflow.tags + description = "The tags used in the project" } diff --git a/infrastructure/blocks/pipeline/iam.tf b/infrastructure/blocks/pipeline/iam.tf index 307f275..6d5db51 100644 --- a/infrastructure/blocks/pipeline/iam.tf +++ b/infrastructure/blocks/pipeline/iam.tf @@ -204,19 +204,11 @@ resource "aws_iam_policy" "pipeline_glue_access" { { "Effect" : "Allow", "Action" : [ - "glue:DeleteCrawler", "glue:DeleteTable", "glue:TagResource", ], "Resource" : "*" }, - { - "Effect" : "Allow", - "Action" : [ - "glue:GetTags", - ], - "Resource" : "arn:aws:glue:${var.aws_region}:${var.aws_account}:crawler/*" - } ], "Version" : "2012-10-17" }) diff --git a/infrastructure/blocks/pipeline/main.tf b/infrastructure/blocks/pipeline/main.tf index 8511039..7a433ed 100644 --- a/infrastructure/blocks/pipeline/main.tf +++ b/infrastructure/blocks/pipeline/main.tf @@ -64,7 +64,8 @@ data "template_file" "initialise-runner" { } data "terraform_remote_state" "vpc-state" { - backend = "s3" + backend = "s3" + workspace = "prod" config = { key = "vpc/terraform.tfstate" diff --git a/infrastructure/blocks/vpc/main.tf b/infrastructure/blocks/vpc/main.tf index e056b3f..f4d6a37 100644 --- a/infrastructure/blocks/vpc/main.tf +++ b/infrastructure/blocks/vpc/main.tf @@ -4,20 +4,19 @@ terraform { } } -module "core_vpc" { - source = "../../modules/aws-core/vpc" - tags = var.tags - vpc_name = "${var.resource-name-prefix}_vpc" - vpc_cidr_range = "10.1.0.0/16" +data "aws_availability_zones" "available" {} - private_subnet_size = 6 - /* the minimum subnet size in aws is /28 https://aws.amazon.com/vpc/faqs/ */ - private_subnet_cidrs = ["10.1.10.0/28", "10.1.11.0/28", "10.1.12.0/28", "10.1.13.0/28", "10.1.14.0/28", "10.1.15.0/28"] - private_subnet_offset = 2 - private_subnet_prefix = "${var.resource-name-prefix}_private_" - public_subnet_size = 3 - public_subnet_cidrs = ["10.1.1.0/28", "10.1.2.0/28", "10.1.3.0/28"] - public_subnet_prefix = "${var.resource-name-prefix}_public_" +module "vpc" { + source = "terraform-aws-modules/vpc/aws" + version = "5.1.0" + + name = "${var.resource-name-prefix}_vpc" + cidr = "10.0.0.0/16" + azs = data.aws_availability_zones.available.names + private_subnets = ["10.0.1.0/24", "10.0.2.0/24", "10.0.3.0/24"] + public_subnets = ["10.0.4.0/24", "10.0.5.0/24", "10.0.6.0/24"] + enable_nat_gateway = true + enable_dns_hostnames = true } diff --git a/infrastructure/blocks/vpc/output.tf b/infrastructure/blocks/vpc/output.tf index 8286843..3d8a040 100644 --- a/infrastructure/blocks/vpc/output.tf +++ b/infrastructure/blocks/vpc/output.tf @@ -1,14 +1,14 @@ output "vpc_id" { - value = module.core_vpc.vpc_id + value = module.vpc.vpc_id description = "The id of the vpc for the app" } output "private_subnets_ids" { - value = module.core_vpc.private_subnet_ids + value = module.vpc.private_subnets description = "The ids of the private subnets" } output "public_subnets_ids" { - value = module.core_vpc.public_subnet_ids + value = module.vpc.public_subnets description = "The ids of the public subnets" } diff --git a/infrastructure/modules/app-cluster/README.md b/infrastructure/modules/app-cluster/README.md index b44d7cc..1cf3d17 100644 --- a/infrastructure/modules/app-cluster/README.md +++ b/infrastructure/modules/app-cluster/README.md @@ -1,4 +1,4 @@ - + ## Requirements No requirements. @@ -125,4 +125,4 @@ No modules. | [rapid\_metric\_log\_error\_alarm\_arn](#output\_rapid\_metric\_log\_error\_alarm\_arn) | The arn of the log error alarm metric | | [route\_53\_validation\_record\_fqdns](#output\_route\_53\_validation\_record\_fqdns) | The fqdns of the route53 validation records for the certificate | | [service\_table\_arn](#output\_service\_table\_arn) | The arn of the dynamoDB table that stores the user service | - + diff --git a/infrastructure/modules/app-cluster/cloudtrail.tf b/infrastructure/modules/app-cluster/cloudtrail.tf index 1992574..fef204a 100644 --- a/infrastructure/modules/app-cluster/cloudtrail.tf +++ b/infrastructure/modules/app-cluster/cloudtrail.tf @@ -86,7 +86,7 @@ resource "aws_kms_key" "access_logs_key" { description = "This key is used to encrypt the access log objects" policy = data.aws_iam_policy_document.access_logs_key_policy.json tags = var.tags - enable_key_rotation = true + enable_key_rotation = false } resource "aws_cloudwatch_log_group" "access_logs_log_group" { @@ -295,7 +295,8 @@ resource "aws_cloudtrail" "access_logs_trail" { values = [ aws_dynamodb_table.service_table.arn, - var.permissions_table_arn + var.permissions_table_arn, + var.schema_table_arn, ] } } diff --git a/infrastructure/modules/app-cluster/load_balancer.tf b/infrastructure/modules/app-cluster/load_balancer.tf index 1cd0f51..070a889 100644 --- a/infrastructure/modules/app-cluster/load_balancer.tf +++ b/infrastructure/modules/app-cluster/load_balancer.tf @@ -90,6 +90,10 @@ resource "aws_security_group" "load_balancer_security_group" { description = "Allow all egress" } tags = var.tags + + lifecycle { + create_before_destroy = true + } } resource "aws_lb_target_group" "target_group" { diff --git a/infrastructure/modules/app-cluster/main.tf b/infrastructure/modules/app-cluster/main.tf index 0126f62..e8d0a49 100644 --- a/infrastructure/modules/app-cluster/main.tf +++ b/infrastructure/modules/app-cluster/main.tf @@ -90,39 +90,6 @@ resource "aws_iam_policy" "app_athena_query_access" { "arn:aws:athena:${var.aws_region}:${var.aws_account}:workgroup/${var.resource-name-prefix}_athena_workgroup" ] }, - { - "Sid" : "Glue", - "Effect" : "Allow", - "Action" : [ - "glue:GetTable", - "glue:GetTables", - "glue:GetPartitions", - "glue:GetDatabase", - "glue:GetDatabases", - "glue:UpdateTable", - "glue:BatchDeleteTable" - ], - "Resource" : [ - "arn:aws:glue:${var.aws_region}:${var.aws_account}:catalog", - "arn:aws:glue:${var.aws_region}:${var.aws_account}:database/${var.resource-name-prefix}_catalogue_db", - "arn:aws:glue:${var.aws_region}:${var.aws_account}:table/${var.resource-name-prefix}_catalogue_db/*" - ] - }, - { - "Sid" : "GlueCrawler", - "Effect" : "Allow", - "Action" : [ - "glue:CreateCrawler", - "glue:DeleteCrawler", - "glue:GetCrawler", - "glue:GetCrawlers", - "glue:StartCrawler", - "glue:TagResource", - "iam:GetRole", - "iam:PassRole" - ], - "Resource" : "arn:aws:glue:${var.aws_region}:${var.aws_account}:crawler/${var.resource-name-prefix}_crawler/*" - }, { "Sid" : "DataBucket", "Effect" : "Allow", @@ -153,6 +120,34 @@ resource "aws_iam_policy" "app_athena_query_access" { }) } +resource "aws_iam_policy" "app_glue_access" { + name = "${var.resource-name-prefix}-app_glue_access" + description = "Allow application instance to access Glue" + tags = var.tags + + policy = jsonencode({ + "Version" : "2012-10-17", + "Statement" : [ + { + "Effect" : "Allow", + "Action" : [ + "glue:GetTable", + "glue:GetTables", + "glue:GetPartitions", + "glue:GetDatabase", + "glue:GetDatabases", + "glue:UpdateTable", + "glue:BatchDeleteTable", + "glue:CreateTable" + ], + "Resource" : [ + "*" + ] + }, + ] + }) +} + resource "aws_iam_policy" "app_cognito_access" { name = "${var.resource-name-prefix}-app_cognito_access" description = "Allow application instance to access Cognito" @@ -204,7 +199,8 @@ resource "aws_iam_policy" "app_dynamodb_access" { "dynamodb:UpdateItem" ], Resource : [ - "arn:aws:dynamodb:${var.aws_region}:${var.aws_account}:table/${var.permissions_table}", + var.permissions_table_arn, + var.schema_table_arn, aws_dynamodb_table.service_table.arn, "${aws_dynamodb_table.service_table.arn}/index/*" ] @@ -254,27 +250,6 @@ resource "aws_iam_policy" "app_tags_access" { }) } -resource "aws_iam_policy" "app_glue_services_passrole" { - name = "${var.resource-name-prefix}-app-glue-services-passrole" - description = "Allow application instance to passrole to glue access" - tags = var.tags - - policy = jsonencode({ - "Version" : "2012-10-17", - "Statement" : [ - { - Effect : "Allow", - Action : ["iam:PassRole"], - Resource : "arn:aws:iam::${var.aws_account}:role/${var.resource-name-prefix}-glue_services_access" - } - ] - }) -} - -resource "aws_iam_role_policy_attachment" "role_app_glue_services_passrole_policy_attachment" { - role = aws_iam_role.ecsTaskExecutionRole.name - policy_arn = aws_iam_policy.app_glue_services_passrole.arn -} resource "aws_iam_role_policy_attachment" "role_s3_access_policy_attachment" { role = aws_iam_role.ecsTaskExecutionRole.name @@ -296,6 +271,11 @@ resource "aws_iam_role_policy_attachment" "role_athena_access_policy_attachment" policy_arn = aws_iam_policy.app_athena_query_access.arn } +resource "aws_iam_role_policy_attachment" "role_glue_access_policy_attachment" { + role = aws_iam_role.ecsTaskExecutionRole.name + policy_arn = aws_iam_policy.app_glue_access.arn +} + resource "aws_iam_role_policy_attachment" "role_tags_access_policy_attachment" { role = aws_iam_role.ecsTaskExecutionRole.name policy_arn = aws_iam_policy.app_tags_access.arn diff --git a/infrastructure/modules/app-cluster/outputs.tf b/infrastructure/modules/app-cluster/outputs.tf index 8093036..0571b32 100644 --- a/infrastructure/modules/app-cluster/outputs.tf +++ b/infrastructure/modules/app-cluster/outputs.tf @@ -46,3 +46,7 @@ output "service_table_arn" { value = aws_dynamodb_table.service_table.arn description = "The arn of the dynamoDB table that stores the user service" } + +output "application_version" { + value = var.application_version +} diff --git a/infrastructure/modules/app-cluster/routing.tf b/infrastructure/modules/app-cluster/routing.tf index 4ef49ee..c2a3d88 100644 --- a/infrastructure/modules/app-cluster/routing.tf +++ b/infrastructure/modules/app-cluster/routing.tf @@ -10,8 +10,6 @@ resource "aws_acm_certificate" "rapid-certificate" { domain_name = var.domain_name validation_method = "DNS" - subject_alternative_names = ["ckan.${var.domain_name}"] - tags = var.tags lifecycle { diff --git a/infrastructure/modules/app-cluster/variables.tf b/infrastructure/modules/app-cluster/variables.tf index 45e9f2b..f76334e 100644 --- a/infrastructure/modules/app-cluster/variables.tf +++ b/infrastructure/modules/app-cluster/variables.tf @@ -123,14 +123,14 @@ variable "athena_query_output_bucket_arn" { description = "The S3 bucket ARN where Athena stores its query results. This bucket is created dynamically with a unique name in the data-workflow module. Reference it by remote state, module output or ARN string directly" } -variable "permissions_table" { +variable "permissions_table_arn" { type = string - description = "Users permissions table in dynamoDB" + description = "Users permissions table arn in dynamoDB" } -variable "permissions_table_arn" { +variable "schema_table_arn" { type = string - description = "Users permissions table arn in dynamoDB" + description = "The ARN of the schema table in dynamoDB" } variable "cognito_user_pool_id" { diff --git a/infrastructure/modules/auth/README.md b/infrastructure/modules/auth/README.md index b1cebd0..e8db51d 100644 --- a/infrastructure/modules/auth/README.md +++ b/infrastructure/modules/auth/README.md @@ -1,4 +1,4 @@ - + ## Requirements No requirements. @@ -7,8 +7,8 @@ No requirements. | Name | Version | |------|---------| -| [aws](#provider\_aws) | 5.6.2 | -| [random](#provider\_random) | 3.5.1 | +| [aws](#provider\_aws) | n/a | +| [random](#provider\_random) | n/a | ## Modules @@ -82,4 +82,4 @@ No modules. | [user\_permission\_table\_arn](#output\_user\_permission\_table\_arn) | The arn of the dynamoDB table that stores permissions | | [user\_permission\_table\_name](#output\_user\_permission\_table\_name) | The name of the dynamoDB table that stores permissions | | [user\_pool\_endpoint](#output\_user\_pool\_endpoint) | The Cognito rapid user pool endpoint | - + diff --git a/infrastructure/modules/auth/data.tf b/infrastructure/modules/auth/data.tf new file mode 100644 index 0000000..9d978b9 --- /dev/null +++ b/infrastructure/modules/auth/data.tf @@ -0,0 +1,17 @@ +locals { + layer_permissions = concat([for layer in var.layers : upper(layer)], ["ALL"]) + data_permissions = merge(var.master_data_permissions, flatten([ + for action in var.data_actions : flatten([ + for layer in local.layer_permissions : [ + for sensitivity in var.global_data_sensitivities : + { + "${action}_${layer}_${sensitivity}" = { + type = action + sensitivity = sensitivity + layer = layer + } + } + ] + ]) + ])...) +} diff --git a/infrastructure/modules/auth/db.tf b/infrastructure/modules/auth/db.tf index 412521c..a3b8d5f 100644 --- a/infrastructure/modules/auth/db.tf +++ b/infrastructure/modules/auth/db.tf @@ -35,7 +35,7 @@ resource "aws_dynamodb_table_item" "data_permissions" { hash_key = aws_dynamodb_table.permissions_table.hash_key range_key = aws_dynamodb_table.permissions_table.range_key - for_each = var.data_permissions + for_each = local.data_permissions item = < 0 ? var.public_subnet_cidrs[count.index] : cidrsubnet(var.vpc_cidr_range, var.public_subnet_size, count.index) - availability_zone = data.aws_availability_zones.available.names[count.index] - map_public_ip_on_launch = true - - tags = merge({ - Name = "${local.public_subnet_prefix}_${replace( - data.aws_availability_zones.available.names[count.index], - "-", - "_", - )}" - Scope = "public" }, - var.tags) -} - -# Private subnets -resource "aws_subnet" "private_subnet" { - count = length(data.aws_availability_zones.available.names) - vpc_id = aws_vpc.core.id - cidr_block = length(var.private_subnet_cidrs) > 0 ? var.private_subnet_cidrs[count.index] : cidrsubnet(var.vpc_cidr_range, var.private_subnet_size, var.private_subnet_offset + count.index) - availability_zone = data.aws_availability_zones.available.names[count.index] - - tags = merge({ - Name = "${local.private_subnet_prefix}_${replace( - data.aws_availability_zones.available.names[count.index], - "-", - "_", - )}" - Scope = "private" }, - var.tags) -} - -# IGW - Internet Gateway -resource "aws_internet_gateway" "core_igw" { - vpc_id = aws_vpc.core.id - - tags = merge({ - Name = "${var.vpc_name}_igw" - }, var.tags) -} - -# EIPs for NAT Gateways -resource "aws_eip" "core_nat_gw_eip" { - count = length(data.aws_availability_zones.available.names) - vpc = true - - depends_on = [aws_internet_gateway.core_igw] - - tags = merge({ - Name = "${var.vpc_name}_nat_gw_eip_${replace( - data.aws_availability_zones.available.names[count.index], - "-", - "_", - )}" - }, var.tags) -} - -# NAT Gateways for private subnets -resource "aws_nat_gateway" "core_nat_gw" { - count = length(data.aws_availability_zones.available.names) - subnet_id = aws_subnet.public_subnet[count.index].id - allocation_id = aws_eip.core_nat_gw_eip[count.index].id - - tags = merge({ - Name = "${var.vpc_name}_nat_gw_${replace( - data.aws_availability_zones.available.names[count.index], - "-", - "_", - )}" - }, var.tags) -} - -# Route tables for public/private subnets -resource "aws_route_table" "core_main_route_table" { - vpc_id = aws_vpc.core.id - - tags = merge({ - Name = "${var.vpc_name}_main_route_table_name" - }, var.tags) -} - -resource "aws_route_table" "core_private_route_table" { - count = length(data.aws_availability_zones.available.names) - vpc_id = aws_vpc.core.id - - tags = merge({ - Name = "${var.vpc_name}_private_route_table_${replace( - data.aws_availability_zones.available.names[count.index], - "-", - "_", - )}" - }, var.tags) -} - -# Default public route through the IGW -resource "aws_route" "core_main_route_table_public_default_route" { - route_table_id = aws_route_table.core_main_route_table.id - destination_cidr_block = "0.0.0.0/0" - gateway_id = aws_internet_gateway.core_igw.id -} - -# Default private route through the NAT gateways -resource "aws_route" "core_private_route_table_default_route" { - count = length(data.aws_availability_zones.available.names) - route_table_id = aws_route_table.core_private_route_table[count.index].id - destination_cidr_block = "0.0.0.0/0" - nat_gateway_id = aws_nat_gateway.core_nat_gw[count.index].id -} - -# Route table associations -resource "aws_main_route_table_association" "core_main_route_table_association" { - vpc_id = aws_vpc.core.id - route_table_id = aws_route_table.core_main_route_table.id -} - -resource "aws_route_table_association" "core_private_route_table_association" { - count = length(data.aws_availability_zones.available.names) - subnet_id = aws_subnet.private_subnet[count.index].id - route_table_id = aws_route_table.core_private_route_table[count.index].id -} diff --git a/infrastructure/modules/aws-core/vpc/outputs.tf b/infrastructure/modules/aws-core/vpc/outputs.tf deleted file mode 100644 index 830e81c..0000000 --- a/infrastructure/modules/aws-core/vpc/outputs.tf +++ /dev/null @@ -1,14 +0,0 @@ -output "vpc_id" { - value = aws_vpc.core.id - description = "The ID of the created VPC" -} - -output "public_subnet_ids" { - value = aws_subnet.public_subnet[*].id - description = "A list of public subnet IDs" -} - -output "private_subnet_ids" { - value = aws_subnet.private_subnet[*].id - description = "A list of private subnet IDs" -} diff --git a/infrastructure/modules/aws-core/vpc/variables.tf b/infrastructure/modules/aws-core/vpc/variables.tf deleted file mode 100644 index c63cd88..0000000 --- a/infrastructure/modules/aws-core/vpc/variables.tf +++ /dev/null @@ -1,79 +0,0 @@ -variable "tags" { - type = map(string) - description = "A map of tags to apply to all VPC resources" - default = {} -} - -variable "vpc_name" { - type = string - description = "The name of the VPC" - default = "core_vpc" -} - -variable "vpc_cidr_range" { - type = string - description = "The IP address space to use for the VPC" - default = "10.0.0.0/16" -} - -variable "public_subnet_cidrs" { - type = list(string) - description = "A list of CIDRs for the public subnets. Needs to be the same amount as subnets in the Availability Zone you are deploying into (probably 3)" - default = [] -} - -variable "public_subnet_size" { - type = number - description = "The size of the public subnet (default: 1022 usable addresses)" - default = "6" -} - -variable "private_subnet_cidrs" { - type = list(string) - description = "A list of CIDRs for the private subnets. Needs to be the same amount as subnets in the Availability Zone you are deploying into (probably 3)" - default = [] -} - -variable "private_subnet_size" { - type = number - description = "The size of the private subnet (default: 1022 usable addresses)" - default = 6 -} - -variable "private_subnet_offset" { - type = number - description = "The amount of IP space between the public and the private subnet" - default = 32 -} - -variable "public_subnet_prefix" { - type = string - description = "The prefix to attach to the name of the public subnets" - default = "" -} - -variable "private_subnet_prefix" { - type = string - description = "The prefix to attach to the name of the private subnets" - default = "" -} - -variable "enable_dns_support" { - type = bool - description = "Whether or not to enable VPC DNS support" - default = true -} - -variable "enable_dns_hostnames" { - type = bool - description = "Whether or not to enable VPC DNS hostname support" - default = true -} - -locals { - # We are using locals for this because there already is a fair amount of function calling inside the actual resources - public_subnet_prefix = length(var.public_subnet_prefix) > 0 ? var.public_subnet_prefix : "${var.vpc_name}_public_subnet" - private_subnet_prefix = length(var.private_subnet_prefix) > 0 ? var.private_subnet_prefix : "${var.vpc_name}_private_subnet" -} - -data "aws_availability_zones" "available" {} diff --git a/infrastructure/modules/aws-core/vpc/versions.tf b/infrastructure/modules/aws-core/vpc/versions.tf deleted file mode 100644 index 1ee8068..0000000 --- a/infrastructure/modules/aws-core/vpc/versions.tf +++ /dev/null @@ -1,4 +0,0 @@ - -terraform { - required_version = ">= 1.3.5" -} diff --git a/infrastructure/modules/data-workflow/README.md b/infrastructure/modules/data-workflow/README.md index a013b1c..0e28b19 100644 --- a/infrastructure/modules/data-workflow/README.md +++ b/infrastructure/modules/data-workflow/README.md @@ -1,4 +1,4 @@ - + ## Requirements No requirements. @@ -57,4 +57,4 @@ No modules. | [athena\_workgroup\_arn](#output\_athena\_workgroup\_arn) | Query workgroup for Athena | | [glue\_catalog\_arn](#output\_glue\_catalog\_arn) | Catalog database arn | | [tags](#output\_tags) | The tags used in the project | - + diff --git a/infrastructure/modules/data-workflow/dynamodb.tf b/infrastructure/modules/data-workflow/dynamodb.tf new file mode 100644 index 0000000..ea2884e --- /dev/null +++ b/infrastructure/modules/data-workflow/dynamodb.tf @@ -0,0 +1,23 @@ +resource "aws_dynamodb_table" "schema_table" { + # checkov:skip=CKV_AWS_119:No need for customer managed keys + name = "${var.resource-name-prefix}_schema_table" + hash_key = "PK" + range_key = "SK" + billing_mode = "PAY_PER_REQUEST" + stream_enabled = true + attribute { + name = "PK" + type = "S" + } + + attribute { + name = "SK" + type = "N" + } + + point_in_time_recovery { + enabled = true + } + + tags = var.tags +} diff --git a/infrastructure/modules/data-workflow/glue-components.tf b/infrastructure/modules/data-workflow/glue-components.tf deleted file mode 100644 index f9d75f9..0000000 --- a/infrastructure/modules/data-workflow/glue-components.tf +++ /dev/null @@ -1,118 +0,0 @@ -resource "aws_glue_catalog_database" "catalogue_db" { - name = "${var.resource-name-prefix}_catalogue_db" -} - -resource "aws_glue_connection" "glue_connection" { - name = "${var.resource-name-prefix}-s3-network-connection" - connection_type = "NETWORK" - - physical_connection_requirements { - availability_zone = data.aws_availability_zones.available.names[0] - security_group_id_list = [aws_security_group.glue_connection_sg.id] - subnet_id = var.private_subnet - } -} - -resource "aws_glue_catalog_table" "metadata" { - name = "${var.resource-name-prefix}_metadata_table" - database_name = aws_glue_catalog_database.catalogue_db.name - - table_type = "EXTERNAL_TABLE" - - parameters = { - "EXTERNAL" = "TRUE" - } - - storage_descriptor { - location = "s3://${var.data_s3_bucket_name}/data/schemas" - input_format = "org.apache.hadoop.mapred.TextInputFormat" - output_format = "org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat" - - columns { - name = "metadata" - type = "struct>,update_behaviour:string>" - } - - columns { - name = "columns" - type = "array>" - comment = "" - } - - ser_de_info { - serialization_library = "org.openx.data.jsonserde.JsonSerDe" - } - } -} - -resource "aws_iam_role" "glue_service_role" { - name = "${var.resource-name-prefix}-glue_services_access" - description = "Allow AWS Glue service to access S3 via crawler" - tags = var.tags - - assume_role_policy = < + ## Requirements No requirements. @@ -28,7 +28,6 @@ No requirements. |------|------| | [aws_s3_bucket.logs](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket) | resource | | [aws_s3_bucket.this](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket) | resource | -| [aws_s3_bucket_ownership_controls.logs](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_ownership_controls) | resource | | [aws_s3_bucket_policy.log_bucket_policy](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_policy) | resource | | [aws_s3_bucket_public_access_block.logs](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_public_access_block) | resource | | [aws_s3_bucket_public_access_block.this](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_public_access_block) | resource | @@ -63,4 +62,4 @@ No requirements. ## Outputs No outputs. - + diff --git a/infrastructure/modules/rapid/variables.tf b/infrastructure/modules/rapid/variables.tf index 5ccf856..c29ce61 100644 --- a/infrastructure/modules/rapid/variables.tf +++ b/infrastructure/modules/rapid/variables.tf @@ -13,7 +13,7 @@ variable "app-replica-count-max" { variable "application_version" { type = string description = "The version number for the application image (e.g.: v1.0.4, v1.0.x-latest, etc.)" - default = "v6.0.1" + default = "v6.2.1" } variable "ui_version" { diff --git a/infrastructure/modules/ui/README.md b/infrastructure/modules/ui/README.md index a50a559..f6203ef 100644 --- a/infrastructure/modules/ui/README.md +++ b/infrastructure/modules/ui/README.md @@ -1,4 +1,4 @@ - + ## Requirements No requirements. @@ -7,10 +7,10 @@ No requirements. | Name | Version | |------|---------| -| [aws](#provider\_aws) | 5.6.2 | -| [aws.us\_east](#provider\_aws.us\_east) | 5.6.2 | -| [null](#provider\_null) | 3.2.1 | -| [random](#provider\_random) | 3.5.1 | +| [aws](#provider\_aws) | n/a | +| [aws.us\_east](#provider\_aws.us\_east) | n/a | +| [null](#provider\_null) | n/a | +| [random](#provider\_random) | n/a | ## Modules @@ -33,7 +33,6 @@ No modules. | [aws_route53_record.route-to-cloudfront](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/route53_record) | resource | | [aws_s3_bucket.rapid_ui](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket) | resource | | [aws_s3_bucket_acl.rapid_ui_storage_acl](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_acl) | resource | -| [aws_s3_bucket_ownership_controls.rapid_ui](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_ownership_controls) | resource | | [aws_s3_bucket_policy.s3](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_policy) | resource | | [aws_s3_bucket_website_configuration.rapid_ui_website](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_website_configuration) | resource | | [aws_wafv2_ip_set.this](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/wafv2_ip_set) | resource | @@ -68,4 +67,4 @@ No modules. | [bucket\_public\_arn](#output\_bucket\_public\_arn) | The arn of the public S3 bucket | | [bucket\_website\_domain](#output\_bucket\_website\_domain) | The domain of the website endpoint | | [tags](#output\_tags) | The tags used in the project | - + diff --git a/infrastructure/modules/ui/scripts/ui.sh.tpl b/infrastructure/modules/ui/scripts/ui.sh.tpl index d9e9568..7aeb118 100644 --- a/infrastructure/modules/ui/scripts/ui.sh.tpl +++ b/infrastructure/modules/ui/scripts/ui.sh.tpl @@ -13,4 +13,4 @@ aws s3 cp . s3://${BUCKET_ID} --recursive cd .. -rm -rf ./out +rm -rf ./out \ No newline at end of file diff --git a/infrastructure/scripts/initialisation-script.sh.tpl b/infrastructure/scripts/initialisation-script.sh.tpl index f8e1c1a..73a1d86 100644 --- a/infrastructure/scripts/initialisation-script.sh.tpl +++ b/infrastructure/scripts/initialisation-script.sh.tpl @@ -49,12 +49,12 @@ sudo usermod -a -G docker ubuntu # Need to run these commands as the ubuntu user for correct permissions sudo -u ubuntu mkdir /home/ubuntu/actions-runner cd /home/ubuntu/actions-runner -sudo -u ubuntu curl -o actions-runner-linux-x64-2.299.1.tar.gz -L https://github.com/actions/runner/releases/download/v2.299.1/actions-runner-linux-x64-2.299.1.tar.gz -sudo -u ubuntu tar xzf ./actions-runner-linux-x64-2.299.1.tar.gz +sudo -u ubuntu curl -o actions-runner-linux-x64-2.307.1.tar.gz -L https://github.com/actions/runner/releases/download/v2.307.1/actions-runner-linux-x64-2.307.1.tar.gz +sudo -u ubuntu tar xzf ./actions-runner-linux-x64-2.307.1.tar.gz sudo -u ubuntu ./config.sh --url https://github.com/no10ds --token "${runner-registration-token}" --name Data-F1-Pipeline-Runner --unattended --replace # Run the GitHub Actions Runner sudo -u ubuntu ./run.sh & # # Configure the GitHub Actions Runner to start on reboot -# sudo crontab -l -u ubuntu | echo "@reboot sudo -u ubuntu /home/ubuntu/actions-runner/run.sh &" | sudo crontab -u ubuntu - +sudo crontab -l -u ubuntu | echo "@reboot sudo -u ubuntu /home/ubuntu/actions-runner/run.sh &" | sudo crontab -u ubuntu - \ No newline at end of file diff --git a/ui/jest.config.js b/ui/jest.config.js index 3584933..f9f3c6a 100644 --- a/ui/jest.config.js +++ b/ui/jest.config.js @@ -18,7 +18,8 @@ const customJestConfig = { '/.next/', '/node_modules/', '/coverage', - '/dist' + '/dist', + '/playwright' ], moduleNameMapper: { '^@/(.*)$': '/src/$1' diff --git a/ui/package.json b/ui/package.json index 3973284..e4035e3 100644 --- a/ui/package.json +++ b/ui/package.json @@ -9,7 +9,8 @@ "start": "next start", "lint": "next lint", "test:watch": "DEBUG_PRINT_LIMIT=1000000 jest --watch", - "test:all": "next lint && tsc --noemit && jest" + "test:all": "next lint && tsc --noemit && jest", + "test:debug": "jest --watch --runInBand --slient=false --collectCoverage --testTimeout=30000" }, "dependencies": { "@emotion/cache": "^11.10.5", @@ -62,4 +63,4 @@ "storybook-addon-designs": "^6.3.1", "typescript": "5.1.6" } -} +} \ No newline at end of file diff --git a/ui/playwright.config.ts b/ui/playwright.config.ts new file mode 100644 index 0000000..8f5c959 --- /dev/null +++ b/ui/playwright.config.ts @@ -0,0 +1,22 @@ +import { defineConfig, devices } from '@playwright/test'; +import dotenv from 'dotenv'; +import path from 'path'; + +dotenv.config({ path: path.resolve(process.cwd(), '.env.local') }); + +export default defineConfig({ + projects: [ + // Setup project + { name: 'setup', testMatch: 'auth.setup.ts' }, + { + name: 'chromium', + use: { + ...devices['Desktop Chrome'], + // Use prepared auth state. + storageState: 'playwright/.auth/user.json', + }, + dependencies: ['setup'], + }, + ], +}); + diff --git a/ui/playwright/auth.setup.ts b/ui/playwright/auth.setup.ts new file mode 100644 index 0000000..e8f7a23 --- /dev/null +++ b/ui/playwright/auth.setup.ts @@ -0,0 +1,48 @@ +import { test as setup, expect } from '@playwright/test'; +import { SecretsManager } from 'aws-sdk' + +const client = new SecretsManager({ region: process.env.AWS_REGION }) +const authFile = 'playwright/.auth/user.json'; +const domain = process.env.DOMAIN; +const secretName = `${process.env.RESOURCE_PREFIX}_UI_TEST_USER` + + +export async function getSecretValue( + secretName: string +): Promise { + return new Promise((resolve, reject) => { + client.getSecretValue({ SecretId: secretName }, function (err, data) { + if (err) { + reject(err) + } else { + resolve(data.SecretString) + } + }) + }) +} + + +setup('authenticate', async ({ page }) => { + const secret = JSON.parse(await getSecretValue(secretName) as string) + await page.goto(domain); + await page.goto(`${domain}/login`); + + await page.locator('[data-testid="login-link"]').click(); + + await page.locator('[placeholder="Username"]').nth(1).click(); + + await page.locator('[placeholder="Password"]').nth(1).click(); + + await page.locator('[placeholder="Password"]').nth(1).fill(`${secret['password']}`); + + await page.locator('[placeholder="Username"]').nth(1).click(); + + await page.locator('[placeholder="Username"]').nth(1).click(); + + await page.locator('[placeholder="Username"]').nth(1).fill(`${secret['username']}`); + + await page.locator('text=Sign in').nth(3).click(); + await expect(page).toHaveURL(domain); + + await page.context().storageState({ path: authFile }); +}); \ No newline at end of file diff --git a/ui/playwright/gapminder.csv b/ui/playwright/gapminder.csv new file mode 100644 index 0000000..a03dc5a --- /dev/null +++ b/ui/playwright/gapminder.csv @@ -0,0 +1,1705 @@ +country,year,pop,continent,lifeexp,gdppercap +Afghanistan,1952,8425333,Asia,28.801,779.4453145 +Afghanistan,1957,9240934,Asia,30.332,820.8530296 +Afghanistan,1962,10267083,Asia,31.997,853.10071 +Afghanistan,1967,11537966,Asia,34.02,836.1971382 +Afghanistan,1972,13079460,Asia,36.088,739.9811058 +Afghanistan,1977,14880372,Asia,38.438,786.11336 +Afghanistan,1982,12881816,Asia,39.854,978.0114388 +Afghanistan,1987,13867957,Asia,40.822,852.3959448 +Afghanistan,1992,16317921,Asia,41.674,649.3413952 +Afghanistan,1997,22227415,Asia,41.763,635.341351 +Afghanistan,2002,25268405,Asia,42.129,726.7340548 +Afghanistan,2007,31889923,Asia,43.828,974.5803384 +Albania,1952,1282697,Europe,55.23,1601.056136 +Albania,1957,1476505,Europe,59.28,1942.284244 +Albania,1962,1728137,Europe,64.82,2312.888958 +Albania,1967,1984060,Europe,66.22,2760.196931 +Albania,1972,2263554,Europe,67.69,3313.422188 +Albania,1977,2509048,Europe,68.93,3533.00391 +Albania,1982,2780097,Europe,70.42,3630.880722 +Albania,1987,3075321,Europe,72,3738.932735 +Albania,1992,3326498,Europe,71.581,2497.437901 +Albania,1997,3428038,Europe,72.95,3193.054604 +Albania,2002,3508512,Europe,75.651,4604.211737 +Albania,2007,3600523,Europe,76.423,5937.029526 +Algeria,1952,9279525,Africa,43.077,2449.008185 +Algeria,1957,10270856,Africa,45.685,3013.976023 +Algeria,1962,11000948,Africa,48.303,2550.81688 +Algeria,1967,12760499,Africa,51.407,3246.991771 +Algeria,1972,14760787,Africa,54.518,4182.663766 +Algeria,1977,17152804,Africa,58.014,4910.416756 +Algeria,1982,20033753,Africa,61.368,5745.160213 +Algeria,1987,23254956,Africa,65.799,5681.358539 +Algeria,1992,26298373,Africa,67.744,5023.216647 +Algeria,1997,29072015,Africa,69.152,4797.295051 +Algeria,2002,31287142,Africa,70.994,5288.040382 +Algeria,2007,33333216,Africa,72.301,6223.367465 +Angola,1952,4232095,Africa,30.015,3520.610273 +Angola,1957,4561361,Africa,31.999,3827.940465 +Angola,1962,4826015,Africa,34,4269.276742 +Angola,1967,5247469,Africa,35.985,5522.776375 +Angola,1972,5894858,Africa,37.928,5473.288005 +Angola,1977,6162675,Africa,39.483,3008.647355 +Angola,1982,7016384,Africa,39.942,2756.953672 +Angola,1987,7874230,Africa,39.906,2430.208311 +Angola,1992,8735988,Africa,40.647,2627.845685 +Angola,1997,9875024,Africa,40.963,2277.140884 +Angola,2002,10866106,Africa,41.003,2773.287312 +Angola,2007,12420476,Africa,42.731,4797.231267 +Argentina,1952,17876956,Americas,62.485,5911.315053 +Argentina,1957,19610538,Americas,64.399,6856.856212 +Argentina,1962,21283783,Americas,65.142,7133.166023 +Argentina,1967,22934225,Americas,65.634,8052.953021 +Argentina,1972,24779799,Americas,67.065,9443.038526 +Argentina,1977,26983828,Americas,68.481,10079.02674 +Argentina,1982,29341374,Americas,69.942,8997.897412 +Argentina,1987,31620918,Americas,70.774,9139.671389 +Argentina,1992,33958947,Americas,71.868,9308.41871 +Argentina,1997,36203463,Americas,73.275,10967.28195 +Argentina,2002,38331121,Americas,74.34,8797.640716 +Argentina,2007,40301927,Americas,75.32,12779.37964 +Australia,1952,8691212,Oceania,69.12,10039.59564 +Australia,1957,9712569,Oceania,70.33,10949.64959 +Australia,1962,10794968,Oceania,70.93,12217.22686 +Australia,1967,11872264,Oceania,71.1,14526.12465 +Australia,1972,13177000,Oceania,71.93,16788.62948 +Australia,1977,14074100,Oceania,73.49,18334.19751 +Australia,1982,15184200,Oceania,74.74,19477.00928 +Australia,1987,16257249,Oceania,76.32,21888.88903 +Australia,1992,17481977,Oceania,77.56,23424.76683 +Australia,1997,18565243,Oceania,78.83,26997.93657 +Australia,2002,19546792,Oceania,80.37,30687.75473 +Australia,2007,20434176,Oceania,81.235,34435.36744 +Austria,1952,6927772,Europe,66.8,6137.076492 +Austria,1957,6965860,Europe,67.48,8842.59803 +Austria,1962,7129864,Europe,69.54,10750.72111 +Austria,1967,7376998,Europe,70.14,12834.6024 +Austria,1972,7544201,Europe,70.63,16661.6256 +Austria,1977,7568430,Europe,72.17,19749.4223 +Austria,1982,7574613,Europe,73.18,21597.08362 +Austria,1987,7578903,Europe,74.94,23687.82607 +Austria,1992,7914969,Europe,76.04,27042.01868 +Austria,1997,8069876,Europe,77.51,29095.92066 +Austria,2002,8148312,Europe,78.98,32417.60769 +Austria,2007,8199783,Europe,79.829,36126.4927 +Bahrain,1952,120447,Asia,50.939,9867.084765 +Bahrain,1957,138655,Asia,53.832,11635.79945 +Bahrain,1962,171863,Asia,56.923,12753.27514 +Bahrain,1967,202182,Asia,59.923,14804.6727 +Bahrain,1972,230800,Asia,63.3,18268.65839 +Bahrain,1977,297410,Asia,65.593,19340.10196 +Bahrain,1982,377967,Asia,69.052,19211.14731 +Bahrain,1987,454612,Asia,70.75,18524.02406 +Bahrain,1992,529491,Asia,72.601,19035.57917 +Bahrain,1997,598561,Asia,73.925,20292.01679 +Bahrain,2002,656397,Asia,74.795,23403.55927 +Bahrain,2007,708573,Asia,75.635,29796.04834 +Bangladesh,1952,46886859,Asia,37.484,684.2441716 +Bangladesh,1957,51365468,Asia,39.348,661.6374577 +Bangladesh,1962,56839289,Asia,41.216,686.3415538 +Bangladesh,1967,62821884,Asia,43.453,721.1860862 +Bangladesh,1972,70759295,Asia,45.252,630.2336265 +Bangladesh,1977,80428306,Asia,46.923,659.8772322 +Bangladesh,1982,93074406,Asia,50.009,676.9818656 +Bangladesh,1987,103764241,Asia,52.819,751.9794035 +Bangladesh,1992,113704579,Asia,56.018,837.8101643 +Bangladesh,1997,123315288,Asia,59.412,972.7700352 +Bangladesh,2002,135656790,Asia,62.013,1136.39043 +Bangladesh,2007,150448339,Asia,64.062,1391.253792 +Belgium,1952,8730405,Europe,68,8343.105127 +Belgium,1957,8989111,Europe,69.24,9714.960623 +Belgium,1962,9218400,Europe,70.25,10991.20676 +Belgium,1967,9556500,Europe,70.94,13149.04119 +Belgium,1972,9709100,Europe,71.44,16672.14356 +Belgium,1977,9821800,Europe,72.8,19117.97448 +Belgium,1982,9856303,Europe,73.93,20979.84589 +Belgium,1987,9870200,Europe,75.35,22525.56308 +Belgium,1992,10045622,Europe,76.46,25575.57069 +Belgium,1997,10199787,Europe,77.53,27561.19663 +Belgium,2002,10311970,Europe,78.32,30485.88375 +Belgium,2007,10392226,Europe,79.441,33692.60508 +Benin,1952,1738315,Africa,38.223,1062.7522 +Benin,1957,1925173,Africa,40.358,959.6010805 +Benin,1962,2151895,Africa,42.618,949.4990641 +Benin,1967,2427334,Africa,44.885,1035.831411 +Benin,1972,2761407,Africa,47.014,1085.796879 +Benin,1977,3168267,Africa,49.19,1029.161251 +Benin,1982,3641603,Africa,50.904,1277.897616 +Benin,1987,4243788,Africa,52.337,1225.85601 +Benin,1992,4981671,Africa,53.919,1191.207681 +Benin,1997,6066080,Africa,54.777,1232.975292 +Benin,2002,7026113,Africa,54.406,1372.877931 +Benin,2007,8078314,Africa,56.728,1441.284873 +Bolivia,1952,2883315,Americas,40.414,2677.326347 +Bolivia,1957,3211738,Americas,41.89,2127.686326 +Bolivia,1962,3593918,Americas,43.428,2180.972546 +Bolivia,1967,4040665,Americas,45.032,2586.886053 +Bolivia,1972,4565872,Americas,46.714,2980.331339 +Bolivia,1977,5079716,Americas,50.023,3548.097832 +Bolivia,1982,5642224,Americas,53.859,3156.510452 +Bolivia,1987,6156369,Americas,57.251,2753.69149 +Bolivia,1992,6893451,Americas,59.957,2961.699694 +Bolivia,1997,7693188,Americas,62.05,3326.143191 +Bolivia,2002,8445134,Americas,63.883,3413.26269 +Bolivia,2007,9119152,Americas,65.554,3822.137084 +Bosnia and Herzegovina,1952,2791000,Europe,53.82,973.5331948 +Bosnia and Herzegovina,1957,3076000,Europe,58.45,1353.989176 +Bosnia and Herzegovina,1962,3349000,Europe,61.93,1709.683679 +Bosnia and Herzegovina,1967,3585000,Europe,64.79,2172.352423 +Bosnia and Herzegovina,1972,3819000,Europe,67.45,2860.16975 +Bosnia and Herzegovina,1977,4086000,Europe,69.86,3528.481305 +Bosnia and Herzegovina,1982,4172693,Europe,70.69,4126.613157 +Bosnia and Herzegovina,1987,4338977,Europe,71.14,4314.114757 +Bosnia and Herzegovina,1992,4256013,Europe,72.178,2546.781445 +Bosnia and Herzegovina,1997,3607000,Europe,73.244,4766.355904 +Bosnia and Herzegovina,2002,4165416,Europe,74.09,6018.975239 +Bosnia and Herzegovina,2007,4552198,Europe,74.852,7446.298803 +Botswana,1952,442308,Africa,47.622,851.2411407 +Botswana,1957,474639,Africa,49.618,918.2325349 +Botswana,1962,512764,Africa,51.52,983.6539764 +Botswana,1967,553541,Africa,53.298,1214.709294 +Botswana,1972,619351,Africa,56.024,2263.611114 +Botswana,1977,781472,Africa,59.319,3214.857818 +Botswana,1982,970347,Africa,61.484,4551.14215 +Botswana,1987,1151184,Africa,63.622,6205.88385 +Botswana,1992,1342614,Africa,62.745,7954.111645 +Botswana,1997,1536536,Africa,52.556,8647.142313 +Botswana,2002,1630347,Africa,46.634,11003.60508 +Botswana,2007,1639131,Africa,50.728,12569.85177 +Brazil,1952,56602560,Americas,50.917,2108.944355 +Brazil,1957,65551171,Americas,53.285,2487.365989 +Brazil,1962,76039390,Americas,55.665,3336.585802 +Brazil,1967,88049823,Americas,57.632,3429.864357 +Brazil,1972,100840058,Americas,59.504,4985.711467 +Brazil,1977,114313951,Americas,61.489,6660.118654 +Brazil,1982,128962939,Americas,63.336,7030.835878 +Brazil,1987,142938076,Americas,65.205,7807.095818 +Brazil,1992,155975974,Americas,67.057,6950.283021 +Brazil,1997,168546719,Americas,69.388,7957.980824 +Brazil,2002,179914212,Americas,71.006,8131.212843 +Brazil,2007,190010647,Americas,72.39,9065.800825 +Bulgaria,1952,7274900,Europe,59.6,2444.286648 +Bulgaria,1957,7651254,Europe,66.61,3008.670727 +Bulgaria,1962,8012946,Europe,69.51,4254.337839 +Bulgaria,1967,8310226,Europe,70.42,5577.0028 +Bulgaria,1972,8576200,Europe,70.9,6597.494398 +Bulgaria,1977,8797022,Europe,70.81,7612.240438 +Bulgaria,1982,8892098,Europe,71.08,8224.191647 +Bulgaria,1987,8971958,Europe,71.34,8239.854824 +Bulgaria,1992,8658506,Europe,71.19,6302.623438 +Bulgaria,1997,8066057,Europe,70.32,5970.38876 +Bulgaria,2002,7661799,Europe,72.14,7696.777725 +Bulgaria,2007,7322858,Europe,73.005,10680.79282 +Burkina Faso,1952,4469979,Africa,31.975,543.2552413 +Burkina Faso,1957,4713416,Africa,34.906,617.1834648 +Burkina Faso,1962,4919632,Africa,37.814,722.5120206 +Burkina Faso,1967,5127935,Africa,40.697,794.8265597 +Burkina Faso,1972,5433886,Africa,43.591,854.7359763 +Burkina Faso,1977,5889574,Africa,46.137,743.3870368 +Burkina Faso,1982,6634596,Africa,48.122,807.1985855 +Burkina Faso,1987,7586551,Africa,49.557,912.0631417 +Burkina Faso,1992,8878303,Africa,50.26,931.7527731 +Burkina Faso,1997,10352843,Africa,50.324,946.2949618 +Burkina Faso,2002,12251209,Africa,50.65,1037.645221 +Burkina Faso,2007,14326203,Africa,52.295,1217.032994 +Burundi,1952,2445618,Africa,39.031,339.2964587 +Burundi,1957,2667518,Africa,40.533,379.5646281 +Burundi,1962,2961915,Africa,42.045,355.2032273 +Burundi,1967,3330989,Africa,43.548,412.9775136 +Burundi,1972,3529983,Africa,44.057,464.0995039 +Burundi,1977,3834415,Africa,45.91,556.1032651 +Burundi,1982,4580410,Africa,47.471,559.603231 +Burundi,1987,5126023,Africa,48.211,621.8188189 +Burundi,1992,5809236,Africa,44.736,631.6998778 +Burundi,1997,6121610,Africa,45.326,463.1151478 +Burundi,2002,7021078,Africa,47.36,446.4035126 +Burundi,2007,8390505,Africa,49.58,430.0706916 +Cambodia,1952,4693836,Asia,39.417,368.4692856 +Cambodia,1957,5322536,Asia,41.366,434.0383364 +Cambodia,1962,6083619,Asia,43.415,496.9136476 +Cambodia,1967,6960067,Asia,45.415,523.4323142 +Cambodia,1972,7450606,Asia,40.317,421.6240257 +Cambodia,1977,6978607,Asia,31.22,524.9721832 +Cambodia,1982,7272485,Asia,50.957,624.4754784 +Cambodia,1987,8371791,Asia,53.914,683.8955732 +Cambodia,1992,10150094,Asia,55.803,682.3031755 +Cambodia,1997,11782962,Asia,56.534,734.28517 +Cambodia,2002,12926707,Asia,56.752,896.2260153 +Cambodia,2007,14131858,Asia,59.723,1713.778686 +Cameroon,1952,5009067,Africa,38.523,1172.667655 +Cameroon,1957,5359923,Africa,40.428,1313.048099 +Cameroon,1962,5793633,Africa,42.643,1399.607441 +Cameroon,1967,6335506,Africa,44.799,1508.453148 +Cameroon,1972,7021028,Africa,47.049,1684.146528 +Cameroon,1977,7959865,Africa,49.355,1783.432873 +Cameroon,1982,9250831,Africa,52.961,2367.983282 +Cameroon,1987,10780667,Africa,54.985,2602.664206 +Cameroon,1992,12467171,Africa,54.314,1793.163278 +Cameroon,1997,14195809,Africa,52.199,1694.337469 +Cameroon,2002,15929988,Africa,49.856,1934.011449 +Cameroon,2007,17696293,Africa,50.43,2042.09524 +Canada,1952,14785584,Americas,68.75,11367.16112 +Canada,1957,17010154,Americas,69.96,12489.95006 +Canada,1962,18985849,Americas,71.3,13462.48555 +Canada,1967,20819767,Americas,72.13,16076.58803 +Canada,1972,22284500,Americas,72.88,18970.57086 +Canada,1977,23796400,Americas,74.21,22090.88306 +Canada,1982,25201900,Americas,75.76,22898.79214 +Canada,1987,26549700,Americas,76.86,26626.51503 +Canada,1992,28523502,Americas,77.95,26342.88426 +Canada,1997,30305843,Americas,78.61,28954.92589 +Canada,2002,31902268,Americas,79.77,33328.96507 +Canada,2007,33390141,Americas,80.653,36319.23501 +Central African Republic,1952,1291695,Africa,35.463,1071.310713 +Central African Republic,1957,1392284,Africa,37.464,1190.844328 +Central African Republic,1962,1523478,Africa,39.475,1193.068753 +Central African Republic,1967,1733638,Africa,41.478,1136.056615 +Central African Republic,1972,1927260,Africa,43.457,1070.013275 +Central African Republic,1977,2167533,Africa,46.775,1109.374338 +Central African Republic,1982,2476971,Africa,48.295,956.7529907 +Central African Republic,1987,2840009,Africa,50.485,844.8763504 +Central African Republic,1992,3265124,Africa,49.396,747.9055252 +Central African Republic,1997,3696513,Africa,46.066,740.5063317 +Central African Republic,2002,4048013,Africa,43.308,738.6906068 +Central African Republic,2007,4369038,Africa,44.741,706.016537 +Chad,1952,2682462,Africa,38.092,1178.665927 +Chad,1957,2894855,Africa,39.881,1308.495577 +Chad,1962,3150417,Africa,41.716,1389.817618 +Chad,1967,3495967,Africa,43.601,1196.810565 +Chad,1972,3899068,Africa,45.569,1104.103987 +Chad,1977,4388260,Africa,47.383,1133.98495 +Chad,1982,4875118,Africa,49.517,797.9081006 +Chad,1987,5498955,Africa,51.051,952.386129 +Chad,1992,6429417,Africa,51.724,1058.0643 +Chad,1997,7562011,Africa,51.573,1004.961353 +Chad,2002,8835739,Africa,50.525,1156.18186 +Chad,2007,10238807,Africa,50.651,1704.063724 +Chile,1952,6377619,Americas,54.745,3939.978789 +Chile,1957,7048426,Americas,56.074,4315.622723 +Chile,1962,7961258,Americas,57.924,4519.094331 +Chile,1967,8858908,Americas,60.523,5106.654313 +Chile,1972,9717524,Americas,63.441,5494.024437 +Chile,1977,10599793,Americas,67.052,4756.763836 +Chile,1982,11487112,Americas,70.565,5095.665738 +Chile,1987,12463354,Americas,72.492,5547.063754 +Chile,1992,13572994,Americas,74.126,7596.125964 +Chile,1997,14599929,Americas,75.816,10118.05318 +Chile,2002,15497046,Americas,77.86,10778.78385 +Chile,2007,16284741,Americas,78.553,13171.63885 +China,1952,556263528.0,Asia,44,400.4486107 +China,1957,637408000,Asia,50.54896,575.9870009 +China,1962,665770000,Asia,44.50136,487.6740183 +China,1967,754550000,Asia,58.38112,612.7056934 +China,1972,862030000,Asia,63.11888,676.9000921 +China,1977,943455000,Asia,63.96736,741.2374699 +China,1982,1000281000,Asia,65.525,962.4213805 +China,1987,1084035000,Asia,67.274,1378.904018 +China,1992,1164970000,Asia,68.69,1655.784158 +China,1997,1230075000,Asia,70.426,2289.234136 +China,2002,1280400000,Asia,72.028,3119.280896 +China,2007,1318683096,Asia,72.961,4959.114854 +Colombia,1952,12350771,Americas,50.643,2144.115096 +Colombia,1957,14485993,Americas,55.118,2323.805581 +Colombia,1962,17009885,Americas,57.863,2492.351109 +Colombia,1967,19764027,Americas,59.963,2678.729839 +Colombia,1972,22542890,Americas,61.623,3264.660041 +Colombia,1977,25094412,Americas,63.837,3815.80787 +Colombia,1982,27764644,Americas,66.653,4397.575659 +Colombia,1987,30964245,Americas,67.768,4903.2191 +Colombia,1992,34202721,Americas,68.421,5444.648617 +Colombia,1997,37657830,Americas,70.313,6117.361746 +Colombia,2002,41008227,Americas,71.682,5755.259962 +Colombia,2007,44227550,Americas,72.889,7006.580419 +Comoros,1952,153936,Africa,40.715,1102.990936 +Comoros,1957,170928,Africa,42.46,1211.148548 +Comoros,1962,191689,Africa,44.467,1406.648278 +Comoros,1967,217378,Africa,46.472,1876.029643 +Comoros,1972,250027,Africa,48.944,1937.577675 +Comoros,1977,304739,Africa,50.939,1172.603047 +Comoros,1982,348643,Africa,52.933,1267.100083 +Comoros,1987,395114,Africa,54.926,1315.980812 +Comoros,1992,454429,Africa,57.939,1246.90737 +Comoros,1997,527982,Africa,60.66,1173.618235 +Comoros,2002,614382,Africa,62.974,1075.811558 +Comoros,2007,710960,Africa,65.152,986.1478792 +"Congo, Dem. Rep.",1952,14100005,Africa,39.143,780.5423257 +"Congo, Dem. Rep.",1957,15577932,Africa,40.652,905.8602303 +"Congo, Dem. Rep.",1962,17486434,Africa,42.122,896.3146335 +"Congo, Dem. Rep.",1967,19941073,Africa,44.056,861.5932424 +"Congo, Dem. Rep.",1972,23007669,Africa,45.989,904.8960685 +"Congo, Dem. Rep.",1977,26480870,Africa,47.804,795.757282 +"Congo, Dem. Rep.",1982,30646495,Africa,47.784,673.7478181 +"Congo, Dem. Rep.",1987,35481645,Africa,47.412,672.774812 +"Congo, Dem. Rep.",1992,41672143,Africa,45.548,457.7191807 +"Congo, Dem. Rep.",1997,47798986,Africa,42.587,312.188423 +"Congo, Dem. Rep.",2002,55379852,Africa,44.966,241.1658765 +"Congo, Dem. Rep.",2007,64606759,Africa,46.462,277.5518587 +"Congo, Rep.",1952,854885,Africa,42.111,2125.621418 +"Congo, Rep.",1957,940458,Africa,45.053,2315.056572 +"Congo, Rep.",1962,1047924,Africa,48.435,2464.783157 +"Congo, Rep.",1967,1179760,Africa,52.04,2677.939642 +"Congo, Rep.",1972,1340458,Africa,54.907,3213.152683 +"Congo, Rep.",1977,1536769,Africa,55.625,3259.178978 +"Congo, Rep.",1982,1774735,Africa,56.695,4879.507522 +"Congo, Rep.",1987,2064095,Africa,57.47,4201.194937 +"Congo, Rep.",1992,2409073,Africa,56.433,4016.239529 +"Congo, Rep.",1997,2800947,Africa,52.962,3484.164376 +"Congo, Rep.",2002,3328795,Africa,52.97,3484.06197 +"Congo, Rep.",2007,3800610,Africa,55.322,3632.557798 +Costa Rica,1952,926317,Americas,57.206,2627.009471 +Costa Rica,1957,1112300,Americas,60.026,2990.010802 +Costa Rica,1962,1345187,Americas,62.842,3460.937025 +Costa Rica,1967,1588717,Americas,65.424,4161.727834 +Costa Rica,1972,1834796,Americas,67.849,5118.146939 +Costa Rica,1977,2108457,Americas,70.75,5926.876967 +Costa Rica,1982,2424367,Americas,73.45,5262.734751 +Costa Rica,1987,2799811,Americas,74.752,5629.915318 +Costa Rica,1992,3173216,Americas,75.713,6160.416317 +Costa Rica,1997,3518107,Americas,77.26,6677.045314 +Costa Rica,2002,3834934,Americas,78.123,7723.447195 +Costa Rica,2007,4133884,Americas,78.782,9645.06142 +Cote d'Ivoire,1952,2977019,Africa,40.477,1388.594732 +Cote d'Ivoire,1957,3300000,Africa,42.469,1500.895925 +Cote d'Ivoire,1962,3832408,Africa,44.93,1728.869428 +Cote d'Ivoire,1967,4744870,Africa,47.35,2052.050473 +Cote d'Ivoire,1972,6071696,Africa,49.801,2378.201111 +Cote d'Ivoire,1977,7459574,Africa,52.374,2517.736547 +Cote d'Ivoire,1982,9025951,Africa,53.983,2602.710169 +Cote d'Ivoire,1987,10761098,Africa,54.655,2156.956069 +Cote d'Ivoire,1992,12772596,Africa,52.044,1648.073791 +Cote d'Ivoire,1997,14625967,Africa,47.991,1786.265407 +Cote d'Ivoire,2002,16252726,Africa,46.832,1648.800823 +Cote d'Ivoire,2007,18013409,Africa,48.328,1544.750112 +Croatia,1952,3882229,Europe,61.21,3119.23652 +Croatia,1957,3991242,Europe,64.77,4338.231617 +Croatia,1962,4076557,Europe,67.13,5477.890018 +Croatia,1967,4174366,Europe,68.5,6960.297861 +Croatia,1972,4225310,Europe,69.61,9164.090127 +Croatia,1977,4318673,Europe,70.64,11305.38517 +Croatia,1982,4413368,Europe,70.46,13221.82184 +Croatia,1987,4484310,Europe,71.52,13822.58394 +Croatia,1992,4494013,Europe,72.527,8447.794873 +Croatia,1997,4444595,Europe,73.68,9875.604515 +Croatia,2002,4481020,Europe,74.876,11628.38895 +Croatia,2007,4493312,Europe,75.748,14619.22272 +Cuba,1952,6007797,Americas,59.421,5586.53878 +Cuba,1957,6640752,Americas,62.325,6092.174359 +Cuba,1962,7254373,Americas,65.246,5180.75591 +Cuba,1967,8139332,Americas,68.29,5690.268015 +Cuba,1972,8831348,Americas,70.723,5305.445256 +Cuba,1977,9537988,Americas,72.649,6380.494966 +Cuba,1982,9789224,Americas,73.717,7316.918107 +Cuba,1987,10239839,Americas,74.174,7532.924763 +Cuba,1992,10723260,Americas,74.414,5592.843963 +Cuba,1997,10983007,Americas,76.151,5431.990415 +Cuba,2002,11226999,Americas,77.158,6340.646683 +Cuba,2007,11416987,Americas,78.273,8948.102923 +Czech Republic,1952,9125183,Europe,66.87,6876.14025 +Czech Republic,1957,9513758,Europe,69.03,8256.343918 +Czech Republic,1962,9620282,Europe,69.9,10136.86713 +Czech Republic,1967,9835109,Europe,70.38,11399.44489 +Czech Republic,1972,9862158,Europe,70.29,13108.4536 +Czech Republic,1977,10161915,Europe,70.71,14800.16062 +Czech Republic,1982,10303704,Europe,70.96,15377.22855 +Czech Republic,1987,10311597,Europe,71.58,16310.4434 +Czech Republic,1992,10315702,Europe,72.4,14297.02122 +Czech Republic,1997,10300707,Europe,74.01,16048.51424 +Czech Republic,2002,10256295,Europe,75.51,17596.21022 +Czech Republic,2007,10228744,Europe,76.486,22833.30851 +Denmark,1952,4334000,Europe,70.78,9692.385245 +Denmark,1957,4487831,Europe,71.81,11099.65935 +Denmark,1962,4646899,Europe,72.35,13583.31351 +Denmark,1967,4838800,Europe,72.96,15937.21123 +Denmark,1972,4991596,Europe,73.47,18866.20721 +Denmark,1977,5088419,Europe,74.69,20422.9015 +Denmark,1982,5117810,Europe,74.63,21688.04048 +Denmark,1987,5127024,Europe,74.8,25116.17581 +Denmark,1992,5171393,Europe,75.33,26406.73985 +Denmark,1997,5283663,Europe,76.11,29804.34567 +Denmark,2002,5374693,Europe,77.18,32166.50006 +Denmark,2007,5468120,Europe,78.332,35278.41874 +Djibouti,1952,63149,Africa,34.812,2669.529475 +Djibouti,1957,71851,Africa,37.328,2864.969076 +Djibouti,1962,89898,Africa,39.693,3020.989263 +Djibouti,1967,127617,Africa,42.074,3020.050513 +Djibouti,1972,178848,Africa,44.366,3694.212352 +Djibouti,1977,228694,Africa,46.519,3081.761022 +Djibouti,1982,305991,Africa,48.812,2879.468067 +Djibouti,1987,311025,Africa,50.04,2880.102568 +Djibouti,1992,384156,Africa,51.604,2377.156192 +Djibouti,1997,417908,Africa,53.157,1895.016984 +Djibouti,2002,447416,Africa,53.373,1908.260867 +Djibouti,2007,496374,Africa,54.791,2082.481567 +Dominican Republic,1952,2491346,Americas,45.928,1397.717137 +Dominican Republic,1957,2923186,Americas,49.828,1544.402995 +Dominican Republic,1962,3453434,Americas,53.459,1662.137359 +Dominican Republic,1967,4049146,Americas,56.751,1653.723003 +Dominican Republic,1972,4671329,Americas,59.631,2189.874499 +Dominican Republic,1977,5302800,Americas,61.788,2681.9889 +Dominican Republic,1982,5968349,Americas,63.727,2861.092386 +Dominican Republic,1987,6655297,Americas,66.046,2899.842175 +Dominican Republic,1992,7351181,Americas,68.457,3044.214214 +Dominican Republic,1997,7992357,Americas,69.957,3614.101285 +Dominican Republic,2002,8650322,Americas,70.847,4563.808154 +Dominican Republic,2007,9319622,Americas,72.235,6025.374752 +Ecuador,1952,3548753,Americas,48.357,3522.110717 +Ecuador,1957,4058385,Americas,51.356,3780.546651 +Ecuador,1962,4681707,Americas,54.64,4086.114078 +Ecuador,1967,5432424,Americas,56.678,4579.074215 +Ecuador,1972,6298651,Americas,58.796,5280.99471 +Ecuador,1977,7278866,Americas,61.31,6679.62326 +Ecuador,1982,8365850,Americas,64.342,7213.791267 +Ecuador,1987,9545158,Americas,67.231,6481.776993 +Ecuador,1992,10748394,Americas,69.613,7103.702595 +Ecuador,1997,11911819,Americas,72.312,7429.455877 +Ecuador,2002,12921234,Americas,74.173,5773.044512 +Ecuador,2007,13755680,Americas,74.994,6873.262326 +Egypt,1952,22223309,Africa,41.893,1418.822445 +Egypt,1957,25009741,Africa,44.444,1458.915272 +Egypt,1962,28173309,Africa,46.992,1693.335853 +Egypt,1967,31681188,Africa,49.293,1814.880728 +Egypt,1972,34807417,Africa,51.137,2024.008147 +Egypt,1977,38783863,Africa,53.319,2785.493582 +Egypt,1982,45681811,Africa,56.006,3503.729636 +Egypt,1987,52799062,Africa,59.797,3885.46071 +Egypt,1992,59402198,Africa,63.674,3794.755195 +Egypt,1997,66134291,Africa,67.217,4173.181797 +Egypt,2002,73312559,Africa,69.806,4754.604414 +Egypt,2007,80264543,Africa,71.338,5581.180998 +El Salvador,1952,2042865,Americas,45.262,3048.3029 +El Salvador,1957,2355805,Americas,48.57,3421.523218 +El Salvador,1962,2747687,Americas,52.307,3776.803627 +El Salvador,1967,3232927,Americas,55.855,4358.595393 +El Salvador,1972,3790903,Americas,58.207,4520.246008 +El Salvador,1977,4282586,Americas,56.696,5138.922374 +El Salvador,1982,4474873,Americas,56.604,4098.344175 +El Salvador,1987,4842194,Americas,63.154,4140.442097 +El Salvador,1992,5274649,Americas,66.798,4444.2317 +El Salvador,1997,5783439,Americas,69.535,5154.825496 +El Salvador,2002,6353681,Americas,70.734,5351.568666 +El Salvador,2007,6939688,Americas,71.878,5728.353514 +Equatorial Guinea,1952,216964,Africa,34.482,375.6431231 +Equatorial Guinea,1957,232922,Africa,35.983,426.0964081 +Equatorial Guinea,1962,249220,Africa,37.485,582.8419714 +Equatorial Guinea,1967,259864,Africa,38.987,915.5960025 +Equatorial Guinea,1972,277603,Africa,40.516,672.4122571 +Equatorial Guinea,1977,192675,Africa,42.024,958.5668124 +Equatorial Guinea,1982,285483,Africa,43.662,927.8253427 +Equatorial Guinea,1987,341244,Africa,45.664,966.8968149 +Equatorial Guinea,1992,387838,Africa,47.545,1132.055034 +Equatorial Guinea,1997,439971,Africa,48.245,2814.480755 +Equatorial Guinea,2002,495627,Africa,49.348,7703.4959 +Equatorial Guinea,2007,551201,Africa,51.579,12154.08975 +Eritrea,1952,1438760,Africa,35.928,328.9405571 +Eritrea,1957,1542611,Africa,38.047,344.1618859 +Eritrea,1962,1666618,Africa,40.158,380.9958433 +Eritrea,1967,1820319,Africa,42.189,468.7949699 +Eritrea,1972,2260187,Africa,44.142,514.3242082 +Eritrea,1977,2512642,Africa,44.535,505.7538077 +Eritrea,1982,2637297,Africa,43.89,524.8758493 +Eritrea,1987,2915959,Africa,46.453,521.1341333 +Eritrea,1992,3668440,Africa,49.991,582.8585102 +Eritrea,1997,4058319,Africa,53.378,913.47079 +Eritrea,2002,4414865,Africa,55.24,765.3500015 +Eritrea,2007,4906585,Africa,58.04,641.3695236 +Ethiopia,1952,20860941,Africa,34.078,362.1462796 +Ethiopia,1957,22815614,Africa,36.667,378.9041632 +Ethiopia,1962,25145372,Africa,40.059,419.4564161 +Ethiopia,1967,27860297,Africa,42.115,516.1186438 +Ethiopia,1972,30770372,Africa,43.515,566.2439442 +Ethiopia,1977,34617799,Africa,44.51,556.8083834 +Ethiopia,1982,38111756,Africa,44.916,577.8607471 +Ethiopia,1987,42999530,Africa,46.684,573.7413142 +Ethiopia,1992,52088559,Africa,48.091,421.3534653 +Ethiopia,1997,59861301,Africa,49.402,515.8894013 +Ethiopia,2002,67946797,Africa,50.725,530.0535319 +Ethiopia,2007,76511887,Africa,52.947,690.8055759 +Finland,1952,4090500,Europe,66.55,6424.519071 +Finland,1957,4324000,Europe,67.49,7545.415386 +Finland,1962,4491443,Europe,68.75,9371.842561 +Finland,1967,4605744,Europe,69.83,10921.63626 +Finland,1972,4639657,Europe,70.87,14358.8759 +Finland,1977,4738902,Europe,72.52,15605.42283 +Finland,1982,4826933,Europe,74.55,18533.15761 +Finland,1987,4931729,Europe,74.83,21141.01223 +Finland,1992,5041039,Europe,75.7,20647.16499 +Finland,1997,5134406,Europe,77.13,23723.9502 +Finland,2002,5193039,Europe,78.37,28204.59057 +Finland,2007,5238460,Europe,79.313,33207.0844 +France,1952,42459667,Europe,67.41,7029.809327 +France,1957,44310863,Europe,68.93,8662.834898 +France,1962,47124000,Europe,70.51,10560.48553 +France,1967,49569000,Europe,71.55,12999.91766 +France,1972,51732000,Europe,72.38,16107.19171 +France,1977,53165019,Europe,73.83,18292.63514 +France,1982,54433565,Europe,74.89,20293.89746 +France,1987,55630100,Europe,76.34,22066.44214 +France,1992,57374179,Europe,77.46,24703.79615 +France,1997,58623428,Europe,78.64,25889.78487 +France,2002,59925035,Europe,79.59,28926.03234 +France,2007,61083916,Europe,80.657,30470.0167 +Gabon,1952,420702,Africa,37.003,4293.476475 +Gabon,1957,434904,Africa,38.999,4976.198099 +Gabon,1962,455661,Africa,40.489,6631.459222 +Gabon,1967,489004,Africa,44.598,8358.761987 +Gabon,1972,537977,Africa,48.69,11401.94841 +Gabon,1977,706367,Africa,52.79,21745.57328 +Gabon,1982,753874,Africa,56.564,15113.36194 +Gabon,1987,880397,Africa,60.19,11864.40844 +Gabon,1992,985739,Africa,61.366,13522.15752 +Gabon,1997,1126189,Africa,60.461,14722.84188 +Gabon,2002,1299304,Africa,56.761,12521.71392 +Gabon,2007,1454867,Africa,56.735,13206.48452 +Gambia,1952,284320,Africa,30,485.2306591 +Gambia,1957,323150,Africa,32.065,520.9267111 +Gambia,1962,374020,Africa,33.896,599.650276 +Gambia,1967,439593,Africa,35.857,734.7829124 +Gambia,1972,517101,Africa,38.308,756.0868363 +Gambia,1977,608274,Africa,41.842,884.7552507 +Gambia,1982,715523,Africa,45.58,835.8096108 +Gambia,1987,848406,Africa,49.265,611.6588611 +Gambia,1992,1025384,Africa,52.644,665.6244126 +Gambia,1997,1235767,Africa,55.861,653.7301704 +Gambia,2002,1457766,Africa,58.041,660.5855997 +Gambia,2007,1688359,Africa,59.448,752.7497265 +Germany,1952,69145952,Europe,67.5,7144.114393 +Germany,1957,71019069,Europe,69.1,10187.82665 +Germany,1962,73739117,Europe,70.3,12902.46291 +Germany,1967,76368453,Europe,70.8,14745.62561 +Germany,1972,78717088,Europe,71,18016.18027 +Germany,1977,78160773,Europe,72.5,20512.92123 +Germany,1982,78335266,Europe,73.8,22031.53274 +Germany,1987,77718298,Europe,74.847,24639.18566 +Germany,1992,80597764,Europe,76.07,26505.30317 +Germany,1997,82011073,Europe,77.34,27788.88416 +Germany,2002,82350671,Europe,78.67,30035.80198 +Germany,2007,82400996,Europe,79.406,32170.37442 +Ghana,1952,5581001,Africa,43.149,911.2989371 +Ghana,1957,6391288,Africa,44.779,1043.561537 +Ghana,1962,7355248,Africa,46.452,1190.041118 +Ghana,1967,8490213,Africa,48.072,1125.69716 +Ghana,1972,9354120,Africa,49.875,1178.223708 +Ghana,1977,10538093,Africa,51.756,993.2239571 +Ghana,1982,11400338,Africa,53.744,876.032569 +Ghana,1987,14168101,Africa,55.729,847.0061135 +Ghana,1992,16278738,Africa,57.501,925.060154 +Ghana,1997,18418288,Africa,58.556,1005.245812 +Ghana,2002,20550751,Africa,58.453,1111.984578 +Ghana,2007,22873338,Africa,60.022,1327.60891 +Greece,1952,7733250,Europe,65.86,3530.690067 +Greece,1957,8096218,Europe,67.86,4916.299889 +Greece,1962,8448233,Europe,69.51,6017.190733 +Greece,1967,8716441,Europe,71,8513.097016 +Greece,1972,8888628,Europe,72.34,12724.82957 +Greece,1977,9308479,Europe,73.68,14195.52428 +Greece,1982,9786480,Europe,75.24,15268.42089 +Greece,1987,9974490,Europe,76.67,16120.52839 +Greece,1992,10325429,Europe,77.03,17541.49634 +Greece,1997,10502372,Europe,77.869,18747.69814 +Greece,2002,10603863,Europe,78.256,22514.2548 +Greece,2007,10706290,Europe,79.483,27538.41188 +Guatemala,1952,3146381,Americas,42.023,2428.237769 +Guatemala,1957,3640876,Americas,44.142,2617.155967 +Guatemala,1962,4208858,Americas,46.954,2750.364446 +Guatemala,1967,4690773,Americas,50.016,3242.531147 +Guatemala,1972,5149581,Americas,53.738,4031.408271 +Guatemala,1977,5703430,Americas,56.029,4879.992748 +Guatemala,1982,6395630,Americas,58.137,4820.49479 +Guatemala,1987,7326406,Americas,60.782,4246.485974 +Guatemala,1992,8486949,Americas,63.373,4439.45084 +Guatemala,1997,9803875,Americas,66.322,4684.313807 +Guatemala,2002,11178650,Americas,68.978,4858.347495 +Guatemala,2007,12572928,Americas,70.259,5186.050003 +Guinea,1952,2664249,Africa,33.609,510.1964923 +Guinea,1957,2876726,Africa,34.558,576.2670245 +Guinea,1962,3140003,Africa,35.753,686.3736739 +Guinea,1967,3451418,Africa,37.197,708.7595409 +Guinea,1972,3811387,Africa,38.842,741.6662307 +Guinea,1977,4227026,Africa,40.762,874.6858643 +Guinea,1982,4710497,Africa,42.891,857.2503577 +Guinea,1987,5650262,Africa,45.552,805.5724718 +Guinea,1992,6990574,Africa,48.576,794.3484384 +Guinea,1997,8048834,Africa,51.455,869.4497668 +Guinea,2002,8807818,Africa,53.676,945.5835837 +Guinea,2007,9947814,Africa,56.007,942.6542111 +Guinea-Bissau,1952,580653,Africa,32.5,299.850319 +Guinea-Bissau,1957,601095,Africa,33.489,431.7904566 +Guinea-Bissau,1962,627820,Africa,34.488,522.0343725 +Guinea-Bissau,1967,601287,Africa,35.492,715.5806402 +Guinea-Bissau,1972,625361,Africa,36.486,820.2245876 +Guinea-Bissau,1977,745228,Africa,37.465,764.7259628 +Guinea-Bissau,1982,825987,Africa,39.327,838.1239671 +Guinea-Bissau,1987,927524,Africa,41.245,736.4153921 +Guinea-Bissau,1992,1050938,Africa,43.266,745.5398706 +Guinea-Bissau,1997,1193708,Africa,44.873,796.6644681 +Guinea-Bissau,2002,1332459,Africa,45.504,575.7047176 +Guinea-Bissau,2007,1472041,Africa,46.388,579.231743 +Haiti,1952,3201488,Americas,37.579,1840.366939 +Haiti,1957,3507701,Americas,40.696,1726.887882 +Haiti,1962,3880130,Americas,43.59,1796.589032 +Haiti,1967,4318137,Americas,46.243,1452.057666 +Haiti,1972,4698301,Americas,48.042,1654.456946 +Haiti,1977,4908554,Americas,49.923,1874.298931 +Haiti,1982,5198399,Americas,51.461,2011.159549 +Haiti,1987,5756203,Americas,53.636,1823.015995 +Haiti,1992,6326682,Americas,55.089,1456.309517 +Haiti,1997,6913545,Americas,56.671,1341.726931 +Haiti,2002,7607651,Americas,58.137,1270.364932 +Haiti,2007,8502814,Americas,60.916,1201.637154 +Honduras,1952,1517453,Americas,41.912,2194.926204 +Honduras,1957,1770390,Americas,44.665,2220.487682 +Honduras,1962,2090162,Americas,48.041,2291.156835 +Honduras,1967,2500689,Americas,50.924,2538.269358 +Honduras,1972,2965146,Americas,53.884,2529.842345 +Honduras,1977,3055235,Americas,57.402,3203.208066 +Honduras,1982,3669448,Americas,60.909,3121.760794 +Honduras,1987,4372203,Americas,64.492,3023.096699 +Honduras,1992,5077347,Americas,66.399,3081.694603 +Honduras,1997,5867957,Americas,67.659,3160.454906 +Honduras,2002,6677328,Americas,68.565,3099.72866 +Honduras,2007,7483763,Americas,70.198,3548.330846 +"Hong Kong, China",1952,2125900,Asia,60.96,3054.421209 +"Hong Kong, China",1957,2736300,Asia,64.75,3629.076457 +"Hong Kong, China",1962,3305200,Asia,67.65,4692.648272 +"Hong Kong, China",1967,3722800,Asia,70,6197.962814 +"Hong Kong, China",1972,4115700,Asia,72,8315.928145 +"Hong Kong, China",1977,4583700,Asia,73.6,11186.14125 +"Hong Kong, China",1982,5264500,Asia,75.45,14560.53051 +"Hong Kong, China",1987,5584510,Asia,76.2,20038.47269 +"Hong Kong, China",1992,5829696,Asia,77.601,24757.60301 +"Hong Kong, China",1997,6495918,Asia,80,28377.63219 +"Hong Kong, China",2002,6762476,Asia,81.495,30209.01516 +"Hong Kong, China",2007,6980412,Asia,82.208,39724.97867 +Hungary,1952,9504000,Europe,64.03,5263.673816 +Hungary,1957,9839000,Europe,66.41,6040.180011 +Hungary,1962,10063000,Europe,67.96,7550.359877 +Hungary,1967,10223422,Europe,69.5,9326.64467 +Hungary,1972,10394091,Europe,69.76,10168.65611 +Hungary,1977,10637171,Europe,69.95,11674.83737 +Hungary,1982,10705535,Europe,69.39,12545.99066 +Hungary,1987,10612740,Europe,69.58,12986.47998 +Hungary,1992,10348684,Europe,69.17,10535.62855 +Hungary,1997,10244684,Europe,71.04,11712.7768 +Hungary,2002,10083313,Europe,72.59,14843.93556 +Hungary,2007,9956108,Europe,73.338,18008.94444 +Iceland,1952,147962,Europe,72.49,7267.688428 +Iceland,1957,165110,Europe,73.47,9244.001412 +Iceland,1962,182053,Europe,73.68,10350.15906 +Iceland,1967,198676,Europe,73.73,13319.89568 +Iceland,1972,209275,Europe,74.46,15798.06362 +Iceland,1977,221823,Europe,76.11,19654.96247 +Iceland,1982,233997,Europe,76.99,23269.6075 +Iceland,1987,244676,Europe,77.23,26923.20628 +Iceland,1992,259012,Europe,78.77,25144.39201 +Iceland,1997,271192,Europe,78.95,28061.09966 +Iceland,2002,288030,Europe,80.5,31163.20196 +Iceland,2007,301931,Europe,81.757,36180.78919 +India,1952,372000000,Asia,37.373,546.5657493 +India,1957,409000000,Asia,40.249,590.061996 +India,1962,454000000,Asia,43.605,658.3471509 +India,1967,506000000,Asia,47.193,700.7706107 +India,1972,567000000,Asia,50.651,724.032527 +India,1977,634000000,Asia,54.208,813.337323 +India,1982,708000000,Asia,56.596,855.7235377 +India,1987,788000000,Asia,58.553,976.5126756 +India,1992,872000000,Asia,60.223,1164.406809 +India,1997,959000000,Asia,61.765,1458.817442 +India,2002,1034172547,Asia,62.879,1746.769454 +India,2007,1110396331,Asia,64.698,2452.210407 +Indonesia,1952,82052000,Asia,37.468,749.6816546 +Indonesia,1957,90124000,Asia,39.918,858.9002707 +Indonesia,1962,99028000,Asia,42.518,849.2897701 +Indonesia,1967,109343000,Asia,45.964,762.4317721 +Indonesia,1972,121282000,Asia,49.203,1111.107907 +Indonesia,1977,136725000,Asia,52.702,1382.702056 +Indonesia,1982,153343000,Asia,56.159,1516.872988 +Indonesia,1987,169276000,Asia,60.137,1748.356961 +Indonesia,1992,184816000,Asia,62.681,2383.140898 +Indonesia,1997,199278000,Asia,66.041,3119.335603 +Indonesia,2002,211060000,Asia,68.588,2873.91287 +Indonesia,2007,223547000,Asia,70.65,3540.651564 +Iran,1952,17272000,Asia,44.869,3035.326002 +Iran,1957,19792000,Asia,47.181,3290.257643 +Iran,1962,22874000,Asia,49.325,4187.329802 +Iran,1967,26538000,Asia,52.469,5906.731805 +Iran,1972,30614000,Asia,55.234,9613.818607 +Iran,1977,35480679,Asia,57.702,11888.59508 +Iran,1982,43072751,Asia,59.62,7608.334602 +Iran,1987,51889696,Asia,63.04,6642.881371 +Iran,1992,60397973,Asia,65.742,7235.653188 +Iran,1997,63327987,Asia,68.042,8263.590301 +Iran,2002,66907826,Asia,69.451,9240.761975 +Iran,2007,69453570,Asia,70.964,11605.71449 +Iraq,1952,5441766,Asia,45.32,4129.766056 +Iraq,1957,6248643,Asia,48.437,6229.333562 +Iraq,1962,7240260,Asia,51.457,8341.737815 +Iraq,1967,8519282,Asia,54.459,8931.459811 +Iraq,1972,10061506,Asia,56.95,9576.037596 +Iraq,1977,11882916,Asia,60.413,14688.23507 +Iraq,1982,14173318,Asia,62.038,14517.90711 +Iraq,1987,16543189,Asia,65.044,11643.57268 +Iraq,1992,17861905,Asia,59.461,3745.640687 +Iraq,1997,20775703,Asia,58.811,3076.239795 +Iraq,2002,24001816,Asia,57.046,4390.717312 +Iraq,2007,27499638,Asia,59.545,4471.061906 +Ireland,1952,2952156,Europe,66.91,5210.280328 +Ireland,1957,2878220,Europe,68.9,5599.077872 +Ireland,1962,2830000,Europe,70.29,6631.597314 +Ireland,1967,2900100,Europe,71.08,7655.568963 +Ireland,1972,3024400,Europe,71.28,9530.772896 +Ireland,1977,3271900,Europe,72.03,11150.98113 +Ireland,1982,3480000,Europe,73.1,12618.32141 +Ireland,1987,3539900,Europe,74.36,13872.86652 +Ireland,1992,3557761,Europe,75.467,17558.81555 +Ireland,1997,3667233,Europe,76.122,24521.94713 +Ireland,2002,3879155,Europe,77.783,34077.04939 +Ireland,2007,4109086,Europe,78.885,40675.99635 +Israel,1952,1620914,Asia,65.39,4086.522128 +Israel,1957,1944401,Asia,67.84,5385.278451 +Israel,1962,2310904,Asia,69.39,7105.630706 +Israel,1967,2693585,Asia,70.75,8393.741404 +Israel,1972,3095893,Asia,71.63,12786.93223 +Israel,1977,3495918,Asia,73.06,13306.61921 +Israel,1982,3858421,Asia,74.45,15367.0292 +Israel,1987,4203148,Asia,75.6,17122.47986 +Israel,1992,4936550,Asia,76.93,18051.52254 +Israel,1997,5531387,Asia,78.269,20896.60924 +Israel,2002,6029529,Asia,79.696,21905.59514 +Israel,2007,6426679,Asia,80.745,25523.2771 +Italy,1952,47666000,Europe,65.94,4931.404155 +Italy,1957,49182000,Europe,67.81,6248.656232 +Italy,1962,50843200,Europe,69.24,8243.58234 +Italy,1967,52667100,Europe,71.06,10022.40131 +Italy,1972,54365564,Europe,72.19,12269.27378 +Italy,1977,56059245,Europe,73.48,14255.98475 +Italy,1982,56535636,Europe,74.98,16537.4835 +Italy,1987,56729703,Europe,76.42,19207.23482 +Italy,1992,56840847,Europe,77.44,22013.64486 +Italy,1997,57479469,Europe,78.82,24675.02446 +Italy,2002,57926999,Europe,80.24,27968.09817 +Italy,2007,58147733,Europe,80.546,28569.7197 +Jamaica,1952,1426095,Americas,58.53,2898.530881 +Jamaica,1957,1535090,Americas,62.61,4756.525781 +Jamaica,1962,1665128,Americas,65.61,5246.107524 +Jamaica,1967,1861096,Americas,67.51,6124.703451 +Jamaica,1972,1997616,Americas,69,7433.889293 +Jamaica,1977,2156814,Americas,70.11,6650.195573 +Jamaica,1982,2298309,Americas,71.21,6068.05135 +Jamaica,1987,2326606,Americas,71.77,6351.237495 +Jamaica,1992,2378618,Americas,71.766,7404.923685 +Jamaica,1997,2531311,Americas,72.262,7121.924704 +Jamaica,2002,2664659,Americas,72.047,6994.774861 +Jamaica,2007,2780132,Americas,72.567,7320.880262 +Japan,1952,86459025,Asia,63.03,3216.956347 +Japan,1957,91563009,Asia,65.5,4317.694365 +Japan,1962,95831757,Asia,68.73,6576.649461 +Japan,1967,100825279,Asia,71.43,9847.788607 +Japan,1972,107188273,Asia,73.42,14778.78636 +Japan,1977,113872473,Asia,75.38,16610.37701 +Japan,1982,118454974,Asia,77.11,19384.10571 +Japan,1987,122091325,Asia,78.67,22375.94189 +Japan,1992,124329269,Asia,79.36,26824.89511 +Japan,1997,125956499,Asia,80.69,28816.58499 +Japan,2002,127065841,Asia,82,28604.5919 +Japan,2007,127467972,Asia,82.603,31656.06806 +Jordan,1952,607914,Asia,43.158,1546.907807 +Jordan,1957,746559,Asia,45.669,1886.080591 +Jordan,1962,933559,Asia,48.126,2348.009158 +Jordan,1967,1255058,Asia,51.629,2741.796252 +Jordan,1972,1613551,Asia,56.528,2110.856309 +Jordan,1977,1937652,Asia,61.134,2852.351568 +Jordan,1982,2347031,Asia,63.739,4161.415959 +Jordan,1987,2820042,Asia,65.869,4448.679912 +Jordan,1992,3867409,Asia,68.015,3431.593647 +Jordan,1997,4526235,Asia,69.772,3645.379572 +Jordan,2002,5307470,Asia,71.263,3844.917194 +Jordan,2007,6053193,Asia,72.535,4519.461171 +Kenya,1952,6464046,Africa,42.27,853.540919 +Kenya,1957,7454779,Africa,44.686,944.4383152 +Kenya,1962,8678557,Africa,47.949,896.9663732 +Kenya,1967,10191512,Africa,50.654,1056.736457 +Kenya,1972,12044785,Africa,53.559,1222.359968 +Kenya,1977,14500404,Africa,56.155,1267.613204 +Kenya,1982,17661452,Africa,58.766,1348.225791 +Kenya,1987,21198082,Africa,59.339,1361.936856 +Kenya,1992,25020539,Africa,59.285,1341.921721 +Kenya,1997,28263827,Africa,54.407,1360.485021 +Kenya,2002,31386842,Africa,50.992,1287.514732 +Kenya,2007,35610177,Africa,54.11,1463.249282 +"Korea, Dem. Rep.",1952,8865488,Asia,50.056,1088.277758 +"Korea, Dem. Rep.",1957,9411381,Asia,54.081,1571.134655 +"Korea, Dem. Rep.",1962,10917494,Asia,56.656,1621.693598 +"Korea, Dem. Rep.",1967,12617009,Asia,59.942,2143.540609 +"Korea, Dem. Rep.",1972,14781241,Asia,63.983,3701.621503 +"Korea, Dem. Rep.",1977,16325320,Asia,67.159,4106.301249 +"Korea, Dem. Rep.",1982,17647518,Asia,69.1,4106.525293 +"Korea, Dem. Rep.",1987,19067554,Asia,70.647,4106.492315 +"Korea, Dem. Rep.",1992,20711375,Asia,69.978,3726.063507 +"Korea, Dem. Rep.",1997,21585105,Asia,67.727,1690.756814 +"Korea, Dem. Rep.",2002,22215365,Asia,66.662,1646.758151 +"Korea, Dem. Rep.",2007,23301725,Asia,67.297,1593.06548 +"Korea, Rep.",1952,20947571,Asia,47.453,1030.592226 +"Korea, Rep.",1957,22611552,Asia,52.681,1487.593537 +"Korea, Rep.",1962,26420307,Asia,55.292,1536.344387 +"Korea, Rep.",1967,30131000,Asia,57.716,2029.228142 +"Korea, Rep.",1972,33505000,Asia,62.612,3030.87665 +"Korea, Rep.",1977,36436000,Asia,64.766,4657.22102 +"Korea, Rep.",1982,39326000,Asia,67.123,5622.942464 +"Korea, Rep.",1987,41622000,Asia,69.81,8533.088805 +"Korea, Rep.",1992,43805450,Asia,72.244,12104.27872 +"Korea, Rep.",1997,46173816,Asia,74.647,15993.52796 +"Korea, Rep.",2002,47969150,Asia,77.045,19233.98818 +"Korea, Rep.",2007,49044790,Asia,78.623,23348.13973 +Kuwait,1952,160000,Asia,55.565,108382.3529 +Kuwait,1957,212846,Asia,58.033,113523.1329 +Kuwait,1962,358266,Asia,60.47,95458.11176 +Kuwait,1967,575003,Asia,64.624,80894.88326 +Kuwait,1972,841934,Asia,67.712,109347.867 +Kuwait,1977,1140357,Asia,69.343,59265.47714 +Kuwait,1982,1497494,Asia,71.309,31354.03573 +Kuwait,1987,1891487,Asia,74.174,28118.42998 +Kuwait,1992,1418095,Asia,75.19,34932.91959 +Kuwait,1997,1765345,Asia,76.156,40300.61996 +Kuwait,2002,2111561,Asia,76.904,35110.10566 +Kuwait,2007,2505559,Asia,77.588,47306.98978 +Lebanon,1952,1439529,Asia,55.928,4834.804067 +Lebanon,1957,1647412,Asia,59.489,6089.786934 +Lebanon,1962,1886848,Asia,62.094,5714.560611 +Lebanon,1967,2186894,Asia,63.87,6006.983042 +Lebanon,1972,2680018,Asia,65.421,7486.384341 +Lebanon,1977,3115787,Asia,66.099,8659.696836 +Lebanon,1982,3086876,Asia,66.983,7640.519521 +Lebanon,1987,3089353,Asia,67.926,5377.091329 +Lebanon,1992,3219994,Asia,69.292,6890.806854 +Lebanon,1997,3430388,Asia,70.265,8754.96385 +Lebanon,2002,3677780,Asia,71.028,9313.93883 +Lebanon,2007,3921278,Asia,71.993,10461.05868 +Lesotho,1952,748747,Africa,42.138,298.8462121 +Lesotho,1957,813338,Africa,45.047,335.9971151 +Lesotho,1962,893143,Africa,47.747,411.8006266 +Lesotho,1967,996380,Africa,48.492,498.6390265 +Lesotho,1972,1116779,Africa,49.767,496.5815922 +Lesotho,1977,1251524,Africa,52.208,745.3695408 +Lesotho,1982,1411807,Africa,55.078,797.2631074 +Lesotho,1987,1599200,Africa,57.18,773.9932141 +Lesotho,1992,1803195,Africa,59.685,977.4862725 +Lesotho,1997,1982823,Africa,55.558,1186.147994 +Lesotho,2002,2046772,Africa,44.593,1275.184575 +Lesotho,2007,2012649,Africa,42.592,1569.331442 +Liberia,1952,863308,Africa,38.48,575.5729961 +Liberia,1957,975950,Africa,39.486,620.9699901 +Liberia,1962,1112796,Africa,40.502,634.1951625 +Liberia,1967,1279406,Africa,41.536,713.6036483 +Liberia,1972,1482628,Africa,42.614,803.0054535 +Liberia,1977,1703617,Africa,43.764,640.3224383 +Liberia,1982,1956875,Africa,44.852,572.1995694 +Liberia,1987,2269414,Africa,46.027,506.1138573 +Liberia,1992,1912974,Africa,40.802,636.6229191 +Liberia,1997,2200725,Africa,42.221,609.1739508 +Liberia,2002,2814651,Africa,43.753,531.4823679 +Liberia,2007,3193942,Africa,45.678,414.5073415 +Libya,1952,1019729,Africa,42.723,2387.54806 +Libya,1957,1201578,Africa,45.289,3448.284395 +Libya,1962,1441863,Africa,47.808,6757.030816 +Libya,1967,1759224,Africa,50.227,18772.75169 +Libya,1972,2183877,Africa,52.773,21011.49721 +Libya,1977,2721783,Africa,57.442,21951.21176 +Libya,1982,3344074,Africa,62.155,17364.27538 +Libya,1987,3799845,Africa,66.234,11770.5898 +Libya,1992,4364501,Africa,68.755,9640.138501 +Libya,1997,4759670,Africa,71.555,9467.446056 +Libya,2002,5368585,Africa,72.737,9534.677467 +Libya,2007,6036914,Africa,73.952,12057.49928 +Madagascar,1952,4762912,Africa,36.681,1443.011715 +Madagascar,1957,5181679,Africa,38.865,1589.20275 +Madagascar,1962,5703324,Africa,40.848,1643.38711 +Madagascar,1967,6334556,Africa,42.881,1634.047282 +Madagascar,1972,7082430,Africa,44.851,1748.562982 +Madagascar,1977,8007166,Africa,46.881,1544.228586 +Madagascar,1982,9171477,Africa,48.969,1302.878658 +Madagascar,1987,10568642,Africa,49.35,1155.441948 +Madagascar,1992,12210395,Africa,52.214,1040.67619 +Madagascar,1997,14165114,Africa,54.978,986.2958956 +Madagascar,2002,16473477,Africa,57.286,894.6370822 +Madagascar,2007,19167654,Africa,59.443,1044.770126 +Malawi,1952,2917802,Africa,36.256,369.1650802 +Malawi,1957,3221238,Africa,37.207,416.3698064 +Malawi,1962,3628608,Africa,38.41,427.9010856 +Malawi,1967,4147252,Africa,39.487,495.5147806 +Malawi,1972,4730997,Africa,41.766,584.6219709 +Malawi,1977,5637246,Africa,43.767,663.2236766 +Malawi,1982,6502825,Africa,45.642,632.8039209 +Malawi,1987,7824747,Africa,47.457,635.5173634 +Malawi,1992,10014249,Africa,49.42,563.2000145 +Malawi,1997,10419991,Africa,47.495,692.2758103 +Malawi,2002,11824495,Africa,45.009,665.4231186 +Malawi,2007,13327079,Africa,48.303,759.3499101 +Malaysia,1952,6748378,Asia,48.463,1831.132894 +Malaysia,1957,7739235,Asia,52.102,1810.066992 +Malaysia,1962,8906385,Asia,55.737,2036.884944 +Malaysia,1967,10154878,Asia,59.371,2277.742396 +Malaysia,1972,11441462,Asia,63.01,2849.09478 +Malaysia,1977,12845381,Asia,65.256,3827.921571 +Malaysia,1982,14441916,Asia,68,4920.355951 +Malaysia,1987,16331785,Asia,69.5,5249.802653 +Malaysia,1992,18319502,Asia,70.693,7277.912802 +Malaysia,1997,20476091,Asia,71.938,10132.90964 +Malaysia,2002,22662365,Asia,73.044,10206.97794 +Malaysia,2007,24821286,Asia,74.241,12451.6558 +Mali,1952,3838168,Africa,33.685,452.3369807 +Mali,1957,4241884,Africa,35.307,490.3821867 +Mali,1962,4690372,Africa,36.936,496.1743428 +Mali,1967,5212416,Africa,38.487,545.0098873 +Mali,1972,5828158,Africa,39.977,581.3688761 +Mali,1977,6491649,Africa,41.714,686.3952693 +Mali,1982,6998256,Africa,43.916,618.0140641 +Mali,1987,7634008,Africa,46.364,684.1715576 +Mali,1992,8416215,Africa,48.388,739.014375 +Mali,1997,9384984,Africa,49.903,790.2579846 +Mali,2002,10580176,Africa,51.818,951.4097518 +Mali,2007,12031795,Africa,54.467,1042.581557 +Mauritania,1952,1022556,Africa,40.543,743.1159097 +Mauritania,1957,1076852,Africa,42.338,846.1202613 +Mauritania,1962,1146757,Africa,44.248,1055.896036 +Mauritania,1967,1230542,Africa,46.289,1421.145193 +Mauritania,1972,1332786,Africa,48.437,1586.851781 +Mauritania,1977,1456688,Africa,50.852,1497.492223 +Mauritania,1982,1622136,Africa,53.599,1481.150189 +Mauritania,1987,1841240,Africa,56.145,1421.603576 +Mauritania,1992,2119465,Africa,58.333,1361.369784 +Mauritania,1997,2444741,Africa,60.43,1483.136136 +Mauritania,2002,2828858,Africa,62.247,1579.019543 +Mauritania,2007,3270065,Africa,64.164,1803.151496 +Mauritius,1952,516556,Africa,50.986,1967.955707 +Mauritius,1957,609816,Africa,58.089,2034.037981 +Mauritius,1962,701016,Africa,60.246,2529.067487 +Mauritius,1967,789309,Africa,61.557,2475.387562 +Mauritius,1972,851334,Africa,62.944,2575.484158 +Mauritius,1977,913025,Africa,64.93,3710.982963 +Mauritius,1982,992040,Africa,66.711,3688.037739 +Mauritius,1987,1042663,Africa,68.74,4783.586903 +Mauritius,1992,1096202,Africa,69.745,6058.253846 +Mauritius,1997,1149818,Africa,70.736,7425.705295 +Mauritius,2002,1200206,Africa,71.954,9021.815894 +Mauritius,2007,1250882,Africa,72.801,10956.99112 +Mexico,1952,30144317,Americas,50.789,3478.125529 +Mexico,1957,35015548,Americas,55.19,4131.546641 +Mexico,1962,41121485,Americas,58.299,4581.609385 +Mexico,1967,47995559,Americas,60.11,5754.733883 +Mexico,1972,55984294,Americas,62.361,6809.40669 +Mexico,1977,63759976,Americas,65.032,7674.929108 +Mexico,1982,71640904,Americas,67.405,9611.147541 +Mexico,1987,80122492,Americas,69.498,8688.156003 +Mexico,1992,88111030,Americas,71.455,9472.384295 +Mexico,1997,95895146,Americas,73.67,9767.29753 +Mexico,2002,102479927,Americas,74.902,10742.44053 +Mexico,2007,108700891,Americas,76.195,11977.57496 +Mongolia,1952,800663,Asia,42.244,786.5668575 +Mongolia,1957,882134,Asia,45.248,912.6626085 +Mongolia,1962,1010280,Asia,48.251,1056.353958 +Mongolia,1967,1149500,Asia,51.253,1226.04113 +Mongolia,1972,1320500,Asia,53.754,1421.741975 +Mongolia,1977,1528000,Asia,55.491,1647.511665 +Mongolia,1982,1756032,Asia,57.489,2000.603139 +Mongolia,1987,2015133,Asia,60.222,2338.008304 +Mongolia,1992,2312802,Asia,61.271,1785.402016 +Mongolia,1997,2494803,Asia,63.625,1902.2521 +Mongolia,2002,2674234,Asia,65.033,2140.739323 +Mongolia,2007,2874127,Asia,66.803,3095.772271 +Montenegro,1952,413834,Europe,59.164,2647.585601 +Montenegro,1957,442829,Europe,61.448,3682.259903 +Montenegro,1962,474528,Europe,63.728,4649.593785 +Montenegro,1967,501035,Europe,67.178,5907.850937 +Montenegro,1972,527678,Europe,70.636,7778.414017 +Montenegro,1977,560073,Europe,73.066,9595.929905 +Montenegro,1982,562548,Europe,74.101,11222.58762 +Montenegro,1987,569473,Europe,74.865,11732.51017 +Montenegro,1992,621621,Europe,75.435,7003.339037 +Montenegro,1997,692651,Europe,75.445,6465.613349 +Montenegro,2002,720230,Europe,73.981,6557.194282 +Montenegro,2007,684736,Europe,74.543,9253.896111 +Morocco,1952,9939217,Africa,42.873,1688.20357 +Morocco,1957,11406350,Africa,45.423,1642.002314 +Morocco,1962,13056604,Africa,47.924,1566.353493 +Morocco,1967,14770296,Africa,50.335,1711.04477 +Morocco,1972,16660670,Africa,52.862,1930.194975 +Morocco,1977,18396941,Africa,55.73,2370.619976 +Morocco,1982,20198730,Africa,59.65,2702.620356 +Morocco,1987,22987397,Africa,62.677,2755.046991 +Morocco,1992,25798239,Africa,65.393,2948.047252 +Morocco,1997,28529501,Africa,67.66,2982.101858 +Morocco,2002,31167783,Africa,69.615,3258.495584 +Morocco,2007,33757175,Africa,71.164,3820.17523 +Mozambique,1952,6446316,Africa,31.286,468.5260381 +Mozambique,1957,7038035,Africa,33.779,495.5868333 +Mozambique,1962,7788944,Africa,36.161,556.6863539 +Mozambique,1967,8680909,Africa,38.113,566.6691539 +Mozambique,1972,9809596,Africa,40.328,724.9178037 +Mozambique,1977,11127868,Africa,42.495,502.3197334 +Mozambique,1982,12587223,Africa,42.795,462.2114149 +Mozambique,1987,12891952,Africa,42.861,389.8761846 +Mozambique,1992,13160731,Africa,44.284,410.8968239 +Mozambique,1997,16603334,Africa,46.344,472.3460771 +Mozambique,2002,18473780,Africa,44.026,633.6179466 +Mozambique,2007,19951656,Africa,42.082,823.6856205 +Myanmar,1952,20092996,Asia,36.319,331 +Myanmar,1957,21731844,Asia,41.905,350 +Myanmar,1962,23634436,Asia,45.108,388 +Myanmar,1967,25870271,Asia,49.379,349 +Myanmar,1972,28466390,Asia,53.07,357 +Myanmar,1977,31528087,Asia,56.059,371 +Myanmar,1982,34680442,Asia,58.056,424 +Myanmar,1987,38028578,Asia,58.339,385 +Myanmar,1992,40546538,Asia,59.32,347 +Myanmar,1997,43247867,Asia,60.328,415 +Myanmar,2002,45598081,Asia,59.908,611 +Myanmar,2007,47761980,Asia,62.069,944 +Namibia,1952,485831,Africa,41.725,2423.780443 +Namibia,1957,548080,Africa,45.226,2621.448058 +Namibia,1962,621392,Africa,48.386,3173.215595 +Namibia,1967,706640,Africa,51.159,3793.694753 +Namibia,1972,821782,Africa,53.867,3746.080948 +Namibia,1977,977026,Africa,56.437,3876.485958 +Namibia,1982,1099010,Africa,58.968,4191.100511 +Namibia,1987,1278184,Africa,60.835,3693.731337 +Namibia,1992,1554253,Africa,61.999,3804.537999 +Namibia,1997,1774766,Africa,58.909,3899.52426 +Namibia,2002,1972153,Africa,51.479,4072.324751 +Namibia,2007,2055080,Africa,52.906,4811.060429 +Nepal,1952,9182536,Asia,36.157,545.8657229 +Nepal,1957,9682338,Asia,37.686,597.9363558 +Nepal,1962,10332057,Asia,39.393,652.3968593 +Nepal,1967,11261690,Asia,41.472,676.4422254 +Nepal,1972,12412593,Asia,43.971,674.7881296 +Nepal,1977,13933198,Asia,46.748,694.1124398 +Nepal,1982,15796314,Asia,49.594,718.3730947 +Nepal,1987,17917180,Asia,52.537,775.6324501 +Nepal,1992,20326209,Asia,55.727,897.7403604 +Nepal,1997,23001113,Asia,59.426,1010.892138 +Nepal,2002,25873917,Asia,61.34,1057.206311 +Nepal,2007,28901790,Asia,63.785,1091.359778 +Netherlands,1952,10381988,Europe,72.13,8941.571858 +Netherlands,1957,11026383,Europe,72.99,11276.19344 +Netherlands,1962,11805689,Europe,73.23,12790.84956 +Netherlands,1967,12596822,Europe,73.82,15363.25136 +Netherlands,1972,13329874,Europe,73.75,18794.74567 +Netherlands,1977,13852989,Europe,75.24,21209.0592 +Netherlands,1982,14310401,Europe,76.05,21399.46046 +Netherlands,1987,14665278,Europe,76.83,23651.32361 +Netherlands,1992,15174244,Europe,77.42,26790.94961 +Netherlands,1997,15604464,Europe,78.03,30246.13063 +Netherlands,2002,16122830,Europe,78.53,33724.75778 +Netherlands,2007,16570613,Europe,79.762,36797.93332 +New Zealand,1952,1994794,Oceania,69.39,10556.57566 +New Zealand,1957,2229407,Oceania,70.26,12247.39532 +New Zealand,1962,2488550,Oceania,71.24,13175.678 +New Zealand,1967,2728150,Oceania,71.52,14463.91893 +New Zealand,1972,2929100,Oceania,71.89,16046.03728 +New Zealand,1977,3164900,Oceania,72.22,16233.7177 +New Zealand,1982,3210650,Oceania,73.84,17632.4104 +New Zealand,1987,3317166,Oceania,74.32,19007.19129 +New Zealand,1992,3437674,Oceania,76.33,18363.32494 +New Zealand,1997,3676187,Oceania,77.55,21050.41377 +New Zealand,2002,3908037,Oceania,79.11,23189.80135 +New Zealand,2007,4115771,Oceania,80.204,25185.00911 +Nicaragua,1952,1165790,Americas,42.314,3112.363948 +Nicaragua,1957,1358828,Americas,45.432,3457.415947 +Nicaragua,1962,1590597,Americas,48.632,3634.364406 +Nicaragua,1967,1865490,Americas,51.884,4643.393534 +Nicaragua,1972,2182908,Americas,55.151,4688.593267 +Nicaragua,1977,2554598,Americas,57.47,5486.371089 +Nicaragua,1982,2979423,Americas,59.298,3470.338156 +Nicaragua,1987,3344353,Americas,62.008,2955.984375 +Nicaragua,1992,4017939,Americas,65.843,2170.151724 +Nicaragua,1997,4609572,Americas,68.426,2253.023004 +Nicaragua,2002,5146848,Americas,70.836,2474.548819 +Nicaragua,2007,5675356,Americas,72.899,2749.320965 +Niger,1952,3379468,Africa,37.444,761.879376 +Niger,1957,3692184,Africa,38.598,835.5234025 +Niger,1962,4076008,Africa,39.487,997.7661127 +Niger,1967,4534062,Africa,40.118,1054.384891 +Niger,1972,5060262,Africa,40.546,954.2092363 +Niger,1977,5682086,Africa,41.291,808.8970728 +Niger,1982,6437188,Africa,42.598,909.7221354 +Niger,1987,7332638,Africa,44.555,668.3000228 +Niger,1992,8392818,Africa,47.391,581.182725 +Niger,1997,9666252,Africa,51.313,580.3052092 +Niger,2002,11140655,Africa,54.496,601.0745012 +Niger,2007,12894865,Africa,56.867,619.6768924 +Nigeria,1952,33119096,Africa,36.324,1077.281856 +Nigeria,1957,37173340,Africa,37.802,1100.592563 +Nigeria,1962,41871351,Africa,39.36,1150.927478 +Nigeria,1967,47287752,Africa,41.04,1014.514104 +Nigeria,1972,53740085,Africa,42.821,1698.388838 +Nigeria,1977,62209173,Africa,44.514,1981.951806 +Nigeria,1982,73039376,Africa,45.826,1576.97375 +Nigeria,1987,81551520,Africa,46.886,1385.029563 +Nigeria,1992,93364244,Africa,47.472,1619.848217 +Nigeria,1997,106207839,Africa,47.464,1624.941275 +Nigeria,2002,119901274,Africa,46.608,1615.286395 +Nigeria,2007,135031164,Africa,46.859,2013.977305 +Norway,1952,3327728,Europe,72.67,10095.42172 +Norway,1957,3491938,Europe,73.44,11653.97304 +Norway,1962,3638919,Europe,73.47,13450.40151 +Norway,1967,3786019,Europe,74.08,16361.87647 +Norway,1972,3933004,Europe,74.34,18965.05551 +Norway,1977,4043205,Europe,75.37,23311.34939 +Norway,1982,4114787,Europe,75.97,26298.63531 +Norway,1987,4186147,Europe,75.89,31540.9748 +Norway,1992,4286357,Europe,77.32,33965.66115 +Norway,1997,4405672,Europe,78.32,41283.16433 +Norway,2002,4535591,Europe,79.05,44683.97525 +Norway,2007,4627926,Europe,80.196,49357.19017 +Oman,1952,507833,Asia,37.578,1828.230307 +Oman,1957,561977,Asia,40.08,2242.746551 +Oman,1962,628164,Asia,43.165,2924.638113 +Oman,1967,714775,Asia,46.988,4720.942687 +Oman,1972,829050,Asia,52.143,10618.03855 +Oman,1977,1004533,Asia,57.367,11848.34392 +Oman,1982,1301048,Asia,62.728,12954.79101 +Oman,1987,1593882,Asia,67.734,18115.22313 +Oman,1992,1915208,Asia,71.197,18616.70691 +Oman,1997,2283635,Asia,72.499,19702.05581 +Oman,2002,2713462,Asia,74.193,19774.83687 +Oman,2007,3204897,Asia,75.64,22316.19287 +Pakistan,1952,41346560,Asia,43.436,684.5971438 +Pakistan,1957,46679944,Asia,45.557,747.0835292 +Pakistan,1962,53100671,Asia,47.67,803.3427418 +Pakistan,1967,60641899,Asia,49.8,942.4082588 +Pakistan,1972,69325921,Asia,51.929,1049.938981 +Pakistan,1977,78152686,Asia,54.043,1175.921193 +Pakistan,1982,91462088,Asia,56.158,1443.429832 +Pakistan,1987,105186881,Asia,58.245,1704.686583 +Pakistan,1992,120065004,Asia,60.838,1971.829464 +Pakistan,1997,135564834,Asia,61.818,2049.350521 +Pakistan,2002,153403524,Asia,63.61,2092.712441 +Pakistan,2007,169270617,Asia,65.483,2605.94758 +Panama,1952,940080,Americas,55.191,2480.380334 +Panama,1957,1063506,Americas,59.201,2961.800905 +Panama,1962,1215725,Americas,61.817,3536.540301 +Panama,1967,1405486,Americas,64.071,4421.009084 +Panama,1972,1616384,Americas,66.216,5364.249663 +Panama,1977,1839782,Americas,68.681,5351.912144 +Panama,1982,2036305,Americas,70.472,7009.601598 +Panama,1987,2253639,Americas,71.523,7034.779161 +Panama,1992,2484997,Americas,72.462,6618.74305 +Panama,1997,2734531,Americas,73.738,7113.692252 +Panama,2002,2990875,Americas,74.712,7356.031934 +Panama,2007,3242173,Americas,75.537,9809.185636 +Paraguay,1952,1555876,Americas,62.649,1952.308701 +Paraguay,1957,1770902,Americas,63.196,2046.154706 +Paraguay,1962,2009813,Americas,64.361,2148.027146 +Paraguay,1967,2287985,Americas,64.951,2299.376311 +Paraguay,1972,2614104,Americas,65.815,2523.337977 +Paraguay,1977,2984494,Americas,66.353,3248.373311 +Paraguay,1982,3366439,Americas,66.874,4258.503604 +Paraguay,1987,3886512,Americas,67.378,3998.875695 +Paraguay,1992,4483945,Americas,68.225,4196.411078 +Paraguay,1997,5154123,Americas,69.4,4247.400261 +Paraguay,2002,5884491,Americas,70.755,3783.674243 +Paraguay,2007,6667147,Americas,71.752,4172.838464 +Peru,1952,8025700,Americas,43.902,3758.523437 +Peru,1957,9146100,Americas,46.263,4245.256698 +Peru,1962,10516500,Americas,49.096,4957.037982 +Peru,1967,12132200,Americas,51.445,5788.09333 +Peru,1972,13954700,Americas,55.448,5937.827283 +Peru,1977,15990099,Americas,58.447,6281.290855 +Peru,1982,18125129,Americas,61.406,6434.501797 +Peru,1987,20195924,Americas,64.134,6360.943444 +Peru,1992,22430449,Americas,66.458,4446.380924 +Peru,1997,24748122,Americas,68.386,5838.347657 +Peru,2002,26769436,Americas,69.906,5909.020073 +Peru,2007,28674757,Americas,71.421,7408.905561 +Philippines,1952,22438691,Asia,47.752,1272.880995 +Philippines,1957,26072194,Asia,51.334,1547.944844 +Philippines,1962,30325264,Asia,54.757,1649.552153 +Philippines,1967,35356600,Asia,56.393,1814.12743 +Philippines,1972,40850141,Asia,58.065,1989.37407 +Philippines,1977,46850962,Asia,60.06,2373.204287 +Philippines,1982,53456774,Asia,62.082,2603.273765 +Philippines,1987,60017788,Asia,64.151,2189.634995 +Philippines,1992,67185766,Asia,66.458,2279.324017 +Philippines,1997,75012988,Asia,68.564,2536.534925 +Philippines,2002,82995088,Asia,70.303,2650.921068 +Philippines,2007,91077287,Asia,71.688,3190.481016 +Poland,1952,25730551,Europe,61.31,4029.329699 +Poland,1957,28235346,Europe,65.77,4734.253019 +Poland,1962,30329617,Europe,67.64,5338.752143 +Poland,1967,31785378,Europe,69.61,6557.152776 +Poland,1972,33039545,Europe,70.85,8006.506993 +Poland,1977,34621254,Europe,70.67,9508.141454 +Poland,1982,36227381,Europe,71.32,8451.531004 +Poland,1987,37740710,Europe,70.98,9082.351172 +Poland,1992,38370697,Europe,70.99,7738.881247 +Poland,1997,38654957,Europe,72.75,10159.58368 +Poland,2002,38625976,Europe,74.67,12002.23908 +Poland,2007,38518241,Europe,75.563,15389.92468 +Portugal,1952,8526050,Europe,59.82,3068.319867 +Portugal,1957,8817650,Europe,61.51,3774.571743 +Portugal,1962,9019800,Europe,64.39,4727.954889 +Portugal,1967,9103000,Europe,66.6,6361.517993 +Portugal,1972,8970450,Europe,69.26,9022.247417 +Portugal,1977,9662600,Europe,70.41,10172.48572 +Portugal,1982,9859650,Europe,72.77,11753.84291 +Portugal,1987,9915289,Europe,74.06,13039.30876 +Portugal,1992,9927680,Europe,74.86,16207.26663 +Portugal,1997,10156415,Europe,75.97,17641.03156 +Portugal,2002,10433867,Europe,77.29,19970.90787 +Portugal,2007,10642836,Europe,78.098,20509.64777 +Puerto Rico,1952,2227000,Americas,64.28,3081.959785 +Puerto Rico,1957,2260000,Americas,68.54,3907.156189 +Puerto Rico,1962,2448046,Americas,69.62,5108.34463 +Puerto Rico,1967,2648961,Americas,71.1,6929.277714 +Puerto Rico,1972,2847132,Americas,72.16,9123.041742 +Puerto Rico,1977,3080828,Americas,73.44,9770.524921 +Puerto Rico,1982,3279001,Americas,73.75,10330.98915 +Puerto Rico,1987,3444468,Americas,74.63,12281.34191 +Puerto Rico,1992,3585176,Americas,73.911,14641.58711 +Puerto Rico,1997,3759430,Americas,74.917,16999.4333 +Puerto Rico,2002,3859606,Americas,77.778,18855.60618 +Puerto Rico,2007,3942491,Americas,78.746,19328.70901 +Reunion,1952,257700,Africa,52.724,2718.885295 +Reunion,1957,308700,Africa,55.09,2769.451844 +Reunion,1962,358900,Africa,57.666,3173.72334 +Reunion,1967,414024,Africa,60.542,4021.175739 +Reunion,1972,461633,Africa,64.274,5047.658563 +Reunion,1977,492095,Africa,67.064,4319.804067 +Reunion,1982,517810,Africa,69.885,5267.219353 +Reunion,1987,562035,Africa,71.913,5303.377488 +Reunion,1992,622191,Africa,73.615,6101.255823 +Reunion,1997,684810,Africa,74.772,6071.941411 +Reunion,2002,743981,Africa,75.744,6316.1652 +Reunion,2007,798094,Africa,76.442,7670.122558 +Romania,1952,16630000,Europe,61.05,3144.613186 +Romania,1957,17829327,Europe,64.1,3943.370225 +Romania,1962,18680721,Europe,66.8,4734.997586 +Romania,1967,19284814,Europe,66.8,6470.866545 +Romania,1972,20662648,Europe,69.21,8011.414402 +Romania,1977,21658597,Europe,69.46,9356.39724 +Romania,1982,22356726,Europe,69.66,9605.314053 +Romania,1987,22686371,Europe,69.53,9696.273295 +Romania,1992,22797027,Europe,69.36,6598.409903 +Romania,1997,22562458,Europe,69.72,7346.547557 +Romania,2002,22404337,Europe,71.322,7885.360081 +Romania,2007,22276056,Europe,72.476,10808.47561 +Rwanda,1952,2534927,Africa,40,493.3238752 +Rwanda,1957,2822082,Africa,41.5,540.2893983 +Rwanda,1962,3051242,Africa,43,597.4730727 +Rwanda,1967,3451079,Africa,44.1,510.9637142 +Rwanda,1972,3992121,Africa,44.6,590.5806638 +Rwanda,1977,4657072,Africa,45,670.0806011 +Rwanda,1982,5507565,Africa,46.218,881.5706467 +Rwanda,1987,6349365,Africa,44.02,847.991217 +Rwanda,1992,7290203,Africa,23.599,737.0685949 +Rwanda,1997,7212583,Africa,36.087,589.9445051 +Rwanda,2002,7852401,Africa,43.413,785.6537648 +Rwanda,2007,8860588,Africa,46.242,863.0884639 +Sao Tome and Principe,1952,60011,Africa,46.471,879.5835855 +Sao Tome and Principe,1957,61325,Africa,48.945,860.7369026 +Sao Tome and Principe,1962,65345,Africa,51.893,1071.551119 +Sao Tome and Principe,1967,70787,Africa,54.425,1384.840593 +Sao Tome and Principe,1972,76595,Africa,56.48,1532.985254 +Sao Tome and Principe,1977,86796,Africa,58.55,1737.561657 +Sao Tome and Principe,1982,98593,Africa,60.351,1890.218117 +Sao Tome and Principe,1987,110812,Africa,61.728,1516.525457 +Sao Tome and Principe,1992,125911,Africa,62.742,1428.777814 +Sao Tome and Principe,1997,145608,Africa,63.306,1339.076036 +Sao Tome and Principe,2002,170372,Africa,64.337,1353.09239 +Sao Tome and Principe,2007,199579,Africa,65.528,1598.435089 +Saudi Arabia,1952,4005677,Asia,39.875,6459.554823 +Saudi Arabia,1957,4419650,Asia,42.868,8157.591248 +Saudi Arabia,1962,4943029,Asia,45.914,11626.41975 +Saudi Arabia,1967,5618198,Asia,49.901,16903.04886 +Saudi Arabia,1972,6472756,Asia,53.886,24837.42865 +Saudi Arabia,1977,8128505,Asia,58.69,34167.7626 +Saudi Arabia,1982,11254672,Asia,63.012,33693.17525 +Saudi Arabia,1987,14619745,Asia,66.295,21198.26136 +Saudi Arabia,1992,16945857,Asia,68.768,24841.61777 +Saudi Arabia,1997,21229759,Asia,70.533,20586.69019 +Saudi Arabia,2002,24501530,Asia,71.626,19014.54118 +Saudi Arabia,2007,27601038,Asia,72.777,21654.83194 +Senegal,1952,2755589,Africa,37.278,1450.356983 +Senegal,1957,3054547,Africa,39.329,1567.653006 +Senegal,1962,3430243,Africa,41.454,1654.988723 +Senegal,1967,3965841,Africa,43.563,1612.404632 +Senegal,1972,4588696,Africa,45.815,1597.712056 +Senegal,1977,5260855,Africa,48.879,1561.769116 +Senegal,1982,6147783,Africa,52.379,1518.479984 +Senegal,1987,7171347,Africa,55.769,1441.72072 +Senegal,1992,8307920,Africa,58.196,1367.899369 +Senegal,1997,9535314,Africa,60.187,1392.368347 +Senegal,2002,10870037,Africa,61.6,1519.635262 +Senegal,2007,12267493,Africa,63.062,1712.472136 +Serbia,1952,6860147,Europe,57.996,3581.459448 +Serbia,1957,7271135,Europe,61.685,4981.090891 +Serbia,1962,7616060,Europe,64.531,6289.629157 +Serbia,1967,7971222,Europe,66.914,7991.707066 +Serbia,1972,8313288,Europe,68.7,10522.06749 +Serbia,1977,8686367,Europe,70.3,12980.66956 +Serbia,1982,9032824,Europe,70.162,15181.0927 +Serbia,1987,9230783,Europe,71.218,15870.87851 +Serbia,1992,9826397,Europe,71.659,9325.068238 +Serbia,1997,10336594,Europe,72.232,7914.320304 +Serbia,2002,10111559,Europe,73.213,7236.075251 +Serbia,2007,10150265,Europe,74.002,9786.534714 +Sierra Leone,1952,2143249,Africa,30.331,879.7877358 +Sierra Leone,1957,2295678,Africa,31.57,1004.484437 +Sierra Leone,1962,2467895,Africa,32.767,1116.639877 +Sierra Leone,1967,2662190,Africa,34.113,1206.043465 +Sierra Leone,1972,2879013,Africa,35.4,1353.759762 +Sierra Leone,1977,3140897,Africa,36.788,1348.285159 +Sierra Leone,1982,3464522,Africa,38.445,1465.010784 +Sierra Leone,1987,3868905,Africa,40.006,1294.447788 +Sierra Leone,1992,4260884,Africa,38.333,1068.696278 +Sierra Leone,1997,4578212,Africa,39.897,574.6481576 +Sierra Leone,2002,5359092,Africa,41.012,699.489713 +Sierra Leone,2007,6144562,Africa,42.568,862.5407561 +Singapore,1952,1127000,Asia,60.396,2315.138227 +Singapore,1957,1445929,Asia,63.179,2843.104409 +Singapore,1962,1750200,Asia,65.798,3674.735572 +Singapore,1967,1977600,Asia,67.946,4977.41854 +Singapore,1972,2152400,Asia,69.521,8597.756202 +Singapore,1977,2325300,Asia,70.795,11210.08948 +Singapore,1982,2651869,Asia,71.76,15169.16112 +Singapore,1987,2794552,Asia,73.56,18861.53081 +Singapore,1992,3235865,Asia,75.788,24769.8912 +Singapore,1997,3802309,Asia,77.158,33519.4766 +Singapore,2002,4197776,Asia,78.77,36023.1054 +Singapore,2007,4553009,Asia,79.972,47143.17964 +Slovak Republic,1952,3558137,Europe,64.36,5074.659104 +Slovak Republic,1957,3844277,Europe,67.45,6093.26298 +Slovak Republic,1962,4237384,Europe,70.33,7481.107598 +Slovak Republic,1967,4442238,Europe,70.98,8412.902397 +Slovak Republic,1972,4593433,Europe,70.35,9674.167626 +Slovak Republic,1977,4827803,Europe,70.45,10922.66404 +Slovak Republic,1982,5048043,Europe,70.8,11348.54585 +Slovak Republic,1987,5199318,Europe,71.08,12037.26758 +Slovak Republic,1992,5302888,Europe,71.38,9498.467723 +Slovak Republic,1997,5383010,Europe,72.71,12126.23065 +Slovak Republic,2002,5410052,Europe,73.8,13638.77837 +Slovak Republic,2007,5447502,Europe,74.663,18678.31435 +Slovenia,1952,1489518,Europe,65.57,4215.041741 +Slovenia,1957,1533070,Europe,67.85,5862.276629 +Slovenia,1962,1582962,Europe,69.15,7402.303395 +Slovenia,1967,1646912,Europe,69.18,9405.489397 +Slovenia,1972,1694510,Europe,69.82,12383.4862 +Slovenia,1977,1746919,Europe,70.97,15277.03017 +Slovenia,1982,1861252,Europe,71.063,17866.72175 +Slovenia,1987,1945870,Europe,72.25,18678.53492 +Slovenia,1992,1999210,Europe,73.64,14214.71681 +Slovenia,1997,2011612,Europe,75.13,17161.10735 +Slovenia,2002,2011497,Europe,76.66,20660.01936 +Slovenia,2007,2009245,Europe,77.926,25768.25759 +Somalia,1952,2526994,Africa,32.978,1135.749842 +Somalia,1957,2780415,Africa,34.977,1258.147413 +Somalia,1962,3080153,Africa,36.981,1369.488336 +Somalia,1967,3428839,Africa,38.977,1284.73318 +Somalia,1972,3840161,Africa,40.973,1254.576127 +Somalia,1977,4353666,Africa,41.974,1450.992513 +Somalia,1982,5828892,Africa,42.955,1176.807031 +Somalia,1987,6921858,Africa,44.501,1093.244963 +Somalia,1992,6099799,Africa,39.658,926.9602964 +Somalia,1997,6633514,Africa,43.795,930.5964284 +Somalia,2002,7753310,Africa,45.936,882.0818218 +Somalia,2007,9118773,Africa,48.159,926.1410683 +South Africa,1952,14264935,Africa,45.009,4725.295531 +South Africa,1957,16151549,Africa,47.985,5487.104219 +South Africa,1962,18356657,Africa,49.951,5768.729717 +South Africa,1967,20997321,Africa,51.927,7114.477971 +South Africa,1972,23935810,Africa,53.696,7765.962636 +South Africa,1977,27129932,Africa,55.527,8028.651439 +South Africa,1982,31140029,Africa,58.161,8568.266228 +South Africa,1987,35933379,Africa,60.834,7825.823398 +South Africa,1992,39964159,Africa,61.888,7225.069258 +South Africa,1997,42835005,Africa,60.236,7479.188244 +South Africa,2002,44433622,Africa,53.365,7710.946444 +South Africa,2007,43997828,Africa,49.339,9269.657808 +Spain,1952,28549870,Europe,64.94,3834.034742 +Spain,1957,29841614,Europe,66.66,4564.80241 +Spain,1962,31158061,Europe,69.69,5693.843879 +Spain,1967,32850275,Europe,71.44,7993.512294 +Spain,1972,34513161,Europe,73.06,10638.75131 +Spain,1977,36439000,Europe,74.39,13236.92117 +Spain,1982,37983310,Europe,76.3,13926.16997 +Spain,1987,38880702,Europe,76.9,15764.98313 +Spain,1992,39549438,Europe,77.57,18603.06452 +Spain,1997,39855442,Europe,78.77,20445.29896 +Spain,2002,40152517,Europe,79.78,24835.47166 +Spain,2007,40448191,Europe,80.941,28821.0637 +Sri Lanka,1952,7982342,Asia,57.593,1083.53203 +Sri Lanka,1957,9128546,Asia,61.456,1072.546602 +Sri Lanka,1962,10421936,Asia,62.192,1074.47196 +Sri Lanka,1967,11737396,Asia,64.266,1135.514326 +Sri Lanka,1972,13016733,Asia,65.042,1213.39553 +Sri Lanka,1977,14116836,Asia,65.949,1348.775651 +Sri Lanka,1982,15410151,Asia,68.757,1648.079789 +Sri Lanka,1987,16495304,Asia,69.011,1876.766827 +Sri Lanka,1992,17587060,Asia,70.379,2153.739222 +Sri Lanka,1997,18698655,Asia,70.457,2664.477257 +Sri Lanka,2002,19576783,Asia,70.815,3015.378833 +Sri Lanka,2007,20378239,Asia,72.396,3970.095407 +Sudan,1952,8504667,Africa,38.635,1615.991129 +Sudan,1957,9753392,Africa,39.624,1770.337074 +Sudan,1962,11183227,Africa,40.87,1959.593767 +Sudan,1967,12716129,Africa,42.858,1687.997641 +Sudan,1972,14597019,Africa,45.083,1659.652775 +Sudan,1977,17104986,Africa,47.8,2202.988423 +Sudan,1982,20367053,Africa,50.338,1895.544073 +Sudan,1987,24725960,Africa,51.744,1507.819159 +Sudan,1992,28227588,Africa,53.556,1492.197043 +Sudan,1997,32160729,Africa,55.373,1632.210764 +Sudan,2002,37090298,Africa,56.369,1993.398314 +Sudan,2007,42292929,Africa,58.556,2602.394995 +Swaziland,1952,290243,Africa,41.407,1148.376626 +Swaziland,1957,326741,Africa,43.424,1244.708364 +Swaziland,1962,370006,Africa,44.992,1856.182125 +Swaziland,1967,420690,Africa,46.633,2613.101665 +Swaziland,1972,480105,Africa,49.552,3364.836625 +Swaziland,1977,551425,Africa,52.537,3781.410618 +Swaziland,1982,649901,Africa,55.561,3895.384018 +Swaziland,1987,779348,Africa,57.678,3984.839812 +Swaziland,1992,962344,Africa,58.474,3553.0224 +Swaziland,1997,1054486,Africa,54.289,3876.76846 +Swaziland,2002,1130269,Africa,43.869,4128.116943 +Swaziland,2007,1133066,Africa,39.613,4513.480643 +Sweden,1952,7124673,Europe,71.86,8527.844662 +Sweden,1957,7363802,Europe,72.49,9911.878226 +Sweden,1962,7561588,Europe,73.37,12329.44192 +Sweden,1967,7867931,Europe,74.16,15258.29697 +Sweden,1972,8122293,Europe,74.72,17832.02464 +Sweden,1977,8251648,Europe,75.44,18855.72521 +Sweden,1982,8325260,Europe,76.42,20667.38125 +Sweden,1987,8421403,Europe,77.19,23586.92927 +Sweden,1992,8718867,Europe,78.16,23880.01683 +Sweden,1997,8897619,Europe,79.39,25266.59499 +Sweden,2002,8954175,Europe,80.04,29341.63093 +Sweden,2007,9031088,Europe,80.884,33859.74835 +Switzerland,1952,4815000,Europe,69.62,14734.23275 +Switzerland,1957,5126000,Europe,70.56,17909.48973 +Switzerland,1962,5666000,Europe,71.32,20431.0927 +Switzerland,1967,6063000,Europe,72.77,22966.14432 +Switzerland,1972,6401400,Europe,73.78,27195.11304 +Switzerland,1977,6316424,Europe,75.39,26982.29052 +Switzerland,1982,6468126,Europe,76.21,28397.71512 +Switzerland,1987,6649942,Europe,77.41,30281.70459 +Switzerland,1992,6995447,Europe,78.03,31871.5303 +Switzerland,1997,7193761,Europe,79.37,32135.32301 +Switzerland,2002,7361757,Europe,80.62,34480.95771 +Switzerland,2007,7554661,Europe,81.701,37506.41907 +Syria,1952,3661549,Asia,45.883,1643.485354 +Syria,1957,4149908,Asia,48.284,2117.234893 +Syria,1962,4834621,Asia,50.305,2193.037133 +Syria,1967,5680812,Asia,53.655,1881.923632 +Syria,1972,6701172,Asia,57.296,2571.423014 +Syria,1977,7932503,Asia,61.195,3195.484582 +Syria,1982,9410494,Asia,64.59,3761.837715 +Syria,1987,11242847,Asia,66.974,3116.774285 +Syria,1992,13219062,Asia,69.249,3340.542768 +Syria,1997,15081016,Asia,71.527,4014.238972 +Syria,2002,17155814,Asia,73.053,4090.925331 +Syria,2007,19314747,Asia,74.143,4184.548089 +Taiwan,1952,8550362,Asia,58.5,1206.947913 +Taiwan,1957,10164215,Asia,62.4,1507.86129 +Taiwan,1962,11918938,Asia,65.2,1822.879028 +Taiwan,1967,13648692,Asia,67.5,2643.858681 +Taiwan,1972,15226039,Asia,69.39,4062.523897 +Taiwan,1977,16785196,Asia,70.59,5596.519826 +Taiwan,1982,18501390,Asia,72.16,7426.354774 +Taiwan,1987,19757799,Asia,73.4,11054.56175 +Taiwan,1992,20686918,Asia,74.26,15215.6579 +Taiwan,1997,21628605,Asia,75.25,20206.82098 +Taiwan,2002,22454239,Asia,76.99,23235.42329 +Taiwan,2007,23174294,Asia,78.4,28718.27684 +Tanzania,1952,8322925,Africa,41.215,716.6500721 +Tanzania,1957,9452826,Africa,42.974,698.5356073 +Tanzania,1962,10863958,Africa,44.246,722.0038073 +Tanzania,1967,12607312,Africa,45.757,848.2186575 +Tanzania,1972,14706593,Africa,47.62,915.9850592 +Tanzania,1977,17129565,Africa,49.919,962.4922932 +Tanzania,1982,19844382,Africa,50.608,874.2426069 +Tanzania,1987,23040630,Africa,51.535,831.8220794 +Tanzania,1992,26605473,Africa,50.44,825.682454 +Tanzania,1997,30686889,Africa,48.466,789.1862231 +Tanzania,2002,34593779,Africa,49.651,899.0742111 +Tanzania,2007,38139640,Africa,52.517,1107.482182 +Thailand,1952,21289402,Asia,50.848,757.7974177 +Thailand,1957,25041917,Asia,53.63,793.5774148 +Thailand,1962,29263397,Asia,56.061,1002.199172 +Thailand,1967,34024249,Asia,58.285,1295.46066 +Thailand,1972,39276153,Asia,60.405,1524.358936 +Thailand,1977,44148285,Asia,62.494,1961.224635 +Thailand,1982,48827160,Asia,64.597,2393.219781 +Thailand,1987,52910342,Asia,66.084,2982.653773 +Thailand,1992,56667095,Asia,67.298,4616.896545 +Thailand,1997,60216677,Asia,67.521,5852.625497 +Thailand,2002,62806748,Asia,68.564,5913.187529 +Thailand,2007,65068149,Asia,70.616,7458.396327 +Togo,1952,1219113,Africa,38.596,859.8086567 +Togo,1957,1357445,Africa,41.208,925.9083202 +Togo,1962,1528098,Africa,43.922,1067.53481 +Togo,1967,1735550,Africa,46.769,1477.59676 +Togo,1972,2056351,Africa,49.759,1649.660188 +Togo,1977,2308582,Africa,52.887,1532.776998 +Togo,1982,2644765,Africa,55.471,1344.577953 +Togo,1987,3154264,Africa,56.941,1202.201361 +Togo,1992,3747553,Africa,58.061,1034.298904 +Togo,1997,4320890,Africa,58.39,982.2869243 +Togo,2002,4977378,Africa,57.561,886.2205765 +Togo,2007,5701579,Africa,58.42,882.9699438 +Trinidad and Tobago,1952,662850,Americas,59.1,3023.271928 +Trinidad and Tobago,1957,764900,Americas,61.8,4100.3934 +Trinidad and Tobago,1962,887498,Americas,64.9,4997.523971 +Trinidad and Tobago,1967,960155,Americas,65.4,5621.368472 +Trinidad and Tobago,1972,975199,Americas,65.9,6619.551419 +Trinidad and Tobago,1977,1039009,Americas,68.3,7899.554209 +Trinidad and Tobago,1982,1116479,Americas,68.832,9119.528607 +Trinidad and Tobago,1987,1191336,Americas,69.582,7388.597823 +Trinidad and Tobago,1992,1183669,Americas,69.862,7370.990932 +Trinidad and Tobago,1997,1138101,Americas,69.465,8792.573126 +Trinidad and Tobago,2002,1101832,Americas,68.976,11460.60023 +Trinidad and Tobago,2007,1056608,Americas,69.819,18008.50924 +Tunisia,1952,3647735,Africa,44.6,1468.475631 +Tunisia,1957,3950849,Africa,47.1,1395.232468 +Tunisia,1962,4286552,Africa,49.579,1660.30321 +Tunisia,1967,4786986,Africa,52.053,1932.360167 +Tunisia,1972,5303507,Africa,55.602,2753.285994 +Tunisia,1977,6005061,Africa,59.837,3120.876811 +Tunisia,1982,6734098,Africa,64.048,3560.233174 +Tunisia,1987,7724976,Africa,66.894,3810.419296 +Tunisia,1992,8523077,Africa,70.001,4332.720164 +Tunisia,1997,9231669,Africa,71.973,4876.798614 +Tunisia,2002,9770575,Africa,73.042,5722.895655 +Tunisia,2007,10276158,Africa,73.923,7092.923025 +Turkey,1952,22235677,Europe,43.585,1969.10098 +Turkey,1957,25670939,Europe,48.079,2218.754257 +Turkey,1962,29788695,Europe,52.098,2322.869908 +Turkey,1967,33411317,Europe,54.336,2826.356387 +Turkey,1972,37492953,Europe,57.005,3450.69638 +Turkey,1977,42404033,Europe,59.507,4269.122326 +Turkey,1982,47328791,Europe,61.036,4241.356344 +Turkey,1987,52881328,Europe,63.108,5089.043686 +Turkey,1992,58179144,Europe,66.146,5678.348271 +Turkey,1997,63047647,Europe,68.835,6601.429915 +Turkey,2002,67308928,Europe,70.845,6508.085718 +Turkey,2007,71158647,Europe,71.777,8458.276384 +Uganda,1952,5824797,Africa,39.978,734.753484 +Uganda,1957,6675501,Africa,42.571,774.3710692 +Uganda,1962,7688797,Africa,45.344,767.2717398 +Uganda,1967,8900294,Africa,48.051,908.9185217 +Uganda,1972,10190285,Africa,51.016,950.735869 +Uganda,1977,11457758,Africa,50.35,843.7331372 +Uganda,1982,12939400,Africa,49.849,682.2662268 +Uganda,1987,15283050,Africa,51.509,617.7244065 +Uganda,1992,18252190,Africa,48.825,644.1707969 +Uganda,1997,21210254,Africa,44.578,816.559081 +Uganda,2002,24739869,Africa,47.813,927.7210018 +Uganda,2007,29170398,Africa,51.542,1056.380121 +United Kingdom,1952,50430000,Europe,69.18,9979.508487 +United Kingdom,1957,51430000,Europe,70.42,11283.17795 +United Kingdom,1962,53292000,Europe,70.76,12477.17707 +United Kingdom,1967,54959000,Europe,71.36,14142.85089 +United Kingdom,1972,56079000,Europe,72.01,15895.11641 +United Kingdom,1977,56179000,Europe,72.76,17428.74846 +United Kingdom,1982,56339704,Europe,74.04,18232.42452 +United Kingdom,1987,56981620,Europe,75.007,21664.78767 +United Kingdom,1992,57866349,Europe,76.42,22705.09254 +United Kingdom,1997,58808266,Europe,77.218,26074.53136 +United Kingdom,2002,59912431,Europe,78.471,29478.99919 +United Kingdom,2007,60776238,Europe,79.425,33203.26128 +United States,1952,157553000,Americas,68.44,13990.48208 +United States,1957,171984000,Americas,69.49,14847.12712 +United States,1962,186538000,Americas,70.21,16173.14586 +United States,1967,198712000,Americas,70.76,19530.36557 +United States,1972,209896000,Americas,71.34,21806.03594 +United States,1977,220239000,Americas,73.38,24072.63213 +United States,1982,232187835,Americas,74.65,25009.55914 +United States,1987,242803533,Americas,75.02,29884.35041 +United States,1992,256894189,Americas,76.09,32003.93224 +United States,1997,272911760,Americas,76.81,35767.43303 +United States,2002,287675526,Americas,77.31,39097.09955 +United States,2007,301139947,Americas,78.242,42951.65309 +Uruguay,1952,2252965,Americas,66.071,5716.766744 +Uruguay,1957,2424959,Americas,67.044,6150.772969 +Uruguay,1962,2598466,Americas,68.253,5603.357717 +Uruguay,1967,2748579,Americas,68.468,5444.61962 +Uruguay,1972,2829526,Americas,68.673,5703.408898 +Uruguay,1977,2873520,Americas,69.481,6504.339663 +Uruguay,1982,2953997,Americas,70.805,6920.223051 +Uruguay,1987,3045153,Americas,71.918,7452.398969 +Uruguay,1992,3149262,Americas,72.752,8137.004775 +Uruguay,1997,3262838,Americas,74.223,9230.240708 +Uruguay,2002,3363085,Americas,75.307,7727.002004 +Uruguay,2007,3447496,Americas,76.384,10611.46299 +Venezuela,1952,5439568,Americas,55.088,7689.799761 +Venezuela,1957,6702668,Americas,57.907,9802.466526 +Venezuela,1962,8143375,Americas,60.77,8422.974165 +Venezuela,1967,9709552,Americas,63.479,9541.474188 +Venezuela,1972,11515649,Americas,65.712,10505.25966 +Venezuela,1977,13503563,Americas,67.456,13143.95095 +Venezuela,1982,15620766,Americas,68.557,11152.41011 +Venezuela,1987,17910182,Americas,70.19,9883.584648 +Venezuela,1992,20265563,Americas,71.15,10733.92631 +Venezuela,1997,22374398,Americas,72.146,10165.49518 +Venezuela,2002,24287670,Americas,72.766,8605.047831 +Venezuela,2007,26084662,Americas,73.747,11415.80569 +Vietnam,1952,26246839,Asia,40.412,605.0664917 +Vietnam,1957,28998543,Asia,42.887,676.2854478 +Vietnam,1962,33796140,Asia,45.363,772.0491602 +Vietnam,1967,39463910,Asia,47.838,637.1232887 +Vietnam,1972,44655014,Asia,50.254,699.5016441 +Vietnam,1977,50533506,Asia,55.764,713.5371196 +Vietnam,1982,56142181,Asia,58.816,707.2357863 +Vietnam,1987,62826491,Asia,62.82,820.7994449 +Vietnam,1992,69940728,Asia,67.662,989.0231487 +Vietnam,1997,76048996,Asia,70.672,1385.896769 +Vietnam,2002,80908147,Asia,73.017,1764.456677 +Vietnam,2007,85262356,Asia,74.249,2441.576404 +West Bank and Gaza,1952,1030585,Asia,43.16,1515.592329 +West Bank and Gaza,1957,1070439,Asia,45.671,1827.067742 +West Bank and Gaza,1962,1133134,Asia,48.127,2198.956312 +West Bank and Gaza,1967,1142636,Asia,51.631,2649.715007 +West Bank and Gaza,1972,1089572,Asia,56.532,3133.409277 +West Bank and Gaza,1977,1261091,Asia,60.765,3682.831494 +West Bank and Gaza,1982,1425876,Asia,64.406,4336.032082 +West Bank and Gaza,1987,1691210,Asia,67.046,5107.197384 +West Bank and Gaza,1992,2104779,Asia,69.718,6017.654756 +West Bank and Gaza,1997,2826046,Asia,71.096,7110.667619 +West Bank and Gaza,2002,3389578,Asia,72.37,4515.487575 +West Bank and Gaza,2007,4018332,Asia,73.422,3025.349798 +"Yemen, Rep.",1952,4963829,Asia,32.548,781.7175761 +"Yemen, Rep.",1957,5498090,Asia,33.97,804.8304547 +"Yemen, Rep.",1962,6120081,Asia,35.18,825.6232006 +"Yemen, Rep.",1967,6740785,Asia,36.984,862.4421463 +"Yemen, Rep.",1972,7407075,Asia,39.848,1265.047031 +"Yemen, Rep.",1977,8403990,Asia,44.175,1829.765177 +"Yemen, Rep.",1982,9657618,Asia,49.113,1977.55701 +"Yemen, Rep.",1987,11219340,Asia,52.922,1971.741538 +"Yemen, Rep.",1992,13367997,Asia,55.599,1879.496673 +"Yemen, Rep.",1997,15826497,Asia,58.02,2117.484526 +"Yemen, Rep.",2002,18701257,Asia,60.308,2234.820827 +"Yemen, Rep.",2007,22211743,Asia,62.698,2280.769906 +Zambia,1952,2672000,Africa,42.038,1147.388831 +Zambia,1957,3016000,Africa,44.077,1311.956766 +Zambia,1962,3421000,Africa,46.023,1452.725766 +Zambia,1967,3900000,Africa,47.768,1777.077318 +Zambia,1972,4506497,Africa,50.107,1773.498265 +Zambia,1977,5216550,Africa,51.386,1588.688299 +Zambia,1982,6100407,Africa,51.821,1408.678565 +Zambia,1987,7272406,Africa,50.821,1213.315116 +Zambia,1992,8381163,Africa,46.1,1210.884633 +Zambia,1997,9417789,Africa,40.238,1071.353818 +Zambia,2002,10595811,Africa,39.193,1071.613938 +Zambia,2007,11746035,Africa,42.384,1271.211593 +Zimbabwe,1952,3080907,Africa,48.451,406.8841148 +Zimbabwe,1957,3646340,Africa,50.469,518.7642681 +Zimbabwe,1962,4277736,Africa,52.358,527.2721818 +Zimbabwe,1967,4995432,Africa,53.995,569.7950712 +Zimbabwe,1972,5861135,Africa,55.635,799.3621758 +Zimbabwe,1977,6642107,Africa,57.674,685.5876821 +Zimbabwe,1982,7636524,Africa,60.363,788.8550411 +Zimbabwe,1987,9216418,Africa,62.351,706.1573059 +Zimbabwe,1992,10704340,Africa,60.377,693.4207856 +Zimbabwe,1997,11404948,Africa,46.809,792.4499603 +Zimbabwe,2002,11926563,Africa,39.989,672.0386227 +Zimbabwe,2007,12311143,Africa,43.487,469.7092981 \ No newline at end of file diff --git a/ui/playwright/test-data-flow.spec.ts b/ui/playwright/test-data-flow.spec.ts new file mode 100644 index 0000000..9c8c3cb --- /dev/null +++ b/ui/playwright/test-data-flow.spec.ts @@ -0,0 +1,87 @@ +import { test, expect } from '@playwright/test'; +import { v4 } from 'uuid'; +import fs from 'fs'; + +const domain = process.env.DOMAIN; +const datasetName = `ui_test_dataset_${v4().replace('-', '_').slice(0, 8)}` +const filePath = 'playwright/gapminder.csv' +const downloadPath = `playwright/.downloads/${datasetName}` + +test('test', async ({ page }) => { + await page.goto(domain); + + // Create a schema + await page.locator('div[role="button"]:has-text("Create Schema")').click(); + await expect(page).toHaveURL(`${domain}/schema/create`); + await page.locator('[data-testid="field-level"]').selectOption('PUBLIC'); + await page.locator('[data-testid="field-layer"]').selectOption('default'); + await page.locator('[data-testid="field-domain"]').click(); + await page.locator('[data-testid="field-domain"]').fill('ui_test_domain'); + await page.locator('[data-testid="field-title"]').click(); + await page.locator('[data-testid="field-title"]').fill(datasetName); + await page.locator('[data-testid="field-file"]').click(); + await page.locator('[data-testid="field-file"]').setInputFiles(filePath); + await page.locator('[data-testid="submit"]').click(); + await page.locator('input[name="ownerEmail"]').click(); + await page.locator('input[name="ownerEmail"]').fill('ui_test@email.com'); + await page.locator('input[name="ownerName"]').click(); + await page.locator('input[name="ownerName"]').fill('ui_test'); + await page.locator('button:has-text("Create Schema")').click(); + // @ts-ignore + const schemaCreatedElement = await page.waitForSelector('.MuiAlertTitle-root', { text: 'Schema Created' }); + + expect(await schemaCreatedElement.innerText()).toEqual('Schema Created'); + + // Upload a dataset + await page.getByRole('button', { name: 'Upload data' }).click(); + await page.getByTestId('select-layer').getByRole('combobox').click(); + await page.getByRole('option', { name: 'default' }).click(); + await page.getByTestId('select-domain').getByRole('combobox').click(); + await page.getByRole('option', { name: 'ui_test_domain' }).click(); + await page.getByTestId('select-dataset').getByRole('combobox').click(); + await page.getByRole('option', { name: datasetName }).click(); + await page.getByTestId('upload').click(); + await page.getByTestId('upload').setInputFiles(filePath); + await page.getByTestId('submit').click(); + + expect(await page.getByText('Data uploaded successfully').textContent()).toEqual('Status: Data uploaded successfully') + + // Download the dataset + await page.getByRole('button', { name: 'Download data' }).click(); + await page.getByTestId('select-layer').getByRole('combobox').click(); + await page.getByRole('option', { name: 'default' }).click(); + await page.getByTestId('select-domain').getByRole('combobox').click(); + await page.getByRole('option', { name: 'ui_test_domain' }).click(); + await page.getByTestId('select-dataset').getByRole('combobox').click(); + await page.getByRole('option', { name: datasetName }).click(); + await page.getByTestId('submit').click(); + await page.locator('div').filter({ hasText: 'Row Limit' }).locator('div').nth(1).click(); + await page.getByPlaceholder('30').fill('200'); + const downloadPromise = page.waitForEvent('download'); + await page.getByRole('button', { name: 'Download', exact: true }).click(); + const download = await downloadPromise; + await download.saveAs(downloadPath) + + expect(fs.existsSync(downloadPath)).toBeTruthy() + + fs.rm(downloadPath, (err) => { + err ? console.error(err) : console.log("Download deleted") + }) + + // Delete the dataset + await page.getByRole('button', { name: 'Delete data' }).click(); + await page.locator('div[role="button"]:has-text("Delete data")').click(); + await page.getByTestId('select-layer').getByRole('combobox').click(); + await page.getByRole('option', { name: 'default' }).click(); + await page.getByTestId('select-domain').getByRole('combobox').click(); + await page.getByRole('option', { name: 'ui_test_domain' }).click(); + await page.getByTestId('select-dataset').getByRole('combobox').click(); + await page.getByRole('option', { name: datasetName }).click(); + await page.getByTestId('submit').click(); + + // @ts-ignore + const datasetDeletedElement = await page.waitForSelector('.MuiAlertTitle-root', { text: `Dataset deleted: default/ui_test_domain/${datasetName}` }); + + expect(await datasetDeletedElement.innerText()).toEqual(`Dataset deleted: default/ui_test_domain/${datasetName}`); + +}); \ No newline at end of file diff --git a/ui/playwright/test-homepage.spec.ts b/ui/playwright/test-homepage.spec.ts new file mode 100644 index 0000000..eb4bedf --- /dev/null +++ b/ui/playwright/test-homepage.spec.ts @@ -0,0 +1,18 @@ +import { test, expect } from '@playwright/test'; + +const domain = process.env.DOMAIN + +test('test', async ({ page }) => { + await page.goto(domain); + await page.getByRole('button', { name: 'Create User' }).click(); + await page.getByRole('button', { name: 'Modify User' }).click(); + await page.getByRole('button', { name: 'Download data' }).click(); + await page.getByRole('button', { name: 'Upload data' }).click(); + await page.getByRole('button', { name: 'Create Schema' }).click(); + await page.getByRole('button', { name: 'Task Status' }).click(); + await page.getByRole('link', { name: 'Home' }).click(); + await page.getByRole('link', { name: 'Create User' }).nth(1).click(); + await page.getByRole('link', { name: 'Home' }).click(); + await page.getByRole('button', { name: 'account of current user' }).click(); + await page.getByText('Logout').click(); +}); \ No newline at end of file diff --git a/ui/playwright/test-user-flow.spec.ts b/ui/playwright/test-user-flow.spec.ts new file mode 100644 index 0000000..03bdc16 --- /dev/null +++ b/ui/playwright/test-user-flow.spec.ts @@ -0,0 +1,21 @@ +import { test, expect } from '@playwright/test'; + +const domain = process.env.DOMAIN; +const user = `${process.env.RESOURCE_PREFIX}_ui_test_user` + +test('test', async ({ page }) => { + await page.goto(domain); + + // Click div[role="button"]:has-text("Modify User") + await page.locator('div[role="button"]:has-text("Modify User")').click(); + await expect(page).toHaveURL(`${domain}/subject/modify`); + + await page.locator('[data-testid="field-user"]').selectOption({ 'label': user }) + await page.locator('[data-testid="submit-button"]').click(); + + await page.getByRole('row', { name: 'DATA_ADMIN' }).getByRole('button').click(); + await page.getByTestId('select-type').selectOption('DATA_ADMIN'); + await page.getByRole('row').filter({ hasText: 'ActionDATA_ADMIN' }).getByRole('button').click(); + await page.getByTestId('submit').click(); + await expect(page).toHaveURL(/success/); +}); \ No newline at end of file diff --git a/ui/src/__tests__/app.test.tsx b/ui/src/__tests__/app.test.tsx index f9495cd..1a5614b 100644 --- a/ui/src/__tests__/app.test.tsx +++ b/ui/src/__tests__/app.test.tsx @@ -1,5 +1,5 @@ import fetchMock from 'jest-fetch-mock' -import { renderWithProviders } from '@/utils/test-utils' +import { renderWithProviders } from '@/lib/test-utils' import AppPage from '@/pages/_app' jest.useFakeTimers() diff --git a/ui/src/__tests__/catalog.test.tsx b/ui/src/__tests__/catalog.test.tsx index dfc64a0..40b7791 100644 --- a/ui/src/__tests__/catalog.test.tsx +++ b/ui/src/__tests__/catalog.test.tsx @@ -1,6 +1,6 @@ import { screen, waitForElementToBeRemoved } from '@testing-library/react' import fetchMock from 'jest-fetch-mock' -import { renderWithProviders } from '@/utils/test-utils' +import { renderWithProviders } from '@/lib/test-utils' import CatalogPage from '@/pages/catalog/[search]' import { MetadataSearchResponse } from '@/service/types' diff --git a/ui/src/__tests__/data/delete.test.tsx b/ui/src/__tests__/data/delete.test.tsx index e309d2d..30e1a51 100644 --- a/ui/src/__tests__/data/delete.test.tsx +++ b/ui/src/__tests__/data/delete.test.tsx @@ -1,14 +1,12 @@ import { - fireEvent, screen, waitFor, waitForElementToBeRemoved, - within } from '@testing-library/react' import userEvent from '@testing-library/user-event' import fetchMock from 'jest-fetch-mock' import DeletePage from '@/pages/data/delete' -import { mockDataSetsList, renderWithProviders } from '@/utils/test-utils' +import { mockDataset, mockDataSetsList, renderWithProviders, selectAutocompleteOption } from '@/lib/test-utils' import { DeleteDatasetResponse } from '@/service/types' describe('Page: Delete page', () => { @@ -19,26 +17,20 @@ describe('Page: Delete page', () => { it('renders', async () => { fetchMock.mockResponseOnce(JSON.stringify(mockDataSetsList), { status: 200 }) - renderWithProviders() + renderWithProviders() await waitForElementToBeRemoved(() => screen.queryByRole('progressbar')) const datasetDropdown = screen.getByTestId('select-dataset') expect(datasetDropdown).toBeVisible() - for (const key in mockDataSetsList) { - mockDataSetsList[key].forEach - for (const { dataset } of mockDataSetsList[key]) { - const option = within(datasetDropdown).getByRole('option', { name: dataset }) - expect(option).toBeInTheDocument() - expect(option).toHaveValue(`${key}/${dataset}`) - } - } - }) + expect(screen.getByTestId('submit')).toBeInTheDocument() + } + ) it('error on fetch', async () => { fetchMock.mockReject(new Error('fake error message')) - renderWithProviders() + renderWithProviders() await waitForElementToBeRemoved(() => screen.queryByRole('progressbar')) await waitFor(async () => { @@ -49,20 +41,15 @@ describe('Page: Delete page', () => { describe('on submit', () => { it('success', async () => { fetchMock.mockResponseOnce(JSON.stringify(mockDataSetsList), { status: 200 }) - renderWithProviders() + renderWithProviders() await waitForElementToBeRemoved(() => screen.queryByRole('progressbar')) - userEvent.selectOptions( - screen.getByTestId('select-dataset'), - mockDataSetsList['Pizza'][0].dataset - ) - await userEvent.click(screen.getByTestId('submit')) await waitFor(async () => { expect(fetchMock).toHaveBeenLastCalledWith( - '/api/datasets/Pizza/bit_complicated', + '/api/datasets/layer/domain/dataset', expect.objectContaining({ credentials: 'include', method: 'DELETE' @@ -75,13 +62,17 @@ describe('Page: Delete page', () => { const mockSuccess: DeleteDatasetResponse = { details: 'dataset successfully deleted' } - fetchMock.mockResponses( [JSON.stringify(mockDataSetsList), { status: 200 }], [JSON.stringify(mockSuccess), { status: 200 }] ) renderWithProviders() await waitForElementToBeRemoved(() => screen.queryByRole('progressbar')) + + selectAutocompleteOption('select-layer', 'layer') + selectAutocompleteOption('select-domain', 'Pizza') + selectAutocompleteOption('select-dataset', 'bit_complicated') + await userEvent.click(screen.getByTestId('submit')) await waitFor(async () => { @@ -89,13 +80,13 @@ describe('Page: Delete page', () => { }) expect( - screen.getByText('Dataset deleted: Pizza/bit_complicated') + screen.getByText('Dataset deleted: layer/Pizza/bit_complicated') ).toBeInTheDocument() }) it('api error', async () => { fetchMock.mockResponseOnce(JSON.stringify(mockDataSetsList), { status: 200 }) - renderWithProviders() + renderWithProviders() await waitForElementToBeRemoved(() => screen.queryByRole('progressbar')) diff --git a/ui/src/__tests__/data/download.test.tsx b/ui/src/__tests__/data/download.test.tsx index dd4db36..b16f4d6 100644 --- a/ui/src/__tests__/data/download.test.tsx +++ b/ui/src/__tests__/data/download.test.tsx @@ -2,11 +2,10 @@ import { screen, waitFor, waitForElementToBeRemoved, - within } from '@testing-library/react' import userEvent from '@testing-library/user-event' import fetchMock from 'jest-fetch-mock' -import { mockDataSetsList, renderWithProviders } from '@/utils/test-utils' +import { mockDataset, mockDataSetsList, renderWithProviders } from '@/lib/test-utils' import DownloadPage from '@/pages/data/download/' const pushSpy = jest.fn() @@ -26,38 +25,23 @@ describe('Page: Download page', () => { it('renders dataset drodown', async () => { fetchMock.mockResponseOnce(JSON.stringify(mockDataSetsList), { status: 200 }) - renderWithProviders() + renderWithProviders() await waitForElementToBeRemoved(() => screen.queryByRole('progressbar')) const datasetDropdown = screen.getByTestId('select-dataset') expect(datasetDropdown).toBeVisible() - for (const key in mockDataSetsList) { - mockDataSetsList[key].forEach - for (const { dataset } of mockDataSetsList[key]) { - const option = within(datasetDropdown).getByRole('option', { - name: dataset - }) - expect(option).toBeInTheDocument() - expect(option).toHaveValue(`${key}/${dataset}`) - } - } - }) + expect(screen.getByTestId('submit')).toBeInTheDocument() + } + ) - it('renders version', async () => { + it('renders dataset-selector', async () => { fetchMock.mockResponseOnce(JSON.stringify(mockDataSetsList), { status: 200 }) - renderWithProviders() - await waitForElementToBeRemoved(() => screen.queryByRole('progressbar')) + renderWithProviders() + await waitFor(async () => { - expect(screen.getByTestId('select-version')).toBeInTheDocument() - }) - ;[...Array(2).keys()].forEach((i) => { - expect( - within(screen.getByTestId('select-version')).getByRole('option', { - name: (i + 1).toString() - }) - ).toBeInTheDocument() + expect(screen.getByTestId('select-layer')).toBeInTheDocument() }) }) @@ -73,24 +57,21 @@ describe('Page: Download page', () => { it('on submit', async () => { fetchMock.mockResponseOnce(JSON.stringify(mockDataSetsList), { status: 200 }) - renderWithProviders() + renderWithProviders() await waitForElementToBeRemoved(() => screen.queryByRole('progressbar')) - await waitFor(async () => { - expect(screen.getByTestId('select-version')).toBeInTheDocument() - }) await userEvent.click(screen.getByTestId('submit')) await waitFor(async () => { expect(pushSpy).toHaveBeenCalledWith( - `/data/download/Pizza/bit_complicated?version=3` + `/data/download/layer/domain/dataset?version=1` ) }) }) it('should display helper text when there is no data', async () => { fetchMock.mockResponseOnce(JSON.stringify({}), { status: 200 }) - renderWithProviders() + renderWithProviders() await waitForElementToBeRemoved(() => screen.queryByRole('progressbar')) await waitFor(async () => { expect(screen.getByTestId('no-data-helper')).toBeInTheDocument() diff --git a/ui/src/__tests__/data/upload.test.tsx b/ui/src/__tests__/data/upload.test.tsx index dd07edf..0ae028e 100644 --- a/ui/src/__tests__/data/upload.test.tsx +++ b/ui/src/__tests__/data/upload.test.tsx @@ -3,14 +3,14 @@ import { screen, waitFor, waitForElementToBeRemoved, - within } from '@testing-library/react' import userEvent from '@testing-library/user-event' import fetchMock from 'jest-fetch-mock' -import { mockDataSetsList, renderWithProviders } from '@/utils/test-utils' +import { mockDataset, mockDataSetsList, renderWithProviders, selectAutocompleteOption } from '@/lib/test-utils' import UploadPage from '@/pages/data/upload' import { UploadDatasetResponse } from '@/service/types' + const pushSpy = jest.fn() jest.mock('next/router', () => ({ ...jest.requireActual('next/router'), @@ -27,31 +27,21 @@ describe('Page: Upload page', () => { }) it('renders', async () => { + fetchMock.mockResponseOnce(JSON.stringify(mockDataSetsList), { status: 200 }) - renderWithProviders() + renderWithProviders() await waitForElementToBeRemoved(() => screen.queryByRole('progressbar')) const datasetDropdown = screen.getByTestId('select-dataset') expect(datasetDropdown).toBeVisible() - for (const key in mockDataSetsList) { - mockDataSetsList[key].forEach - for (const { dataset } of mockDataSetsList[key]) { - const option = within(datasetDropdown).getByRole('option', { - name: dataset - }) - expect(option).toBeInTheDocument() - expect(option).toHaveValue(`${key}/${dataset}`) - } - } - expect(screen.getByTestId('upload')).toBeInTheDocument() }) it('error on fetch', async () => { fetchMock.mockReject(new Error('fake error message')) - renderWithProviders() + renderWithProviders() await waitForElementToBeRemoved(() => screen.queryByRole('progressbar')) @@ -69,20 +59,20 @@ describe('Page: Upload page', () => { await waitForElementToBeRemoved(() => screen.queryByRole('progressbar')) + + selectAutocompleteOption('select-layer', 'layer') + selectAutocompleteOption('select-domain', 'Pizza') + selectAutocompleteOption('select-dataset', 'bit_complicated') + await fireEvent.change(screen.getByTestId('upload'), { target: { files: [file] } }) - userEvent.selectOptions( - screen.getByTestId('select-dataset'), - mockDataSetsList['Pizza'][0].dataset - ) - await userEvent.click(screen.getByTestId('submit')) await waitFor(async () => { expect(fetchMock).toHaveBeenLastCalledWith( - '/api/datasets/Pizza/bit_complicated', + '/api/datasets/layer/Pizza/bit_complicated?version=3', expect.objectContaining({ body: new FormData(), credentials: 'include', @@ -106,9 +96,9 @@ describe('Page: Upload page', () => { fetchMock.mockResponses( [JSON.stringify(mockDataSetsList), { status: 200 }], [JSON.stringify(mockSuccess), { status: 200 }], - [JSON.stringify({ status: 'FAILED' }), { status: 200 }] + [JSON.stringify({ status: "FAILED" }), { status: 200 }] ) - renderWithProviders() + renderWithProviders() await waitForElementToBeRemoved(() => screen.queryByRole('progressbar')) await userEvent.click(screen.getByTestId('submit')) @@ -141,9 +131,9 @@ describe('Page: Upload page', () => { fetchMock.mockResponses( [JSON.stringify(mockDataSetsList), { status: 200 }], [JSON.stringify(mockSuccess), { status: 200 }], - [JSON.stringify({ status: 'SUCCESS' }), { status: 200 }] + [JSON.stringify({ status: "SUCCESS" }), { status: 200 }] ) - renderWithProviders() + renderWithProviders() await waitForElementToBeRemoved(() => screen.queryByRole('progressbar')) await userEvent.click(screen.getByTestId('submit')) @@ -176,9 +166,9 @@ describe('Page: Upload page', () => { fetchMock.mockResponses( [JSON.stringify(mockDataSetsList), { status: 200 }], [JSON.stringify(mockSuccess), { status: 200 }], - [JSON.stringify({ status: 'IN PROGRESS' }), { status: 200 }] + [JSON.stringify({ status: "IN PROGRESS" }), { status: 200 }] ) - renderWithProviders() + renderWithProviders() await waitForElementToBeRemoved(() => screen.queryByRole('progressbar')) await userEvent.click(screen.getByTestId('submit')) @@ -196,14 +186,12 @@ describe('Page: Upload page', () => { }) }) - it('api error', async () => { + it('api error upload', async () => { fetchMock.mockResponseOnce(JSON.stringify(mockDataSetsList), { status: 200 }) - renderWithProviders() - + renderWithProviders() await waitForElementToBeRemoved(() => screen.queryByRole('progressbar')) fetchMock.mockReject(new Error('fake error message')) - await userEvent.click(screen.getByTestId('submit')) await waitFor(async () => { diff --git a/ui/src/__tests__/index.test.tsx b/ui/src/__tests__/index.test.tsx index b34a54f..b5347de 100644 --- a/ui/src/__tests__/index.test.tsx +++ b/ui/src/__tests__/index.test.tsx @@ -1,6 +1,6 @@ import { screen, waitFor } from '@testing-library/react' import fetchMock from 'jest-fetch-mock' -import { renderWithProviders } from '@/utils/test-utils' +import { renderWithProviders } from '@/lib/test-utils' import IndexPage from '@/pages/index' import { MethodsResponse } from '@/service/types' diff --git a/ui/src/__tests__/login.test.tsx b/ui/src/__tests__/login.test.tsx index 30d06c7..097c1f2 100644 --- a/ui/src/__tests__/login.test.tsx +++ b/ui/src/__tests__/login.test.tsx @@ -1,6 +1,6 @@ import { screen, waitFor, waitForElementToBeRemoved } from '@testing-library/react' import fetchMock from 'jest-fetch-mock' -import { renderWithProviders } from '@/utils/test-utils' +import { renderWithProviders } from '@/lib/test-utils' import LoginPage from '@/pages/login' import { AuthResponse, GetLoginResponse } from '@/service/types' diff --git a/ui/src/__tests__/schema/create.test.tsx b/ui/src/__tests__/schema/create.test.tsx index 904b8ef..73ae851 100644 --- a/ui/src/__tests__/schema/create.test.tsx +++ b/ui/src/__tests__/schema/create.test.tsx @@ -1,7 +1,7 @@ -import { fireEvent, screen, waitFor } from '@testing-library/react' +import { fireEvent, screen, waitFor, waitForElementToBeRemoved } from '@testing-library/react' import userEvent from '@testing-library/user-event' import fetchMock from 'jest-fetch-mock' -import { renderWithProviders } from '@/utils/test-utils' +import { renderWithProviders } from '@/lib/test-utils' import SchemaCreatePage from '@/pages/schema/create' const mockProps = jest.fn() @@ -44,25 +44,31 @@ describe('Page: Upload page', () => { }) it('renders', async () => { + fetchMock.mockResponseOnce(JSON.stringify(['default']), { status: 200 }) renderWithProviders() + await waitForElementToBeRemoved(() => screen.queryByRole('progressbar')) + expect(screen.getByTestId('submit')).toBeInTheDocument() expect(screen.getByTestId('field-level')).toBeInTheDocument() + expect(screen.getByTestId('field-layer')).toBeInTheDocument() expect(screen.getByTestId('field-domain')).toBeInTheDocument() expect(screen.getByTestId('field-title')).toBeInTheDocument() expect(screen.getByTestId('field-file')).toBeInTheDocument() }) describe('on submit', () => { + const file = new File(['test'], 'testfile.txt', { type: 'text/plain' }) const formData = new FormData() formData.append('file', file) it('errors', async () => { + fetchMock.mockResponseOnce(JSON.stringify(['default']), { status: 200 }) renderWithProviders() - await userEvent.click(screen.getByTestId('submit')) + await waitForElementToBeRemoved(() => screen.queryByRole('progressbar')) - // expect(screen.queryByText('Required')).toHaveLength(2) + await userEvent.click(screen.getByTestId('submit')) await waitFor(async () => { expect(screen.queryAllByText('Required')).toHaveLength(3) @@ -74,10 +80,14 @@ describe('Page: Upload page', () => { }) it('success', async () => { + fetchMock.mockResponseOnce(JSON.stringify(['default']), { status: 200 }) fetchMock.mockResponseOnce(JSON.stringify(mockGenerate)) renderWithProviders() + await waitForElementToBeRemoved(() => screen.queryByRole('progressbar')) + userEvent.selectOptions(screen.getByTestId('field-level'), 'PUBLIC') + userEvent.selectOptions(screen.getByTestId('field-layer'), 'default') await userEvent.type(screen.getByTestId('field-domain'), 'my-domain') await userEvent.type(screen.getByTestId('field-title'), 'my-title') await fireEvent.change(screen.getByTestId('field-file'), { @@ -88,7 +98,7 @@ describe('Page: Upload page', () => { await waitFor(async () => { expect(fetchMock).toHaveBeenCalledWith( - '/api/schema/PUBLIC/my-domain/my-title/generate', + '/api/schema/default/PUBLIC/my-domain/my-title/generate', { body: formData, credentials: 'include', diff --git a/ui/src/__tests__/subject/create.test.tsx b/ui/src/__tests__/subject/create.test.tsx index e42c780..111beb9 100644 --- a/ui/src/__tests__/subject/create.test.tsx +++ b/ui/src/__tests__/subject/create.test.tsx @@ -1,35 +1,9 @@ import { screen, waitFor, waitForElementToBeRemoved } from '@testing-library/react' import userEvent from '@testing-library/user-event' import fetchMock from 'jest-fetch-mock' -import { renderWithProviders } from '@/utils/test-utils' +import { renderWithProviders, mockPermissionUiResponse } from '@/lib/test-utils' import SubjectCreatePage from '@/pages/subject/create/index' -const mockUiData: { [key: string]: { [key: string]: string }[] } = { - ADMIN: [ - { - display_name: 'Data', - display_name_full: 'Data admin', - name: 'DATA_ADMIN' - }, - { - display_name: 'User', - display_name_full: 'User admin', - name: 'USER_ADMIN' - } - ], - GLOBAL_WRITE: [ - { - display_name: 'ALL', - display_name_full: 'Read all', - name: 'READ_ALL' - }, - { - display_name: 'PRIVATE', - display_name_full: 'Read private', - name: 'READ_PRIVATE' - } - ] -} const pushSpy = jest.fn() jest.mock('next/router', () => ({ @@ -47,7 +21,7 @@ describe('Page: Subject Create', () => { }) it('renders', async () => { - fetchMock.mockResponseOnce(JSON.stringify(mockUiData), { status: 200 }) + fetchMock.mockResponseOnce(JSON.stringify(mockPermissionUiResponse), { status: 200 }) renderWithProviders() await waitForElementToBeRemoved(() => screen.queryByRole('progressbar')) @@ -55,20 +29,10 @@ describe('Page: Subject Create', () => { expect(screen.queryByTestId('field-email')).not.toBeInTheDocument() expect(screen.getByTestId('field-name')).toBeInTheDocument() expect(screen.getByTestId('submit')).toBeInTheDocument() - - expect(screen.getByText('Management Permissions')).toBeInTheDocument() - expect(screen.getByText('Global Write Permissions')).toBeInTheDocument() - - for (const key in mockUiData) { - mockUiData[key].forEach - for (const { display_name } of mockUiData[key]) { - expect(screen.getByRole('button', { name: display_name })).toBeInTheDocument() - } - } }) it('user prompts email field', async () => { - fetchMock.mockResponseOnce(JSON.stringify(mockUiData), { status: 200 }) + fetchMock.mockResponseOnce(JSON.stringify(mockPermissionUiResponse), { status: 200 }) renderWithProviders() await waitForElementToBeRemoved(() => screen.queryByRole('progressbar')) @@ -79,22 +43,27 @@ describe('Page: Subject Create', () => { }) describe('on submit', () => { - const mockData = { - client_name: 'James Bond', - client_secret: 'secret-code-word', // pragma: allowlist secret - client_id: 'id-abc123', - permissions: ['DATA_ADMIN', 'READ_PRIVATE'] - } it('client success', async () => { - fetchMock.mockResponseOnce(JSON.stringify(mockUiData), { status: 200 }) + + const mockData = { + client_name: 'James Bond', + client_secret: 'secret-code-word', + client_id: 'id-abc123', + permissions: ['DATA_ADMIN', 'READ_PRIVATE'] + } + + fetchMock.mockResponseOnce(JSON.stringify(mockPermissionUiResponse), { status: 200 }) renderWithProviders() await waitForElementToBeRemoved(() => screen.queryByRole('progressbar')) userEvent.selectOptions(screen.getByTestId('field-type'), 'Client') await userEvent.type(screen.getByTestId('field-name'), 'James Bond') - await userEvent.click(screen.getByText('Data')) - await userEvent.click(screen.getByText('PRIVATE')) + + userEvent.selectOptions(screen.getByTestId('select-type'), 'WRITE') + userEvent.selectOptions(screen.getByTestId('select-layer'), 'ALL') + userEvent.selectOptions(screen.getByTestId('select-sensitivity'), 'ALL') + await userEvent.click(screen.getByTestId('AddIcon')) await userEvent.click(screen.getByTestId('submit')) fetchMock.mockResponseOnce(JSON.stringify(mockData), { status: 200 }) @@ -103,7 +72,7 @@ describe('Page: Subject Create', () => { expect(fetchMock).toHaveBeenCalledWith( '/api/client', expect.objectContaining({ - body: '{"permissions":["DATA_ADMIN","READ_PRIVATE"],"client_name":"James Bond"}' + body: '{"permissions":["WRITE_ALL"],"client_name":"James Bond"}' }) ) }) @@ -120,31 +89,34 @@ describe('Page: Subject Create', () => { }) }) - it(' user success', async () => { + it('user success', async () => { const mockData = { username: 'user-abc', user_id: 'id-abc123', email: 'test@example.com' } - fetchMock.mockResponseOnce(JSON.stringify(mockUiData), { status: 200 }) + fetchMock.mockResponseOnce(JSON.stringify(mockPermissionUiResponse), { status: 200 }) renderWithProviders() await waitForElementToBeRemoved(() => screen.queryByRole('progressbar')) userEvent.selectOptions(screen.getByTestId('field-type'), 'User') await userEvent.type(screen.getByTestId('field-name'), 'James Bond') await userEvent.type(screen.getByTestId('field-email'), 'test@example.com') - await userEvent.click(screen.getByText('Data')) - await userEvent.click(screen.getByText('PRIVATE')) - await userEvent.click(screen.getByTestId('submit')) + userEvent.selectOptions(screen.getByTestId('select-type'), 'WRITE') + userEvent.selectOptions(screen.getByTestId('select-layer'), 'ALL') + userEvent.selectOptions(screen.getByTestId('select-sensitivity'), 'ALL') + await userEvent.click(screen.getByTestId('AddIcon')) + await new Promise((r) => setTimeout(r, 2000)); + await userEvent.click(screen.getByTestId('submit')) fetchMock.mockResponseOnce(JSON.stringify(mockData), { status: 200 }) await waitFor(async () => { expect(fetchMock).toHaveBeenCalledWith( '/api/user', expect.objectContaining({ - body: '{"permissions":["DATA_ADMIN","READ_PRIVATE"],"username":"James Bond","email":"test@example.com"}' + body: '{"permissions":["WRITE_ALL"],"username":"James Bond","email":"test@example.com"}' }) ) }) @@ -163,14 +135,16 @@ describe('Page: Subject Create', () => { it('server error', async () => { const error = 'server error message' - fetchMock.mockResponseOnce(JSON.stringify(mockUiData), { status: 200 }) + fetchMock.mockResponseOnce(JSON.stringify(mockPermissionUiResponse), { status: 200 }) renderWithProviders() await waitForElementToBeRemoved(() => screen.queryByRole('progressbar')) userEvent.selectOptions(screen.getByTestId('field-type'), 'Client') await userEvent.type(screen.getByTestId('field-name'), 'James Bond') - await userEvent.click(screen.getByText('Data')) - await userEvent.click(screen.getByText('PRIVATE')) + userEvent.selectOptions(screen.getByTestId('select-type'), 'WRITE') + userEvent.selectOptions(screen.getByTestId('select-layer'), 'ALL') + userEvent.selectOptions(screen.getByTestId('select-sensitivity'), 'ALL') + await userEvent.click(screen.getByTestId('AddIcon')) await userEvent.click(screen.getByTestId('submit')) fetchMock.mockReject(new Error(error)) diff --git a/ui/src/__tests__/subject/modify.test.tsx b/ui/src/__tests__/subject/modify.test.tsx index 7be2a96..7f12c2f 100644 --- a/ui/src/__tests__/subject/modify.test.tsx +++ b/ui/src/__tests__/subject/modify.test.tsx @@ -6,7 +6,7 @@ import { } from '@testing-library/react' import userEvent from '@testing-library/user-event' import fetchMock from 'jest-fetch-mock' -import { renderWithProviders } from '@/utils/test-utils' +import { renderWithProviders } from '@/lib/test-utils' import SubjectModifyPage from '@/pages/subject/modify/index' const mockData: Array> = [ diff --git a/ui/src/__tests__/tasks.test.tsx b/ui/src/__tests__/tasks.test.tsx index d36de38..23a880c 100644 --- a/ui/src/__tests__/tasks.test.tsx +++ b/ui/src/__tests__/tasks.test.tsx @@ -1,6 +1,6 @@ import { screen, waitForElementToBeRemoved } from '@testing-library/react' import fetchMock from 'jest-fetch-mock' -import { renderWithProviders } from '@/utils/test-utils' +import { renderWithProviders } from '@/lib/test-utils' import TasksPage from '@/pages/tasks/index' import { AllJobsResponse } from '@/service/types' diff --git a/ui/src/components/Autocomplete/Autocomplete.tsx b/ui/src/components/Autocomplete/Autocomplete.tsx new file mode 100644 index 0000000..1c40523 --- /dev/null +++ b/ui/src/components/Autocomplete/Autocomplete.tsx @@ -0,0 +1,29 @@ +import { ComponentProps } from 'react' +import { + Autocomplete as BaseAutocomplete, +} from '@mui/material' + +import { styled } from '@mui/system'; + + +export const Autocomplete = styled(BaseAutocomplete) >` + .MuiInputBase-input { + padding: 4px 15px 0px 15px; + height: 100%; + width: 100%; + font-size: 13px; + font-weight: 400 + } + ` + + +export const GroupHeader = styled('div')(() => ({ + position: 'sticky', + top: '-8px', + padding: '4px 10px', + backgroundColor: 'lightgrey' +})); + +export const GroupItems = styled('ul')({ + padding: 0, +}); diff --git a/ui/src/components/Button/Button.test.tsx b/ui/src/components/Button/Button.test.tsx index ab59e7f..8297303 100644 --- a/ui/src/components/Button/Button.test.tsx +++ b/ui/src/components/Button/Button.test.tsx @@ -1,5 +1,5 @@ import { screen } from '@testing-library/react' -import { renderWithProviders } from '@/utils/test-utils' +import { renderWithProviders } from '@/lib/test-utils' import Button from './Button' describe('Button', () => { diff --git a/ui/src/components/ConditionalWrapper/ConditionalWrapper.test.tsx b/ui/src/components/ConditionalWrapper/ConditionalWrapper.test.tsx index 259d4a5..d8876b0 100644 --- a/ui/src/components/ConditionalWrapper/ConditionalWrapper.test.tsx +++ b/ui/src/components/ConditionalWrapper/ConditionalWrapper.test.tsx @@ -1,6 +1,6 @@ import { screen } from '@testing-library/react' import ConditionalWrapper from './ConditionalWrapper' -import { renderWithProviders } from '@/utils/test-utils' +import { renderWithProviders } from '@/lib/test-utils' import { FC } from 'react' const Content: FC = () =>
test
diff --git a/ui/src/components/DatasetSelector/DatasetSelector.tsx b/ui/src/components/DatasetSelector/DatasetSelector.tsx new file mode 100644 index 0000000..23233e4 --- /dev/null +++ b/ui/src/components/DatasetSelector/DatasetSelector.tsx @@ -0,0 +1,166 @@ + +import { Row } from '@/components' +import { TextField, Typography } from '@mui/material' +import { useEffect, useState } from 'react' +import { Autocomplete, GroupHeader, GroupItems } from '../Autocomplete/Autocomplete' +import FormControl from '../FormControl/FormControl' +import { Dataset } from "@/service/types" + + +const DatasetSelector = ({ datasetsList, setParentDataset, enableVersionSelector = true }) => { + + const [maxVersion, setMaxVersion] = useState(0) + const [filteredDatasetsList, setFilteredDatasetsList] = useState([]) + const [layerFilteredDatasetsList, setLayerFilteredDatasetsList] = useState([]) + const [layer, setLayer] = useState('') + const [domain, setDomain] = useState('') + const [dataset, setDataset] = useState(null) + const [version, setVersion] = useState(1) + + useEffect(() => { + if (datasetsList) { + let filteredList = datasetsList; + if (layer) { + filteredList = filteredList.filter((dataset) => dataset.layer === layer); + + if (domain) { + filteredList = filteredList.filter((dataset) => dataset.domain === domain); + } + } + setFilteredDatasetsList(filteredList); + setLayerFilteredDatasetsList(filteredList); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [layer, domain]) + + useEffect(() => { + if (datasetsList) { + setFilteredDatasetsList(datasetsList) + setLayerFilteredDatasetsList(datasetsList) + } + }, [datasetsList]) + + const handleDomainSelect = (value) => { + if (value) { + const splits = value.split('/') + const layer = splits[0] + const domain = splits[1] + setLayer(layer) + setDomain(domain) + } + else { + setDomain(null) + } + setDataset(null) + } + + const handleLayerSelect = (value) => { + if (value) { setLayer(value); setDomain(null) } + else { setLayer(null), setDataset(null), setDataset(null) } + } + + useEffect(() => { + let version = 0 + if (dataset) { + version = dataset.version + setLayer(dataset.layer) + setDomain(dataset.domain) + } + else { + setLayer(null) + setDomain(null) + } + + setVersion(version) + setMaxVersion(version) + setParentDataset(dataset) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [dataset]) + + + useEffect(() => { + if (dataset) { + dataset.version = version + setParentDataset(dataset) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [version]) + + const getUniqueLayers = () => [...new Set(datasetsList.map((dataset) => dataset.layer))] + const getUniqueDomains = () => [...new Set(layerFilteredDatasetsList.map((dataset) => `${dataset.layer}/${dataset.domain}`))] + + return ( + <> + + Layer + + (option as string) || ""} + renderInput={(params) => } + value={getUniqueLayers().length === 1 ? datasetsList[0].layer : layer || null} + onChange={(_, newValue) => { + handleLayerSelect(newValue); + }} + data-testid='select-layer' + /> + + Domain + + (option as string).split('/')[1] || ""} + renderInput={(params) => } + value={layer && domain ? `${layer}/${domain}` : null} + onChange={(_, newValue) => { + handleDomainSelect(newValue); + }} + data-testid='select-domain' + /> + + Dataset + + `${(dataset as Dataset).layer}-${(dataset as Dataset).domain}`} + getOptionLabel={(dataset) => (dataset as unknown as Dataset).dataset || ""} + renderInput={(params) => } + renderGroup={(params) => ( +
  • + {params.group} + {params.children} +
  • + )} + defaultValue={undefined} + value={dataset} + onChange={(_, newValue) => { + setDataset(newValue as unknown as Dataset); + }} + data-testid='select-dataset' + /> +
    + {(enableVersionSelector && maxVersion != 0) && ( + + Select version + i + 1)} + renderInput={(params) => } + onChange={(_, newValue) => { + setVersion(newValue as unknown as number); + }} + value={version} + data-testid='select-version' + /> + + )} +
    + + ) +} + + +export default DatasetSelector; diff --git a/ui/src/components/FormControl/FormControl.tsx b/ui/src/components/FormControl/FormControl.tsx new file mode 100644 index 0000000..bb1f3b6 --- /dev/null +++ b/ui/src/components/FormControl/FormControl.tsx @@ -0,0 +1,14 @@ +import { FormControl as BaseFormControl } from '@mui/material' +import { ComponentProps } from 'react' +import { styled } from '@mui/system'; + + +const FormControl = styled(BaseFormControl) >` + .MuiInputBase-root { + margin-bottom: 4px; + height: 32px !important; + padding: 0px; + } +` + +export default FormControl; \ No newline at end of file diff --git a/ui/src/components/Icon/svg/logo.svg b/ui/src/components/Icon/svg/logo.svg index fc94f20..17c5932 100644 --- a/ui/src/components/Icon/svg/logo.svg +++ b/ui/src/components/Icon/svg/logo.svg @@ -1 +1 @@ - + \ No newline at end of file diff --git a/ui/src/components/Icon/svg/search-alt.svg b/ui/src/components/Icon/svg/search-alt.svg index 68b2e80..d1bbf7e 100644 --- a/ui/src/components/Icon/svg/search-alt.svg +++ b/ui/src/components/Icon/svg/search-alt.svg @@ -1 +1 @@ - + \ No newline at end of file diff --git a/ui/src/components/PermissionsTable/PermissionsTable.tsx b/ui/src/components/PermissionsTable/PermissionsTable.tsx new file mode 100644 index 0000000..7aa25ae --- /dev/null +++ b/ui/src/components/PermissionsTable/PermissionsTable.tsx @@ -0,0 +1,281 @@ +import { Select } from '@/components' +import { zodResolver } from '@hookform/resolvers/zod' +import { Typography } from '@mui/material' +import { Controller, useForm, FieldValues } from 'react-hook-form' +import { z } from 'zod' +import { Permission, ActionEnum, SensitivityEnum } from '@/service' +import { isDataPermission } from '@/service/permissions' +import { PermissionUiResponse } from '@/service/types' +import { useEffect, useState } from 'react' +import { cloneDeep } from 'lodash' +import IconButton from '@mui/material/IconButton'; +import AddIcon from '@mui/icons-material/Add'; +import RemoveIcon from '@mui/icons-material/Remove'; +import Table from '@mui/material/Table'; +import TableBody from '@mui/material/TableBody'; +import TableCell from '@mui/material/TableCell'; +import TableContainer from '@mui/material/TableContainer'; +import TableHead from '@mui/material/TableHead'; +import TableRow from '@mui/material/TableRow'; + + +type ActionType = z.infer +type PermissionType = z.infer +type SensitivityType = z.infer + + +const PermissionsTable = ({ permissionsListData, fieldArrayReturn }: { permissionsListData: PermissionUiResponse, fieldArrayReturn: FieldValues }) => { + + const [filteredPermissionsListData, setFilteredPermissionsListData] = useState({}) + const [permissionsAtMax, setPermissionsAtMax] = useState(false) + + const removePermissionAsAnOption = (permission: PermissionType, permissionsList: PermissionUiResponse) => { + const { type, layer, sensitivity, domain } = permission; + const typeList = permissionsList[type]; + const layerList = typeList?.[layer]; + const sensitivityList = layerList?.[sensitivity]; + + switch (true) { + // Scenario for protected permission + case Boolean(domain): + // Remove the domain + if (domain in sensitivityList) { + delete sensitivityList[domain]; + } + + // Remove the sensitivity if there are no domains left + if (!Object.keys(sensitivityList)?.length) { + delete layerList[sensitivity]; + + // Remove the layer if there are no sensitivities left + if (!Object.keys(layerList)?.length) { + delete typeList[layer]; + } + } + break; + case Boolean(sensitivity): + // Remove the sensitivity + if (sensitivity in layerList) { + delete layerList[sensitivity]; + } + + // Remove the layer if there are no sensitivities left + if (!Object.keys(layerList)?.length || sensitivity === "ALL") { + delete typeList[layer]; + + // Remove the type if there are no layers left + if (!Object.keys(typeList)?.length || layer === "ALL") { + delete permissionsList[type]; + } + } + break; + + // Scenario for admin permissions + default: + delete permissionsList[type]; + break; + } + + return permissionsList; + }; + + + const { fields, append, remove } = fieldArrayReturn + + const { control, trigger, watch, reset, setError, setValue } = useForm({ + resolver: zodResolver(Permission) + }) + + // Remove any of the selected permissions from being an option + useEffect(() => { + let amendedPermissions = cloneDeep(permissionsListData) + fields.forEach((permission) => { + amendedPermissions = removePermissionAsAnOption(permission, amendedPermissions) + }) + setFilteredPermissionsListData(amendedPermissions) + + }, [fields, permissionsListData]); + + // Set Permissions at max + useEffect(() => { + if (Object.keys(filteredPermissionsListData).length === 0) { + setPermissionsAtMax(true) + } + else { + setPermissionsAtMax(false) + } + }, [filteredPermissionsListData]) + + + const generateOptions = (items) => items.map((item) => { + return + }) + + return ( + + + + + + Type + Layer + Sensitivity + Domain + + + + {(fields || []).map((item, index) => + ( + + remove(index)} + > + + + + + {item.type} + + + {item.layer} + + + {item.sensitivity} + + + {item.domain} + + ) + )} + {!permissionsAtMax && + + { + const result = trigger(undefined, { shouldFocus: true }); + if (result) { + const permissionToAdd = watch() + // Triggers an error if the domain is not set for protected sensitivity + if (isDataPermission(permissionToAdd) && permissionToAdd.sensitivity === "PROTECTED" && permissionToAdd.domain === undefined) { + setError("domain", { type: "custom", message: "Required" }); + } + else { + append(permissionToAdd) + reset({ + type: undefined, + layer: undefined, + sensitivity: undefined, + domain: undefined, + }) + } + } + }} + > + + + + + ( + + )} + /> + + + ( + isDataPermission(watch()) && + + )} + /> + + + ( + isDataPermission(watch()) && watch('layer') && + + ) + } + /> + + + ( + isDataPermission(watch()) && watch('sensitivity') === 'PROTECTED' && + ) + } + /> + + } + +
    +
    + ) +} + +export default PermissionsTable diff --git a/ui/src/components/SchemaCreate.tsx b/ui/src/components/SchemaCreate.tsx index 93c90da..378da37 100644 --- a/ui/src/components/SchemaCreate.tsx +++ b/ui/src/components/SchemaCreate.tsx @@ -1,4 +1,4 @@ -import { createSchema, schemaCreateSchema } from '@/service' +import { createSchema, schemaCreateSchema, GlobalSensitivities, ProtectedSensitivity } from '@/service' import { CreateSchemaResponse, GenerateSchemaResponse, @@ -18,9 +18,25 @@ import Select from './Select/Select' import SimpleTable from './SimpleTable/SimpleTable' import TextField from './TextField/TextField' -const dataTypes = ['Int64', 'Float64', 'object', 'date', 'boolean'] -function CreateSchema({ schemaData }: { schemaData: GenerateSchemaResponse }) { +const dataTypes = [ + "bigint", + "boolean", + "char", + "date", + "decimal", + "double", + "float", + "int", + "smallint", + "string", + "timestamp", + "tinyint", + "varchar" +] + + +function CreateSchema({ schemaData, layersData }: { schemaData: GenerateSchemaResponse, layersData: string[] }) { const [newSchemaData, setNewSchemaData] = useState(schemaData) const [keyValueTag, setKeyValueTag] = useState({ key: '', value: '' }) const [valueTag, setValueTag] = useState('') @@ -104,7 +120,29 @@ function CreateSchema({ schemaData }: { schemaData: GenerateSchemaResponse }) { Sensitivity Level field.onChange(e.target.value)} diff --git a/ui/src/components/Select/Select.tsx b/ui/src/components/Select/Select.tsx index a315fdf..7baf3bb 100644 --- a/ui/src/components/Select/Select.tsx +++ b/ui/src/components/Select/Select.tsx @@ -1,5 +1,4 @@ import { - FormControl, FormHelperText, InputLabel, MenuItem, @@ -8,9 +7,11 @@ import { } from '@mui/material' import { ComponentProps, FC, forwardRef, useId } from 'react' import SelectCheckbox from './SelectCheckbox' +import FormControl from '../FormControl/FormControl' import { Props } from './types' -const StyledBasicSelect = styled(BasicSelect)>` + +const StyledBasicSelect = styled(BasicSelect) >` .MuiInputBase-input { padding: 4px 15px 0px 15px; height: 100%; @@ -21,14 +22,6 @@ const StyledBasicSelect = styled(BasicSelect) } ` -const StyledFormControl = styled(FormControl)>` - .MuiInputBase-root { - margin-bottom: 4px; - height: 32px !important; - padding: 0px; - } -` - const Select: FC = forwardRef( ( { checkboxes, data = [], fullWidth = true, children, error, helperText, ...props }, @@ -45,7 +38,7 @@ const Select: FC = forwardRef( } return ( - + {checkboxes ? ( @@ -61,7 +54,7 @@ const Select: FC = forwardRef( )} {!!helperText && {helperText}} - + ) } ) diff --git a/ui/src/components/SimpleTable/SimpleTable.test.tsx b/ui/src/components/SimpleTable/SimpleTable.test.tsx index ccaae66..53324ee 100644 --- a/ui/src/components/SimpleTable/SimpleTable.test.tsx +++ b/ui/src/components/SimpleTable/SimpleTable.test.tsx @@ -1,6 +1,6 @@ /* eslint-disable @typescript-eslint/no-explicit-any */ import SimpleTable from './SimpleTable' -import { renderWithProviders, screen } from '@/utils/test-utils' +import { renderWithProviders, screen } from '@/lib/test-utils' describe('SimpleTable', () => { it('empty row', async () => { diff --git a/ui/src/components/UploadProgress/UploadProgress.tsx b/ui/src/components/UploadProgress/UploadProgress.tsx index 8994544..5f69cee 100644 --- a/ui/src/components/UploadProgress/UploadProgress.tsx +++ b/ui/src/components/UploadProgress/UploadProgress.tsx @@ -75,4 +75,4 @@ const UploadProgress = ({ uploadSuccessDetails, setDisableUpload }: { uploadSucc ) } -export default UploadProgress; +export default UploadProgress; \ No newline at end of file diff --git a/ui/src/pages/_app.tsx b/ui/src/pages/_app.tsx index 770a945..0f7e0af 100644 --- a/ui/src/pages/_app.tsx +++ b/ui/src/pages/_app.tsx @@ -1,11 +1,12 @@ import { ThemeProvider } from '@/components' import { CacheProvider, EmotionCache } from '@emotion/react' -import createEmotionCache from '@/utils/createEmotionCache' +import createEmotionCache from '@/lib/createEmotionCache' import { ErrorBoundary } from 'react-error-boundary' import { ReactNode, useEffect } from 'react' import { AppProps } from 'next/app' import { NextPage } from 'next' import { QueryClient, QueryClientProvider } from '@tanstack/react-query' +import { ReactQueryDevtools } from '@tanstack/react-query-devtools' import Head from 'next/head' import { useRouter } from 'next/router' import ErrorBoundryComponent from '@/components/ErrorBoundryComponent' @@ -78,6 +79,7 @@ export default function MyApp({ rAPId {getLayout()} + diff --git a/ui/src/pages/_document.tsx b/ui/src/pages/_document.tsx index 0b7dd95..11e6451 100644 --- a/ui/src/pages/_document.tsx +++ b/ui/src/pages/_document.tsx @@ -7,7 +7,7 @@ import Document, { } from 'next/document' import createEmotionServer from '@emotion/server/create-instance' import theme from '@/style/theme' -import createEmotionCache from '@/utils/createEmotionCache' +import createEmotionCache from '@/lib/createEmotionCache' import { ReactNode } from 'react' type DocumentProps = { diff --git a/ui/src/pages/data/delete/index.tsx b/ui/src/pages/data/delete/index.tsx index b61c182..75f6174 100644 --- a/ui/src/pages/data/delete/index.tsx +++ b/ui/src/pages/data/delete/index.tsx @@ -1,13 +1,14 @@ -import { AccountLayout, Alert, Button, Card, Row, Select } from '@/components' +import { AccountLayout, Alert, Button, Card } from '@/components' import ErrorCard from '@/components/ErrorCard/ErrorCard' +import DatasetSelector from '@/components/DatasetSelector/DatasetSelector' import { deleteDataset, getDatasetsUi } from '@/service' -import { DeleteDatasetResponse } from '@/service/types' -import { FormControl, LinearProgress, Typography } from '@mui/material' +import { Dataset, DeleteDatasetResponse } from '@/service/types' +import { LinearProgress, Typography } from '@mui/material' import { useMutation, useQuery } from '@tanstack/react-query' -import { useEffect, useState } from 'react' +import { useState } from 'react' -function DeleteDataset() { - const [dataset, setDataset] = useState('') +function DeleteDataset({ datasetInput = null }: { datasetInput?: Dataset }) { + const [dataset, setDataset] = useState(datasetInput) const [deleteDatasetSuccessDetails, setDeleteDatasetSuccessDetails] = useState< string | undefined >() @@ -32,13 +33,6 @@ function DeleteDataset() { } }) - useEffect(() => { - if (datasetsList && Object.keys(datasetsList).length > 0) { - const firstKey = Object.keys(datasetsList)[0] - setDataset(`${firstKey}/${datasetsList[firstKey][0].dataset}`) - } - }, [datasetsList]) - if (isDatasetsListLoading) { return } @@ -51,7 +45,7 @@ function DeleteDataset() {
    { event.preventDefault() - await mutate({ path: dataset }) + await mutate({ path: `${dataset.layer}/${dataset.domain}/${dataset.dataset}` }) }} > - - - - - + {deleteDatasetSuccessDetails ? ( ) : null} diff --git a/ui/src/pages/data/download/[domain]/[dataset].tsx b/ui/src/pages/data/download/[layer]/[domain]/[dataset].tsx similarity index 95% rename from ui/src/pages/data/download/[domain]/[dataset].tsx rename to ui/src/pages/data/download/[layer]/[domain]/[dataset].tsx index 51251ee..040b7ef 100644 --- a/ui/src/pages/data/download/[domain]/[dataset].tsx +++ b/ui/src/pages/data/download/[layer]/[domain]/[dataset].tsx @@ -10,7 +10,7 @@ import { Alert } from '@/components' import ErrorCard from '@/components/ErrorCard/ErrorCard' -import { asVerticalTableList } from '@/utils' +import { asVerticalTableList } from '@/lib' import { getDatasetInfo, queryDataset } from '@/service' import { DataFormats } from '@/service/types' import { Typography, LinearProgress } from '@mui/material' @@ -20,7 +20,7 @@ import { useState } from 'react' function DownloadDataset() { const router = useRouter() - const { domain, dataset } = router.query + const { layer, domain, dataset } = router.query const version = router.query.version ? router.query.version : 0 const [dataFormat, setDataFormat] = useState('csv') const [queryBody, setQueryBody] = useState({ @@ -35,7 +35,7 @@ function DownloadDataset() { isLoading: isDatasetInfoLoading, data: datasetInfoData, error: datasetInfoError - } = useQuery(['datasetInfo', domain, dataset, version ? version : 0], getDatasetInfo) + } = useQuery(['datasetInfo', layer, domain, dataset, version ? version : 0], getDatasetInfo) const { isLoading, mutate, error } = useMutation< Response, @@ -49,7 +49,7 @@ function DownloadDataset() { const a = document.createElement('a') a.style.display = 'none' a.href = url - a.download = `${domain}_${dataset}_${version}.${dataFormat}` + a.download = `${layer}_${domain}_${dataset}_${version}.${dataFormat}` document.body.appendChild(a) a.click() window.URL.revokeObjectURL(url) @@ -97,7 +97,7 @@ function DownloadDataset() { color="primary" onClick={() => mutate({ - path: `${domain}/${dataset}/query?version=${version}`, + path: `${layer}/${domain}/${dataset}/query?version=${version}`, dataFormat, data: createQueryBodyData() }) diff --git a/ui/src/pages/data/download/file.tsx b/ui/src/pages/data/download/file.tsx index bd0fce3..fc29325 100644 --- a/ui/src/pages/data/download/file.tsx +++ b/ui/src/pages/data/download/file.tsx @@ -10,7 +10,7 @@ import { } from '@/components' import { Typography } from '@mui/material' import { useRouter } from 'next/router' -import { asVerticalTableList } from '@/utils' +import { asVerticalTableList } from '@/lib' function FilePage() { const router = useRouter() diff --git a/ui/src/pages/data/download/index.tsx b/ui/src/pages/data/download/index.tsx index 90f23a4..7fab3be 100644 --- a/ui/src/pages/data/download/index.tsx +++ b/ui/src/pages/data/download/index.tsx @@ -1,17 +1,19 @@ -import { Card, Row, Button, Select } from '@/components' +import { Card, Button } from '@/components' import ErrorCard from '@/components/ErrorCard/ErrorCard' import AccountLayout from '@/components/Layout/AccountLayout' import { getDatasetsUi } from '@/service' -import { FormControl, Typography, LinearProgress } from '@mui/material' +import { Typography, LinearProgress } from '@mui/material' import { useQuery } from '@tanstack/react-query' import { useRouter } from 'next/router' -import { useEffect, useState } from 'react' +import { useState } from 'react' +import DatasetSelector from '@/components/DatasetSelector/DatasetSelector' +import { Dataset } from '@/service/types' -function DownloadData() { + + +function DownloadData({ datasetInput = null }: { datasetInput?: Dataset }) { const router = useRouter() - const [dataset, setDataset] = useState('') - const [versions, setVersions] = useState(0) - const [versionSelected, setVersionSelected] = useState(0) + const [dataset, setDataset] = useState(datasetInput) const { isLoading: isDatasetsListLoading, @@ -19,28 +21,6 @@ function DownloadData() { error: datasetsError } = useQuery(['datasetsList', 'READ'], getDatasetsUi) - useEffect(() => { - if (datasetsList && Object.keys(datasetsList).length > 0) { - const firstKey = Object.keys(datasetsList)[0] - setDataset(`${firstKey}/${datasetsList[firstKey][0].dataset}`) - } - }, [datasetsList]) - - useEffect(() => { - let version = 0 - if (dataset) { - const splits = dataset.split('/') - const domain = splits[0] - const _dataset = splits[1] - version = parseInt( - datasetsList[domain].filter((item) => item.dataset === _dataset)[0].version - ) - } - setVersions(version) - setVersionSelected(version ? Array(version).length : 0) - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [dataset]) - if (isDatasetsListLoading) { return } @@ -65,67 +45,22 @@ function DownloadData() { return ( - router.push(`/data/download/${dataset}?version=${versionSelected}`) - } - > - Next - - } + action={} > Download the contents of a datasource from rAPId. Select the relevant dataset you want to download and then the version to download from. Please note it might take some time to for the API to query the dataset especially if they are large. - - - - - - - - {versions && ( - <> - Select version - - - - - - )} + ) } diff --git a/ui/src/pages/data/upload/index.tsx b/ui/src/pages/data/upload/index.tsx index 3d1f6a4..4e31ea2 100644 --- a/ui/src/pages/data/upload/index.tsx +++ b/ui/src/pages/data/upload/index.tsx @@ -1,17 +1,19 @@ -import { Card, Row, Button, Select, Alert } from '@/components' +import { Card, Row, Button, Select, Alert, Link } from '@/components' import ErrorCard from '@/components/ErrorCard/ErrorCard' import AccountLayout from '@/components/Layout/AccountLayout' import UploadProgress from '@/components/UploadProgress/UploadProgress' +import DatasetSelector from '@/components/DatasetSelector/DatasetSelector' import { getDatasetsUi, uploadDataset } from '@/service' -import { UploadDatasetResponse, UploadDatasetResponseDetails } from '@/service/types' -import { FormControl, Typography, LinearProgress } from '@mui/material' +import { Dataset, UploadDatasetResponse, UploadDatasetResponseDetails } from '@/service/types' +import { Typography, LinearProgress } from '@mui/material' import { useMutation, useQuery } from '@tanstack/react-query' -import { useEffect, useState } from 'react' +import { useState } from 'react' -function UserModifyPage() { + +function UploadDataset({ datasetInput = null }: { datasetInput?: Dataset }) { const [file, setFile] = useState() + const [dataset, setDataset] = useState(datasetInput) const [disable, setDisable] = useState(false) - const [dataset, setDataset] = useState('') const [uploadSuccessDetails, setUploadSuccessDetails] = useState< UploadDatasetResponseDetails | undefined >() @@ -37,13 +39,6 @@ function UserModifyPage() { } }) - useEffect(() => { - if (datasetsList) { - const firstKey = Object.keys(datasetsList)[0] - setDataset(`${firstKey}/${datasetsList[firstKey][0].dataset}`) - } - }, [datasetsList]) - if (isDatasetsListLoading) { return } @@ -58,7 +53,7 @@ function UserModifyPage() { event.preventDefault() const formData = new FormData() formData.append('file', file) - await mutate({ path: dataset, data: formData }) + await mutate({ path: `${dataset.layer}/${dataset.domain}/${dataset.dataset}?version=${dataset.version}`, data: formData }) }} > - - Select dataset - - - - - - - + {!disable && @@ -122,10 +93,10 @@ function UserModifyPage() { )} - + ) } -export default UserModifyPage +export default UploadDataset -UserModifyPage.getLayout = (page) => {page} +UploadDataset.getLayout = (page) => {page} diff --git a/ui/src/pages/schema/create/index.tsx b/ui/src/pages/schema/create/index.tsx index b71b995..dc74e83 100644 --- a/ui/src/pages/schema/create/index.tsx +++ b/ui/src/pages/schema/create/index.tsx @@ -8,17 +8,25 @@ import { Alert, CreateSchema as CreateSchemaComponent } from '@/components' -import { generateSchema, schemaGenerateSchema } from '@/service' +import ErrorCard from '@/components/ErrorCard/ErrorCard' +import { generateSchema, schemaGenerateSchema, GlobalSensitivities, ProtectedSensitivity } from '@/service' +import { getLayers } from '@/service/fetch' import { GenerateSchemaResponse, SchemaGenerate } from '@/service/types' import { zodResolver } from '@hookform/resolvers/zod' -import { Typography } from '@mui/material' +import { LinearProgress, Typography } from '@mui/material' import { useMutation } from '@tanstack/react-query' import { useState } from 'react' import { useForm, Controller } from 'react-hook-form' +import { useQuery } from '@tanstack/react-query' function CreateSchema() { const [file, setFile] = useState() + const { isLoading: isLayersLoading, data: layersData, error: layersError } = useQuery( + ['layers'], + getLayers, + ) + const { control, handleSubmit } = useForm({ resolver: zodResolver(schemaGenerateSchema) }) @@ -33,15 +41,24 @@ function CreateSchema() { }) if (schemaData) { - return + return + } + + if (isLayersLoading) { + return } + if (layersError) { + return + } + + return (
    { const formData = new FormData() formData.append('file', file) - const path = `${data.sensitivity}/${data.domain}/${data.title}/generate` + const path = `${data.layer}/${data.sensitivity}/${data.domain}/${data.title}/generate` await mutate({ path, data: formData }) })} > @@ -74,7 +91,33 @@ function CreateSchema() { - {['PUBLIC', 'PRIVATE', 'PROTECTED'].map((value) => ( + {[...GlobalSensitivities, ProtectedSensitivity].map((value) => ( + + ))} + + + )} + /> + + + ( + <> + Dataset Layer + diff --git a/ui/src/pages/subject/create/index.tsx b/ui/src/pages/subject/create/index.tsx index c886739..56df960 100644 --- a/ui/src/pages/subject/create/index.tsx +++ b/ui/src/pages/subject/create/index.tsx @@ -1,11 +1,11 @@ -import { useState } from 'react' -import { Card, Row, Chip, Button, TextField, Select, Alert } from '@/components' +import { Card, Row, Button, TextField, Select, Alert } from '@/components' import AccountLayout from '@/components/Layout/AccountLayout' import { zodResolver } from '@hookform/resolvers/zod' -import { Stack, Typography, LinearProgress } from '@mui/material' -import { Controller, useForm } from 'react-hook-form' +import { Typography, LinearProgress } from '@mui/material' +import { Controller, useForm, useFieldArray } from 'react-hook-form' import { z } from 'zod' import { createClient, SubjectCreate } from '@/service' +import { extractPermissionNames } from '@/service/permissions' import { getPermissionsListUi } from '@/service/fetch' import { useMutation, useQuery } from '@tanstack/react-query' import { useRouter } from 'next/router' @@ -16,22 +16,15 @@ import { UserCreateResponse } from '@/service/types' import ErrorCard from '@/components/ErrorCard/ErrorCard' +import PermissionsTable from '@/components/PermissionsTable/PermissionsTable' + const userType = ['User', 'Client'] type UserCreate = z.infer -const permissionListKeyMapping = { - ADMIN: 'Management Permissions', - GLOBAL_READ: 'Global Read Permissions', - GLOBAL_WRITE: 'Global Write Permissions', - PROTECTED_READ: 'Read Protected Permissions', - PROTECTED_WRITE: 'Write Protected Permissions' -} - function CreateUserPage() { const router = useRouter() - const [selectedPermissions, setSelectedPermissions] = useState([]) const { isLoading: isPermissionsListLoading, @@ -43,6 +36,11 @@ function CreateUserPage() { resolver: zodResolver(SubjectCreate) }) + const fieldArrayReturn = useFieldArray({ + control, + name: 'permissions' + }); + const { isLoading, mutate, error } = useMutation< ClientCreateResponse | UserCreateResponse, Error, @@ -82,12 +80,12 @@ function CreateUserPage() { return ( { - const baseData = { permissions: selectedPermissions } + const permissions = data.permissions.map((permission) => extractPermissionNames(permission, permissionsListData)) if (data.type === 'User') { await mutate({ path: 'user', data: { - ...baseData, + permissions: permissions, username: data.name, email: data.email } @@ -96,7 +94,7 @@ function CreateUserPage() { await mutate({ path: 'client', data: { - ...baseData, + permissions: permissions, client_name: data.name } }) @@ -215,37 +213,9 @@ function CreateUserPage() { Select Permissions - - {Object.keys(permissionsListData).map((key, index) => { - return ( - - - {permissionListKeyMapping[key]} - - - {permissionsListData[key].map((item) => { - return ( - { - if (active) { - setSelectedPermissions([...selectedPermissions, item.name]) - } else { - setSelectedPermissions( - selectedPermissions.filter((_item) => _item !== item.name) - ) - } - }} - toggle - /> - ) - })} - - - ) - })} - + + + {error && ( {error?.message} diff --git a/ui/src/pages/subject/modify/[subjectId].tsx b/ui/src/pages/subject/modify/[subjectId].tsx index 1159ea5..a58aef1 100644 --- a/ui/src/pages/subject/modify/[subjectId].tsx +++ b/ui/src/pages/subject/modify/[subjectId].tsx @@ -1,4 +1,4 @@ -import { Button, Card, Chip, Row } from '@/components' +import { Button, Card } from '@/components' import ErrorCard from '@/components/ErrorCard/ErrorCard' import AccountLayout from '@/components/Layout/AccountLayout' import { @@ -6,28 +6,31 @@ import { getSubjectPermissions, updateSubjectPermissions } from '@/service' +import { extractPermissionNames } from '@/service/permissions' import { UpdateSubjectPermissionsBody, UpdateSubjectPermissionsResponse } from '@/service/types' -import { Alert, Stack, Typography, LinearProgress } from '@mui/material' +import { Alert, Typography, LinearProgress } from '@mui/material' import { useMutation, useQuery } from '@tanstack/react-query' import { useRouter } from 'next/router' -import { useEffect, useState } from 'react' +import { useEffect } from 'react' +import { useForm, useFieldArray } from 'react-hook-form' +import PermissionsTable from '@/components/PermissionsTable/PermissionsTable' -const permissionListKeyMapping = { - ADMIN: 'Management Permissions', - GLOBAL_READ: 'Global Read Permissions', - GLOBAL_WRITE: 'Global Write Permissions', - PROTECTED_READ: 'Read Protected Permissions', - PROTECTED_WRITE: 'Write Protected Permissions' -} function SubjectModifyPage() { const router = useRouter() const { subjectId, name } = router.query - const [selectedPermissions, setSelectedPermissions] = useState([]) + const { control, handleSubmit } = useForm() + + const fieldArrayReturn = useFieldArray({ + control, + name: 'permissions' + }); + + const { append } = fieldArrayReturn; const { isLoading: isPermissionsListDataLoading, @@ -43,8 +46,9 @@ function SubjectModifyPage() { useEffect(() => { if (subjectPermissionsData) { - setSelectedPermissions(subjectPermissionsData) + append(subjectPermissionsData) } + // eslint-disable-next-line react-hooks/exhaustive-deps }, [subjectPermissionsData]) const { isLoading, mutate, error } = useMutation< @@ -71,61 +75,38 @@ function SubjectModifyPage() { } return ( - { - mutate({ subject_id: subjectId as string, permissions: selectedPermissions }) - }} - > - Modify - - } - > - - Modify Subject - - Select permissions for {name} - - {Object.keys(permissionsListData).map((key, index) => { - return ( - - - {permissionListKeyMapping[key]} - - - {permissionsListData[key].map((item) => { - return ( - { - if (active) { - setSelectedPermissions([...selectedPermissions, item.name]) - } else { - setSelectedPermissions( - selectedPermissions.filter((_item) => _item !== item.name) - ) - } - }} - toggle - /> - ) - })} - - - ) + { + const permissions = data.permissions.map((permission) => extractPermissionNames(permission, permissionsListData)) + await mutate( + { subject_id: subjectId as string, permissions }) })} - - {error && ( - - {error?.message} - - )} - + noValidate + > + + Modify + + } + > + + Modify Subject + + Select permissions for {name} + + {error && ( + + {error?.message} + + )} + + ) } diff --git a/ui/src/pages/subject/modify/success/[subjectId].tsx b/ui/src/pages/subject/modify/success/[subjectId].tsx index 3220953..855f07b 100644 --- a/ui/src/pages/subject/modify/success/[subjectId].tsx +++ b/ui/src/pages/subject/modify/success/[subjectId].tsx @@ -34,7 +34,7 @@ function SubjectModifyPageSuccess() { {subjectPermissionsData.map((item) => ( - {item} + {item.name} ))}
    diff --git a/ui/src/pages/tasks/[jobId].tsx b/ui/src/pages/tasks/[jobId].tsx index 0fec21b..759c2f1 100644 --- a/ui/src/pages/tasks/[jobId].tsx +++ b/ui/src/pages/tasks/[jobId].tsx @@ -1,7 +1,7 @@ import ErrorCard from '@/components/ErrorCard/ErrorCard' import AccountLayout from '@/components/Layout/AccountLayout' import SimpleTable from '@/components/SimpleTable/SimpleTable' -import { asVerticalTableList } from '@/utils' +import { asVerticalTableList } from '@/lib' import { getJob } from '@/service' import { Card, Typography, LinearProgress } from '@mui/material' import { useQuery } from '@tanstack/react-query' @@ -56,6 +56,7 @@ function GetJob() { { name: 'Step', value: data.step as string }, { name: 'Filename', value: data.filename as string }, { name: 'Raw Filename ', value: data.raw_file_identifier as string }, + { name: 'Layer ', value: data.layer as string }, { name: 'Domain ', value: data.domain as string }, { name: 'Dataset ', value: data.dataset as string }, { name: 'Version ', value: data.version.toString() } diff --git a/ui/src/pages/tasks/index.tsx b/ui/src/pages/tasks/index.tsx index 705b360..09577ec 100644 --- a/ui/src/pages/tasks/index.tsx +++ b/ui/src/pages/tasks/index.tsx @@ -39,6 +39,7 @@ function StatusPage() { list={data.map((job) => { return [ { children: <>{job.type} }, + { children: <>{job.layer} }, { children: <>{job.domain} }, { children: <>{job.dataset} }, { children: <>{job.version} }, @@ -63,6 +64,7 @@ function StatusPage() { })} headers={[ { children: 'Type' }, + { children: 'Layer' }, { children: 'Domain' }, { children: 'Dataset' }, { children: 'Version' }, diff --git a/ui/src/service/fetch.ts b/ui/src/service/fetch.ts index 5b22843..f7f84ef 100644 --- a/ui/src/service/fetch.ts +++ b/ui/src/service/fetch.ts @@ -2,6 +2,7 @@ import { ClientCreateBody, DataFormats, DatasetInfoResponse, + Dataset, AllJobsResponse, UpdateSubjectPermissionsBody, UpdateSubjectPermissionsResponse, @@ -12,9 +13,11 @@ import { MetadataSearchResponse, AuthResponse, GetLoginResponse, - MethodsResponse + MethodsResponse, + PermissionUiResponse, + SubjectPermission } from './types' -import { api } from '@/utils/data-utils' +import { api } from '@/lib/data-utils' export const getAuthStatus = async (): Promise => { const res = await api(`/api/auth`, { method: 'GET' }) @@ -36,10 +39,12 @@ export const getMethods = async (): Promise => { return res.json() } -// TODO Move these to the types file -export const getPermissionsListUi = async (): Promise<{ - [key: string]: { [key: string]: string }[] -}> => { +export const getLayers = async (): Promise => { + const res = await api('/api/layers', { method: 'GET' }) + return res.json() +} + +export const getPermissionsListUi = async (): Promise => { const res = await api(`/api/permissions_ui`, { method: 'GET' }) @@ -58,9 +63,7 @@ export const getSubjectsListUi = async (): Promise< export const getDatasetsUi = async ({ queryKey -}): Promise<{ - [key: string]: { dataset: string; version: string }[] -}> => { +}): Promise => { const [, action] = queryKey const res = await api(`/api/datasets_ui/${action}`, { method: 'GET' @@ -83,7 +86,7 @@ export const getJob = async ({ queryKey }): Promise => { return res.json() } -export const getSubjectPermissions = async ({ queryKey }): Promise => { +export const getSubjectPermissions = async ({ queryKey }): Promise => { const [, subjectId] = queryKey const res = await api(`/api/permissions/${subjectId}`, { method: 'GET' @@ -143,8 +146,8 @@ export const deleteDataset = async ({ path }: { path: string }) => { } export const getDatasetInfo = async ({ queryKey }): Promise => { - const [, domain, dataset, version] = queryKey - const res = await api(`/api/datasets/${domain}/${dataset}/info?version=${version}`, { + const [, layer, domain, dataset, version] = queryKey + const res = await api(`/api/datasets/${layer}/${domain}/${dataset}/info?version=${version}`, { method: 'GET' }) return res.json() diff --git a/ui/src/service/permissions.ts b/ui/src/service/permissions.ts new file mode 100644 index 0000000..b5eda29 --- /dev/null +++ b/ui/src/service/permissions.ts @@ -0,0 +1,29 @@ +import { z } from 'zod' +import { PermissionUiResponse } from './types' +import { Permission, DataPermission } from '@/service' + +type DataPermissionType = z.infer +type PermissionType = z.infer + +export const isDataPermission = (permission: PermissionType): boolean => { + return permission.type === "READ" || permission.type === "WRITE"; +} + +export const isAdminPermission = (permission: PermissionType): boolean => { + return permission.type === "DATA_ADMIN" || permission.type === "USER_ADMIN"; +} + +export const extractPermissionNames = (permission: PermissionType, permissionsListData: PermissionUiResponse) => { + switch (true) { + case isDataPermission(permission): + permission = permission as DataPermissionType + if (permission.domain != undefined) { + return permissionsListData[permission.type][permission.layer][permission.sensitivity][permission.domain] + } + else if (permission.sensitivity != undefined) { + return permissionsListData[permission.type][permission.layer][permission.sensitivity] + } + case isAdminPermission(permission): + return permissionsListData[permission.type] + } +} \ No newline at end of file diff --git a/ui/src/service/schema.ts b/ui/src/service/schema.ts index 2fa488e..4276563 100644 --- a/ui/src/service/schema.ts +++ b/ui/src/service/schema.ts @@ -1,17 +1,51 @@ import { z } from 'zod' -export const SensitivityEnum = z.enum(['PUBLIC', 'PRIVATE', 'PROTECTED']) + +export const GlobalSensitivities = ['PUBLIC', 'PRIVATE'] as const; +export const ProtectedSensitivity = 'PROTECTED' + + +export const SensitivityEnum = z.enum([...GlobalSensitivities, ProtectedSensitivity, 'ALL']) const UserTypeEnum = z.enum(['User', 'Client']) +export const DataActionValues = ['READ', 'WRITE'] as const; +export const AdminActionValues = ['DATA_ADMIN', 'USER_ADMIN'] as const; + +const DataActionEnum = z.enum(DataActionValues) +const AdminActionEnum = z.enum(AdminActionValues) + +export const ActionEnum = z.enum([...DataActionValues, ...AdminActionValues]) + +export const DataPermission = z.object({ + type: DataActionEnum, + layer: z.string(), + sensitivity: SensitivityEnum, + domain: z.string().optional(), +}) + +export const AdminPermission = z.object({ + type: AdminActionEnum, + layer: z.literal(undefined), + sensitivity: z.literal(undefined), + domain: z.literal(undefined) +}) + +export const Permission = z.discriminatedUnion("type", [DataPermission, AdminPermission], { + errorMap: () => { + return { message: 'Required' }; + } +}) + export const SubjectCreate = z.object({ type: UserTypeEnum, email: z.string().email().optional(), name: z.string(), - permissions: z.array(z.string()).optional() + permissions: z.array(Permission) }) export const schemaCreateSchema = z.object({ sensitivity: SensitivityEnum, + layer: z.string(), domain: z.string(), title: z.string(), description: z.string().optional(), @@ -26,6 +60,7 @@ export const schemaCreateSchema = z.object({ export const schemaGenerateSchema = z.object({ sensitivity: SensitivityEnum, + layer: z.string(), domain: z.string(), title: z.string() }) diff --git a/ui/src/service/types.ts b/ui/src/service/types.ts index ecd9e87..825f25c 100644 --- a/ui/src/service/types.ts +++ b/ui/src/service/types.ts @@ -100,6 +100,7 @@ export type DatasetInfoResponse = { export type GenerateSchemaResponse = { metadata: { + layer: string domain: string dataset: string sensitivity: string @@ -152,3 +153,22 @@ export type MetadataItem = { } export type MetadataSearchResponse = MetadataItem[] + +export type Dataset = { + layer: string + domain: string + dataset: string + version: number +} + +export type SubjectPermission = { + name: string + type: string + layer: string | undefined + sensitivity: string | undefined + domain: string | undefined +} + +export type PermissionUiResponse = { + [key: string]: string | { [key: string]: { [key: string]: string | { [key: string]: string } } } +} \ No newline at end of file diff --git a/ui/src/utils/createEmotionCache.ts b/ui/src/utils/createEmotionCache.ts deleted file mode 100644 index 0de053b..0000000 --- a/ui/src/utils/createEmotionCache.ts +++ /dev/null @@ -1,5 +0,0 @@ -import createCache from '@emotion/cache' - -const createEmotionCache = () => createCache({ key: 'css', prepend: true }) - -export default createEmotionCache diff --git a/ui/src/utils/data-utils.test.ts b/ui/src/utils/data-utils.test.ts deleted file mode 100644 index fbe299e..0000000 --- a/ui/src/utils/data-utils.test.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { api } from './data-utils' -import fetchMock from 'jest-fetch-mock' -import { defaultError } from '@/lang' - -const mockSuccess = { fruit: 'apples' } - -describe('api()', () => { - afterEach(() => { - fetchMock.resetMocks() - }) - - it('success', async () => { - fetchMock.mockResponseOnce(JSON.stringify(mockSuccess), { status: 200 }) - const data = await (await api('/api')).json() - expect(data).toEqual(expect.objectContaining(mockSuccess)) - }) - - it('default error', async () => { - fetchMock.mockResponseOnce(JSON.stringify(mockSuccess), { status: 401 }) - - try { - await api('/api') - } catch (e) { - expect(e.message).toEqual(defaultError) - } - }) - - it('custom error', async () => { - const errorMessage = 'my custom error' - - fetchMock.mockResponseOnce(JSON.stringify({ details: 'my custom error' }), { - status: 401 - }) - - try { - await api('/api') - } catch (e) { - expect(e.message).toEqual(errorMessage) - } - }) -}) diff --git a/ui/src/utils/data-utils.ts b/ui/src/utils/data-utils.ts deleted file mode 100644 index 1455bd0..0000000 --- a/ui/src/utils/data-utils.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { createUrl } from './url-utils' -import { defaultError } from '@/lang' - -export type ParamsType = Record - -export const api = async ( - path: RequestInfo | URL, - init: RequestInit = {}, - params?: ParamsType -): Promise => { - const API_URL = process.env.NEXT_PUBLIC_API_URL - const baseUrl = API_URL ? `${API_URL}${path}` : path - const url = createUrl(`${baseUrl}`, params) - let detailMessage - const res: Response = await fetch(url, { - credentials: 'include', - ...init - }) - if (res.ok) return res - try { - const { details } = await res.json() - detailMessage = details - } catch (e) {} - throw new Error(detailMessage || defaultError) -} diff --git a/ui/src/utils/index.ts b/ui/src/utils/index.ts deleted file mode 100644 index c5fb87f..0000000 --- a/ui/src/utils/index.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { TableCellProps } from '@mui/material' - -export const asVerticalTableList = ( - list: { - name: string - value: string - }[] -) => [ - ...list.map(({ name, value }) => [ - { children: name, component: 'th' }, - { children: value } - ]) -] diff --git a/ui/src/utils/test-utils.tsx b/ui/src/utils/test-utils.tsx deleted file mode 100644 index 49b68b6..0000000 --- a/ui/src/utils/test-utils.tsx +++ /dev/null @@ -1,92 +0,0 @@ -import { render, renderHook, RenderOptions, waitFor } from '@testing-library/react' -import { ThemeProvider } from '@/components' -import { ReactNode } from 'react' -import { QueryClient, QueryClientProvider } from '@tanstack/react-query' - -beforeAll(() => { - Object.defineProperty(global, 'sessionStorage', { value: mockStorage }) - Object.defineProperty(global, 'localStorage', { value: mockStorage }) - jest.spyOn(console, 'error').mockImplementation(jest.fn()) -}) - -afterEach(() => { - window.sessionStorage.clear() -}) - -const mockStorage = (() => { - let store = {} - return { - getItem: function (key) { - return store[key] || null - }, - setItem: function (key, value) { - store[key] = value.toString() - }, - removeItem: function (key) { - delete store[key] - }, - clear: function () { - store = {} - } - } -})() - -export const wrapper = (ui) => { - const queryClient = new QueryClient({ - defaultOptions: { - queries: { - retry: false - } - } - }) - return ( - - - <>{ui} - - - ) -} - -export const renderWithProviders = async ( - ui: ReactNode, - options: Omit = {} -) => { - const rendered = await render(wrapper(ui), options) - return { - ...rendered, - rerender: (ui, options: Omit = {}) => - renderWithProviders(ui, { container: rendered.container, ...options }) - } -} - -export const renderHookWithProviders: typeof renderHook = (...parameters) => - renderHook(parameters[0], { - wrapper: ({ children }) => wrapper(children), - ...parameters[1] - }) - -export const bugfixForTimeout = async () => - await waitFor(() => new Promise((resolve) => setTimeout(resolve, 0))) - -export * from '@testing-library/react' -export { renderWithProviders as render } - -export const mockDataSetsList: { [key: string]: { [key: string]: string }[] } = { - Pizza: [ - { - dataset: 'bit_complicated', - version: '3' - }, - { - dataset: 'again_complicated_high', - version: '3' - } - ], - Apples: [ - { - dataset: 'juicy', - version: '2' - } - ] -} diff --git a/ui/src/utils/url-utils.test.ts b/ui/src/utils/url-utils.test.ts deleted file mode 100644 index c4aa721..0000000 --- a/ui/src/utils/url-utils.test.ts +++ /dev/null @@ -1,65 +0,0 @@ -import { createUrl, isUrlInternal } from './url-utils' - -describe('createUrl()', () => { - it('returns url with querystring', () => { - expect(createUrl('/path', { food: 'pizza', fruit: 'apple' })).toEqual( - '/path?food=pizza&fruit=apple' - ) - - expect(createUrl('/path', { food: ['pizza', 'chips'], fruit: 'apple' })).toEqual( - '/path?food=pizza%2Cchips&fruit=apple' - ) - }) - - it('empty params', () => { - expect(createUrl('/path', {})).toEqual('/path') - expect(createUrl('/path')).toEqual('/path') - }) -}) - -describe('isUrlInternal()', () => { - const sitename = 'http://myapp/' - const { location } = window - - beforeAll(() => { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - delete (window as any).location - }) - - afterAll(() => { - window.location = location - }) - - beforeEach(() => { - window.location = { - ...location, - href: sitename - } - }) - - it('url is only path', () => { - expect(isUrlInternal('/someurl')).toBeTruthy() - expect(isUrlInternal('/someurl?param=1')).toBeTruthy() - }) - - it('url contains full site', () => { - expect(isUrlInternal(sitename)).toBeTruthy() - expect(isUrlInternal(sitename + 'product/')).toBeTruthy() - expect(isUrlInternal(sitename + '?param=1')).toBeTruthy() - }) - - it('throws error if invalid url', () => { - expect(() => isUrlInternal('')).toThrowError('Invalid URL:') - expect(() => isUrlInternal('*^&*YH')).toThrowError('Invalid URL:') - }) - - it('throws error if invalid currentUrl', () => { - expect(() => isUrlInternal(sitename, '')).toThrowError('Invalid URL:') - expect(() => isUrlInternal(sitename, '*^&*YH')).toThrowError('Invalid URL:') - }) - - it('url is external site', () => { - expect(isUrlInternal('http://externalapp/')).toBeFalsy() - expect(isUrlInternal('https://myapp/')).toBeFalsy() - }) -}) diff --git a/ui/src/utils/url-utils.ts b/ui/src/utils/url-utils.ts deleted file mode 100644 index 975c278..0000000 --- a/ui/src/utils/url-utils.ts +++ /dev/null @@ -1,21 +0,0 @@ -export const createUrl = ( - url: RequestInfo | URL, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - params?: string | URLSearchParams | Record | string[][] -): string => { - const queryString = new URLSearchParams(params).toString() - return `${url}${queryString && `?${queryString}`}` -} - -export const isUrlInternal = ( - url: string, - currenSite = window.location.href -): boolean => { - if (url.charAt(0) === '/') return true - - const fullUrl = new URL(url).origin - const fullSite = new URL(currenSite).origin - - if (fullUrl === fullSite) return true - return false -} From 93254db4975e648844136c118fed3d81a13db381 Mon Sep 17 00:00:00 2001 From: Toby Drane Date: Mon, 21 Aug 2023 18:39:08 +0100 Subject: [PATCH 02/17] fix ui tests --- .pre-commit-config.yaml | 8 + ui/package-lock.json | 434 ++++++++++++++++++++++++++++++++++++++++ ui/package.json | 6 +- ui/playwright.config.ts | 37 ++-- 4 files changed, 465 insertions(+), 20 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 982178e..7276dfa 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -54,6 +54,14 @@ repos: - id: checkov args: [--quiet, --compact] exclude: '^(?!infrastructure/).*' +- repo: local + hooks: + - id: sdk_test + name: sdk_test + language: system + entry: bash -c 'make sdk-test' + files: sdk/*. + pass_filenames: false - repo: local hooks: - id: ui_test diff --git a/ui/package-lock.json b/ui/package-lock.json index 44ec1c9..a5992ef 100644 --- a/ui/package-lock.json +++ b/ui/package-lock.json @@ -22,6 +22,7 @@ "eslint": "8.29.0", "eslint-config-next": "13.0.6", "next": "13.0.6", + "playwright": "^1.37.1", "react": "18.2.0", "react-dom": "18.2.0", "react-error-boundary": "^3.1.4", @@ -30,6 +31,7 @@ }, "devDependencies": { "@babel/core": "^7.20.5", + "@playwright/test": "^1.37.1", "@storybook/addon-a11y": "^6.5.14", "@storybook/addon-actions": "^6.5.14", "@storybook/addon-essentials": "^6.5.14", @@ -40,11 +42,13 @@ "@storybook/react": "^6.5.14", "@storybook/testing-library": "^0.0.13", "@svgr/webpack": "^6.5.1", + "@tanstack/react-query-devtools": "^4.33.0", "@testing-library/jest-dom": "^5.16.5", "@testing-library/react": "^13.4.0", "@types/node": "20.4.5", "@types/react": "18.2.17", "@typescript-eslint/eslint-plugin": "^5.46.0", + "aws-sdk": "^2.1440.0", "babel-loader": "^8.3.0", "eslint-config-prettier": "^8.5.0", "eslint-plugin-jest-dom": "^4.0.3", @@ -3838,6 +3842,25 @@ "url": "https://opencollective.com/unts" } }, + "node_modules/@playwright/test": { + "version": "1.37.1", + "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.37.1.tgz", + "integrity": "sha512-bq9zTli3vWJo8S3LwB91U0qDNQDpEXnw7knhxLM0nwDvexQAwx9tO8iKDZSqqneVq+URd/WIoz+BALMqUTgdSg==", + "dev": true, + "dependencies": { + "@types/node": "*", + "playwright-core": "1.37.1" + }, + "bin": { + "playwright": "cli.js" + }, + "engines": { + "node": ">=16" + }, + "optionalDependencies": { + "fsevents": "2.3.2" + } + }, "node_modules/@pmmmwh/react-refresh-webpack-plugin": { "version": "0.5.10", "resolved": "https://registry.npmjs.org/@pmmmwh/react-refresh-webpack-plugin/-/react-refresh-webpack-plugin-0.5.10.tgz", @@ -10016,6 +10039,22 @@ "tslib": "^2.4.0" } }, + "node_modules/@tanstack/match-sorter-utils": { + "version": "8.8.4", + "resolved": "https://registry.npmjs.org/@tanstack/match-sorter-utils/-/match-sorter-utils-8.8.4.tgz", + "integrity": "sha512-rKH8LjZiszWEvmi01NR72QWZ8m4xmXre0OOwlRGnjU01Eqz/QnN+cqpty2PJ0efHblq09+KilvyR7lsbzmXVEw==", + "dev": true, + "dependencies": { + "remove-accents": "0.4.2" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/kentcdodds" + } + }, "node_modules/@tanstack/query-core": { "version": "4.19.1", "resolved": "https://registry.npmjs.org/@tanstack/query-core/-/query-core-4.19.1.tgz", @@ -10051,6 +10090,26 @@ } } }, + "node_modules/@tanstack/react-query-devtools": { + "version": "4.33.0", + "resolved": "https://registry.npmjs.org/@tanstack/react-query-devtools/-/react-query-devtools-4.33.0.tgz", + "integrity": "sha512-6gegkuDmOoiY5e6ZKj1id48vlCXchjfE/6tIpYO8dFlVMQ7t1bYna/Ce6qQJ69+kfEHbYiTTn2lj+FDjIBH7Hg==", + "dev": true, + "dependencies": { + "@tanstack/match-sorter-utils": "^8.7.0", + "superjson": "^1.10.0", + "use-sync-external-store": "^1.2.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + }, + "peerDependencies": { + "@tanstack/react-query": "^4.33.0", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0" + } + }, "node_modules/@testing-library/dom": { "version": "8.19.0", "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-8.19.0.tgz", @@ -11800,6 +11859,80 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/aws-sdk": { + "version": "2.1440.0", + "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1440.0.tgz", + "integrity": "sha512-ijHaRFZIKLYUDqOGTNrwncXF5vzJPU6VJpbRr7eNBAvo+nFtEHY4BZkldWYuhELCbWz0U5/+qMtF2T/JgPjfWQ==", + "dev": true, + "dependencies": { + "buffer": "4.9.2", + "events": "1.1.1", + "ieee754": "1.1.13", + "jmespath": "0.16.0", + "querystring": "0.2.0", + "sax": "1.2.1", + "url": "0.10.3", + "util": "^0.12.4", + "uuid": "8.0.0", + "xml2js": "0.5.0" + }, + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/aws-sdk/node_modules/events": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz", + "integrity": "sha512-kEcvvCBByWXGnZy6JUlgAp2gBIUjfCAV6P6TgT1/aaQKcmuAEC4OZTV1I4EWQLz2gxZw76atuVyvHhTxvi0Flw==", + "dev": true, + "engines": { + "node": ">=0.4.x" + } + }, + "node_modules/aws-sdk/node_modules/ieee754": { + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.13.tgz", + "integrity": "sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==", + "dev": true + }, + "node_modules/aws-sdk/node_modules/punycode": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", + "integrity": "sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw==", + "dev": true + }, + "node_modules/aws-sdk/node_modules/url": { + "version": "0.10.3", + "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz", + "integrity": "sha512-hzSUW2q06EqL1gKM/a+obYHLIO6ct2hwPuviqTTOcfFVc61UbfJ2Q32+uGL/HCPxKqrdGB5QUwIe7UqlDgwsOQ==", + "dev": true, + "dependencies": { + "punycode": "1.3.2", + "querystring": "0.2.0" + } + }, + "node_modules/aws-sdk/node_modules/util": { + "version": "0.12.5", + "resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz", + "integrity": "sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA==", + "dev": true, + "dependencies": { + "inherits": "^2.0.3", + "is-arguments": "^1.0.4", + "is-generator-function": "^1.0.7", + "is-typed-array": "^1.1.3", + "which-typed-array": "^1.1.2" + } + }, + "node_modules/aws-sdk/node_modules/uuid": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.0.0.tgz", + "integrity": "sha512-jOXGuXZAWdsTH7eZLtyXMqUb9EcWMGZNbL9YcGBJl4MH4nrxHmZJhEHvyLFrkxo+28uLb/NYRcStH48fnD0Vzw==", + "dev": true, + "bin": { + "uuid": "dist/bin/uuid" + } + }, "node_modules/axe-core": { "version": "4.5.2", "resolved": "https://registry.npmjs.org/axe-core/-/axe-core-4.5.2.tgz", @@ -13438,6 +13571,21 @@ "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==", "dev": true }, + "node_modules/copy-anything": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/copy-anything/-/copy-anything-3.0.5.tgz", + "integrity": "sha512-yCEafptTtb4bk7GLEQoM8KVJpxAfdBJYaXyzQEgQQQgYrZiDp8SJmGKlYza6CYjEDNstAdNdKA3UuoULlEbS6w==", + "dev": true, + "dependencies": { + "is-what": "^4.1.8" + }, + "engines": { + "node": ">=12.13" + }, + "funding": { + "url": "https://github.com/sponsors/mesqueeb" + } + }, "node_modules/copy-concurrently": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/copy-concurrently/-/copy-concurrently-1.0.5.tgz", @@ -17966,6 +18114,21 @@ "node": ">=6" } }, + "node_modules/is-generator-function": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.0.10.tgz", + "integrity": "sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A==", + "dev": true, + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-glob": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", @@ -18213,6 +18376,18 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/is-what": { + "version": "4.1.15", + "resolved": "https://registry.npmjs.org/is-what/-/is-what-4.1.15.tgz", + "integrity": "sha512-uKua1wfy3Yt+YqsD6mTUEa2zSi3G1oPlqTflgaPJ7z63vUGN5pxFpnQfeSLMFnJDEsdvOtkp1rUWkYjB4YfhgA==", + "dev": true, + "engines": { + "node": ">=12.13" + }, + "funding": { + "url": "https://github.com/sponsors/mesqueeb" + } + }, "node_modules/is-whitespace-character": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-whitespace-character/-/is-whitespace-character-1.0.4.tgz", @@ -18991,6 +19166,15 @@ "url": "https://github.com/chalk/supports-color?sponsor=1" } }, + "node_modules/jmespath": { + "version": "0.16.0", + "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.16.0.tgz", + "integrity": "sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw==", + "dev": true, + "engines": { + "node": ">= 0.6.0" + } + }, "node_modules/js-sdsl": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/js-sdsl/-/js-sdsl-4.2.0.tgz", @@ -21467,6 +21651,32 @@ "node": ">=8" } }, + "node_modules/playwright": { + "version": "1.37.1", + "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.37.1.tgz", + "integrity": "sha512-bgUXRrQKhT48zHdxDYQTpf//0xDfDd5hLeEhjuSw8rXEGoT9YeElpfvs/izonTNY21IQZ7d3s22jLxYaAnubbQ==", + "hasInstallScript": true, + "dependencies": { + "playwright-core": "1.37.1" + }, + "bin": { + "playwright": "cli.js" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/playwright-core": { + "version": "1.37.1", + "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.37.1.tgz", + "integrity": "sha512-17EuQxlSIYCmEMwzMqusJ2ztDgJePjrbttaefgdsiqeLWidjYz9BxXaTaZWxH1J95SHGk6tjE+dwgWILJoUZfA==", + "bin": { + "playwright-core": "cli.js" + }, + "engines": { + "node": ">=16" + } + }, "node_modules/pnp-webpack-plugin": { "version": "1.6.4", "resolved": "https://registry.npmjs.org/pnp-webpack-plugin/-/pnp-webpack-plugin-1.6.4.tgz", @@ -22736,6 +22946,12 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/remove-accents": { + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/remove-accents/-/remove-accents-0.4.2.tgz", + "integrity": "sha512-7pXIJqJOq5tFgG1A2Zxti3Ht8jJF337m4sowbuHsW30ZnkQFnDzy9qBNhgzX8ZLW4+UBcXiiR7SwR6pokHsxiA==", + "dev": true + }, "node_modules/remove-trailing-separator": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz", @@ -23292,6 +23508,12 @@ "which": "bin/which" } }, + "node_modules/sax": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz", + "integrity": "sha512-8I2a3LovHTOpm7NV5yOyO8IHqgVsfK4+UuySrXU8YXkSRX7k6hCV9b3HrkKCr3nMpgj+0bmocaJJWpvp1oc7ZA==", + "dev": true + }, "node_modules/saxes": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/saxes/-/saxes-6.0.0.tgz", @@ -24423,6 +24645,18 @@ "resolved": "https://registry.npmjs.org/stylis/-/stylis-4.1.3.tgz", "integrity": "sha512-GP6WDNWf+o403jrEp9c5jibKavrtLW+/qYGhFxFrG8maXhwTBI7gLLhiBb0o7uFccWN+EOS9aMO6cGHWAO07OA==" }, + "node_modules/superjson": { + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/superjson/-/superjson-1.13.1.tgz", + "integrity": "sha512-AVH2eknm9DEd3qvxM4Sq+LTCkSXE2ssfh1t11MHMXyYXFQyQ1HLgVvV+guLTsaQnJU3gnaVo34TohHPulY/wLg==", + "dev": true, + "dependencies": { + "copy-anything": "^3.0.2" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -26365,6 +26599,28 @@ "node": ">=12" } }, + "node_modules/xml2js": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz", + "integrity": "sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA==", + "dev": true, + "dependencies": { + "sax": ">=0.6.0", + "xmlbuilder": "~11.0.0" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/xmlbuilder": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", + "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, "node_modules/xmlchars": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/xmlchars/-/xmlchars-2.2.0.tgz", @@ -29081,6 +29337,17 @@ "tslib": "^2.4.0" } }, + "@playwright/test": { + "version": "1.37.1", + "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.37.1.tgz", + "integrity": "sha512-bq9zTli3vWJo8S3LwB91U0qDNQDpEXnw7knhxLM0nwDvexQAwx9tO8iKDZSqqneVq+URd/WIoz+BALMqUTgdSg==", + "dev": true, + "requires": { + "@types/node": "*", + "fsevents": "2.3.2", + "playwright-core": "1.37.1" + } + }, "@pmmmwh/react-refresh-webpack-plugin": { "version": "0.5.10", "resolved": "https://registry.npmjs.org/@pmmmwh/react-refresh-webpack-plugin/-/react-refresh-webpack-plugin-0.5.10.tgz", @@ -33814,6 +34081,15 @@ "tslib": "^2.4.0" } }, + "@tanstack/match-sorter-utils": { + "version": "8.8.4", + "resolved": "https://registry.npmjs.org/@tanstack/match-sorter-utils/-/match-sorter-utils-8.8.4.tgz", + "integrity": "sha512-rKH8LjZiszWEvmi01NR72QWZ8m4xmXre0OOwlRGnjU01Eqz/QnN+cqpty2PJ0efHblq09+KilvyR7lsbzmXVEw==", + "dev": true, + "requires": { + "remove-accents": "0.4.2" + } + }, "@tanstack/query-core": { "version": "4.19.1", "resolved": "https://registry.npmjs.org/@tanstack/query-core/-/query-core-4.19.1.tgz", @@ -33828,6 +34104,17 @@ "use-sync-external-store": "^1.2.0" } }, + "@tanstack/react-query-devtools": { + "version": "4.33.0", + "resolved": "https://registry.npmjs.org/@tanstack/react-query-devtools/-/react-query-devtools-4.33.0.tgz", + "integrity": "sha512-6gegkuDmOoiY5e6ZKj1id48vlCXchjfE/6tIpYO8dFlVMQ7t1bYna/Ce6qQJ69+kfEHbYiTTn2lj+FDjIBH7Hg==", + "dev": true, + "requires": { + "@tanstack/match-sorter-utils": "^8.7.0", + "superjson": "^1.10.0", + "use-sync-external-store": "^1.2.0" + } + }, "@testing-library/dom": { "version": "8.19.0", "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-8.19.0.tgz", @@ -35273,6 +35560,73 @@ "integrity": "sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==", "dev": true }, + "aws-sdk": { + "version": "2.1440.0", + "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1440.0.tgz", + "integrity": "sha512-ijHaRFZIKLYUDqOGTNrwncXF5vzJPU6VJpbRr7eNBAvo+nFtEHY4BZkldWYuhELCbWz0U5/+qMtF2T/JgPjfWQ==", + "dev": true, + "requires": { + "buffer": "4.9.2", + "events": "1.1.1", + "ieee754": "1.1.13", + "jmespath": "0.16.0", + "querystring": "0.2.0", + "sax": "1.2.1", + "url": "0.10.3", + "util": "^0.12.4", + "uuid": "8.0.0", + "xml2js": "0.5.0" + }, + "dependencies": { + "events": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz", + "integrity": "sha512-kEcvvCBByWXGnZy6JUlgAp2gBIUjfCAV6P6TgT1/aaQKcmuAEC4OZTV1I4EWQLz2gxZw76atuVyvHhTxvi0Flw==", + "dev": true + }, + "ieee754": { + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.13.tgz", + "integrity": "sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==", + "dev": true + }, + "punycode": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", + "integrity": "sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw==", + "dev": true + }, + "url": { + "version": "0.10.3", + "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz", + "integrity": "sha512-hzSUW2q06EqL1gKM/a+obYHLIO6ct2hwPuviqTTOcfFVc61UbfJ2Q32+uGL/HCPxKqrdGB5QUwIe7UqlDgwsOQ==", + "dev": true, + "requires": { + "punycode": "1.3.2", + "querystring": "0.2.0" + } + }, + "util": { + "version": "0.12.5", + "resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz", + "integrity": "sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA==", + "dev": true, + "requires": { + "inherits": "^2.0.3", + "is-arguments": "^1.0.4", + "is-generator-function": "^1.0.7", + "is-typed-array": "^1.1.3", + "which-typed-array": "^1.1.2" + } + }, + "uuid": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.0.0.tgz", + "integrity": "sha512-jOXGuXZAWdsTH7eZLtyXMqUb9EcWMGZNbL9YcGBJl4MH4nrxHmZJhEHvyLFrkxo+28uLb/NYRcStH48fnD0Vzw==", + "dev": true + } + } + }, "axe-core": { "version": "4.5.2", "resolved": "https://registry.npmjs.org/axe-core/-/axe-core-4.5.2.tgz", @@ -36563,6 +36917,15 @@ "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==", "dev": true }, + "copy-anything": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/copy-anything/-/copy-anything-3.0.5.tgz", + "integrity": "sha512-yCEafptTtb4bk7GLEQoM8KVJpxAfdBJYaXyzQEgQQQgYrZiDp8SJmGKlYza6CYjEDNstAdNdKA3UuoULlEbS6w==", + "dev": true, + "requires": { + "is-what": "^4.1.8" + } + }, "copy-concurrently": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/copy-concurrently/-/copy-concurrently-1.0.5.tgz", @@ -40080,6 +40443,15 @@ "integrity": "sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==", "dev": true }, + "is-generator-function": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.0.10.tgz", + "integrity": "sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A==", + "dev": true, + "requires": { + "has-tostringtag": "^1.0.0" + } + }, "is-glob": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", @@ -40245,6 +40617,12 @@ "get-intrinsic": "^1.1.1" } }, + "is-what": { + "version": "4.1.15", + "resolved": "https://registry.npmjs.org/is-what/-/is-what-4.1.15.tgz", + "integrity": "sha512-uKua1wfy3Yt+YqsD6mTUEa2zSi3G1oPlqTflgaPJ7z63vUGN5pxFpnQfeSLMFnJDEsdvOtkp1rUWkYjB4YfhgA==", + "dev": true + }, "is-whitespace-character": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-whitespace-character/-/is-whitespace-character-1.0.4.tgz", @@ -40843,6 +41221,12 @@ } } }, + "jmespath": { + "version": "0.16.0", + "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.16.0.tgz", + "integrity": "sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw==", + "dev": true + }, "js-sdsl": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/js-sdsl/-/js-sdsl-4.2.0.tgz", @@ -42793,6 +43177,19 @@ } } }, + "playwright": { + "version": "1.37.1", + "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.37.1.tgz", + "integrity": "sha512-bgUXRrQKhT48zHdxDYQTpf//0xDfDd5hLeEhjuSw8rXEGoT9YeElpfvs/izonTNY21IQZ7d3s22jLxYaAnubbQ==", + "requires": { + "playwright-core": "1.37.1" + } + }, + "playwright-core": { + "version": "1.37.1", + "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.37.1.tgz", + "integrity": "sha512-17EuQxlSIYCmEMwzMqusJ2ztDgJePjrbttaefgdsiqeLWidjYz9BxXaTaZWxH1J95SHGk6tjE+dwgWILJoUZfA==" + }, "pnp-webpack-plugin": { "version": "1.6.4", "resolved": "https://registry.npmjs.org/pnp-webpack-plugin/-/pnp-webpack-plugin-1.6.4.tgz", @@ -43744,6 +44141,12 @@ "mdast-squeeze-paragraphs": "^4.0.0" } }, + "remove-accents": { + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/remove-accents/-/remove-accents-0.4.2.tgz", + "integrity": "sha512-7pXIJqJOq5tFgG1A2Zxti3Ht8jJF337m4sowbuHsW30ZnkQFnDzy9qBNhgzX8ZLW4+UBcXiiR7SwR6pokHsxiA==", + "dev": true + }, "remove-trailing-separator": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz", @@ -44178,6 +44581,12 @@ } } }, + "sax": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz", + "integrity": "sha512-8I2a3LovHTOpm7NV5yOyO8IHqgVsfK4+UuySrXU8YXkSRX7k6hCV9b3HrkKCr3nMpgj+0bmocaJJWpvp1oc7ZA==", + "dev": true + }, "saxes": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/saxes/-/saxes-6.0.0.tgz", @@ -45113,6 +45522,15 @@ "resolved": "https://registry.npmjs.org/stylis/-/stylis-4.1.3.tgz", "integrity": "sha512-GP6WDNWf+o403jrEp9c5jibKavrtLW+/qYGhFxFrG8maXhwTBI7gLLhiBb0o7uFccWN+EOS9aMO6cGHWAO07OA==" }, + "superjson": { + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/superjson/-/superjson-1.13.1.tgz", + "integrity": "sha512-AVH2eknm9DEd3qvxM4Sq+LTCkSXE2ssfh1t11MHMXyYXFQyQ1HLgVvV+guLTsaQnJU3gnaVo34TohHPulY/wLg==", + "dev": true, + "requires": { + "copy-anything": "^3.0.2" + } + }, "supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -46609,6 +47027,22 @@ "integrity": "sha512-ICP2e+jsHvAj2E2lIHxa5tjXRlKDJo4IdvPvCXbXQGdzSfmSpNVyIKMvoZHjDY9DP0zV17iI85o90vRFXNccRw==", "dev": true }, + "xml2js": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz", + "integrity": "sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA==", + "dev": true, + "requires": { + "sax": ">=0.6.0", + "xmlbuilder": "~11.0.0" + } + }, + "xmlbuilder": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", + "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==", + "dev": true + }, "xmlchars": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/xmlchars/-/xmlchars-2.2.0.tgz", diff --git a/ui/package.json b/ui/package.json index e4035e3..9f627f3 100644 --- a/ui/package.json +++ b/ui/package.json @@ -27,6 +27,7 @@ "eslint": "8.29.0", "eslint-config-next": "13.0.6", "next": "13.0.6", + "playwright": "^1.37.1", "react": "18.2.0", "react-dom": "18.2.0", "react-error-boundary": "^3.1.4", @@ -35,6 +36,7 @@ }, "devDependencies": { "@babel/core": "^7.20.5", + "@playwright/test": "^1.37.1", "@storybook/addon-a11y": "^6.5.14", "@storybook/addon-actions": "^6.5.14", "@storybook/addon-essentials": "^6.5.14", @@ -45,11 +47,13 @@ "@storybook/react": "^6.5.14", "@storybook/testing-library": "^0.0.13", "@svgr/webpack": "^6.5.1", + "@tanstack/react-query-devtools": "^4.33.0", "@testing-library/jest-dom": "^5.16.5", "@testing-library/react": "^13.4.0", "@types/node": "20.4.5", "@types/react": "18.2.17", "@typescript-eslint/eslint-plugin": "^5.46.0", + "aws-sdk": "^2.1440.0", "babel-loader": "^8.3.0", "eslint-config-prettier": "^8.5.0", "eslint-plugin-jest-dom": "^4.0.3", @@ -63,4 +67,4 @@ "storybook-addon-designs": "^6.3.1", "typescript": "5.1.6" } -} \ No newline at end of file +} diff --git a/ui/playwright.config.ts b/ui/playwright.config.ts index 8f5c959..4f31eb4 100644 --- a/ui/playwright.config.ts +++ b/ui/playwright.config.ts @@ -1,22 +1,21 @@ -import { defineConfig, devices } from '@playwright/test'; -import dotenv from 'dotenv'; -import path from 'path'; +import { defineConfig, devices } from '@playwright/test' +import dotenv from 'dotenv' +import path from 'path' -dotenv.config({ path: path.resolve(process.cwd(), '.env.local') }); +dotenv.config({ path: path.resolve(process.cwd(), '.env.local') }) export default defineConfig({ - projects: [ - // Setup project - { name: 'setup', testMatch: 'auth.setup.ts' }, - { - name: 'chromium', - use: { - ...devices['Desktop Chrome'], - // Use prepared auth state. - storageState: 'playwright/.auth/user.json', - }, - dependencies: ['setup'], - }, - ], -}); - + projects: [ + // Setup project + { name: 'setup', testMatch: 'auth.setup.ts' }, + { + name: 'chromium', + use: { + ...devices['Desktop Chrome'], + // Use prepared auth state. + storageState: 'playwright/.auth/user.json' + }, + dependencies: ['setup'] + } + ] +}) From cac181ae4ade7e9f197ff5553f2fdb8c17cf3e41 Mon Sep 17 00:00:00 2001 From: lcard Date: Wed, 23 Aug 2023 13:42:52 +0100 Subject: [PATCH 03/17] First of many --- .github/.github.env | 2 + .github/workflows/dev.yml | 4 +- .github/workflows/main.yml | 6 +- Makefile | 4 +- api/Makefile | 4 +- docs/infrastructure/deployment.md | 5 -- infrastructure/blocks/pipeline-ami/data.tf | 9 +++ .../pipeline-ami/install.sh} | 22 ------- infrastructure/blocks/pipeline-ami/packer.tf | 25 ++++++++ .../blocks/pipeline-ami/provider.tf | 6 ++ .../blocks/pipeline-ami/template.json | 58 +++++++++++++++++++ .../blocks/pipeline-ami/variables.tf | 33 +++++++++++ infrastructure/blocks/pipeline/data.tf | 35 +++++++++++ infrastructure/blocks/pipeline/iam.tf | 29 ++-------- .../pipeline/initialisation-script.sh.tpl | 20 +++++++ infrastructure/blocks/pipeline/main.tf | 50 +++++----------- infrastructure/blocks/pipeline/provider.tf | 5 ++ infrastructure/blocks/pipeline/variables.tf | 10 ++++ infrastructure/scripts/env_setup.sh | 4 -- infrastructure/scripts/infra_make_helper.sh | 20 ------- 20 files changed, 231 insertions(+), 120 deletions(-) create mode 100644 infrastructure/blocks/pipeline-ami/data.tf rename infrastructure/{scripts/initialisation-script.sh.tpl => blocks/pipeline-ami/install.sh} (53%) create mode 100644 infrastructure/blocks/pipeline-ami/packer.tf create mode 100644 infrastructure/blocks/pipeline-ami/provider.tf create mode 100644 infrastructure/blocks/pipeline-ami/template.json create mode 100644 infrastructure/blocks/pipeline-ami/variables.tf create mode 100644 infrastructure/blocks/pipeline/data.tf create mode 100644 infrastructure/blocks/pipeline/initialisation-script.sh.tpl create mode 100644 infrastructure/blocks/pipeline/provider.tf delete mode 100644 infrastructure/scripts/env_setup.sh diff --git a/.github/.github.env b/.github/.github.env index 97289f3..ad8c623 100644 --- a/.github/.github.env +++ b/.github/.github.env @@ -2,3 +2,5 @@ COGNITO_USER_POOL_ID=rapid-pool RESOURCE_PREFIX=rapid ALLOWED_EMAIL_DOMAINS=example1.com,example2.com LAYERS=raw,layer +DOMAIN_NAME=example.com +DATA_BUCKET=the-bucket diff --git a/.github/workflows/dev.yml b/.github/workflows/dev.yml index 1278154..3835c6e 100644 --- a/.github/workflows/dev.yml +++ b/.github/workflows/dev.yml @@ -45,8 +45,6 @@ jobs: - name: Populate .env with additional vars run: | cp ./.github/.github.env .env - echo DOMAIN_NAME=${{ secrets.DOMAIN_NAME }} >> .env - echo DATA_BUCKET=${{ secrets.DATA_BUCKET }} >> .env echo AWS_ACCOUNT=${{ secrets.AWS_ACCOUNT }} >> .env echo AWS_REGION=${{ secrets.AWS_REGION }} >> .env echo AWS_DEFAULT_REGION=${{ secrets.AWS_REGION }} >> .env @@ -76,7 +74,7 @@ jobs: run: | echo "TWINE_USERNAME=${{ secrets.TWINE_USERNAME_TEST }}" >> .env echo "TWINE_PASSWORD=${{ secrets.TWINE_PASSWORD_TEST }}" >> .env - echo TWINE_NON_INTERACTIVE=${{ secrets.TWINE_NON_INTERACTIVE }} >> .env + echo "TWINE_NON_INTERACTIVE=true" >> .env - name: Setup Python uses: actions/setup-python@v4 diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 1f5bfe3..65a0807 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -44,8 +44,6 @@ jobs: - name: Populate .env with additional vars run: | cp ./.github/.github.env .env - echo DOMAIN_NAME=${{ secrets.DOMAIN_NAME }} >> .env - echo DATA_BUCKET=${{ secrets.DATA_BUCKET }} >> .env echo AWS_ACCOUNT=${{ secrets.AWS_ACCOUNT }} >> .env echo AWS_REGION=${{ secrets.AWS_REGION }} >> .env echo AWS_DEFAULT_REGION=${{ secrets.AWS_REGION }} >> .env @@ -69,7 +67,7 @@ jobs: run: make api-tag-prod-candidate - name: API Deploy Image to Prod - run: make api-tag-live-in-prod + run: make api-app-live-in-prod - name: API Allow for Application to Start run: sleep 120 @@ -81,6 +79,8 @@ jobs: - name: API E2E Tests id: e2e-tests env: + DOMAIN_NAME: ${{ secrets.DOMAIN_NAME }} + DATA_BUCKET: ${{ secrets.DATA_BUCKET }} COGNITO_USER_POOL_ID: ${{ secrets.COGNITO_USER_POOL_ID }} RESOURCE_PREFIX: ${{ secrets.RESOURCE_PREFIX }} ALLOWED_EMAIL_DOMAINS: ${{ secrets.ALLOWED_EMAIL_DOMAINS }} diff --git a/Makefile b/Makefile index 8b2d897..6e11259 100644 --- a/Makefile +++ b/Makefile @@ -88,8 +88,8 @@ api-tag-and-upload-release-image:## Tag and upload the api release image api-tag-prod-candidate: ## Tag the uploaded api image as a candidate for PROD deployment @cd api/; $(MAKE) tag-prod-candidate -api-tag-live-in-prod: ## Deploy the latest version of the api - @cd api/; $(MAKE) tag-live-in-prod +api-app-live-in-prod: ## Deploy the latest version of the api + @cd api/; $(MAKE) app-live-in-prod api-check-app-is-running: @cd api/; $(MAKE) check-app-is-running diff --git a/api/Makefile b/api/Makefile index 4aa6398..7b03249 100644 --- a/api/Makefile +++ b/api/Makefile @@ -1,5 +1,5 @@ -ECS_SERVICE=rapid-ecs-service -ECS_CLUSTER=rapid-cluster +ECS_SERVICE=rapid-preprod-ecs-service +ECS_CLUSTER=rapid-preprod-cluster LATEST_COMMIT_HASH=$(shell git rev-parse --short HEAD) ACCOUNT_ECR_URI=$(AWS_ACCOUNT).dkr.ecr.$(AWS_REGION).amazonaws.com IMAGE_NAME=data-f1-registry diff --git a/docs/infrastructure/deployment.md b/docs/infrastructure/deployment.md index 0266808..c2c67d7 100644 --- a/docs/infrastructure/deployment.md +++ b/docs/infrastructure/deployment.md @@ -74,9 +74,6 @@ Our infrastructure is built using AWS, so you'll need an AWS account, and access Follow these steps to set up the AWS profile: - [Install/Update AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html) -- [Set up a named profile](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-profiles.html) if you already have the AWS cli. - -After setting up the named profile, the current session can be checked by running ```aws sts get-caller-identity```. We have a file (`scripts/env_setup.sh) with the required exports to use the 'gov' profile. These exports have to be run when starting a new session. We use `jq` in our scripts to help the `make` targets work correctly, please [Install jq](https://stedolan.github.io/jq/download/) before running any make command. @@ -184,8 +181,6 @@ In order to gain the admin privileges necessary for infrastructure changes one n enabled only for user's defined in `input-params.tfvars`, only after logging into the AWS console for the first time as an IAM user and enabling MFA. -Then, to assume the role, set up the profile (`scripts/env_setup.sh`), run ```make infra-assume-role``` and follow the prompts. - ### Deploying remaining infra-blocks Once the state backend has been configured, provide/change the following inputs in `input-params.tfvars`. diff --git a/infrastructure/blocks/pipeline-ami/data.tf b/infrastructure/blocks/pipeline-ami/data.tf new file mode 100644 index 0000000..603ec83 --- /dev/null +++ b/infrastructure/blocks/pipeline-ami/data.tf @@ -0,0 +1,9 @@ +data "terraform_remote_state" "vpc-state" { + backend = "s3" + workspace = "prod" + + config = { + key = "vpc/terraform.tfstate" + bucket = var.state_bucket + } +} diff --git a/infrastructure/scripts/initialisation-script.sh.tpl b/infrastructure/blocks/pipeline-ami/install.sh similarity index 53% rename from infrastructure/scripts/initialisation-script.sh.tpl rename to infrastructure/blocks/pipeline-ami/install.sh index 73a1d86..020b8ed 100644 --- a/infrastructure/scripts/initialisation-script.sh.tpl +++ b/infrastructure/blocks/pipeline-ami/install.sh @@ -1,5 +1,3 @@ -#!/usr/bin/env bash - # Enable SSM sudo snap install amazon-ssm-agent --classic sudo snap start amazon-ssm-agent @@ -38,23 +36,3 @@ sudo apt install gh -y curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" unzip awscliv2.zip sudo ./aws/install - -# ---- Start docker service -sudo service docker start - -# ---- Allow ubuntu user to manage Docker service -sudo usermod -a -G docker ubuntu - -# Install GitHub Actions Runner -# Need to run these commands as the ubuntu user for correct permissions -sudo -u ubuntu mkdir /home/ubuntu/actions-runner -cd /home/ubuntu/actions-runner -sudo -u ubuntu curl -o actions-runner-linux-x64-2.307.1.tar.gz -L https://github.com/actions/runner/releases/download/v2.307.1/actions-runner-linux-x64-2.307.1.tar.gz -sudo -u ubuntu tar xzf ./actions-runner-linux-x64-2.307.1.tar.gz -sudo -u ubuntu ./config.sh --url https://github.com/no10ds --token "${runner-registration-token}" --name Data-F1-Pipeline-Runner --unattended --replace - -# Run the GitHub Actions Runner -sudo -u ubuntu ./run.sh & - -# # Configure the GitHub Actions Runner to start on reboot -sudo crontab -l -u ubuntu | echo "@reboot sudo -u ubuntu /home/ubuntu/actions-runner/run.sh &" | sudo crontab -u ubuntu - \ No newline at end of file diff --git a/infrastructure/blocks/pipeline-ami/packer.tf b/infrastructure/blocks/pipeline-ami/packer.tf new file mode 100644 index 0000000..3b24632 --- /dev/null +++ b/infrastructure/blocks/pipeline-ami/packer.tf @@ -0,0 +1,25 @@ +resource "null_resource" "packer_build" { + triggers = { + sha256_ami_config = filesha256("${path.module}/template.json") + sha256_ami_install = filesha256("${path.module}/install.sh") + version = var.pipeline_ami_version + } + + provisioner "local-exec" { + command = < Date: Wed, 23 Aug 2023 13:56:31 +0100 Subject: [PATCH 04/17] Fix --- .github/workflows/ui-tests.yml | 7 ++----- requirements.txt | 10 +++++----- 2 files changed, 7 insertions(+), 10 deletions(-) diff --git a/.github/workflows/ui-tests.yml b/.github/workflows/ui-tests.yml index dd280c9..e40849b 100644 --- a/.github/workflows/ui-tests.yml +++ b/.github/workflows/ui-tests.yml @@ -1,9 +1,6 @@ name: rAPId Integration Tests on: - push: - branches: - - "**" workflow_dispatch: @@ -29,11 +26,11 @@ jobs: npm install - name: Install playwright browsers - run: npx playwright install-deps && npx playwright install + run: npm install @playwright/test -D - name: run playwright tests run: npx playwright test ui/playwright env: - DOMAIN: ${{ secrets.DOMAIN }} + DOMAIN: "https://${{ secrets.DOMAIN }}" RESOURCE_PREFIX: ${{ secrets.RESOURCE_PREFIX }} AWS_REGION: ${{ secrets.AWS_REGION }} diff --git a/requirements.txt b/requirements.txt index a800c2c..1a91b58 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ -bandit==1.7.5 -detect-secrets==1.4.0 -mkdocs==1.5.1 -mkdocs-material==9.1.21 -mkdocstrings[python]==1.2.1 +bandit +detect-secrets +mkdocs +mkdocs-material +mkdocstrings[python] From 94d02c6033e0d6731f3fdfc10629cb1f9b7bf258 Mon Sep 17 00:00:00 2001 From: lcard Date: Wed, 23 Aug 2023 14:24:52 +0100 Subject: [PATCH 05/17] Fix --- .gitignore | 4 ++-- ui/src/__tests__/subject/create.test.tsx | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.gitignore b/.gitignore index 2b0728b..47550f1 100644 --- a/.gitignore +++ b/.gitignore @@ -182,5 +182,5 @@ docs/_build/ .terraform/ .terraform.lock.hcl -playwright/.auth -playwright/.downloads \ No newline at end of file +ui/playwright/.auth +ui/playwright/.downloads \ No newline at end of file diff --git a/ui/src/__tests__/subject/create.test.tsx b/ui/src/__tests__/subject/create.test.tsx index 111beb9..0661ab3 100644 --- a/ui/src/__tests__/subject/create.test.tsx +++ b/ui/src/__tests__/subject/create.test.tsx @@ -48,6 +48,7 @@ describe('Page: Subject Create', () => { const mockData = { client_name: 'James Bond', + // pragma: allowlist secret client_secret: 'secret-code-word', client_id: 'id-abc123', permissions: ['DATA_ADMIN', 'READ_PRIVATE'] From 4b61ab02b86c77f9ce7ed4f47273d65da93f27e5 Mon Sep 17 00:00:00 2001 From: lcard Date: Wed, 23 Aug 2023 14:32:06 +0100 Subject: [PATCH 06/17] remove file From 1ec7c66f8215e0fe0cf0d14609622e6cdbbf19d6 Mon Sep 17 00:00:00 2001 From: lcard Date: Wed, 23 Aug 2023 14:33:56 +0100 Subject: [PATCH 07/17] move pragma comment --- ui/src/__tests__/subject/create.test.tsx | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/ui/src/__tests__/subject/create.test.tsx b/ui/src/__tests__/subject/create.test.tsx index 0661ab3..a6aca10 100644 --- a/ui/src/__tests__/subject/create.test.tsx +++ b/ui/src/__tests__/subject/create.test.tsx @@ -48,8 +48,7 @@ describe('Page: Subject Create', () => { const mockData = { client_name: 'James Bond', - // pragma: allowlist secret - client_secret: 'secret-code-word', + client_secret: 'secret-code-word', // pragma: allowlist secret client_id: 'id-abc123', permissions: ['DATA_ADMIN', 'READ_PRIVATE'] } From c3732b729773b403588a089a03955b56ef4a22ab Mon Sep 17 00:00:00 2001 From: lcard Date: Wed, 23 Aug 2023 14:43:04 +0100 Subject: [PATCH 08/17] fixes --- Makefile | 4 +- api/api/controller/auth.py | 6 ++- api/api/controller/datasets.py | 7 +-- api/api/domain/metadata_search.py | 59 --------------------- api/test/api/controller/test_datasets.py | 66 ++++++++++++------------ 5 files changed, 39 insertions(+), 103 deletions(-) delete mode 100644 api/api/domain/metadata_search.py diff --git a/Makefile b/Makefile index 6e11259..29ed4ce 100644 --- a/Makefile +++ b/Makefile @@ -11,9 +11,7 @@ help: ## List targets and description precommit: pre-commit install -security-check: - @$(MAKE) detect-secrets - @$(MAKE) detect-vulnerabilities +security-check: detect-secrets detect-vulnerabilities detect-secrets: @git ls-files -z | xargs -0 detect-secrets-hook --baseline .secrets.baseline diff --git a/api/api/controller/auth.py b/api/api/controller/auth.py index fbcb64c..92a9815 100644 --- a/api/api/controller/auth.py +++ b/api/api/controller/auth.py @@ -65,7 +65,9 @@ async def redirect_oauth_token_request(request: Request): } payload = await _load_json_bytes_to_dict(request) - response = requests.post(IDENTITY_PROVIDER_TOKEN_URL, headers=headers, data=payload) + response = requests.post( + IDENTITY_PROVIDER_TOKEN_URL, headers=headers, data=payload, timeout=5 + ) return response.json() @@ -114,7 +116,7 @@ async def _get_access_token(auth, code, cognito_user_login_client_id): "code": code, } response = requests.post( - IDENTITY_PROVIDER_TOKEN_URL, auth=auth, headers=headers, data=payload + IDENTITY_PROVIDER_TOKEN_URL, auth=auth, headers=headers, data=payload, timeout=5 ) response_content = json.loads(response.content.decode(CONTENT_ENCODING)) access_token = response_content["access_token"] diff --git a/api/api/controller/datasets.py b/api/api/controller/datasets.py index 6c4e759..0e3c2e1 100644 --- a/api/api/controller/datasets.py +++ b/api/api/controller/datasets.py @@ -43,7 +43,6 @@ from api.domain.dataset_filters import DatasetFilters from api.domain.dataset_metadata import DatasetMetadata from api.domain.schema_metadata import SchemaMetadata -from api.domain.metadata_search import metadata_search_query from api.domain.mime_type import MimeType from api.domain.sql_query import SQLQuery from api.domain.Jobs.Job import generate_uuid @@ -128,11 +127,7 @@ class EnrichedMetadata(SchemaMetadata): include_in_schema=False, ) async def search_dataset_metadata(term: str): - sql_query = metadata_search_query(term) - df = athena_adapter.query_sql(sql_query) - df["version"] = df["version"].fillna(value="0") - df["data"] = df["data"].fillna(value="") - return df.to_dict("records") + return None @datasets_router.get( diff --git a/api/api/domain/metadata_search.py b/api/api/domain/metadata_search.py deleted file mode 100644 index c736725..0000000 --- a/api/api/domain/metadata_search.py +++ /dev/null @@ -1,59 +0,0 @@ -from jinja2 import Template -from typing import List - -from api.common.config.aws import GLUE_CATALOGUE_DB_NAME, METADATA_CATALOGUE_DB_NAME - - -DATASET_COLUMN = "dataset" -DOMAIN_COLUMN = "domain" -VERSION_COLUMN = "version" -DATA_COLUMN = "data" -DATA_TYPE_COLUMN = "data_type" - -# fmt: off -METADATA_QUERY = Template( # nosec - f""" -SELECT * FROM ( - SELECT - metadata.dataset as {DATASET_COLUMN}, - metadata.domain as {DOMAIN_COLUMN}, - metadata.version as {VERSION_COLUMN}, - "column".name as {DATA_COLUMN}, - 'column_name' as {DATA_TYPE_COLUMN} - FROM "{GLUE_CATALOGUE_DB_NAME}"."{METADATA_CATALOGUE_DB_NAME}" - CROSS JOIN UNNEST("columns") AS t ("column") - UNION ALL - SELECT - metadata.dataset as {DATASET_COLUMN}, - metadata.domain as {DOMAIN_COLUMN}, - metadata.version as {VERSION_COLUMN}, - metadata.description as {DATA_COLUMN}, - 'description' as {DATA_TYPE_COLUMN} - FROM "{GLUE_CATALOGUE_DB_NAME}"."{METADATA_CATALOGUE_DB_NAME}" - UNION ALL - SELECT - metadata.dataset as {DATASET_COLUMN}, - metadata.domain as {DOMAIN_COLUMN}, - metadata.version as {VERSION_COLUMN}, - metadata.dataset as {DATA_COLUMN}, - 'dataset_name' as {DATA_TYPE_COLUMN} - FROM "{GLUE_CATALOGUE_DB_NAME}"."{METADATA_CATALOGUE_DB_NAME}" -) -WHERE {{{{ where_clause }}}} -""" -) -# fmt: on - - -def generate_where_clause(search_term: str) -> List[str]: - return " OR ".join( - [ - f"lower({DATA_COLUMN}) LIKE '%{word.lower()}%'" - for word in search_term.split(" ") - ] - ) - - -def metadata_search_query(search_term: str) -> str: - where_clause = generate_where_clause(search_term) - return METADATA_QUERY.render(where_clause=where_clause) diff --git a/api/test/api/controller/test_datasets.py b/api/test/api/controller/test_datasets.py index 9e07410..e14a69c 100644 --- a/api/test/api/controller/test_datasets.py +++ b/api/test/api/controller/test_datasets.py @@ -634,39 +634,39 @@ def test_returns_enriched_metadata_for_datasets_with_certain_sensitivity( assert response.json() == expected_response -class TestSearchDatasets(BaseClientTest): - @patch.object(AthenaAdapter, "query_sql") - @patch("api.controller.datasets.metadata_search_query") - def test_search_dataset_metadata(self, mock_metadata_search_query, mock_query_sql): - mock_query = "SELECT * FROM table" - - mock_metadata_search_query.return_value = mock_query - - mock_data = [ - { - "dataset": "test", - "data": "foo", - "version": "1", - "data_type": "column", - }, - { - "dataset": "bar", - "data": "bar", - "version": "1", - "data_type": "table_name", - }, - ] - - mock_query_sql.return_value = pd.DataFrame(mock_data) - response = self.client.get( - f"{BASE_API_PATH}/datasets/search/foo bar", - headers={"Authorization": "Bearer test-token"}, - ) - - mock_metadata_search_query.assert_called_once_with("foo bar") - mock_query_sql.assert_called_once_with(mock_query) - assert response.status_code == 200 - assert response.json() == mock_data +# class TestSearchDatasets(BaseClientTest): +# @patch.object(AthenaAdapter, "query_sql") +# @patch("api.controller.datasets.metadata_search_query") +# def test_search_dataset_metadata(self, mock_metadata_search_query, mock_query_sql): +# mock_query = "SELECT * FROM table" + +# mock_metadata_search_query.return_value = mock_query + +# mock_data = [ +# { +# "dataset": "test", +# "data": "foo", +# "version": "1", +# "data_type": "column", +# }, +# { +# "dataset": "bar", +# "data": "bar", +# "version": "1", +# "data_type": "table_name", +# }, +# ] + +# mock_query_sql.return_value = pd.DataFrame(mock_data) +# response = self.client.get( +# f"{BASE_API_PATH}/datasets/search/foo bar", +# headers={"Authorization": "Bearer test-token"}, +# ) + +# mock_metadata_search_query.assert_called_once_with("foo bar") +# mock_query_sql.assert_called_once_with(mock_query) +# assert response.status_code == 200 +# assert response.json() == mock_data class TestDatasetInfo(BaseClientTest): From 2b0ed4cdc8a6d2316dc07f47d039584f31857fbe Mon Sep 17 00:00:00 2001 From: lcard Date: Wed, 23 Aug 2023 15:10:58 +0100 Subject: [PATCH 09/17] fix sdk and api --- .env.example | 13 +++--- api/test/api/controller/test_datasets.py | 1 - api/test/api/domain/test_metadata_search.py | 51 --------------------- sdk/tests/test_items/test_schema.py | 24 ++++++++-- 4 files changed, 28 insertions(+), 61 deletions(-) delete mode 100644 api/test/api/domain/test_metadata_search.py diff --git a/.env.example b/.env.example index 86dc1c0..b0399a9 100644 --- a/.env.example +++ b/.env.example @@ -1,12 +1,13 @@ -AWS_ACCOUNT= -AWS_REGION= +AWS_ACCOUNT=123456 +AWS_REGION=eu-west-2 # API Specific -COGNITO_USER_POOL_ID=rapid-pool -DATA_BUCKET=rapid-bucket -DOMAIN_NAME=rapid-domain -RESOURCE_PREFIX=rapid ALLOWED_EMAIL_DOMAINS=example1.com,example2.com +DATA_BUCKET=the-bucket +RESOURCE_PREFIX=rapid +DOMAIN_NAME=example.com +COGNITO_USER_POOL_ID=11111111 +LAYERS=raw,layer # SDK Specific RAPID_CLIENT_ID= diff --git a/api/test/api/controller/test_datasets.py b/api/test/api/controller/test_datasets.py index e14a69c..04cec01 100644 --- a/api/test/api/controller/test_datasets.py +++ b/api/test/api/controller/test_datasets.py @@ -4,7 +4,6 @@ import pandas as pd import pytest -from api.adapter.athena_adapter import AthenaAdapter from api.adapter.s3_adapter import S3Adapter from api.application.services.authorisation.dataset_access_evaluator import ( DatasetAccessEvaluator, diff --git a/api/test/api/domain/test_metadata_search.py b/api/test/api/domain/test_metadata_search.py deleted file mode 100644 index 0d8d513..0000000 --- a/api/test/api/domain/test_metadata_search.py +++ /dev/null @@ -1,51 +0,0 @@ -import pytest - -from api.domain.metadata_search import generate_where_clause, metadata_search_query - - -@pytest.mark.parametrize( - "term, expected", - [ - ("foo", "lower(data) LIKE '%foo%'"), - ("foo bar", "lower(data) LIKE '%foo%' OR lower(data) LIKE '%bar%'"), - ], -) -def test_generate_where_clause(term, expected): - res = generate_where_clause(term) - assert expected == res - - -def test_metadata_search_query(): - search_term = "foo bar" - expected = """ -SELECT * FROM ( - SELECT - metadata.dataset as dataset, - metadata.domain as domain, - metadata.version as version, - "column".name as data, - 'column_name' as data_type - FROM "rapid_catalogue_db"."rapid_metadata_table" - CROSS JOIN UNNEST("columns") AS t ("column") - UNION ALL - SELECT - metadata.dataset as dataset, - metadata.domain as domain, - metadata.version as version, - metadata.description as data, - 'description' as data_type - FROM "rapid_catalogue_db"."rapid_metadata_table" - UNION ALL - SELECT - metadata.dataset as dataset, - metadata.domain as domain, - metadata.version as version, - metadata.dataset as data, - 'dataset_name' as data_type - FROM "rapid_catalogue_db"."rapid_metadata_table" -) -WHERE lower(data) LIKE '%foo%' OR lower(data) LIKE '%bar%' - """ - res = metadata_search_query(search_term) - # Remove whitespace to compare - assert "".join(res.split()) == "".join(expected.split()) diff --git a/sdk/tests/test_items/test_schema.py b/sdk/tests/test_items/test_schema.py index 390f1c0..7dcee44 100644 --- a/sdk/tests/test_items/test_schema.py +++ b/sdk/tests/test_items/test_schema.py @@ -98,7 +98,18 @@ def test_create_columns_fails_name_none(self): Column(**_column) assert exc_info.value.errors() == [ - {"loc": ("name",), "msg": "field required", "type": "value_error.missing"} + { + "input": { + "partition_index": None, + "data_type": "object", + "allow_null": True, + "format": None, + }, + "loc": ("name",), + "msg": "Field required", + "type": "missing", + "url": "https://errors.pydantic.dev/2.2/v/missing", + } ] def test_create_columns_fails_data_type_none(self): @@ -114,9 +125,16 @@ def test_create_columns_fails_data_type_none(self): assert exc_info.value.errors() == [ { + "input": { + "name": "column_a", + "partition_index": None, + "allow_null": True, + "format": None, + }, "loc": ("data_type",), - "msg": "field required", - "type": "value_error.missing", + "msg": "Field required", + "type": "missing", + "url": "https://errors.pydantic.dev/2.2/v/missing", } ] From ba15cf1895f608a736f551893b96896722a0b3e0 Mon Sep 17 00:00:00 2001 From: lcard Date: Wed, 23 Aug 2023 15:22:24 +0100 Subject: [PATCH 10/17] fix UI and API --- api/test/api/controller/test_auth.py | 1 + ui/src/__tests__/app.test.tsx | 2 +- ui/src/__tests__/catalog.test.tsx | 2 +- ui/src/__tests__/data/delete.test.tsx | 2 +- ui/src/__tests__/data/download.test.tsx | 2 +- ui/src/__tests__/data/upload.test.tsx | 2 +- ui/src/__tests__/index.test.tsx | 2 +- ui/src/__tests__/login.test.tsx | 2 +- ui/src/__tests__/schema/create.test.tsx | 2 +- ui/src/__tests__/subject/create.test.tsx | 2 +- ui/src/__tests__/subject/modify.test.tsx | 2 +- ui/src/__tests__/tasks.test.tsx | 2 +- ui/src/components/Button/Button.test.tsx | 2 +- .../ConditionalWrapper.test.tsx | 2 +- .../SimpleTable/SimpleTable.test.tsx | 2 +- ui/src/pages/_app.tsx | 2 +- ui/src/pages/_document.tsx | 2 +- .../download/[layer]/[domain]/[dataset].tsx | 2 +- ui/src/pages/data/download/file.tsx | 2 +- ui/src/pages/tasks/[jobId].tsx | 2 +- ui/src/service/fetch.ts | 2 +- ui/src/utils/createEmotionCache.ts | 5 + ui/src/utils/data.test.ts | 41 +++++ ui/src/utils/data.ts | 25 ++++ ui/src/utils/index.ts | 13 ++ ui/src/utils/testing.tsx | 140 ++++++++++++++++++ ui/src/utils/url.test.ts | 65 ++++++++ ui/src/utils/url.ts | 21 +++ 28 files changed, 331 insertions(+), 20 deletions(-) create mode 100644 ui/src/utils/createEmotionCache.ts create mode 100644 ui/src/utils/data.test.ts create mode 100644 ui/src/utils/data.ts create mode 100644 ui/src/utils/index.ts create mode 100644 ui/src/utils/testing.tsx create mode 100644 ui/src/utils/url.test.ts create mode 100644 ui/src/utils/url.ts diff --git a/api/test/api/controller/test_auth.py b/api/test/api/controller/test_auth.py index c1d253b..c3a735c 100644 --- a/api/test/api/controller/test_auth.py +++ b/api/test/api/controller/test_auth.py @@ -42,5 +42,6 @@ def test_calls_cognito_for_access_token_when_callback_is_called_with_temporary_c "redirect_uri": COGNITO_REDIRECT_URI, "code": temporary_code, }, + timeout=5, ) mock_redirect.assert_called_once_with(url="/", status_code=HTTP_302_FOUND) diff --git a/ui/src/__tests__/app.test.tsx b/ui/src/__tests__/app.test.tsx index 1a5614b..200aa37 100644 --- a/ui/src/__tests__/app.test.tsx +++ b/ui/src/__tests__/app.test.tsx @@ -1,5 +1,5 @@ import fetchMock from 'jest-fetch-mock' -import { renderWithProviders } from '@/lib/test-utils' +import { renderWithProviders } from '@/utils/testing' import AppPage from '@/pages/_app' jest.useFakeTimers() diff --git a/ui/src/__tests__/catalog.test.tsx b/ui/src/__tests__/catalog.test.tsx index 40b7791..4a5fdf9 100644 --- a/ui/src/__tests__/catalog.test.tsx +++ b/ui/src/__tests__/catalog.test.tsx @@ -1,6 +1,6 @@ import { screen, waitForElementToBeRemoved } from '@testing-library/react' import fetchMock from 'jest-fetch-mock' -import { renderWithProviders } from '@/lib/test-utils' +import { renderWithProviders } from '@/utils/testing' import CatalogPage from '@/pages/catalog/[search]' import { MetadataSearchResponse } from '@/service/types' diff --git a/ui/src/__tests__/data/delete.test.tsx b/ui/src/__tests__/data/delete.test.tsx index 30e1a51..ba9babb 100644 --- a/ui/src/__tests__/data/delete.test.tsx +++ b/ui/src/__tests__/data/delete.test.tsx @@ -6,7 +6,7 @@ import { import userEvent from '@testing-library/user-event' import fetchMock from 'jest-fetch-mock' import DeletePage from '@/pages/data/delete' -import { mockDataset, mockDataSetsList, renderWithProviders, selectAutocompleteOption } from '@/lib/test-utils' +import { mockDataset, mockDataSetsList, renderWithProviders, selectAutocompleteOption } from '@/utils/testing' import { DeleteDatasetResponse } from '@/service/types' describe('Page: Delete page', () => { diff --git a/ui/src/__tests__/data/download.test.tsx b/ui/src/__tests__/data/download.test.tsx index b16f4d6..1163b22 100644 --- a/ui/src/__tests__/data/download.test.tsx +++ b/ui/src/__tests__/data/download.test.tsx @@ -5,7 +5,7 @@ import { } from '@testing-library/react' import userEvent from '@testing-library/user-event' import fetchMock from 'jest-fetch-mock' -import { mockDataset, mockDataSetsList, renderWithProviders } from '@/lib/test-utils' +import { mockDataset, mockDataSetsList, renderWithProviders } from '@/utils/testing' import DownloadPage from '@/pages/data/download/' const pushSpy = jest.fn() diff --git a/ui/src/__tests__/data/upload.test.tsx b/ui/src/__tests__/data/upload.test.tsx index 0ae028e..fc83e0b 100644 --- a/ui/src/__tests__/data/upload.test.tsx +++ b/ui/src/__tests__/data/upload.test.tsx @@ -6,7 +6,7 @@ import { } from '@testing-library/react' import userEvent from '@testing-library/user-event' import fetchMock from 'jest-fetch-mock' -import { mockDataset, mockDataSetsList, renderWithProviders, selectAutocompleteOption } from '@/lib/test-utils' +import { mockDataset, mockDataSetsList, renderWithProviders, selectAutocompleteOption } from '@/utils/testing' import UploadPage from '@/pages/data/upload' import { UploadDatasetResponse } from '@/service/types' diff --git a/ui/src/__tests__/index.test.tsx b/ui/src/__tests__/index.test.tsx index b5347de..cef5b8b 100644 --- a/ui/src/__tests__/index.test.tsx +++ b/ui/src/__tests__/index.test.tsx @@ -1,6 +1,6 @@ import { screen, waitFor } from '@testing-library/react' import fetchMock from 'jest-fetch-mock' -import { renderWithProviders } from '@/lib/test-utils' +import { renderWithProviders } from '@/utils/testing' import IndexPage from '@/pages/index' import { MethodsResponse } from '@/service/types' diff --git a/ui/src/__tests__/login.test.tsx b/ui/src/__tests__/login.test.tsx index 097c1f2..741784f 100644 --- a/ui/src/__tests__/login.test.tsx +++ b/ui/src/__tests__/login.test.tsx @@ -1,6 +1,6 @@ import { screen, waitFor, waitForElementToBeRemoved } from '@testing-library/react' import fetchMock from 'jest-fetch-mock' -import { renderWithProviders } from '@/lib/test-utils' +import { renderWithProviders } from '@/utils/testing' import LoginPage from '@/pages/login' import { AuthResponse, GetLoginResponse } from '@/service/types' diff --git a/ui/src/__tests__/schema/create.test.tsx b/ui/src/__tests__/schema/create.test.tsx index 73ae851..c6975c7 100644 --- a/ui/src/__tests__/schema/create.test.tsx +++ b/ui/src/__tests__/schema/create.test.tsx @@ -1,7 +1,7 @@ import { fireEvent, screen, waitFor, waitForElementToBeRemoved } from '@testing-library/react' import userEvent from '@testing-library/user-event' import fetchMock from 'jest-fetch-mock' -import { renderWithProviders } from '@/lib/test-utils' +import { renderWithProviders } from '@/utils/testing' import SchemaCreatePage from '@/pages/schema/create' const mockProps = jest.fn() diff --git a/ui/src/__tests__/subject/create.test.tsx b/ui/src/__tests__/subject/create.test.tsx index a6aca10..7957b42 100644 --- a/ui/src/__tests__/subject/create.test.tsx +++ b/ui/src/__tests__/subject/create.test.tsx @@ -1,7 +1,7 @@ import { screen, waitFor, waitForElementToBeRemoved } from '@testing-library/react' import userEvent from '@testing-library/user-event' import fetchMock from 'jest-fetch-mock' -import { renderWithProviders, mockPermissionUiResponse } from '@/lib/test-utils' +import { renderWithProviders, mockPermissionUiResponse } from '@/utils/testing' import SubjectCreatePage from '@/pages/subject/create/index' diff --git a/ui/src/__tests__/subject/modify.test.tsx b/ui/src/__tests__/subject/modify.test.tsx index 7f12c2f..0cbb6a2 100644 --- a/ui/src/__tests__/subject/modify.test.tsx +++ b/ui/src/__tests__/subject/modify.test.tsx @@ -6,7 +6,7 @@ import { } from '@testing-library/react' import userEvent from '@testing-library/user-event' import fetchMock from 'jest-fetch-mock' -import { renderWithProviders } from '@/lib/test-utils' +import { renderWithProviders } from '@/utils/testing' import SubjectModifyPage from '@/pages/subject/modify/index' const mockData: Array> = [ diff --git a/ui/src/__tests__/tasks.test.tsx b/ui/src/__tests__/tasks.test.tsx index 23a880c..5c5ecad 100644 --- a/ui/src/__tests__/tasks.test.tsx +++ b/ui/src/__tests__/tasks.test.tsx @@ -1,6 +1,6 @@ import { screen, waitForElementToBeRemoved } from '@testing-library/react' import fetchMock from 'jest-fetch-mock' -import { renderWithProviders } from '@/lib/test-utils' +import { renderWithProviders } from '@/utils/testing' import TasksPage from '@/pages/tasks/index' import { AllJobsResponse } from '@/service/types' diff --git a/ui/src/components/Button/Button.test.tsx b/ui/src/components/Button/Button.test.tsx index 8297303..77bc937 100644 --- a/ui/src/components/Button/Button.test.tsx +++ b/ui/src/components/Button/Button.test.tsx @@ -1,5 +1,5 @@ import { screen } from '@testing-library/react' -import { renderWithProviders } from '@/lib/test-utils' +import { renderWithProviders } from '@/utils/testing' import Button from './Button' describe('Button', () => { diff --git a/ui/src/components/ConditionalWrapper/ConditionalWrapper.test.tsx b/ui/src/components/ConditionalWrapper/ConditionalWrapper.test.tsx index d8876b0..cf6fac5 100644 --- a/ui/src/components/ConditionalWrapper/ConditionalWrapper.test.tsx +++ b/ui/src/components/ConditionalWrapper/ConditionalWrapper.test.tsx @@ -1,6 +1,6 @@ import { screen } from '@testing-library/react' import ConditionalWrapper from './ConditionalWrapper' -import { renderWithProviders } from '@/lib/test-utils' +import { renderWithProviders } from '@/utils/testing' import { FC } from 'react' const Content: FC = () =>
    test
    diff --git a/ui/src/components/SimpleTable/SimpleTable.test.tsx b/ui/src/components/SimpleTable/SimpleTable.test.tsx index 53324ee..be3fdf0 100644 --- a/ui/src/components/SimpleTable/SimpleTable.test.tsx +++ b/ui/src/components/SimpleTable/SimpleTable.test.tsx @@ -1,6 +1,6 @@ /* eslint-disable @typescript-eslint/no-explicit-any */ import SimpleTable from './SimpleTable' -import { renderWithProviders, screen } from '@/lib/test-utils' +import { renderWithProviders, screen } from '@/utils/testing' describe('SimpleTable', () => { it('empty row', async () => { diff --git a/ui/src/pages/_app.tsx b/ui/src/pages/_app.tsx index 0f7e0af..b82a79a 100644 --- a/ui/src/pages/_app.tsx +++ b/ui/src/pages/_app.tsx @@ -1,6 +1,6 @@ import { ThemeProvider } from '@/components' import { CacheProvider, EmotionCache } from '@emotion/react' -import createEmotionCache from '@/lib/createEmotionCache' +import createEmotionCache from '@/utils/createEmotionCache' import { ErrorBoundary } from 'react-error-boundary' import { ReactNode, useEffect } from 'react' import { AppProps } from 'next/app' diff --git a/ui/src/pages/_document.tsx b/ui/src/pages/_document.tsx index 11e6451..0b7dd95 100644 --- a/ui/src/pages/_document.tsx +++ b/ui/src/pages/_document.tsx @@ -7,7 +7,7 @@ import Document, { } from 'next/document' import createEmotionServer from '@emotion/server/create-instance' import theme from '@/style/theme' -import createEmotionCache from '@/lib/createEmotionCache' +import createEmotionCache from '@/utils/createEmotionCache' import { ReactNode } from 'react' type DocumentProps = { diff --git a/ui/src/pages/data/download/[layer]/[domain]/[dataset].tsx b/ui/src/pages/data/download/[layer]/[domain]/[dataset].tsx index 040b7ef..e74cf96 100644 --- a/ui/src/pages/data/download/[layer]/[domain]/[dataset].tsx +++ b/ui/src/pages/data/download/[layer]/[domain]/[dataset].tsx @@ -10,7 +10,7 @@ import { Alert } from '@/components' import ErrorCard from '@/components/ErrorCard/ErrorCard' -import { asVerticalTableList } from '@/lib' +import { asVerticalTableList } from '@/utils' import { getDatasetInfo, queryDataset } from '@/service' import { DataFormats } from '@/service/types' import { Typography, LinearProgress } from '@mui/material' diff --git a/ui/src/pages/data/download/file.tsx b/ui/src/pages/data/download/file.tsx index fc29325..bd0fce3 100644 --- a/ui/src/pages/data/download/file.tsx +++ b/ui/src/pages/data/download/file.tsx @@ -10,7 +10,7 @@ import { } from '@/components' import { Typography } from '@mui/material' import { useRouter } from 'next/router' -import { asVerticalTableList } from '@/lib' +import { asVerticalTableList } from '@/utils' function FilePage() { const router = useRouter() diff --git a/ui/src/pages/tasks/[jobId].tsx b/ui/src/pages/tasks/[jobId].tsx index 759c2f1..5efeb22 100644 --- a/ui/src/pages/tasks/[jobId].tsx +++ b/ui/src/pages/tasks/[jobId].tsx @@ -1,7 +1,7 @@ import ErrorCard from '@/components/ErrorCard/ErrorCard' import AccountLayout from '@/components/Layout/AccountLayout' import SimpleTable from '@/components/SimpleTable/SimpleTable' -import { asVerticalTableList } from '@/lib' +import { asVerticalTableList } from '@/utils' import { getJob } from '@/service' import { Card, Typography, LinearProgress } from '@mui/material' import { useQuery } from '@tanstack/react-query' diff --git a/ui/src/service/fetch.ts b/ui/src/service/fetch.ts index f7f84ef..3828c40 100644 --- a/ui/src/service/fetch.ts +++ b/ui/src/service/fetch.ts @@ -17,7 +17,7 @@ import { PermissionUiResponse, SubjectPermission } from './types' -import { api } from '@/lib/data-utils' +import { api } from '@/utils/data' export const getAuthStatus = async (): Promise => { const res = await api(`/api/auth`, { method: 'GET' }) diff --git a/ui/src/utils/createEmotionCache.ts b/ui/src/utils/createEmotionCache.ts new file mode 100644 index 0000000..0de053b --- /dev/null +++ b/ui/src/utils/createEmotionCache.ts @@ -0,0 +1,5 @@ +import createCache from '@emotion/cache' + +const createEmotionCache = () => createCache({ key: 'css', prepend: true }) + +export default createEmotionCache diff --git a/ui/src/utils/data.test.ts b/ui/src/utils/data.test.ts new file mode 100644 index 0000000..240f91a --- /dev/null +++ b/ui/src/utils/data.test.ts @@ -0,0 +1,41 @@ +import { api } from './data' +import fetchMock from 'jest-fetch-mock' +import { defaultError } from '@/lang' + +const mockSuccess = { fruit: 'apples' } + +describe('api()', () => { + afterEach(() => { + fetchMock.resetMocks() + }) + + it('success', async () => { + fetchMock.mockResponseOnce(JSON.stringify(mockSuccess), { status: 200 }) + const data = await (await api('/api')).json() + expect(data).toEqual(expect.objectContaining(mockSuccess)) + }) + + it('default error', async () => { + fetchMock.mockResponseOnce(JSON.stringify(mockSuccess), { status: 401 }) + + try { + await api('/api') + } catch (e) { + expect(e.message).toEqual(defaultError) + } + }) + + it('custom error', async () => { + const errorMessage = 'my custom error' + + fetchMock.mockResponseOnce(JSON.stringify({ details: 'my custom error' }), { + status: 401 + }) + + try { + await api('/api') + } catch (e) { + expect(e.message).toEqual(errorMessage) + } + }) +}) diff --git a/ui/src/utils/data.ts b/ui/src/utils/data.ts new file mode 100644 index 0000000..d7e44e4 --- /dev/null +++ b/ui/src/utils/data.ts @@ -0,0 +1,25 @@ +import { createUrl } from './url' +import { defaultError } from '@/lang' + +export type ParamsType = Record + +export const api = async ( + path: RequestInfo | URL, + init: RequestInit = {}, + params?: ParamsType +): Promise => { + const API_URL = process.env.NEXT_PUBLIC_API_URL + const baseUrl = API_URL ? `${API_URL}${path}` : path + const url = createUrl(`${baseUrl}`, params) + let detailMessage + const res: Response = await fetch(url, { + credentials: 'include', + ...init + }) + if (res.ok) return res + try { + const { details } = await res.json() + detailMessage = details + } catch (e) { } + throw new Error(detailMessage || defaultError) +} diff --git a/ui/src/utils/index.ts b/ui/src/utils/index.ts new file mode 100644 index 0000000..c5fb87f --- /dev/null +++ b/ui/src/utils/index.ts @@ -0,0 +1,13 @@ +import { TableCellProps } from '@mui/material' + +export const asVerticalTableList = ( + list: { + name: string + value: string + }[] +) => [ + ...list.map(({ name, value }) => [ + { children: name, component: 'th' }, + { children: value } + ]) +] diff --git a/ui/src/utils/testing.tsx b/ui/src/utils/testing.tsx new file mode 100644 index 0000000..3356466 --- /dev/null +++ b/ui/src/utils/testing.tsx @@ -0,0 +1,140 @@ +import { fireEvent, render, renderHook, RenderOptions, screen, waitFor } from '@testing-library/react' +import { ThemeProvider } from '@/components' +import { ReactNode } from 'react' +import { QueryClient, QueryClientProvider } from '@tanstack/react-query' +import { Dataset, PermissionUiResponse } from '@/service/types' + +beforeAll(() => { + Object.defineProperty(global, 'sessionStorage', { value: mockStorage }) + Object.defineProperty(global, 'localStorage', { value: mockStorage }) + jest.spyOn(console, 'error').mockImplementation(jest.fn()) +}) + +afterEach(() => { + window.sessionStorage.clear() +}) + +const mockStorage = (() => { + let store = {} + return { + getItem: function (key) { + return store[key] || null + }, + setItem: function (key, value) { + store[key] = value.toString() + }, + removeItem: function (key) { + delete store[key] + }, + clear: function () { + store = {} + } + } +})() + +export const wrapper = (ui) => { + const queryClient = new QueryClient({ + defaultOptions: { + queries: { + retry: false + } + } + }) + return ( + + + <>{ui} + + + ) +} + +export const renderWithProviders = async ( + ui: ReactNode, + options: Omit = {} +) => { + const view = await render(wrapper(ui), options) + return { + ...view, + rerender: (ui, options: Omit = {}) => + renderWithProviders(ui, { container: view.container, ...options }) + } +} + +export const renderHookWithProviders: typeof renderHook = (...parameters) => + renderHook(parameters[0], { + wrapper: ({ children }) => wrapper(children), + ...parameters[1] + }) + +export const bugfixForTimeout = async () => + await waitFor(() => new Promise((resolve) => setTimeout(resolve, 0))) + +export * from '@testing-library/react' +export { renderWithProviders as render } + +export const mockDataset: Dataset = { + layer: 'layer', + domain: 'domain', + dataset: 'dataset', + version: 1 +} + +export const mockDataSetsList: Dataset[] = [ + { + layer: 'layer', + domain: 'Pizza', + dataset: 'bit_complicated', + version: 3 + }, + { + layer: 'layer', + domain: 'Pizza', + dataset: 'again_complicated_high', + version: 3 + }, + { + layer: 'layer', + domain: 'Apple', + dataset: 'juicy', + version: 2 + } +] + +export const mockPermissionUiResponse: PermissionUiResponse = { + "DATA_ADMIN": "DATA_ADMIN", + "USER_ADMIN": "USER_ADMIN", + "READ": { + "ALL": { + "ALL": "READ_ALL", + "PROTECTED": { + "TEST": "READ_ALL_PROTECTED_TEST", + }, + }, + }, + "WRITE": { + "ALL": { + "ALL": "WRITE_ALL", + "PROTECTED": { + "TEST": "WRITE_ALL_PROTECTED_TEST", + }, + }, + "DEFAULT": { + "ALL": "WRITE_DEFAULT_ALL", + "PROTECTED": { + "TEST": "WRITE_DEFAULT_PROTECTED_TEST", + }, + }, + } +} + + +export const selectAutocompleteOption = (id, value) => { + const autocomplete = screen.getByTestId(id); + const input = autocomplete.querySelector('input') + autocomplete.focus() + fireEvent.change(input, { target: { value: value } }) + fireEvent.keyDown(autocomplete, { key: 'ArrowDown' }) + fireEvent.keyDown(autocomplete, { key: 'Enter' }) + expect(input).toHaveValue(value) +} diff --git a/ui/src/utils/url.test.ts b/ui/src/utils/url.test.ts new file mode 100644 index 0000000..39894b1 --- /dev/null +++ b/ui/src/utils/url.test.ts @@ -0,0 +1,65 @@ +import { createUrl, isUrlInternal } from './url' + +describe('createUrl()', () => { + it('returns url with querystring', () => { + expect(createUrl('/path', { food: 'pizza', fruit: 'apple' })).toEqual( + '/path?food=pizza&fruit=apple' + ) + + expect(createUrl('/path', { food: ['pizza', 'chips'], fruit: 'apple' })).toEqual( + '/path?food=pizza%2Cchips&fruit=apple' + ) + }) + + it('empty params', () => { + expect(createUrl('/path', {})).toEqual('/path') + expect(createUrl('/path')).toEqual('/path') + }) +}) + +describe('isUrlInternal()', () => { + const sitename = 'http://myapp/' + const { location } = window + + beforeAll(() => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + delete (window as any).location + }) + + afterAll(() => { + window.location = location + }) + + beforeEach(() => { + window.location = { + ...location, + href: sitename + } + }) + + it('url is only path', () => { + expect(isUrlInternal('/someurl')).toBeTruthy() + expect(isUrlInternal('/someurl?param=1')).toBeTruthy() + }) + + it('url contains full site', () => { + expect(isUrlInternal(sitename)).toBeTruthy() + expect(isUrlInternal(sitename + 'product/')).toBeTruthy() + expect(isUrlInternal(sitename + '?param=1')).toBeTruthy() + }) + + it('throws error if invalid url', () => { + expect(() => isUrlInternal('')).toThrowError('Invalid URL:') + expect(() => isUrlInternal('*^&*YH')).toThrowError('Invalid URL:') + }) + + it('throws error if invalid currentUrl', () => { + expect(() => isUrlInternal(sitename, '')).toThrowError('Invalid URL:') + expect(() => isUrlInternal(sitename, '*^&*YH')).toThrowError('Invalid URL:') + }) + + it('url is external site', () => { + expect(isUrlInternal('http://externalapp/')).toBeFalsy() + expect(isUrlInternal('https://myapp/')).toBeFalsy() + }) +}) diff --git a/ui/src/utils/url.ts b/ui/src/utils/url.ts new file mode 100644 index 0000000..975c278 --- /dev/null +++ b/ui/src/utils/url.ts @@ -0,0 +1,21 @@ +export const createUrl = ( + url: RequestInfo | URL, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + params?: string | URLSearchParams | Record | string[][] +): string => { + const queryString = new URLSearchParams(params).toString() + return `${url}${queryString && `?${queryString}`}` +} + +export const isUrlInternal = ( + url: string, + currenSite = window.location.href +): boolean => { + if (url.charAt(0) === '/') return true + + const fullUrl = new URL(url).origin + const fullSite = new URL(currenSite).origin + + if (fullUrl === fullSite) return true + return false +} From 936d71bba2728a3768ee614baca272299902abc3 Mon Sep 17 00:00:00 2001 From: lcard Date: Wed, 23 Aug 2023 15:44:04 +0100 Subject: [PATCH 11/17] fix tagging --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 29ed4ce..555cdee 100644 --- a/Makefile +++ b/Makefile @@ -64,7 +64,7 @@ api-create-local-venv: ## Create the api local venv for deployment @cd api/; ./local-venv-setup.sh api-create-image: ## Manually (re)create the api environment image - @cd api/; ./batect runtime-environment + @cd api/; ./batect --tag-image service-image=rapid-api-service-image runtime-environment api-shell: ## Run the api application and drop me into a shell @cd api/; ./batect shell From 016a0e6c7a7b45fabce8ebd09cff22ac3507bd31 Mon Sep 17 00:00:00 2001 From: lcard Date: Wed, 23 Aug 2023 17:30:04 +0100 Subject: [PATCH 12/17] fix --- ui/src/pages/_app.tsx | 2 +- ui/src/pages/_document.tsx | 2 +- ui/src/utils/createEmotionCache.ts | 5 +++++ 3 files changed, 7 insertions(+), 2 deletions(-) create mode 100644 ui/src/utils/createEmotionCache.ts diff --git a/ui/src/pages/_app.tsx b/ui/src/pages/_app.tsx index 0f7e0af..b82a79a 100644 --- a/ui/src/pages/_app.tsx +++ b/ui/src/pages/_app.tsx @@ -1,6 +1,6 @@ import { ThemeProvider } from '@/components' import { CacheProvider, EmotionCache } from '@emotion/react' -import createEmotionCache from '@/lib/createEmotionCache' +import createEmotionCache from '@/utils/createEmotionCache' import { ErrorBoundary } from 'react-error-boundary' import { ReactNode, useEffect } from 'react' import { AppProps } from 'next/app' diff --git a/ui/src/pages/_document.tsx b/ui/src/pages/_document.tsx index 11e6451..0b7dd95 100644 --- a/ui/src/pages/_document.tsx +++ b/ui/src/pages/_document.tsx @@ -7,7 +7,7 @@ import Document, { } from 'next/document' import createEmotionServer from '@emotion/server/create-instance' import theme from '@/style/theme' -import createEmotionCache from '@/lib/createEmotionCache' +import createEmotionCache from '@/utils/createEmotionCache' import { ReactNode } from 'react' type DocumentProps = { diff --git a/ui/src/utils/createEmotionCache.ts b/ui/src/utils/createEmotionCache.ts new file mode 100644 index 0000000..0de053b --- /dev/null +++ b/ui/src/utils/createEmotionCache.ts @@ -0,0 +1,5 @@ +import createCache from '@emotion/cache' + +const createEmotionCache = () => createCache({ key: 'css', prepend: true }) + +export default createEmotionCache From 4e5b5796688331e8949453f370c025349de42f42 Mon Sep 17 00:00:00 2001 From: lcard Date: Wed, 23 Aug 2023 17:42:47 +0100 Subject: [PATCH 13/17] fix imports --- .../data/download/[layer]/[domain]/[dataset].tsx | 2 +- ui/src/pages/data/download/file.tsx | 2 +- ui/src/pages/tasks/[jobId].tsx | 2 +- ui/src/utils/index.ts | 13 +++++++++++++ 4 files changed, 16 insertions(+), 3 deletions(-) create mode 100644 ui/src/utils/index.ts diff --git a/ui/src/pages/data/download/[layer]/[domain]/[dataset].tsx b/ui/src/pages/data/download/[layer]/[domain]/[dataset].tsx index 040b7ef..e74cf96 100644 --- a/ui/src/pages/data/download/[layer]/[domain]/[dataset].tsx +++ b/ui/src/pages/data/download/[layer]/[domain]/[dataset].tsx @@ -10,7 +10,7 @@ import { Alert } from '@/components' import ErrorCard from '@/components/ErrorCard/ErrorCard' -import { asVerticalTableList } from '@/lib' +import { asVerticalTableList } from '@/utils' import { getDatasetInfo, queryDataset } from '@/service' import { DataFormats } from '@/service/types' import { Typography, LinearProgress } from '@mui/material' diff --git a/ui/src/pages/data/download/file.tsx b/ui/src/pages/data/download/file.tsx index fc29325..bd0fce3 100644 --- a/ui/src/pages/data/download/file.tsx +++ b/ui/src/pages/data/download/file.tsx @@ -10,7 +10,7 @@ import { } from '@/components' import { Typography } from '@mui/material' import { useRouter } from 'next/router' -import { asVerticalTableList } from '@/lib' +import { asVerticalTableList } from '@/utils' function FilePage() { const router = useRouter() diff --git a/ui/src/pages/tasks/[jobId].tsx b/ui/src/pages/tasks/[jobId].tsx index 759c2f1..5efeb22 100644 --- a/ui/src/pages/tasks/[jobId].tsx +++ b/ui/src/pages/tasks/[jobId].tsx @@ -1,7 +1,7 @@ import ErrorCard from '@/components/ErrorCard/ErrorCard' import AccountLayout from '@/components/Layout/AccountLayout' import SimpleTable from '@/components/SimpleTable/SimpleTable' -import { asVerticalTableList } from '@/lib' +import { asVerticalTableList } from '@/utils' import { getJob } from '@/service' import { Card, Typography, LinearProgress } from '@mui/material' import { useQuery } from '@tanstack/react-query' diff --git a/ui/src/utils/index.ts b/ui/src/utils/index.ts new file mode 100644 index 0000000..c5fb87f --- /dev/null +++ b/ui/src/utils/index.ts @@ -0,0 +1,13 @@ +import { TableCellProps } from '@mui/material' + +export const asVerticalTableList = ( + list: { + name: string + value: string + }[] +) => [ + ...list.map(({ name, value }) => [ + { children: name, component: 'th' }, + { children: value } + ]) +] From f2d7f7cc0a279fcabc71e20f46cc5a20c1489450 Mon Sep 17 00:00:00 2001 From: lcard Date: Wed, 23 Aug 2023 17:51:04 +0100 Subject: [PATCH 14/17] fix ui-test --- .github/workflows/ui-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ui-tests.yml b/.github/workflows/ui-tests.yml index e40849b..9dd149d 100644 --- a/.github/workflows/ui-tests.yml +++ b/.github/workflows/ui-tests.yml @@ -31,6 +31,6 @@ jobs: - name: run playwright tests run: npx playwright test ui/playwright env: - DOMAIN: "https://${{ secrets.DOMAIN }}" + DOMAIN: "https://${{ secrets.DOMAIN_NAME }}" RESOURCE_PREFIX: ${{ secrets.RESOURCE_PREFIX }} AWS_REGION: ${{ secrets.AWS_REGION }} From fcbf75530c5d05644936162c32fcd4ad8ffb5c9b Mon Sep 17 00:00:00 2001 From: lcard Date: Wed, 23 Aug 2023 18:09:19 +0100 Subject: [PATCH 15/17] fix UI tests --- .github/workflows/ui-tests.yml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ui-tests.yml b/.github/workflows/ui-tests.yml index 9dd149d..444a4ae 100644 --- a/.github/workflows/ui-tests.yml +++ b/.github/workflows/ui-tests.yml @@ -26,10 +26,14 @@ jobs: npm install - name: Install playwright browsers - run: npm install @playwright/test -D + run: | + cd ui + npm install @playwright/test -D - name: run playwright tests - run: npx playwright test ui/playwright + run: | + cd ui + npx playwright test ui/playwright env: DOMAIN: "https://${{ secrets.DOMAIN_NAME }}" RESOURCE_PREFIX: ${{ secrets.RESOURCE_PREFIX }} From a6e443fb3a68cbb7e713559784c61c5de57c8f99 Mon Sep 17 00:00:00 2001 From: Toby Drane Date: Wed, 30 Aug 2023 19:00:13 +0100 Subject: [PATCH 16/17] fix v7 ui failing on unique modify users error --- .github/workflows/ui-tests.yml | 7 +- .gitignore | 5 +- .pre-commit-config.yaml | 69 +-- Makefile | 6 + ui/playwright/.auth/user.json | 69 --- ui/playwright/auth.setup.ts | 52 +-- ui/playwright/test-data-flow.spec.ts | 148 +++--- ui/playwright/test-homepage.spec.ts | 31 +- ui/playwright/test-user-flow.spec.ts | 64 ++- ui/playwright/utils.ts | 58 +++ .../PermissionsTable/PermissionsTable.tsx | 436 ++++++++++-------- ui/src/pages/subject/modify/[subjectId].tsx | 37 +- 12 files changed, 527 insertions(+), 455 deletions(-) delete mode 100644 ui/playwright/.auth/user.json create mode 100644 ui/playwright/utils.ts diff --git a/.github/workflows/ui-tests.yml b/.github/workflows/ui-tests.yml index dd280c9..2d83f3c 100644 --- a/.github/workflows/ui-tests.yml +++ b/.github/workflows/ui-tests.yml @@ -3,7 +3,7 @@ name: rAPId Integration Tests on: push: branches: - - "**" + - '**' workflow_dispatch: @@ -12,7 +12,6 @@ on: - opened jobs: - run-ui-test: runs-on: self-hosted @@ -32,8 +31,8 @@ jobs: run: npx playwright install-deps && npx playwright install - name: run playwright tests - run: npx playwright test ui/playwright + run: make ui-test-e2e env: - DOMAIN: ${{ secrets.DOMAIN }} + DOMAIN_NAME: ${{ secrets.DOMAIN_NAME }} RESOURCE_PREFIX: ${{ secrets.RESOURCE_PREFIX }} AWS_REGION: ${{ secrets.AWS_REGION }} diff --git a/.gitignore b/.gitignore index 2b0728b..b8e63d4 100644 --- a/.gitignore +++ b/.gitignore @@ -182,5 +182,6 @@ docs/_build/ .terraform/ .terraform.lock.hcl -playwright/.auth -playwright/.downloads \ No newline at end of file +ui/playwright/.auth +ui/playwright/.downloads +ui/test-results/ \ No newline at end of file diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7276dfa..299c5ee 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,70 +1,75 @@ exclude: '^ui' repos: -- repo: https://github.com/pre-commit/pre-commit-hooks + - repo: https://github.com/pre-commit/pre-commit-hooks rev: v2.3.0 hooks: - - id: check-yaml - - id: check-json - - id: check-merge-conflict - - id: end-of-file-fixer - - id: trailing-whitespace -- repo: https://github.com/PyCQA/bandit + - id: check-yaml + - id: check-json + - id: check-merge-conflict + - id: end-of-file-fixer + - id: trailing-whitespace + - repo: https://github.com/PyCQA/bandit rev: 1.7.5 hooks: - - id: bandit + - id: bandit exclude: '(test|docs)/*' -- repo: https://github.com/psf/black + - repo: https://github.com/psf/black rev: 22.6.0 hooks: - - id: black -- repo: https://github.com/Yelp/detect-secrets + - id: black + - repo: https://github.com/Yelp/detect-secrets rev: v1.3.0 hooks: - - id: detect-secrets + - id: detect-secrets exclude: docs/ -- repo: https://github.com/asottile/blacken-docs + - repo: https://github.com/asottile/blacken-docs rev: v1.12.1 hooks: - - id: blacken-docs -- repo: https://github.com/PyCQA/flake8 + - id: blacken-docs + - repo: https://github.com/PyCQA/flake8 rev: 4.0.1 hooks: - - id: flake8 + - id: flake8 args: ['--config', 'api/.flake8'] exclude: (docs/|get_latest_release_changelog.py) -# - repo: https://github.com/PyCQA/pylint -# rev: v2.15.5 -# hooks: -# - id: pylint -# exclude: (docs/|get_latest_release_changelog.py) -- repo: https://github.com/antonbabenko/pre-commit-terraform + # - repo: https://github.com/PyCQA/pylint + # rev: v2.15.5 + # hooks: + # - id: pylint + # exclude: (docs/|get_latest_release_changelog.py) + - repo: https://github.com/antonbabenko/pre-commit-terraform rev: v1.81.0 hooks: - - id: terraform_fmt + - id: terraform_fmt exclude: '^(?!infrastructure/).*' - - id: terraform_validate + - id: terraform_validate exclude: '^(?!infrastructure/).*' - - id: terraform_docs + - id: terraform_docs args: - - markdown table --recursive --output-file README.md . + - markdown table --recursive --output-file README.md . exclude: '^(?!infrastructure/).*' -- repo: https://github.com/bridgecrewio/checkov.git + - repo: https://github.com/bridgecrewio/checkov.git rev: 2.3.261 hooks: - - id: checkov + - id: checkov args: [--quiet, --compact] exclude: '^(?!infrastructure/).*' -- repo: local + - repo: local hooks: - - id: sdk_test + - id: sdk_test name: sdk_test language: system entry: bash -c 'make sdk-test' files: sdk/*. pass_filenames: false -- repo: local + - repo: https://github.com/pre-commit/mirrors-prettier + rev: v3.0.3 hooks: - - id: ui_test + - id: prettier + args: ['--config', 'ui/.prettierrc.json', './ui'] + - repo: local + hooks: + - id: ui_test name: ui_test language: system entry: bash -c 'cd ./ui; npm run test:all' diff --git a/Makefile b/Makefile index 8b2d897..26bdf13 100644 --- a/Makefile +++ b/Makefile @@ -163,6 +163,12 @@ ui-run-dev: ## Run the ui application with hot reload ui-test: ## Test ui site @cd ui/; npm run test:all +ui-test-e2e: + @cd ui/; npx playwright test ui/playwright + +ui-test-e2e-headed: + @cd ui/; npx playwright test ui/playwright --ui + # UI Release -------------------- ## ui-create-static-out: diff --git a/ui/playwright/.auth/user.json b/ui/playwright/.auth/user.json deleted file mode 100644 index 69f3391..0000000 --- a/ui/playwright/.auth/user.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "cookies": [ - { - "name": "csrf-state", - "value": "\"\"", - "domain": "rapid-preprod-auth.auth.eu-west-2.amazoncognito.com", - "path": "/", - "expires": 1692287927.336736, - "httpOnly": true, - "secure": true, - "sameSite": "None" - }, - { - "name": "csrf-state-legacy", - "value": "\"\"", - "domain": "rapid-preprod-auth.auth.eu-west-2.amazoncognito.com", - "path": "/", - "expires": 1692287927.336913, - "httpOnly": true, - "secure": true, - "sameSite": "Lax" - }, - { - "name": "XSRF-TOKEN", - "value": "58e5152c-5518-4a15-8f44-53aab80923e3", - "domain": "rapid-preprod-auth.auth.eu-west-2.amazoncognito.com", - "path": "/", - "expires": -1, - "httpOnly": true, - "secure": true, - "sameSite": "Lax" - }, - { - "name": "cognito", - "value": "\"H4sIAAAAAAAAAAHjABz/0cUswHEu+wt4sFtgX/MXtjfEuCG5J9+QOoDte5Xo+8fhLVV+z9tVnIc3CZMT9dor+RIIuLiP9D/+lidJbwe86EF6UaDpHjGE4G2pudZQsByLI9ltdUis+cyI7dAnBFmLao1OXbcaCG6IDKqwtF3QQ2zuO0Z8l3PlMCwkR3Cx28fhs2EqOSo9AZtWfvcrThpi2KbwI1Zub32pcqOMkVgYIyiCveNXqpjvR2zVNGrYatzd0DYka5DpLq8EyZVXK3HL2mWbalfT4KqdoMC9CYxWgMSDKGxw3H/EU5nk23uUIo8V44wxzDVT4wAAAA==.H4sIAAAAAAAAAAEgAN//jAzRXN0pqpqP9AxAks2+ygr+2fMuLXRS1sEKXC9B/YAo9zXxIAAAAA==.3\"", - "domain": ".rapid-preprod-auth.auth.eu-west-2.amazoncognito.com", - "path": "/", - "expires": 1692291228.147482, - "httpOnly": true, - "secure": true, - "sameSite": "Lax" - }, - { - "name": "rat", - "value": "eyJraWQiOiI4NXBka2R2N0VlWHp5Qnk0aUcrRzkrVm03SWhyVFlqalhDZlwvTlNUMGNJUT0iLCJhbGciOiJSUzI1NiJ9.eyJzdWIiOiJiMTBkZWQ4OC00ZTEwLTQ2ZDMtYjljNy1mZjZjZjA1MjZjMDkiLCJpc3MiOiJodHRwczpcL1wvY29nbml0by1pZHAuZXUtd2VzdC0yLmFtYXpvbmF3cy5jb21cL2V1LXdlc3QtMl9jWkRjYmViRm8iLCJ2ZXJzaW9uIjoyLCJjbGllbnRfaWQiOiI3YzRwOWdldWh2dG12MDFvMHRycDRyNWYxIiwib3JpZ2luX2p0aSI6IjgzMmI1ZjdkLTgwNGYtNDYwZS1hN2YyLTM5ODYyY2NlYThjZCIsImV2ZW50X2lkIjoiNDk4ODAwM2ItMGQyZS00ZGUzLTg5YTItNDRhOGYwYjdmNmZmIiwidG9rZW5fdXNlIjoiYWNjZXNzIiwic2NvcGUiOiJwaG9uZSBvcGVuaWQgZW1haWwiLCJhdXRoX3RpbWUiOjE2OTIyODc2MjgsImV4cCI6MTY5MjI5MTIyOCwiaWF0IjoxNjkyMjg3NjI4LCJqdGkiOiJkZTJlM2I0Zi05NzNjLTQ0NjItOWM1Yy1kZGNhZGY5ZDlkYjIiLCJ1c2VybmFtZSI6InJhcGlkLXByZXByb2RfdWlfdGVzdF91c2VyIn0.Xf-R3WBmlGQ_RgF5R0q8buwNRq4_SQqyFJ2oBQkM33fURlZ8oPUIoibne8VyIoNH4EsrFpn-9m0q7jfojNVPr9gUbZbTNjusO1aPDmpa95_wWE7ltWoFu_DRfiJmph9915cmWPZrvhE4cDfQBpnbSEXB8Qd-NveYPbX1dzcJqEsZC3MIUAhaEyynjAmKoPtzIPRCivcprjdU1GzYTVz_bidtMKv0f_JdLH-fsZKvHYp8ybd_8Yd_k-hD_rgmAb5pe2GQAgLWch1Z4Gw3vXcpXdNmWQZPuU4peS6BLnZ9hVPjBN7dse7JZL-bu-Z7pHfXcmB1BXt-xdt1j8shR4-jHw", - "domain": "preprod.getrapid.link", - "path": "/", - "expires": 1692291228.637131, - "httpOnly": true, - "secure": true, - "sameSite": "Lax" - } - ], - "origins": [ - { - "origin": "https://rapid-preprod-auth.auth.eu-west-2.amazoncognito.com", - "localStorage": [ - { - "name": "Amazon.AWS.Cognito.ContextData.LS_UBID", - "value": "hiq9hu40hfac59ul73l7:1692287627856" - }, - { - "name": "amznfbgid", - "value": "X59-7904128-7171304:1692287627" - } - ] - } - ] -} \ No newline at end of file diff --git a/ui/playwright/auth.setup.ts b/ui/playwright/auth.setup.ts index e8f7a23..d4ede69 100644 --- a/ui/playwright/auth.setup.ts +++ b/ui/playwright/auth.setup.ts @@ -1,48 +1,30 @@ -import { test as setup, expect } from '@playwright/test'; -import { SecretsManager } from 'aws-sdk' +import { test as setup, expect } from '@playwright/test' +import { domain, getSecretValue } from './utils' -const client = new SecretsManager({ region: process.env.AWS_REGION }) -const authFile = 'playwright/.auth/user.json'; -const domain = process.env.DOMAIN; +const authFile = 'playwright/.auth/user.json' const secretName = `${process.env.RESOURCE_PREFIX}_UI_TEST_USER` - -export async function getSecretValue( - secretName: string -): Promise { - return new Promise((resolve, reject) => { - client.getSecretValue({ SecretId: secretName }, function (err, data) { - if (err) { - reject(err) - } else { - resolve(data.SecretString) - } - }) - }) -} - - setup('authenticate', async ({ page }) => { - const secret = JSON.parse(await getSecretValue(secretName) as string) - await page.goto(domain); - await page.goto(`${domain}/login`); + const secret = JSON.parse((await getSecretValue(secretName)) as string) + await page.goto(domain) + await page.goto(`${domain}/login`) - await page.locator('[data-testid="login-link"]').click(); + await page.locator('[data-testid="login-link"]').click() - await page.locator('[placeholder="Username"]').nth(1).click(); + await page.locator('[placeholder="Username"]').nth(1).click() - await page.locator('[placeholder="Password"]').nth(1).click(); + await page.locator('[placeholder="Password"]').nth(1).click() - await page.locator('[placeholder="Password"]').nth(1).fill(`${secret['password']}`); + await page.locator('[placeholder="Password"]').nth(1).fill(`${secret['password']}`) - await page.locator('[placeholder="Username"]').nth(1).click(); + await page.locator('[placeholder="Username"]').nth(1).click() - await page.locator('[placeholder="Username"]').nth(1).click(); + await page.locator('[placeholder="Username"]').nth(1).click() - await page.locator('[placeholder="Username"]').nth(1).fill(`${secret['username']}`); + await page.locator('[placeholder="Username"]').nth(1).fill(`${secret['username']}`) - await page.locator('text=Sign in').nth(3).click(); - await expect(page).toHaveURL(domain); + await page.locator('text=Sign in').nth(3).click() + await expect(page).toHaveURL(domain) - await page.context().storageState({ path: authFile }); -}); \ No newline at end of file + await page.context().storageState({ path: authFile }) +}) diff --git a/ui/playwright/test-data-flow.spec.ts b/ui/playwright/test-data-flow.spec.ts index 9c8c3cb..d0926e3 100644 --- a/ui/playwright/test-data-flow.spec.ts +++ b/ui/playwright/test-data-flow.spec.ts @@ -1,87 +1,91 @@ -import { test, expect } from '@playwright/test'; -import { v4 } from 'uuid'; -import fs from 'fs'; +/* eslint-disable testing-library/prefer-screen-queries */ +import { test, expect } from '@playwright/test' +import { v4 } from 'uuid' +import fs from 'fs' + +import { domain } from './utils' -const domain = process.env.DOMAIN; const datasetName = `ui_test_dataset_${v4().replace('-', '_').slice(0, 8)}` const filePath = 'playwright/gapminder.csv' const downloadPath = `playwright/.downloads/${datasetName}` test('test', async ({ page }) => { - await page.goto(domain); - - // Create a schema - await page.locator('div[role="button"]:has-text("Create Schema")').click(); - await expect(page).toHaveURL(`${domain}/schema/create`); - await page.locator('[data-testid="field-level"]').selectOption('PUBLIC'); - await page.locator('[data-testid="field-layer"]').selectOption('default'); - await page.locator('[data-testid="field-domain"]').click(); - await page.locator('[data-testid="field-domain"]').fill('ui_test_domain'); - await page.locator('[data-testid="field-title"]').click(); - await page.locator('[data-testid="field-title"]').fill(datasetName); - await page.locator('[data-testid="field-file"]').click(); - await page.locator('[data-testid="field-file"]').setInputFiles(filePath); - await page.locator('[data-testid="submit"]').click(); - await page.locator('input[name="ownerEmail"]').click(); - await page.locator('input[name="ownerEmail"]').fill('ui_test@email.com'); - await page.locator('input[name="ownerName"]').click(); - await page.locator('input[name="ownerName"]').fill('ui_test'); - await page.locator('button:has-text("Create Schema")').click(); - // @ts-ignore - const schemaCreatedElement = await page.waitForSelector('.MuiAlertTitle-root', { text: 'Schema Created' }); - - expect(await schemaCreatedElement.innerText()).toEqual('Schema Created'); + await page.goto(domain) - // Upload a dataset - await page.getByRole('button', { name: 'Upload data' }).click(); - await page.getByTestId('select-layer').getByRole('combobox').click(); - await page.getByRole('option', { name: 'default' }).click(); - await page.getByTestId('select-domain').getByRole('combobox').click(); - await page.getByRole('option', { name: 'ui_test_domain' }).click(); - await page.getByTestId('select-dataset').getByRole('combobox').click(); - await page.getByRole('option', { name: datasetName }).click(); - await page.getByTestId('upload').click(); - await page.getByTestId('upload').setInputFiles(filePath); - await page.getByTestId('submit').click(); + // Create a schema + await page.locator('div[role="button"]:has-text("Create Schema")').click() + await expect(page).toHaveURL(`${domain}/schema/create`) + await page.locator('[data-testid="field-level"]').selectOption('PUBLIC') + await page.locator('[data-testid="field-layer"]').selectOption('default') + await page.locator('[data-testid="field-domain"]').click() + await page.locator('[data-testid="field-domain"]').fill('ui_test_domain') + await page.locator('[data-testid="field-title"]').click() + await page.locator('[data-testid="field-title"]').fill(datasetName) + await page.locator('[data-testid="field-file"]').click() + await page.locator('[data-testid="field-file"]').setInputFiles(filePath) + await page.locator('[data-testid="submit"]').click() + await page.locator('input[name="ownerEmail"]').click() + await page.locator('input[name="ownerEmail"]').fill('ui_test@email.com') + await page.locator('input[name="ownerName"]').click() + await page.locator('input[name="ownerName"]').fill('ui_test') + await page.locator('button:has-text("Create Schema")').click() + const schemaCreatedElement = await page.waitForSelector('.MuiAlertTitle-root') + expect(await schemaCreatedElement.innerText()).toEqual('Schema Created') - expect(await page.getByText('Data uploaded successfully').textContent()).toEqual('Status: Data uploaded successfully') + // Upload a dataset + await page.getByRole('button', { name: 'Upload data' }).click() + await page.getByTestId('select-layer').getByRole('combobox').click() + await page.getByRole('option', { name: 'default' }).click() + await page.getByTestId('select-domain').getByRole('combobox').click() + await page.getByRole('option', { name: 'ui_test_domain' }).click() + await page.getByTestId('select-dataset').getByRole('combobox').click() + await page.getByRole('option', { name: datasetName }).click() + await page.getByTestId('upload').click() + await page.getByTestId('upload').setInputFiles(filePath) + await page.getByTestId('submit').click() - // Download the dataset - await page.getByRole('button', { name: 'Download data' }).click(); - await page.getByTestId('select-layer').getByRole('combobox').click(); - await page.getByRole('option', { name: 'default' }).click(); - await page.getByTestId('select-domain').getByRole('combobox').click(); - await page.getByRole('option', { name: 'ui_test_domain' }).click(); - await page.getByTestId('select-dataset').getByRole('combobox').click(); - await page.getByRole('option', { name: datasetName }).click(); - await page.getByTestId('submit').click(); - await page.locator('div').filter({ hasText: 'Row Limit' }).locator('div').nth(1).click(); - await page.getByPlaceholder('30').fill('200'); - const downloadPromise = page.waitForEvent('download'); - await page.getByRole('button', { name: 'Download', exact: true }).click(); - const download = await downloadPromise; - await download.saveAs(downloadPath) + expect(await page.getByText('Data uploaded successfully').textContent()).toEqual( + 'Status: Data uploaded successfully', + ) - expect(fs.existsSync(downloadPath)).toBeTruthy() + // Download the dataset + await page.getByRole('button', { name: 'Download data' }).click() + await page.getByTestId('select-layer').getByRole('combobox').click() + await page.getByRole('option', { name: 'default' }).click() + await page.getByTestId('select-domain').getByRole('combobox').click() + await page.getByRole('option', { name: 'ui_test_domain' }).click() + await page.getByTestId('select-dataset').getByRole('combobox').click() + await page.getByRole('option', { name: datasetName }).click() + await page.getByTestId('submit').click() + await page.locator('div').filter({ hasText: 'Row Limit' }).locator('div').nth(1).click() + await page.getByPlaceholder('30').fill('200') + const downloadPromise = page.waitForEvent('download') + await page.getByRole('button', { name: 'Download', exact: true }).click() + const download = await downloadPromise + await download.saveAs(downloadPath) - fs.rm(downloadPath, (err) => { - err ? console.error(err) : console.log("Download deleted") - }) + expect(fs.existsSync(downloadPath)).toBeTruthy() - // Delete the dataset - await page.getByRole('button', { name: 'Delete data' }).click(); - await page.locator('div[role="button"]:has-text("Delete data")').click(); - await page.getByTestId('select-layer').getByRole('combobox').click(); - await page.getByRole('option', { name: 'default' }).click(); - await page.getByTestId('select-domain').getByRole('combobox').click(); - await page.getByRole('option', { name: 'ui_test_domain' }).click(); - await page.getByTestId('select-dataset').getByRole('combobox').click(); - await page.getByRole('option', { name: datasetName }).click(); - await page.getByTestId('submit').click(); + fs.rm(downloadPath, (err) => { + err ? console.error(err) : console.log('Download deleted') + }) - // @ts-ignore - const datasetDeletedElement = await page.waitForSelector('.MuiAlertTitle-root', { text: `Dataset deleted: default/ui_test_domain/${datasetName}` }); + // Delete the dataset + await page.getByRole('button', { name: 'Delete data' }).click() + await page.locator('div[role="button"]:has-text("Delete data")').click() + await page.getByTestId('select-layer').getByRole('combobox').click() + await page.getByRole('option', { name: 'default' }).click() + await page.getByTestId('select-domain').getByRole('combobox').click() + await page.getByRole('option', { name: 'ui_test_domain' }).click() + await page.getByTestId('select-dataset').getByRole('combobox').click() + await page.getByRole('option', { name: datasetName }).click() + await page.getByTestId('submit').click() - expect(await datasetDeletedElement.innerText()).toEqual(`Dataset deleted: default/ui_test_domain/${datasetName}`); + const datasetDeletedElement = await page.waitForSelector('.MuiAlertTitle-root', { + text: `Dataset deleted: default/ui_test_domain/${datasetName}`, + }) -}); \ No newline at end of file + expect(await datasetDeletedElement.innerText()).toEqual( + `Dataset deleted: default/ui_test_domain/${datasetName}`, + ) +}) diff --git a/ui/playwright/test-homepage.spec.ts b/ui/playwright/test-homepage.spec.ts index eb4bedf..edfa8c8 100644 --- a/ui/playwright/test-homepage.spec.ts +++ b/ui/playwright/test-homepage.spec.ts @@ -1,18 +1,19 @@ -import { test, expect } from '@playwright/test'; +/* eslint-disable testing-library/prefer-screen-queries */ +import { test } from '@playwright/test' -const domain = process.env.DOMAIN +import { domain } from './utils' test('test', async ({ page }) => { - await page.goto(domain); - await page.getByRole('button', { name: 'Create User' }).click(); - await page.getByRole('button', { name: 'Modify User' }).click(); - await page.getByRole('button', { name: 'Download data' }).click(); - await page.getByRole('button', { name: 'Upload data' }).click(); - await page.getByRole('button', { name: 'Create Schema' }).click(); - await page.getByRole('button', { name: 'Task Status' }).click(); - await page.getByRole('link', { name: 'Home' }).click(); - await page.getByRole('link', { name: 'Create User' }).nth(1).click(); - await page.getByRole('link', { name: 'Home' }).click(); - await page.getByRole('button', { name: 'account of current user' }).click(); - await page.getByText('Logout').click(); -}); \ No newline at end of file + await page.goto(domain) + await page.getByRole('button', { name: 'Create User' }).click() + await page.getByRole('button', { name: 'Modify User' }).click() + await page.getByRole('button', { name: 'Download data' }).click() + await page.getByRole('button', { name: 'Upload data' }).click() + await page.getByRole('button', { name: 'Create Schema' }).click() + await page.getByRole('button', { name: 'Task Status' }).click() + await page.getByRole('link', { name: 'Home' }).click() + await page.getByRole('link', { name: 'Create User' }).nth(1).click() + await page.getByRole('link', { name: 'Home' }).click() + await page.getByRole('button', { name: 'account of current user' }).click() + await page.getByText('Logout').click() +}) diff --git a/ui/playwright/test-user-flow.spec.ts b/ui/playwright/test-user-flow.spec.ts index 03bdc16..dafa96d 100644 --- a/ui/playwright/test-user-flow.spec.ts +++ b/ui/playwright/test-user-flow.spec.ts @@ -1,21 +1,55 @@ -import { test, expect } from '@playwright/test'; +/* eslint-disable testing-library/prefer-screen-queries */ +import { test, expect } from '@playwright/test' + +import { makeAPIRequest, generateRapidAuthToken } from './utils' + +const domain = 'http://localhost:3000' -const domain = process.env.DOMAIN; const user = `${process.env.RESOURCE_PREFIX}_ui_test_user` test('test', async ({ page }) => { - await page.goto(domain); - - // Click div[role="button"]:has-text("Modify User") - await page.locator('div[role="button"]:has-text("Modify User")').click(); - await expect(page).toHaveURL(`${domain}/subject/modify`); + await page.goto(domain) - await page.locator('[data-testid="field-user"]').selectOption({ 'label': user }) - await page.locator('[data-testid="submit-button"]').click(); + // Modify user to have data admin permissions + await page.locator('div[role="button"]:has-text("Modify User")').click() + await expect(page).toHaveURL(`${domain}/subject/modify`) + await page.locator('[data-testid="field-user"]').selectOption({ label: user }) + await page.locator('[data-testid="submit-button"]').click() + await page.getByRole('row', { name: 'DATA_ADMIN' }).getByRole('button').click() + await page.getByTestId('select-type').selectOption('DATA_ADMIN') + await page + .getByRole('row') + .filter({ hasText: 'ActionDATA_ADMIN' }) + .getByRole('button') + .click() + await page.getByTestId('submit').click() + // await expect(page).toHaveURL(/success/) - await page.getByRole('row', { name: 'DATA_ADMIN' }).getByRole('button').click(); - await page.getByTestId('select-type').selectOption('DATA_ADMIN'); - await page.getByRole('row').filter({ hasText: 'ActionDATA_ADMIN' }).getByRole('button').click(); - await page.getByTestId('submit').click(); - await expect(page).toHaveURL(/success/); -}); \ No newline at end of file + // Test unique condition where we correctly display permissions when modifying a user + // even though they might have conflicting permissions within the filtering logic + const { access_token } = await generateRapidAuthToken() + await makeAPIRequest( + 'subjects/permissions', + 'PUT', + { + subject_id: 'b10ded88-4e10-46d3-b9c7-ff6cf0526c09', + permissions: [ + 'DATA_ADMIN', + 'READ_ALL', + 'USER_ADMIN', + 'WRITE_ALL', + 'READ_DEFAULT_PROTECTED_TEST_E2E_PROTECTED', + ], + }, + `Bearer ${access_token}`, + ) + await page.locator('div[role="button"]:has-text("Modify User")').click() + await expect(page).toHaveURL(`${domain}/subject/modify`) + await page.locator('[data-testid="field-user"]').selectOption({ label: user }) + await page.locator('[data-testid="submit-button"]').click() + await page + .getByRole('row', { name: 'READ DEFAULT PROTECTED TEST_E2E_PROTECTED' }) + .getByRole('button') + .click() + await page.getByTestId('submit').click() +}) diff --git a/ui/playwright/utils.ts b/ui/playwright/utils.ts new file mode 100644 index 0000000..ef80c45 --- /dev/null +++ b/ui/playwright/utils.ts @@ -0,0 +1,58 @@ +import { SecretsManager } from 'aws-sdk' + +const client = new SecretsManager({ region: process.env.AWS_REGION }) + +const baseDomain = process.env.DOMAIN_NAME +export const domain = `https://${baseDomain.replace('/api', '')}` + +export async function makeAPIRequest( + path: string, + method: string, + body?: any, + authToken?: string, + optionalHeaders = {}, +): Promise { + const response = await fetch(`https://${baseDomain}/${path}`, { + method, + headers: { + 'Content-Type': 'application/json', + ...optionalHeaders, + ...(authToken ? { Authorization: authToken } : {}), + }, + body: JSON.stringify(body), + }) + return response.json() +} + +export async function getSecretValue(secretName: string): Promise { + return new Promise((resolve, reject) => { + client.getSecretValue({ SecretId: secretName }, function (err, data) { + if (err) { + reject(err) + } else { + resolve(data.SecretString) + } + }) + }) +} + +export async function generateRapidAuthToken(): Promise { + const secretName = `${process.env.RESOURCE_PREFIX}_E2E_TEST_CLIENT_USER_ADMIN` + const clientId = JSON.parse((await getSecretValue(secretName)) as string)['CLIENT_ID'] + const clientSecret = JSON.parse((await getSecretValue(secretName)) as string)[ + 'CLIENT_SECRET' + ] + const credentialsSecret = btoa(`${clientId}:${clientSecret}`) + return makeAPIRequest( + 'oauth2/token', + 'POST', + { + grant_type: 'client_credentials', + client_id: clientId, + }, + `Basic ${credentialsSecret}`, + { + 'Content-Type': 'application/x-www-form-urlencoded', + }, + ) +} diff --git a/ui/src/components/PermissionsTable/PermissionsTable.tsx b/ui/src/components/PermissionsTable/PermissionsTable.tsx index 7aa25ae..3024151 100644 --- a/ui/src/components/PermissionsTable/PermissionsTable.tsx +++ b/ui/src/components/PermissionsTable/PermissionsTable.tsx @@ -8,108 +8,126 @@ import { isDataPermission } from '@/service/permissions' import { PermissionUiResponse } from '@/service/types' import { useEffect, useState } from 'react' import { cloneDeep } from 'lodash' -import IconButton from '@mui/material/IconButton'; -import AddIcon from '@mui/icons-material/Add'; -import RemoveIcon from '@mui/icons-material/Remove'; -import Table from '@mui/material/Table'; -import TableBody from '@mui/material/TableBody'; -import TableCell from '@mui/material/TableCell'; -import TableContainer from '@mui/material/TableContainer'; -import TableHead from '@mui/material/TableHead'; -import TableRow from '@mui/material/TableRow'; - +import IconButton from '@mui/material/IconButton' +import AddIcon from '@mui/icons-material/Add' +import RemoveIcon from '@mui/icons-material/Remove' +import Table from '@mui/material/Table' +import TableBody from '@mui/material/TableBody' +import TableCell from '@mui/material/TableCell' +import TableContainer from '@mui/material/TableContainer' +import TableHead from '@mui/material/TableHead' +import TableRow from '@mui/material/TableRow' type ActionType = z.infer type PermissionType = z.infer type SensitivityType = z.infer - -const PermissionsTable = ({ permissionsListData, fieldArrayReturn }: { permissionsListData: PermissionUiResponse, fieldArrayReturn: FieldValues }) => { - +const PermissionsTable = ({ + permissionsListData, + fieldArrayReturn, + isModifyPage = false, +}: { + permissionsListData: PermissionUiResponse + fieldArrayReturn: FieldValues + isModifyPage?: boolean +}) => { const [filteredPermissionsListData, setFilteredPermissionsListData] = useState({}) const [permissionsAtMax, setPermissionsAtMax] = useState(false) - const removePermissionAsAnOption = (permission: PermissionType, permissionsList: PermissionUiResponse) => { - const { type, layer, sensitivity, domain } = permission; - const typeList = permissionsList[type]; - const layerList = typeList?.[layer]; - const sensitivityList = layerList?.[sensitivity]; + const removePermissionAsAnOption = ( + permission: PermissionType, + permissionsList: PermissionUiResponse, + ) => { + const { type, layer, sensitivity, domain } = permission + const typeList = permissionsList[type] + const layerList = typeList?.[layer] + const sensitivityList = layerList?.[sensitivity] switch (true) { // Scenario for protected permission case Boolean(domain): // Remove the domain if (domain in sensitivityList) { - delete sensitivityList[domain]; + delete sensitivityList[domain] } // Remove the sensitivity if there are no domains left if (!Object.keys(sensitivityList)?.length) { - delete layerList[sensitivity]; + delete layerList[sensitivity] // Remove the layer if there are no sensitivities left if (!Object.keys(layerList)?.length) { - delete typeList[layer]; + delete typeList[layer] } } - break; + break case Boolean(sensitivity): // Remove the sensitivity if (sensitivity in layerList) { - delete layerList[sensitivity]; + delete layerList[sensitivity] } // Remove the layer if there are no sensitivities left - if (!Object.keys(layerList)?.length || sensitivity === "ALL") { - delete typeList[layer]; + if (!Object.keys(layerList)?.length || sensitivity === 'ALL') { + delete typeList[layer] // Remove the type if there are no layers left - if (!Object.keys(typeList)?.length || layer === "ALL") { - delete permissionsList[type]; + if (!Object.keys(typeList)?.length || layer === 'ALL') { + delete permissionsList[type] } } - break; + break - // Scenario for admin permissions + // Scenario for admin permissions default: - delete permissionsList[type]; - break; + delete permissionsList[type] + break } - return permissionsList; - }; - + return permissionsList + } const { fields, append, remove } = fieldArrayReturn const { control, trigger, watch, reset, setError, setValue } = useForm({ - resolver: zodResolver(Permission) + resolver: zodResolver(Permission), }) // Remove any of the selected permissions from being an option useEffect(() => { let amendedPermissions = cloneDeep(permissionsListData) - fields.forEach((permission) => { - amendedPermissions = removePermissionAsAnOption(permission, amendedPermissions) - }) + // Handle the unique case where if the user has conflicting permissions that would not be allowed + // by the removePermissionAsAnOption function, we gracefully handle this error + try { + fields.forEach((permission) => { + amendedPermissions = removePermissionAsAnOption(permission, amendedPermissions) + }) + } catch (error) { + // Do not allow for this case when creating a user + if (!isModifyPage) { + throw error + } + } setFilteredPermissionsListData(amendedPermissions) - - }, [fields, permissionsListData]); + }, [fields, permissionsListData]) // Set Permissions at max useEffect(() => { if (Object.keys(filteredPermissionsListData).length === 0) { setPermissionsAtMax(true) - } - else { + } else { setPermissionsAtMax(false) } }, [filteredPermissionsListData]) - - const generateOptions = (items) => items.map((item) => { - return - }) + const generateOptions = (items) => + items.map((item) => { + return ( + + ) + }) return ( @@ -124,157 +142,191 @@ const PermissionsTable = ({ permissionsListData, fieldArrayReturn }: { permissio - {(fields || []).map((item, index) => - ( - ( + - remove(index)} - > - - - - - {item.type} - - - {item.layer} - - - {item.sensitivity} - - - {item.domain} - - ) - )} - {!permissionsAtMax && - - { - const result = trigger(undefined, { shouldFocus: true }); - if (result) { - const permissionToAdd = watch() - // Triggers an error if the domain is not set for protected sensitivity - if (isDataPermission(permissionToAdd) && permissionToAdd.sensitivity === "PROTECTED" && permissionToAdd.domain === undefined) { - setError("domain", { type: "custom", message: "Required" }); - } - else { - append(permissionToAdd) - reset({ - type: undefined, - layer: undefined, - sensitivity: undefined, - domain: undefined, - }) + + remove(index)}> + + + + + {item.type} + + + {item.layer} + + + + {item.sensitivity} + + + + {item.domain} + + + ))} + {!permissionsAtMax && ( + + + { + const result = trigger(undefined, { shouldFocus: true }) + if (result) { + const permissionToAdd = watch() + // Triggers an error if the domain is not set for protected sensitivity + if ( + isDataPermission(permissionToAdd) && + permissionToAdd.sensitivity === 'PROTECTED' && + permissionToAdd.domain === undefined + ) { + setError('domain', { type: 'custom', message: 'Required' }) + } else { + append(permissionToAdd) + reset({ + type: undefined, + layer: undefined, + sensitivity: undefined, + domain: undefined, + }) + } } + }} + > + + + + + ( + + )} + /> + + + + isDataPermission(watch()) && ( + + ) + } + /> + + + + isDataPermission(watch()) && + watch('layer') && ( + + ) } - }} - > - - - - - ( - - )} - /> - - - ( - isDataPermission(watch()) && - - )} - /> - - - ( - isDataPermission(watch()) && watch('layer') && - - ) - } - /> - - - ( - isDataPermission(watch()) && watch('sensitivity') === 'PROTECTED' && - ) - } - /> - - } + /> + + + + isDataPermission(watch()) && + watch('sensitivity') === 'PROTECTED' && ( + + ) + } + /> + + + )} - + ) } diff --git a/ui/src/pages/subject/modify/[subjectId].tsx b/ui/src/pages/subject/modify/[subjectId].tsx index a58aef1..5d64aef 100644 --- a/ui/src/pages/subject/modify/[subjectId].tsx +++ b/ui/src/pages/subject/modify/[subjectId].tsx @@ -4,12 +4,12 @@ import AccountLayout from '@/components/Layout/AccountLayout' import { getPermissionsListUi, getSubjectPermissions, - updateSubjectPermissions + updateSubjectPermissions, } from '@/service' import { extractPermissionNames } from '@/service/permissions' import { UpdateSubjectPermissionsBody, - UpdateSubjectPermissionsResponse + UpdateSubjectPermissionsResponse, } from '@/service/types' import { Alert, Typography, LinearProgress } from '@mui/material' import { useMutation, useQuery } from '@tanstack/react-query' @@ -18,7 +18,6 @@ import { useEffect } from 'react' import { useForm, useFieldArray } from 'react-hook-form' import PermissionsTable from '@/components/PermissionsTable/PermissionsTable' - function SubjectModifyPage() { const router = useRouter() const { subjectId, name } = router.query @@ -27,21 +26,21 @@ function SubjectModifyPage() { const fieldArrayReturn = useFieldArray({ control, - name: 'permissions' - }); + name: 'permissions', + }) - const { append } = fieldArrayReturn; + const { append } = fieldArrayReturn const { isLoading: isPermissionsListDataLoading, data: permissionsListData, - error: permissionsListDataError + error: permissionsListDataError, } = useQuery(['permissionsList'], getPermissionsListUi) const { isLoading: isSubjectPermissionsLoading, data: subjectPermissionsData, - error: subjectPermissionsError + error: subjectPermissionsError, } = useQuery(['subjectPermissions', subjectId], getSubjectPermissions) useEffect(() => { @@ -59,7 +58,7 @@ function SubjectModifyPage() { mutationFn: updateSubjectPermissions, onSuccess: () => { router.push({ pathname: `/subject/modify/success/${subjectId}`, query: { name } }) - } + }, }) if (isPermissionsListDataLoading || isSubjectPermissionsLoading) { @@ -77,20 +76,16 @@ function SubjectModifyPage() { return (
    { - const permissions = data.permissions.map((permission) => extractPermissionNames(permission, permissionsListData)) - await mutate( - { subject_id: subjectId as string, permissions }) + const permissions = data.permissions.map((permission) => + extractPermissionNames(permission, permissionsListData), + ) + await mutate({ subject_id: subjectId as string, permissions }) })} noValidate > + } @@ -99,7 +94,11 @@ function SubjectModifyPage() { Modify Subject Select permissions for {name} - + {error && ( {error?.message} From b07ff7516d0e63584dfb4046259d37ad511fedea Mon Sep 17 00:00:00 2001 From: Toby Drane Date: Fri, 1 Sep 2023 16:53:34 +0100 Subject: [PATCH 17/17] remove hardcoded subject id --- ui/playwright/test-user-flow.spec.ts | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/ui/playwright/test-user-flow.spec.ts b/ui/playwright/test-user-flow.spec.ts index dafa96d..2c79ed4 100644 --- a/ui/playwright/test-user-flow.spec.ts +++ b/ui/playwright/test-user-flow.spec.ts @@ -1,9 +1,7 @@ /* eslint-disable testing-library/prefer-screen-queries */ import { test, expect } from '@playwright/test' -import { makeAPIRequest, generateRapidAuthToken } from './utils' - -const domain = 'http://localhost:3000' +import { makeAPIRequest, generateRapidAuthToken, domain } from './utils' const user = `${process.env.RESOURCE_PREFIX}_ui_test_user` @@ -11,7 +9,7 @@ test('test', async ({ page }) => { await page.goto(domain) // Modify user to have data admin permissions - await page.locator('div[role="button"]:has-text("Modify User")').click() + https: await page.locator('div[role="button"]:has-text("Modify User")').click() await expect(page).toHaveURL(`${domain}/subject/modify`) await page.locator('[data-testid="field-user"]').selectOption({ label: user }) await page.locator('[data-testid="submit-button"]').click() @@ -28,20 +26,22 @@ test('test', async ({ page }) => { // Test unique condition where we correctly display permissions when modifying a user // even though they might have conflicting permissions within the filtering logic const { access_token } = await generateRapidAuthToken() + const url = page.url() + const subjectId = url.split('/').pop() await makeAPIRequest( 'subjects/permissions', 'PUT', { - subject_id: 'b10ded88-4e10-46d3-b9c7-ff6cf0526c09', + subject_id: subjectId, permissions: [ 'DATA_ADMIN', 'READ_ALL', 'USER_ADMIN', 'WRITE_ALL', - 'READ_DEFAULT_PROTECTED_TEST_E2E_PROTECTED', - ], + 'READ_DEFAULT_PROTECTED_TEST_E2E_PROTECTED' + ] }, - `Bearer ${access_token}`, + `Bearer ${access_token}` ) await page.locator('div[role="button"]:has-text("Modify User")').click() await expect(page).toHaveURL(`${domain}/subject/modify`)