diff --git a/data-serving/reusable-data-service/data_service/controller/case_controller.py b/data-serving/reusable-data-service/data_service/controller/case_controller.py index c76a38d92..da579bb9c 100644 --- a/data-serving/reusable-data-service/data_service/controller/case_controller.py +++ b/data-serving/reusable-data-service/data_service/controller/case_controller.py @@ -163,6 +163,9 @@ def generate_output(): return generate_output + def valid_statuses(self): + return [f.values for f in Case.custom_fields if f.key == "caseStatus"][0] + def batch_status_change( self, status: str, @@ -172,18 +175,18 @@ def batch_status_change( ): """Update all of the cases identified in case_ids to have the supplied curation status. Raises PreconditionUnsatisfiedError or ValidationError on invalid input.""" - statuses = CaseReference.valid_statuses() + statuses = self.valid_statuses() if not status in statuses: raise PreconditionUnsatisfiedError(f"status {status} not one of {statuses}") if filter is not None and case_ids is not None: raise PreconditionUnsatisfiedError( "Do not supply both a filter and a list of IDs" ) - if status == "EXCLUDED" and note is None: + if status == "omit_error" and note is None: raise ValidationError(f"Excluding cases must be documented in a note") def update_status(id: str, status: str, note: str): - if status == "EXCLUDED": + if status == "omit_error": caseExclusion = CaseExclusionMetadata() caseExclusion.note = note else: diff --git a/data-serving/reusable-data-service/data_service/controller/schema_controller.py b/data-serving/reusable-data-service/data_service/controller/schema_controller.py index 77e308865..98945fd5f 100644 --- a/data-serving/reusable-data-service/data_service/controller/schema_controller.py +++ b/data-serving/reusable-data-service/data_service/controller/schema_controller.py @@ -1,7 +1,7 @@ import dataclasses from datetime import date -from typing import Optional, Union +from typing import Any, List, Optional, Union from data_service.model.case import add_field_to_case_class, observe_case_class from data_service.model.field import Field @@ -33,6 +33,7 @@ def restore_saved_fields(self) -> None: field.data_dictionary_text, field.required, field.default, + field.values, False, ) @@ -43,6 +44,7 @@ def add_field( description: str, required: bool = False, default: Optional[Union[bool, str, int, date]] = None, + values: Optional[List[Any]] = None, store_field: bool = True, ): global Case @@ -61,7 +63,7 @@ def add_field( If a field is required, set required = True. You must also set a default value so that existing cases have an initial setting for the field.""" required = required if required is not None else False - field_model = Field(name, type_name, description, required, default) + field_model = Field(name, type_name, description, required, default, values) add_field_to_case_class(field_model) if store_field: self.store.add_field(field_model) diff --git a/data-serving/reusable-data-service/data_service/day_zero_fields.json b/data-serving/reusable-data-service/data_service/day_zero_fields.json index f6af25486..20e95d6bf 100644 --- a/data-serving/reusable-data-service/data_service/day_zero_fields.json +++ b/data-serving/reusable-data-service/data_service/day_zero_fields.json @@ -5,6 +5,30 @@ "data_dictionary_text": "A unique identifier for this case, in the form of a mongoDB object identifier (24 characters 0-9a-f).", "required": false }, + { + "key": "caseStatus", + "type": "string", + "data_dictionary_text": "Status of a case. Cases which are discarded were previously suspected but have now been confirmed negative, and should be excluded from case counts. Cases which are omit_error were incorrectly added and should be dismissed from any data interpretation.", + "required": true, + "values": [ + "confirmed", + "probable", + "suspected", + "discarded", + "omit_error" + ] + }, + { + "key": "pathogenStatus", + "type": "string", + "data_dictionary_text": "Whether the infection occured in an endemic, or non-endemic region.", + "required": "true", + "values": [ + "endemic", + "emerging", + "unknown" + ] + }, { "key": "confirmationDate", "type": "date", diff --git a/data-serving/reusable-data-service/data_service/main.py b/data-serving/reusable-data-service/data_service/main.py index 09a4fc103..776dd5cf9 100644 --- a/data-serving/reusable-data-service/data_service/main.py +++ b/data-serving/reusable-data-service/data_service/main.py @@ -163,6 +163,7 @@ def add_field_to_case_schema(): req["description"], req.get("required"), req.get("default"), + req.get("values"), ) return "", 201 except WebApplicationError as e: diff --git a/data-serving/reusable-data-service/data_service/model/case_reference.py b/data-serving/reusable-data-service/data_service/model/case_reference.py index 525ba6202..45336870c 100644 --- a/data-serving/reusable-data-service/data_service/model/case_reference.py +++ b/data-serving/reusable-data-service/data_service/model/case_reference.py @@ -10,7 +10,6 @@ class CaseReference(Document): _: dataclasses.KW_ONLY sourceId: str = dataclasses.field(init=False, default=None) - status: str = dataclasses.field(init=False, default="UNVERIFIED") def validate(self): """Check whether I am consistent. Raise ValueError if not.""" @@ -19,18 +18,10 @@ def validate(self): raise ValueError("Source ID is mandatory") elif self.sourceId is None: raise ValueError("Source ID must have a value") - if self.status not in self.valid_statuses(): - raise ValueError(f"Status {self.status} is not acceptable") - - @staticmethod - def valid_statuses(): - """A case reference must have one of these statuses.""" - return ["EXCLUDED", "UNVERIFIED", "VERIFIED"] @staticmethod def from_dict(d: dict[str, str]): """Create a CaseReference from a dictionary representation.""" ref = CaseReference() ref.sourceId = d.get("sourceId") - ref.status = d.get("status", "UNVERIFIED") return ref diff --git a/data-serving/reusable-data-service/data_service/model/document.py b/data-serving/reusable-data-service/data_service/model/document.py index 7434b1bc6..4617e3fec 100644 --- a/data-serving/reusable-data-service/data_service/model/document.py +++ b/data-serving/reusable-data-service/data_service/model/document.py @@ -193,10 +193,16 @@ def validate(self): """Check whether I am consistent. Raise ValidationError if not.""" for field in self.custom_fields: getter = operator.attrgetter(field.key) - if field.required is True and getter(self) is None: + value = getter(self) + if field.required is True and value is None: raise ValidationError(f"{field.key} must have a value") - if field.key in self.document_fields() and getter(self) is not None: + if field.key in self.document_fields() and value is not None: getter(self).validate() + if field.values is not None: + if value is not None and value not in field.values: + raise ValidationError( + f"{field.key} value {value} not in permissible values {field.values}" + ) def _internal_set_value(self, key, value): self._internal_ensure_containers_exist(key) diff --git a/data-serving/reusable-data-service/data_service/model/field.py b/data-serving/reusable-data-service/data_service/model/field.py index c76c74dc6..c33595b2c 100644 --- a/data-serving/reusable-data-service/data_service/model/field.py +++ b/data-serving/reusable-data-service/data_service/model/field.py @@ -1,6 +1,6 @@ import dataclasses from datetime import date -from typing import Optional, Union +from typing import Any, List, Optional, Union from data_service.model.case_exclusion_metadata import CaseExclusionMetadata from data_service.model.case_reference import CaseReference @@ -20,6 +20,8 @@ class Field(Document): default: Optional[Union[bool, str, int, date]] = dataclasses.field( init=True, default=None ) + values: Optional[List[Any]] = dataclasses.field(init=True, default=None) + STRING = "string" DATE = "date" INTEGER = "integer" @@ -49,6 +51,7 @@ def from_dict(cls, dictionary): dictionary.get("data_dictionary_text"), dictionary.get("required"), dictionary.get("default", None), + dictionary.get("values", None), ) def python_type(self) -> type: diff --git a/data-serving/reusable-data-service/data_service/stores/memory_store.py b/data-serving/reusable-data-service/data_service/stores/memory_store.py index 5915768ac..23533119b 100644 --- a/data-serving/reusable-data-service/data_service/stores/memory_store.py +++ b/data-serving/reusable-data-service/data_service/stores/memory_store.py @@ -53,7 +53,7 @@ def update_case_status( self, id: str, status: str, exclusion: CaseExclusionMetadata ): case = self.case_by_id(id) - case.caseReference.status = status + case.caseStatus = status case.caseExclusion = exclusion def fetch_cases(self, page: int, limit: int, predicate: Filter): @@ -71,8 +71,7 @@ def excluded_cases(self, source_id: str, filter: Filter): return [ c for c in self.cases.values() - if c.caseReference.sourceId == source_id - and c.caseReference.status == "EXCLUDED" + if c.caseReference.sourceId == source_id and c.caseStatus == "omit_error" ] def delete_case(self, case_id: str): diff --git a/data-serving/reusable-data-service/data_service/stores/mongo_store.py b/data-serving/reusable-data-service/data_service/stores/mongo_store.py index 7daed1048..ae78410c6 100644 --- a/data-serving/reusable-data-service/data_service/stores/mongo_store.py +++ b/data-serving/reusable-data-service/data_service/stores/mongo_store.py @@ -91,7 +91,7 @@ def replace_case(self, id: str, case: Case): def update_case_status( self, id: str, status: str, exclusion: CaseExclusionMetadata ): - update = {"$set": {"caseReference.status": status}} + update = {"$set": {"caseStatus": status}} if exclusion: update["$set"][ "caseExclusion" @@ -124,7 +124,7 @@ def excluded_cases(self, source_id: str, filter: Filter) -> List[Case]: "$and": [ { "caseReference.sourceId": ObjectId(source_id), - "caseReference.status": "EXCLUDED", + "caseStatus": "omit_error", }, query, ] diff --git a/data-serving/reusable-data-service/tests/data/case.excluded.json b/data-serving/reusable-data-service/tests/data/case.excluded.json new file mode 100644 index 000000000..bf54aba34 --- /dev/null +++ b/data-serving/reusable-data-service/tests/data/case.excluded.json @@ -0,0 +1,12 @@ +{ + "confirmationDate": "2022-05-01T01:23:45.678Z", + "caseReference": { + "sourceId": "fedc12345678901234567890" + }, + "caseExclusion": { + "date": "2022-06-01T01:23:45.678Z", + "note": "Excluded upon this day, for reasons" + }, + "caseStatus": "omit_error", + "pathogenStatus": "endemic" +} \ No newline at end of file diff --git a/data-serving/reusable-data-service/tests/data/case.minimal.json b/data-serving/reusable-data-service/tests/data/case.minimal.json index f8c91cb10..1c21bfbb0 100644 --- a/data-serving/reusable-data-service/tests/data/case.minimal.json +++ b/data-serving/reusable-data-service/tests/data/case.minimal.json @@ -2,5 +2,7 @@ "confirmationDate": "2021-12-31T01:23:45.678Z", "caseReference": { "sourceId": "fedc09876543210987654321" - } + }, + "caseStatus": "probable", + "pathogenStatus": "emerging" } \ No newline at end of file diff --git a/data-serving/reusable-data-service/tests/data/case.with_location.json b/data-serving/reusable-data-service/tests/data/case.with_location.json index 139facc78..7f9dbcfe5 100644 --- a/data-serving/reusable-data-service/tests/data/case.with_location.json +++ b/data-serving/reusable-data-service/tests/data/case.with_location.json @@ -15,5 +15,7 @@ "properties": { "country": "IND" } - } + }, + "caseStatus": "probable", + "pathogenStatus": "unknown" } \ No newline at end of file diff --git a/data-serving/reusable-data-service/tests/test_case_controller_crud_actions.py b/data-serving/reusable-data-service/tests/test_case_controller_crud_actions.py index c1a1e15e7..2c1ce6b99 100644 --- a/data-serving/reusable-data-service/tests/test_case_controller_crud_actions.py +++ b/data-serving/reusable-data-service/tests/test_case_controller_crud_actions.py @@ -95,42 +95,35 @@ def test_create_case_with_missing_properties_raises(case_controller): def test_create_case_with_invalid_data_raises(case_controller): + with open("./tests/data/case.minimal.json", "r") as minimal_file: + case_doc = json.load(minimal_file) + del case_doc["caseStatus"] with pytest.raises(ValidationError): - case_controller.create_case( - { - "confirmationDate": date(2001, 3, 17), - "caseReference": {"sourceId": "123ab4567890123ef4567890"}, - } - ) + case_controller.create_case(case_doc) def test_create_valid_case_adds_to_collection(case_controller): - case_controller.create_case( - { - "confirmationDate": date(2021, 6, 3), - "caseReference": {"sourceId": "123ab4567890123ef4567890"}, - } - ) + with open("./tests/data/case.minimal.json", "r") as minimal_file: + case_doc = json.load(minimal_file) + case_controller.create_case(case_doc) assert case_controller.store.count_cases() == 1 def test_create_valid_case_with_negative_count_raises(case_controller): + with open("./tests/data/case.minimal.json", "r") as minimal_file: + case_doc = json.load(minimal_file) with pytest.raises(PreconditionUnsatisfiedError): case_controller.create_case( - { - "confirmationDate": date(2021, 6, 3), - "caseReference": {"sourceId": "123ab4567890123ef4567890"}, - }, + case_doc, num_cases=-7, ) def test_create_valid_case_with_positive_count_adds_to_collection(case_controller): + with open("./tests/data/case.minimal.json", "r") as minimal_file: + case_doc = json.load(minimal_file) case_controller.create_case( - { - "confirmationDate": date(2021, 6, 3), - "caseReference": {"sourceId": "123ab4567890123ef4567890"}, - }, + case_doc, num_cases=7, ) assert case_controller.store.count_cases() == 7 @@ -144,12 +137,9 @@ def test_validate_case_with_invalid_case_raises(case_controller): def test_validate_case_with_valid_case_does_not_add_case( case_controller, ): - case_controller.validate_case_dictionary( - { - "confirmationDate": date(2021, 6, 3), - "caseReference": {"sourceId": "123ab4567890123ef4567890"}, - } - ) + with open("./tests/data/case.minimal.json", "r") as minimal_file: + case_doc = json.load(minimal_file) + case_controller.validate_case_dictionary(case_doc) assert case_controller.store.count_cases() == 0 @@ -182,15 +172,15 @@ def test_batch_upsert_reports_errors(case_controller): response = case_controller.batch_upsert({"cases": [{}]}) assert response.numCreated == 0 assert response.numUpdated == 0 - assert response.errors == {"0": "confirmationDate must have a value"} + assert response.errors == {"0": "caseStatus must have a value"} def test_download_with_no_query_is_ok(case_controller): + with open("./tests/data/case.minimal.json", "r") as minimal_file: + case_doc = json.load(minimal_file) + _ = case_controller.create_case( - { - "confirmationDate": date(2021, 6, 3), - "caseReference": {"sourceId": "123ab4567890123ef4567890"}, - }, + case_doc, num_cases=2, ) generator = case_controller.download(format="csv") @@ -218,13 +208,12 @@ def test_download_with_query_and_case_ids_throws(case_controller): def test_download_cases_by_id(case_controller): + with open("./tests/data/case.minimal.json", "r") as minimal_file: + case_doc = json.load(minimal_file) for i in range(4): - _ = case_controller.create_case( - { - "confirmationDate": date(2021, 6, i + 1), - "caseReference": {"sourceId": "123ab4567890123ef4567890"}, - }, - ) + this_case = dict(case_doc) + this_case["confirmationDate"] = date(2021, 6, i + 1) + _ = case_controller.create_case(this_case) generator = case_controller.download("csv", case_ids=["1", "3"]) result = "" for chunk in generator(): @@ -235,13 +224,12 @@ def test_download_cases_by_id(case_controller): def test_filter_cases_by_query(case_controller): + with open("./tests/data/case.minimal.json", "r") as minimal_file: + case_doc = json.load(minimal_file) for i in range(4): - _ = case_controller.create_case( - { - "confirmationDate": date(2021, 6, i + 1), - "caseReference": {"sourceId": "123ab4567890123ef4567890"}, - }, - ) + this_case = dict(case_doc) + this_case["confirmationDate"] = date(2021, 6, i + 1) + _ = case_controller.create_case(this_case) generator = case_controller.download("csv", filter="dateconfirmedbefore:2021-06-03") result = "" for chunk in generator(): @@ -251,11 +239,10 @@ def test_filter_cases_by_query(case_controller): def test_download_supports_tsv(case_controller): + with open("./tests/data/case.minimal.json", "r") as minimal_file: + case_doc = json.load(minimal_file) _ = case_controller.create_case( - { - "confirmationDate": date(2021, 6, 3), - "caseReference": {"sourceId": "123ab4567890123ef4567890"}, - }, + case_doc, num_cases=2, ) generator = case_controller.download(format="tsv") @@ -268,11 +255,10 @@ def test_download_supports_tsv(case_controller): def test_download_supports_json(case_controller): + with open("./tests/data/case.minimal.json", "r") as minimal_file: + case_doc = json.load(minimal_file) _ = case_controller.create_case( - { - "confirmationDate": date(2021, 6, 3), - "caseReference": {"sourceId": "123ab4567890123ef4567890"}, - }, + case_doc, num_cases=2, ) generator = case_controller.download(format="json") @@ -281,8 +267,8 @@ def test_download_supports_json(case_controller): output += chunk result = json.loads(output) assert len(result) == 2 - assert result[0]["confirmationDate"] == "2021-06-03" - assert result[1]["caseReference"]["sourceId"] == "123ab4567890123ef4567890" + assert result[0]["confirmationDate"] == "2021-12-31" + assert result[1]["caseReference"]["sourceId"] == "fedc09876543210987654321" def test_batch_status_change_rejects_invalid_status(case_controller): @@ -292,45 +278,39 @@ def test_batch_status_change_rejects_invalid_status(case_controller): def test_batch_status_change_rejects_exclusion_with_no_note(case_controller): with pytest.raises(ValidationError): - case_controller.batch_status_change("EXCLUDED", case_ids=[]) + case_controller.batch_status_change("omit_error", case_ids=[]) def test_batch_status_change_excludes_cases_with_note(case_controller): + with open("./tests/data/case.minimal.json", "r") as minimal_file: + case_doc = json.load(minimal_file) for i in range(4): - _ = case_controller.create_case( - { - "confirmationDate": date(2021, 6, i + 1), - "caseReference": {"sourceId": "123ab4567890123ef4567890"}, - }, - ) + this_case = dict(case_doc) + this_case["confirmationDate"] = date(2021, 6, i + 1) + _ = case_controller.create_case(this_case) case_controller.batch_status_change( - "EXCLUDED", "I dislike this case", case_ids=["1", "2"] + "omit_error", "I dislike this case", case_ids=["1", "2"] ) an_excluded_case = case_controller.store.case_by_id("1") - assert an_excluded_case.caseReference.status == "EXCLUDED" + assert an_excluded_case.caseStatus == "omit_error" assert an_excluded_case.caseExclusion.note == "I dislike this case" another_case = case_controller.store.case_by_id("3") - assert another_case.caseReference.status == "UNVERIFIED" + assert another_case.caseStatus == "probable" assert another_case.caseExclusion is None @freezegun.freeze_time("Aug 13th, 2021") def test_batch_status_change_records_date_of_exclusion(case_controller): - case_controller.create_case( - { - "confirmationDate": date(2021, 6, 23), - "caseReference": { - "sourceId": "123ab4567890123ef4567890", - }, - } - ) + with open("./tests/data/case.minimal.json", "r") as minimal_file: + case_doc = json.load(minimal_file) + case_controller.create_case(case_doc) case_controller.batch_status_change( - "EXCLUDED", "Mistakes have been made", case_ids=["1"] + "omit_error", "Mistakes have been made", case_ids=["1"] ) case = case_controller.store.case_by_id("1") - assert case.caseReference.status == "EXCLUDED" + assert case.caseStatus == "omit_error" assert case.caseExclusion.note == "Mistakes have been made" assert case.caseExclusion.date == date(2021, 8, 13) @@ -338,41 +318,31 @@ def test_batch_status_change_records_date_of_exclusion(case_controller): def test_batch_status_change_removes_exclusion_data_on_unexcluding_case( case_controller, ): - case_controller.create_case( - { - "confirmationDate": date(2021, 6, 23), - "caseReference": { - "sourceId": "123ab4567890123ef4567890", - }, - } - ) + with open("./tests/data/case.minimal.json", "r") as minimal_file: + case_doc = json.load(minimal_file) + case_controller.create_case(case_doc) case_controller.batch_status_change( - "EXCLUDED", "Mistakes have been made", case_ids=["1"] + "omit_error", "Mistakes have been made", case_ids=["1"] ) - case_controller.batch_status_change("UNVERIFIED", case_ids=["1"]) + case_controller.batch_status_change("suspected", case_ids=["1"]) case = case_controller.store.case_by_id("1") - assert case.caseReference.status == "UNVERIFIED" + assert case.caseStatus == "suspected" assert case.caseExclusion is None def test_batch_status_change_by_query(case_controller): - case_controller.create_case( - { - "confirmationDate": date(2021, 6, 23), - "caseReference": { - "sourceId": "123ab4567890123ef4567890", - }, - } - ) + with open("./tests/data/case.minimal.json", "r") as minimal_file: + case_doc = json.load(minimal_file) + case_controller.create_case(case_doc) case_controller.batch_status_change( - "EXCLUDED", "Mistakes have been made", filter="dateconfirmedafter:2021-06-01" + "omit_error", "Mistakes have been made", filter="dateconfirmedafter:2021-06-01" ) case = case_controller.store.case_by_id("1") - assert case.caseReference.status == "EXCLUDED" + assert case.caseStatus == "omit_error" assert case.caseExclusion is not None @@ -382,115 +352,87 @@ def test_excluded_case_ids_raises_if_no_source_id(case_controller): def test_excluded_case_ids_returns_empty_if_no_matching_cases(case_controller): - case_controller.create_case( - { - "confirmationDate": date(2021, 6, 23), - "caseReference": { - "sourceId": "123ab4567890123ef4567890", - "status": "VERIFIED", - }, - } - ) - ids = case_controller.excluded_case_ids("123ab4567890123ef4567890") + with open("./tests/data/case.minimal.json", "r") as minimal_file: + case_doc = json.load(minimal_file) + case_controller.create_case(case_doc) + ids = case_controller.excluded_case_ids("fedc09876543210987654321") assert len(ids) == 0 def test_excluded_case_ids_returns_ids_of_matching_cases(case_controller): - case_controller.create_case( - { - "confirmationDate": date(2021, 6, 23), - "caseReference": { - "sourceId": "123ab4567890123ef4567890", - "status": "EXCLUDED", - }, - "caseExclusion": { - "date": date(2022, 5, 17), - "note": "I told him we already have one", - }, - } - ) - ids = case_controller.excluded_case_ids("123ab4567890123ef4567890") + with open("./tests/data/case.minimal.json", "r") as minimal_file: + case_doc = json.load(minimal_file) + case_doc["caseStatus"] = "omit_error" + case_doc["caseExclusion"] = { + "date": date(2022, 5, 17), + "note": "I told him we already have one", + } + case_controller.create_case(case_doc) + ids = case_controller.excluded_case_ids("fedc09876543210987654321") assert len(ids) == 1 assert ids[0] == "1" def test_updating_missing_case_should_throw_NotFoundError(case_controller): - case_controller.create_case( - { - "confirmationDate": date(2021, 6, 23), - "caseReference": { - "sourceId": "123ab4567890123ef4567890", - "status": "EXCLUDED", - }, - "caseExclusion": { - "date": date(2022, 5, 17), - "note": "I told him we already have one", - }, - } - ) + with open("./tests/data/case.minimal.json", "r") as minimal_file: + case_doc = json.load(minimal_file) + case_doc["caseStatus"] = "omit_error" + case_doc["caseExclusion"] = { + "date": date(2022, 5, 17), + "note": "I told him we already have one", + } + case_controller.create_case(case_doc) with pytest.raises(NotFoundError): case_controller.update_case("2", {"caseExclusion": {"note": "Duplicate"}}) def test_updating_case_to_invalid_state_should_throw_ValidationError(case_controller): - case_controller.create_case( - { - "confirmationDate": date(2021, 6, 23), - "caseReference": { - "sourceId": "123ab4567890123ef4567890", - "status": "EXCLUDED", - }, - "caseExclusion": { - "date": date(2022, 5, 17), - "note": "I told him we already have one", - }, - } - ) + with open("./tests/data/case.minimal.json", "r") as minimal_file: + case_doc = json.load(minimal_file) + case_doc["caseStatus"] = "omit_error" + case_doc["caseExclusion"] = { + "date": date(2022, 5, 17), + "note": "I told him we already have one", + } + case_controller.create_case(case_doc) with pytest.raises(ValidationError): case_controller.update_case("1", {"confirmationDate": None}) def test_updating_case_to_valid_state_returns_updated_case(case_controller): - case_controller.create_case( - { - "confirmationDate": date(2021, 6, 23), - "caseReference": { - "sourceId": "123ab4567890123ef4567890", - "status": "EXCLUDED", - }, - "caseExclusion": { - "date": date(2022, 5, 17), - "note": "I told him we already have one", - }, - } - ) - + with open("./tests/data/case.minimal.json", "r") as minimal_file: + case_doc = json.load(minimal_file) + case_doc["caseStatus"] = "omit_error" + case_doc["caseExclusion"] = { + "date": date(2022, 5, 17), + "note": "I told him we already have one", + } + case_controller.create_case(case_doc) new_case = case_controller.update_case("1", {"confirmationDate": date(2021, 6, 24)}) assert new_case.confirmationDate == date(2021, 6, 24) def test_batch_update_cases_returns_number_of_modified_cases(case_controller): + with open("./tests/data/case.minimal.json", "r") as minimal_file: + case_doc = json.load(minimal_file) for i in range(4): - _ = case_controller.create_case( - { - "confirmationDate": date(2021, 6, i + 1), - "caseReference": {"sourceId": "123ab4567890123ef4567890"}, - }, - ) + this_case = dict(case_doc) + this_case["confirmationDate"] = date(2021, 6, i + 1) + _ = case_controller.create_case(this_case) update_one = { "_id": "1", - "caseReference": {"status": "EXCLUDED"}, + "caseStatus": "omit_error", "caseExclusion": {"date": date(2022, 2, 2), "note": "Bad case no likey"}, } - update_two = {"_id": "2", "caseReference": {"status": "VERIFIED"}} + update_two = {"_id": "2", "caseStatus": "confirmed"} num_modified = case_controller.batch_update([update_one, update_two]) assert num_modified == 2 case_one = case_controller.get_case("1") - assert case_one.caseReference.status == "EXCLUDED" + assert case_one.caseStatus == "omit_error" case_two = case_controller.get_case("2") - assert case_two.caseReference.status == "VERIFIED" + assert case_two.caseStatus == "confirmed" case_three = case_controller.get_case("3") - assert case_three.caseReference.status == "UNVERIFIED" + assert case_three.caseStatus == "probable" def test_batch_update_raises_if_id_not_supplied(case_controller): @@ -500,15 +442,9 @@ def test_batch_update_raises_if_id_not_supplied(case_controller): def test_batch_update_raises_if_case_would_be_invalid(case_controller): - case_controller.create_case( - { - "confirmationDate": date(2021, 6, 23), - "caseReference": { - "sourceId": "123ab4567890123ef4567890", - "status": "VERIFIED", - }, - } - ) + with open("./tests/data/case.minimal.json", "r") as minimal_file: + case_doc = json.load(minimal_file) + case_controller.create_case(case_doc) update = {"_id": "1", "confirmationDate": None} with pytest.raises(ValidationError): case_controller.batch_update([update]) @@ -521,13 +457,12 @@ def test_batch_update_raises_if_case_not_found(case_controller): def test_batch_update_query_returns_modified_count(case_controller): + with open("./tests/data/case.minimal.json", "r") as minimal_file: + case_doc = json.load(minimal_file) for i in range(4): - _ = case_controller.create_case( - { - "confirmationDate": date(2021, 6, i + 1), - "caseReference": {"sourceId": "123ab4567890123ef4567890"}, - }, - ) + this_case = dict(case_doc) + this_case["confirmationDate"] = date(2021, 6, i + 1) + _ = case_controller.create_case(this_case) update = {"confirmationDate": date(2022, 5, 13)} query = None # didn't implement rich queries on the test store modified = case_controller.batch_update_query(query, update) @@ -535,13 +470,12 @@ def test_batch_update_query_returns_modified_count(case_controller): def test_delete_present_case_deletes_case(case_controller): + with open("./tests/data/case.minimal.json", "r") as minimal_file: + case_doc = json.load(minimal_file) for i in range(4): - _ = case_controller.create_case( - { - "confirmationDate": date(2021, 6, i + 1), - "caseReference": {"sourceId": "123ab4567890123ef4567890"}, - }, - ) + this_case = dict(case_doc) + this_case["confirmationDate"] = date(2021, 6, i + 1) + _ = case_controller.create_case(this_case) case_controller.delete_case("1") assert case_controller.store.count_cases() == 3 assert case_controller.store.case_by_id("1") is None @@ -573,26 +507,24 @@ def test_cannot_batch_delete_with_malformed_query(case_controller): def test_cannot_batch_delete_more_cases_than_threshold(case_controller): + with open("./tests/data/case.minimal.json", "r") as minimal_file: + case_doc = json.load(minimal_file) for i in range(4): - _ = case_controller.create_case( - { - "confirmationDate": date(2021, 6, i + 1), - "caseReference": {"sourceId": "123ab4567890123ef4567890"}, - }, - ) + this_case = dict(case_doc) + this_case["confirmationDate"] = date(2021, 6, i + 1) + _ = case_controller.create_case(this_case) with pytest.raises(ValidationError): case_controller.batch_delete("dateconfirmedafter:2021-05-02", None, 1) assert case_controller.store.count_cases() == 4 def test_batch_delete_with_case_ids(case_controller): + with open("./tests/data/case.minimal.json", "r") as minimal_file: + case_doc = json.load(minimal_file) for i in range(4): - _ = case_controller.create_case( - { - "confirmationDate": date(2021, 6, i + 1), - "caseReference": {"sourceId": "123ab4567890123ef4567890"}, - }, - ) + this_case = dict(case_doc) + this_case["confirmationDate"] = date(2021, 6, i + 1) + _ = case_controller.create_case(this_case) case_controller.batch_delete(None, ["1", "2"]) assert case_controller.store.count_cases() == 2 @@ -603,12 +535,11 @@ def test_batch_delete_with_query(case_controller): which is a lot of complexity for little value. Look to the end-to-end tests for better tests of the filtering logic, because the filters should definitely work in production data stores!""" + with open("./tests/data/case.minimal.json", "r") as minimal_file: + case_doc = json.load(minimal_file) for i in range(4): - _ = case_controller.create_case( - { - "confirmationDate": date(2021, 6, i + 1), - "caseReference": {"sourceId": "123ab4567890123ef4567890"}, - }, - ) + this_case = dict(case_doc) + this_case["confirmationDate"] = date(2021, 6, i + 1) + _ = case_controller.create_case(this_case) case_controller.batch_delete("dateconfirmedafter:2021-05-02", None) assert case_controller.store.count_cases() == 0 diff --git a/data-serving/reusable-data-service/tests/test_case_end_to_end.py b/data-serving/reusable-data-service/tests/test_case_end_to_end.py index 55c3ff32f..93ebcca25 100644 --- a/data-serving/reusable-data-service/tests/test_case_end_to_end.py +++ b/data-serving/reusable-data-service/tests/test_case_end_to_end.py @@ -10,19 +10,10 @@ def test_get_case_with_known_id(client_with_patched_mongo): # insert a case + with open("./tests/data/case.minimal.json") as case_file: + case_doc = json.load(case_file) db = pymongo.MongoClient("mongodb://localhost:27017/outbreak") - case_id = ( - db["outbreak"]["cases"] - .insert_one( - { - "confirmationDate": datetime(2021, 12, 31, 1, 23, 45, 678), - "caseReference": { - "sourceId": bson.ObjectId("fedc12345678901234567890") - }, - } - ) - .inserted_id - ) + case_id = db["outbreak"]["cases"].insert_one(case_doc).inserted_id response = client_with_patched_mongo.get(f"/api/cases/{str(case_id)}") result = response.get_json() assert response.status_code == 200 @@ -48,17 +39,9 @@ def test_list_cases_when_none_present_is_empty_list(client_with_patched_mongo): def test_list_cases_with_pagination_query(client_with_patched_mongo): db = pymongo.MongoClient("mongodb://localhost:27017/outbreak") - db["outbreak"]["cases"].insert_many( - [ - { - "confirmationDate": datetime(2020, 12, 24), - "caseReference": { - "sourceId": bson.ObjectId("fedc12345678901234567890") - }, - } - for i in range(25) - ] - ) + with open("./tests/data/case.minimal.json") as case_file: + case_doc = json.load(case_file) + db["outbreak"]["cases"].insert_many([dict(case_doc) for i in range(25)]) response = client_with_patched_mongo.get(f"/api/cases?page=2&limit=10") assert response.status_code == 200 assert len(response.json["cases"]) == 10 @@ -77,18 +60,13 @@ def test_list_cases_with_negative_page_rejected(client_with_patched_mongo): def test_list_cases_filter_confirmation_date_before(client_with_patched_mongo): + with open("./tests/data/case.minimal.json") as case_file: + case_doc = json.load(case_file) db = pymongo.MongoClient("mongodb://localhost:27017/outbreak") - db["outbreak"]["cases"].insert_many( - [ - { - "confirmationDate": datetime(2022, 5, i), - "caseReference": { - "sourceId": bson.ObjectId("fedc12345678901234567890") - }, - } - for i in range(1, 32) - ] - ) + cases = [dict(case_doc) for i in range(1, 32)] + for i in range(1, 32): + cases[i - 1]["confirmationDate"] = datetime(2022, 5, i) + db["outbreak"]["cases"].insert_many(cases) response = client_with_patched_mongo.get( f"/api/cases?q=dateconfirmedbefore%3a2022-05-10" ) @@ -102,18 +80,13 @@ def test_list_cases_filter_confirmation_date_before(client_with_patched_mongo): def test_list_cases_filter_confirmation_date_after(client_with_patched_mongo): + with open("./tests/data/case.minimal.json") as case_file: + case_doc = json.load(case_file) db = pymongo.MongoClient("mongodb://localhost:27017/outbreak") - db["outbreak"]["cases"].insert_many( - [ - { - "confirmationDate": datetime(2022, 5, i), - "caseReference": { - "sourceId": bson.ObjectId("fedc12345678901234567890") - }, - } - for i in range(1, 32) - ] - ) + cases = [dict(case_doc) for i in range(1, 32)] + for i in range(1, 32): + cases[i - 1]["confirmationDate"] = datetime(2022, 5, i) + db["outbreak"]["cases"].insert_many(cases) response = client_with_patched_mongo.get( f"/api/cases?q=dateconfirmedafter%3a2022-05-10" ) @@ -129,18 +102,13 @@ def test_list_cases_filter_confirmation_date_after(client_with_patched_mongo): def test_list_cases_filter_confirmation_date_before_and_after( client_with_patched_mongo, ): + with open("./tests/data/case.minimal.json") as case_file: + case_doc = json.load(case_file) db = pymongo.MongoClient("mongodb://localhost:27017/outbreak") - db["outbreak"]["cases"].insert_many( - [ - { - "confirmationDate": datetime(2022, 5, i), - "caseReference": { - "sourceId": bson.ObjectId("fedc12345678901234567890") - }, - } - for i in range(1, 32) - ] - ) + cases = [dict(case_doc) for i in range(1, 32)] + for i in range(1, 32): + cases[i - 1]["confirmationDate"] = datetime(2022, 5, i) + db["outbreak"]["cases"].insert_many(cases) response = client_with_patched_mongo.get( f"/api/cases?q=dateconfirmedafter%3a2022-05-10%20dateconfirmedbefore%3a2022-05-13" ) @@ -155,18 +123,13 @@ def test_list_cases_filter_confirmation_date_before_and_after( def test_list_cases_no_matching_results(client_with_patched_mongo): + with open("./tests/data/case.minimal.json") as case_file: + case_doc = json.load(case_file) db = pymongo.MongoClient("mongodb://localhost:27017/outbreak") - db["outbreak"]["cases"].insert_many( - [ - { - "confirmationDate": datetime(2022, 5, i), - "caseReference": { - "sourceId": bson.ObjectId("fedc12345678901234567890") - }, - } - for i in range(1, 32) - ] - ) + cases = [dict(case_doc) for i in range(1, 32)] + for i in range(1, 32): + cases[i - 1]["confirmationDate"] = datetime(2022, 5, i) + db["outbreak"]["cases"].insert_many(cases) response = client_with_patched_mongo.get( f"/api/cases?q=dateconfirmedafter%3a2023-05-10" ) @@ -181,18 +144,17 @@ def test_list_cases_with_bad_filter_rejected(client_with_patched_mongo): def test_post_case_list_cases_round_trip(client_with_patched_mongo): + with open("./tests/data/case.minimal.json") as case_file: + case_doc = json.load(case_file) post_response = client_with_patched_mongo.post( "/api/cases", - json={ - "confirmationDate": "2022-01-23T13:45:01.234Z", - "caseReference": {"sourceId": bson.ObjectId("fedc12345678901234567890")}, - }, + json=case_doc, ) assert post_response.status_code == 201 get_response = client_with_patched_mongo.get("/api/cases") assert get_response.status_code == 200 assert len(get_response.json["cases"]) == 1 - assert get_response.json["cases"][0]["confirmationDate"] == "2022-01-23" + assert get_response.json["cases"][0]["confirmationDate"] == "2021-12-31" def test_post_case_list_cases_geojson_round_trip(client_with_patched_mongo): @@ -210,27 +172,25 @@ def test_post_case_list_cases_geojson_round_trip(client_with_patched_mongo): def test_post_multiple_case_list_cases_round_trip(client_with_patched_mongo): + with open("./tests/data/case.minimal.json") as case_file: + case_doc = json.load(case_file) post_response = client_with_patched_mongo.post( "/api/cases?num_cases=3", - json={ - "confirmationDate": "2022-01-23T13:45:01.234Z", - "caseReference": {"sourceId": bson.ObjectId("fedc12345678901234567890")}, - }, + json=case_doc, ) assert post_response.status_code == 201 get_response = client_with_patched_mongo.get("/api/cases") assert get_response.status_code == 200 assert len(get_response.json["cases"]) == 3 - assert get_response.json["cases"][0]["confirmationDate"] == "2022-01-23" + assert get_response.json["cases"][0]["confirmationDate"] == "2021-12-31" def test_post_case_validate_only(client_with_patched_mongo): + with open("./tests/data/case.minimal.json") as case_file: + case_doc = json.load(case_file) post_response = client_with_patched_mongo.post( "/api/cases?validate_only=true", - json={ - "confirmationDate": "2022-01-23T13:45:01.234Z", - "caseReference": {"sourceId": bson.ObjectId("fedc12345678901234567890")}, - }, + json=case_doc, ) assert post_response.status_code == 204 get_response = client_with_patched_mongo.get("/api/cases") @@ -239,18 +199,11 @@ def test_post_case_validate_only(client_with_patched_mongo): def test_batch_upsert_case(client_with_patched_mongo): + with open("./tests/data/case.minimal.json") as case_file: + case_doc = json.load(case_file) post_response = client_with_patched_mongo.post( "/api/cases/batchUpsert", - json={ - "cases": [ - { - "confirmationDate": "2022-01-23T13:45:01.234Z", - "caseReference": { - "sourceId": "abcd12345678901234567890", - }, - } - ] - }, + json={"cases": [case_doc]}, ) assert post_response.status_code == 200 assert post_response.json["errors"] == {} @@ -261,18 +214,13 @@ def test_batch_upsert_case(client_with_patched_mongo): def test_download_all_cases_csv(client_with_patched_mongo): + with open("./tests/data/case.minimal.json") as case_file: + case_doc = json.load(case_file) db = pymongo.MongoClient("mongodb://localhost:27017/outbreak") - db["outbreak"]["cases"].insert_many( - [ - { - "confirmationDate": datetime(2022, 5, i), - "caseReference": { - "sourceId": bson.ObjectId("fedc12345678901234567890") - }, - } - for i in range(1, 4) - ] - ) + cases = [dict(case_doc) for i in range(1, 4)] + for i in range(1, 4): + cases[i - 1]["confirmationDate"] = datetime(2022, 5, i) + db["outbreak"]["cases"].insert_many(cases) post_response = client_with_patched_mongo.post( "/api/cases/download", json={"format": "csv"} ) @@ -282,18 +230,13 @@ def test_download_all_cases_csv(client_with_patched_mongo): def test_download_selected_cases_tsv(client_with_patched_mongo): + with open("./tests/data/case.minimal.json") as case_file: + case_doc = json.load(case_file) db = pymongo.MongoClient("mongodb://localhost:27017/outbreak") - db["outbreak"]["cases"].insert_many( - [ - { - "confirmationDate": datetime(2022, 5, i), - "caseReference": { - "sourceId": bson.ObjectId("fedc12345678901234567890") - }, - } - for i in range(1, 4) - ] - ) + cases = [dict(case_doc) for i in range(1, 4)] + for i in range(1, 4): + cases[i - 1]["confirmationDate"] = datetime(2022, 5, i) + db["outbreak"]["cases"].insert_many(cases) post_response = client_with_patched_mongo.post( "/api/cases/download", json={"format": "tsv", "query": "dateconfirmedbefore:2022-05-02"}, @@ -306,18 +249,13 @@ def test_download_selected_cases_tsv(client_with_patched_mongo): def test_download_selected_cases_tsv(client_with_patched_mongo): + with open("./tests/data/case.minimal.json") as case_file: + case_doc = json.load(case_file) db = pymongo.MongoClient("mongodb://localhost:27017/outbreak") - inserted = db["outbreak"]["cases"].insert_many( - [ - { - "confirmationDate": datetime(2022, 5, i), - "caseReference": { - "sourceId": bson.ObjectId("fedc12345678901234567890") - }, - } - for i in range(1, 4) - ] - ) + cases = [dict(case_doc) for i in range(1, 4)] + for i in range(1, 4): + cases[i - 1]["confirmationDate"] = datetime(2022, 5, i) + inserted = db["outbreak"]["cases"].insert_many(cases) ids = [str(anId) for anId in inserted.inserted_ids] post_response = client_with_patched_mongo.post( "/api/cases/download", @@ -331,52 +269,37 @@ def test_download_selected_cases_tsv(client_with_patched_mongo): def test_exclude_selected_cases(client_with_patched_mongo): + with open("./tests/data/case.minimal.json") as case_file: + case_doc = json.load(case_file) db = pymongo.MongoClient("mongodb://localhost:27017/outbreak") - inserted = ( - db["outbreak"]["cases"] - .insert_one( - { - "confirmationDate": datetime(2022, 5, 10), - "caseReference": { - "sourceId": bson.ObjectId("fedc12345678901234567890") - }, - } - ) - .inserted_id - ) + inserted = db["outbreak"]["cases"].insert_one(case_doc).inserted_id post_response = client_with_patched_mongo.post( "/api/cases/batchStatusChange", - json={"status": "EXCLUDED", "caseIds": [str(inserted)], "note": "Duplicate"}, + json={"status": "omit_error", "caseIds": [str(inserted)], "note": "Duplicate"}, ) assert post_response.status_code == 204 get_response = client_with_patched_mongo.get(f"/api/cases/{str(inserted)}") assert get_response.status_code == 200 document = get_response.get_json() - assert document["caseReference"]["status"] == "EXCLUDED" + assert document["caseStatus"] == "omit_error" assert document["caseExclusion"]["note"] == "Duplicate" def test_excluded_case_ids(client_with_patched_mongo): - db = pymongo.MongoClient("mongodb://localhost:27017/outbreak") - inserted = ( - db["outbreak"]["cases"] - .insert_one( - { - "confirmationDate": datetime(2022, 5, 10), - "caseReference": { - "sourceId": bson.ObjectId("fedc12345678901234567890") - }, - } - ) - .inserted_id + with open("./tests/data/case.minimal.json") as case_file: + case_doc = json.load(case_file) + case_doc["caseReference"]["sourceId"] = bson.ObjectId( + case_doc["caseReference"]["sourceId"] ) + db = pymongo.MongoClient("mongodb://localhost:27017/outbreak") + inserted = db["outbreak"]["cases"].insert_one(case_doc).inserted_id post_response = client_with_patched_mongo.post( "/api/cases/batchStatusChange", - json={"status": "EXCLUDED", "caseIds": [str(inserted)], "note": "Duplicate"}, + json={"status": "omit_error", "caseIds": [str(inserted)], "note": "Duplicate"}, ) assert post_response.status_code == 204 get_response = client_with_patched_mongo.get( - f"/api/excludedCaseIds?sourceId=fedc12345678901234567890" + f"/api/excludedCaseIds?sourceId=fedc09876543210987654321" ) assert get_response.status_code == 200 ids = get_response.get_json() @@ -385,23 +308,18 @@ def test_excluded_case_ids(client_with_patched_mongo): def test_filter_excluded_case_ids(client_with_patched_mongo): + with open("./tests/data/case.excluded.json") as case_file: + case_doc = json.load(case_file) + case_doc["caseReference"]["sourceId"] = bson.ObjectId( + case_doc["caseReference"]["sourceId"] + ) + cases = [] + for i in range(1, 4): + this_case = dict(case_doc) + this_case["confirmationDate"] = datetime(2022, 5, i) + cases.append(this_case) db = pymongo.MongoClient("mongodb://localhost:27017/outbreak") - inserted = db["outbreak"]["cases"].insert_many( - [ - { - "confirmationDate": datetime(2022, 5, i), - "caseReference": { - "sourceId": bson.ObjectId("fedc12345678901234567890"), - "status": "EXCLUDED", - }, - "caseExclusion": { - "date": datetime(2022, 6, i), - "note": f"Excluded upon this day, the {i}th of June", - }, - } - for i in range(1, 4) - ] - ) + inserted = db["outbreak"]["cases"].insert_many(cases) inserted_ids = [str(anId) for anId in inserted.inserted_ids] get_response = client_with_patched_mongo.get( f"/api/excludedCaseIds?sourceId=fedc12345678901234567890&query=dateconfirmedbefore%3a2022-05-03" @@ -415,19 +333,10 @@ def test_filter_excluded_case_ids(client_with_patched_mongo): def test_update_case(client_with_patched_mongo): + with open("./tests/data/case.minimal.json") as case_file: + case_doc = json.load(case_file) db = pymongo.MongoClient("mongodb://localhost:27017/outbreak") - inserted = ( - db["outbreak"]["cases"] - .insert_one( - { - "confirmationDate": datetime(2022, 5, 10), - "caseReference": { - "sourceId": bson.ObjectId("fedc12345678901234567890") - }, - } - ) - .inserted_id - ) + inserted = db["outbreak"]["cases"].insert_one(case_doc).inserted_id put_response = client_with_patched_mongo.put( f"/api/cases/{str(inserted)}", json={"confirmationDate": "2022-05-11"} ) @@ -436,19 +345,10 @@ def test_update_case(client_with_patched_mongo): def test_update_object_id_on_case(client_with_patched_mongo): + with open("./tests/data/case.excluded.json") as case_file: + case_doc = json.load(case_file) db = pymongo.MongoClient("mongodb://localhost:27017/outbreak") - inserted = ( - db["outbreak"]["cases"] - .insert_one( - { - "confirmationDate": datetime(2022, 5, 10), - "caseReference": { - "sourceId": bson.ObjectId("fedc12345678901234567890") - }, - } - ) - .inserted_id - ) + inserted = db["outbreak"]["cases"].insert_one(case_doc).inserted_id put_response = client_with_patched_mongo.put( f"/api/cases/{str(inserted)}", json={"caseReference": {"sourceId": "fedc1234567890123456789a"}}, @@ -461,23 +361,18 @@ def test_update_object_id_on_case(client_with_patched_mongo): def test_batch_update(client_with_patched_mongo): + with open("./tests/data/case.excluded.json") as case_file: + case_doc = json.load(case_file) + case_doc["caseReference"]["sourceId"] = bson.ObjectId( + case_doc["caseReference"]["sourceId"] + ) + cases = [] + for i in range(1, 4): + this_case = dict(case_doc) + this_case["confirmationDate"] = datetime(2022, 5, i) + cases.append(this_case) db = pymongo.MongoClient("mongodb://localhost:27017/outbreak") - inserted = db["outbreak"]["cases"].insert_many( - [ - { - "confirmationDate": datetime(2022, 5, i), - "caseReference": { - "sourceId": bson.ObjectId("fedc12345678901234567890"), - "status": "EXCLUDED", - }, - "caseExclusion": { - "date": datetime(2022, 6, i), - "note": f"Excluded upon this day, the {i}th of June", - }, - } - for i in range(1, 4) - ] - ) + inserted = db["outbreak"]["cases"].insert_many(cases) inserted_ids = [str(anId) for anId in inserted.inserted_ids] updates = [ {"_id": inserted_ids[i - 1], "confirmationDate": f"2022-04-0{i}"} @@ -492,23 +387,18 @@ def test_batch_update(client_with_patched_mongo): def test_batch_update_query(client_with_patched_mongo): + with open("./tests/data/case.excluded.json") as case_file: + case_doc = json.load(case_file) + case_doc["caseReference"]["sourceId"] = bson.ObjectId( + case_doc["caseReference"]["sourceId"] + ) + cases = [] + for i in range(1, 4): + this_case = dict(case_doc) + this_case["confirmationDate"] = datetime(2022, 5, i) + cases.append(this_case) db = pymongo.MongoClient("mongodb://localhost:27017/outbreak") - inserted = db["outbreak"]["cases"].insert_many( - [ - { - "confirmationDate": datetime(2022, 5, i), - "caseReference": { - "sourceId": bson.ObjectId("fedc12345678901234567890"), - "status": "EXCLUDED", - }, - "caseExclusion": { - "date": datetime(2022, 6, i), - "note": f"Excluded upon this day, the {i}th of June", - }, - } - for i in range(1, 4) - ] - ) + inserted = db["outbreak"]["cases"].insert_many(cases) update = {"confirmationDate": f"2022-04-01"} post_result = client_with_patched_mongo.post( @@ -520,14 +410,11 @@ def test_batch_update_query(client_with_patched_mongo): def test_delete_case(client_with_patched_mongo): + with open("./tests/data/case.minimal.json") as case_file: + case_doc = json.load(case_file) db = pymongo.MongoClient("mongodb://localhost:27017/outbreak") collection = db["outbreak"]["cases"] - inserted = collection.insert_one( - { - "confirmationDate": datetime(2022, 5, 10), - "caseReference": {"sourceId": bson.ObjectId("fedc12345678901234567890")}, - } - ).inserted_id + inserted = collection.insert_one(case_doc).inserted_id delete_result = client_with_patched_mongo.delete(f"/api/cases/{str(inserted)}") assert delete_result.status_code == 204 assert collection.count_documents({}) == 0 @@ -540,23 +427,18 @@ def test_delete_case_404_on_wrong_id(client_with_patched_mongo): def test_batch_delete_with_ids(client_with_patched_mongo): + with open("./tests/data/case.excluded.json") as case_file: + case_doc = json.load(case_file) + case_doc["caseReference"]["sourceId"] = bson.ObjectId( + case_doc["caseReference"]["sourceId"] + ) + cases = [] + for i in range(1, 4): + this_case = dict(case_doc) + this_case["confirmationDate"] = datetime(2022, 5, i) + cases.append(this_case) db = pymongo.MongoClient("mongodb://localhost:27017/outbreak") - inserted = db["outbreak"]["cases"].insert_many( - [ - { - "confirmationDate": datetime(2022, 5, i), - "caseReference": { - "sourceId": bson.ObjectId("fedc12345678901234567890"), - "status": "EXCLUDED", - }, - "caseExclusion": { - "date": datetime(2022, 6, i), - "note": f"Excluded upon this day, the {i}th of June", - }, - } - for i in range(1, 4) - ] - ) + inserted = db["outbreak"]["cases"].insert_many(cases) inserted_ids = [str(anId) for anId in inserted.inserted_ids] delete_result = client_with_patched_mongo.delete( "/api/cases", json={"caseIds": [inserted_ids[1], inserted_ids[2]]} @@ -568,23 +450,18 @@ def test_batch_delete_with_ids(client_with_patched_mongo): def test_batch_delete_with_query(client_with_patched_mongo): + with open("./tests/data/case.excluded.json") as case_file: + case_doc = json.load(case_file) + case_doc["caseReference"]["sourceId"] = bson.ObjectId( + case_doc["caseReference"]["sourceId"] + ) + cases = [] + for i in range(1, 4): + this_case = dict(case_doc) + this_case["confirmationDate"] = datetime(2022, 5, i) + cases.append(this_case) db = pymongo.MongoClient("mongodb://localhost:27017/outbreak") - inserted = db["outbreak"]["cases"].insert_many( - [ - { - "confirmationDate": datetime(2022, 5, i), - "caseReference": { - "sourceId": bson.ObjectId("fedc12345678901234567890"), - "status": "EXCLUDED", - }, - "caseExclusion": { - "date": datetime(2022, 6, i), - "note": f"Excluded upon this day, the {i}th of June", - }, - } - for i in range(1, 4) - ] - ) + inserted = db["outbreak"]["cases"].insert_many(cases) inserted_ids = [str(anId) for anId in inserted.inserted_ids] delete_result = client_with_patched_mongo.delete( "/api/cases", json={"query": "dateconfirmedafter:2022-05-01"} diff --git a/data-serving/reusable-data-service/tests/test_case_model.py b/data-serving/reusable-data-service/tests/test_case_model.py index a9e9d2ffa..f17a20605 100644 --- a/data-serving/reusable-data-service/tests/test_case_model.py +++ b/data-serving/reusable-data-service/tests/test_case_model.py @@ -31,7 +31,7 @@ def test_csv_header(): header_line = Case.csv_header() assert ( header_line - == "_id,confirmationDate,caseReference.sourceId,caseReference.status,location.country,location.latitude,location.longitude,location.admin1,location.admin2,location.admin3\r\n" + == "_id,caseStatus,pathogenStatus,confirmationDate,caseReference.sourceId,location.country,location.latitude,location.longitude,location.admin1,location.admin2,location.admin3\r\n" ) @@ -43,8 +43,10 @@ def test_csv_row_with_no_id(): case = Case() case.confirmationDate = date(2022, 6, 13) case.caseReference = ref + case.caseStatus = "probable" + case.pathogenStatus = "emerging" csv = case.to_csv() - assert csv == ",2022-06-13,abcd12903478565647382910,UNVERIFIED,,,,,,\r\n" + assert csv == ",probable,emerging,2022-06-13,abcd12903478565647382910,,,,,,\r\n" def test_csv_row_with_id(): @@ -57,8 +59,10 @@ def test_csv_row_with_id(): case._id = id1 case.confirmationDate = date(2022, 6, 13) case.caseReference = ref + case.caseStatus = "probable" + case.pathogenStatus = "unknown" csv = case.to_csv() - assert csv == f"{id1},2022-06-13,{id2},UNVERIFIED,,,,,,\r\n" + assert csv == f"{id1},probable,unknown,2022-06-13,{id2},,,,,,\r\n" def test_apply_update_to_case(): @@ -78,11 +82,3 @@ def test_apply_update_that_unsets_value(): update = DocumentUpdate.from_dict({"confirmationDate": None}) case.apply_update(update) assert case.confirmationDate is None - - -def test_apply_nested_update(): - with open("./tests/data/case.minimal.json", "r") as minimal_file: - case = Case.from_json(minimal_file.read()) - update = DocumentUpdate.from_dict({"caseReference": {"status": "VERIFIED"}}) - case.apply_update(update) - assert case.caseReference.status == "VERIFIED" diff --git a/data-serving/reusable-data-service/tests/test_case_reference.py b/data-serving/reusable-data-service/tests/test_case_reference.py index 39c4e0f5a..0efbdb6b1 100644 --- a/data-serving/reusable-data-service/tests/test_case_reference.py +++ b/data-serving/reusable-data-service/tests/test_case_reference.py @@ -4,30 +4,10 @@ from data_service.model.case_reference import CaseReference -def test_csv_row_unexcluded(): +def test_csv_row(): identifier = "abcd12903478565647382910" oid = bson.ObjectId(identifier) ref = CaseReference() ref.sourceId = oid csv = ref.to_csv() - assert csv == "abcd12903478565647382910,UNVERIFIED\r\n" - - -def test_csv_row_excluded(): - identifier = "abcd12903478565647382910" - oid = bson.ObjectId(identifier) - ref = CaseReference() - ref.sourceId = oid - ref.status = "EXCLUDED" - csv = ref.to_csv() - assert csv == "abcd12903478565647382910,EXCLUDED\r\n" - - -def test_reference_must_have_valid_status(): - identifier = "abcd12903478565647382910" - oid = bson.ObjectId(identifier) - ref = CaseReference() - ref.sourceId = oid - ref.status = "BANANA" - with pytest.raises(ValueError): - ref.validate() + assert csv == "abcd12903478565647382910\r\n" diff --git a/data-serving/reusable-data-service/tests/test_case_schema_integration.py b/data-serving/reusable-data-service/tests/test_case_schema_integration.py index c8e25d89f..5641f3e97 100644 --- a/data-serving/reusable-data-service/tests/test_case_schema_integration.py +++ b/data-serving/reusable-data-service/tests/test_case_schema_integration.py @@ -22,6 +22,7 @@ def test_adding_field_then_downloading_case(client_with_patched_mongo): "status": "UNVERIFIED", "sourceId": "24680135792468013579fedc", }, + "caseStatus": "probable", "mySymptoms": "coughs, sneezles", }, ) @@ -52,6 +53,7 @@ def test_adding_field_then_downloading_csv(client_with_patched_mongo): "status": "UNVERIFIED", "sourceId": "24680135792468013579fedc", }, + "caseStatus": "probable", "someField": "well, what have we here", }, ) @@ -79,6 +81,7 @@ def test_required_field_default_value_spread_to_existing_cases( "status": "UNVERIFIED", "sourceId": "24680135792468013579fedc", }, + "caseStatus": "probable", }, ) assert response.status_code == 201 @@ -121,6 +124,36 @@ def test_required_field_becomes_required_in_validation(client_with_patched_mongo "status": "UNVERIFIED", "sourceId": "24680135792468013579fedc", }, + "caseStatus": "probable", + }, + ) + assert response.status_code == 422 + + +def test_field_enumerating_allowed_values_forbids_other_value( + client_with_patched_mongo, +): + response = client_with_patched_mongo.post( + "/api/schema", + json={ + "name": "customPathogenStatus", + "type": "string", + "description": "Whether the infection is associated with an endemic or emerging incidence", + "values": ["Endemic", "Emerging", "Unknown"], + "required": False, + }, + ) + assert response.status_code == 201 + response = client_with_patched_mongo.post( + "/api/cases", + json={ + "confirmationDate": "2022-06-01T00:00:00.000Z", + "caseReference": { + "status": "UNVERIFIED", + "sourceId": "24680135792468013579fedc", + }, + "caseStatus": "probable", + "customPathogenStatus": "Something Else", }, ) assert response.status_code == 422 diff --git a/data-serving/reusable-data-service/tests/test_reusable_data_service.py b/data-serving/reusable-data-service/tests/test_reusable_data_service.py deleted file mode 100644 index 3dbdaacbf..000000000 --- a/data-serving/reusable-data-service/tests/test_reusable_data_service.py +++ /dev/null @@ -1,5 +0,0 @@ -from data_service import __version__ - - -def test_version(): - assert __version__ == "0.1.0"