Skip to content

Commit

Permalink
fix tests
Browse files Browse the repository at this point in the history
  • Loading branch information
ekneg54 committed Jul 11, 2024
1 parent 5cfb156 commit 201d769
Show file tree
Hide file tree
Showing 27 changed files with 93 additions and 94 deletions.
5 changes: 5 additions & 0 deletions logprep/framework/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,11 @@ def warnings(self) -> List[ProcessingWarning]:
"""Return all processing warnings."""
return itertools.chain(*[result.warnings for result in self])

@property
def data(self) -> List[Tuple[dict, dict]]:
"""Return all extra data."""
return itertools.chain(*[result.data for result in self])

def __attrs_post_init__(self):
self.results = list(
(processor.process(self.event) for processor in self.pipeline if self.event)
Expand Down
21 changes: 7 additions & 14 deletions logprep/util/auto_rule_tester/auto_rule_corpus_tester.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@
from colorama import Fore, Style
from deepdiff import DeepDiff, grep

from logprep.framework.pipeline import Pipeline
from logprep.framework.pipeline import Pipeline, PipelineResult
from logprep.util.configuration import Configuration
from logprep.util.helper import get_dotted_field_value
from logprep.util.json_handling import parse_json
Expand All @@ -113,9 +113,8 @@ def convert_extra_data_format(extra_outputs) -> List[Dict]:
output target is the key and the values are the actual outputs.
"""
reformatted_extra_outputs = []
for extra_output in extra_outputs:
for output in extra_output:
reformatted_extra_outputs.append({str(output[1]): output[0]})
for value, key in extra_outputs:
reformatted_extra_outputs.append({str(key): value})
return reformatted_extra_outputs


Expand Down Expand Up @@ -211,18 +210,12 @@ def _run_pipeline_per_test_case(self):
print(Style.BRIGHT + "# Test Cases Summary:" + Style.RESET_ALL)
for test_case_id, test_case in self._test_cases.items():
_ = [processor.setup() for processor in self._pipeline._pipeline]
parsed_event, result = self._pipeline.process_pipeline()
extra_outputs = convert_extra_data_format(
result.results[processor_result].data
for processor_result in range(len(result.results))
)
result: PipelineResult = self._pipeline.process_pipeline()
parsed_event = result.event
extra_outputs = convert_extra_data_format(result.data)
test_case.generated_output = parsed_event
test_case.generated_extra_output = extra_outputs
test_case.warnings = [
result.results[processor_result].errors
for processor_result in range(len(result.results))
]
test_case.warnings = list(itertools.chain(*test_case.warnings))
test_case.warnings = result.warnings
self._compare_logprep_outputs(test_case_id, parsed_event)
self._compare_extra_data_output(test_case_id, extra_outputs)
self._print_pass_fail_statements(test_case_id)
Expand Down
10 changes: 4 additions & 6 deletions logprep/util/rule_dry_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@
from colorama import Back, Fore
from ruamel.yaml import YAML

from logprep.framework.pipeline import Pipeline
from logprep.framework.pipeline import Pipeline, PipelineResult
from logprep.util.auto_rule_tester.auto_rule_corpus_tester import (
convert_extra_data_format,
)
Expand Down Expand Up @@ -103,11 +103,9 @@ def run(self):
transformed_cnt = 0
output_count = 0
for input_document in self._input_documents:
test_output, result = self._pipeline.process_pipeline()
test_output_custom = convert_extra_data_format(
result.results[processor_result].data
for processor_result in range(len(result.results))
)
result: PipelineResult = self._pipeline.process_pipeline()
test_output = result.event
test_output_custom = convert_extra_data_format(result.data)
if test_output:
output_count += 1
diff = self._print_output_results(input_document, test_output, test_output_custom)
Expand Down
8 changes: 5 additions & 3 deletions tests/unit/processor/amides/test_amides.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,9 +168,11 @@ def test_process_event_raise_duplication_error(self):
self.object.process(document)
assert document.get("amides")
result = self.object.process(document)
assert len(result.errors) > 0
assert re.match(r".*missing source_fields: \['process.command_line'].*", str(result.errors))
assert re.match(".*FieldExistsWarning.*", str(result.errors))
assert len(result.warnings) > 0
assert re.match(
r".*missing source_fields: \['process.command_line'].*", str(result.warnings)
)
assert re.match(".*FieldExistsWarning.*", str(result.warnings))

def test_setup_get_model_via_file_getter(self, tmp_path, monkeypatch):
model_uri = "file://tests/testdata/unit/amides/model.zip"
Expand Down
3 changes: 1 addition & 2 deletions tests/unit/processor/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,9 @@

from logprep.abc.processor import Processor, ProcessorResult
from logprep.factory import Factory
from logprep.filter.lucene_filter import LuceneFilter
from logprep.framework.rule_tree.rule_tree import RuleTree
from logprep.metrics.metrics import CounterMetric, HistogramMetric
from logprep.processor.base.exceptions import ProcessingCriticalError, ProcessingError
from logprep.processor.base.exceptions import ProcessingCriticalError
from logprep.processor.base.rule import Rule
from logprep.util.json_handling import list_json_files_in_directory
from tests.unit.component.base import BaseComponentTestCase
Expand Down
4 changes: 2 additions & 2 deletions tests/unit/processor/calculator/test_calculator.py
Original file line number Diff line number Diff line change
Expand Up @@ -355,8 +355,8 @@ def test_testcases_failure_handling(self, testcase, rule, event, expected, error
self._load_specific_rule(rule)

result = self.object.process(event)
assert len(result.errors) == 1
assert re.match(rf".*{error_message}", str(result.errors[0]))
assert len(result.warnings) == 1
assert re.match(rf".*{error_message}", str(result.warnings[0]))
assert event == expected, testcase

@pytest.mark.parametrize(
Expand Down
6 changes: 3 additions & 3 deletions tests/unit/processor/concatenator/test_concatenator.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ def test_for_expected_output(self, test_case, rule, document, expected_output):
self.object.process(document)
assert document == expected_output, test_case

def test_process_raises_duplication_error_if_target_field_exists_and_should_not_be_overwritten(
def test_process_raises_field_exists_warning_if_target_field_exists_and_should_not_be_overwritten(
self,
):
rule = {
Expand All @@ -186,8 +186,8 @@ def test_process_raises_duplication_error_if_target_field_exists_and_should_not_
self._load_specific_rule(rule)
document = {"field": {"a": "first", "b": "second"}, "target_field": "has already content"}
result = self.object.process(document)
assert len(result.errors) == 1
assert isinstance(result.errors[0], FieldExistsWarning)
assert len(result.warnings) == 1
assert isinstance(result.warnings[0], FieldExistsWarning)
assert "target_field" in document
assert document.get("target_field") == "has already content"
assert document.get("tags") == ["_concatenator_failure"]
Original file line number Diff line number Diff line change
Expand Up @@ -181,8 +181,8 @@ def test_existing_target_raises_if_not_overwrite_target(self):
}
self._load_specific_rule(rule)
result = self.object.process(document)
assert len(result.errors) == 1
assert isinstance(result.errors[0], FieldExistsWarning)
assert len(result.warnings) == 1
assert isinstance(result.warnings[0], FieldExistsWarning)

@staticmethod
def _parse_local_tz(tz_local_name):
Expand Down
4 changes: 2 additions & 2 deletions tests/unit/processor/dissector/test_dissector.py
Original file line number Diff line number Diff line change
Expand Up @@ -731,6 +731,6 @@ def test_testcases(self, testcase, rule, event, expected): # pylint: disable=un
def test_testcases_failure_handling(self, testcase, rule, event, expected):
self._load_specific_rule(rule)
result = self.object.process(event)
assert len(result.errors) == 1
assert isinstance(result.errors[0], ProcessingWarning)
assert len(result.warnings) == 1
assert isinstance(result.warnings[0], ProcessingWarning)
assert event == expected, testcase
Original file line number Diff line number Diff line change
Expand Up @@ -245,8 +245,8 @@ def test_domain_extraction_with_ipv6_target(self):
def test_domain_extraction_with_existing_output_field(self):
document = {"url": {"domain": "test.domain.de", "subdomain": "exists already"}}
result = self.object.process(document)
assert len(result.errors) == 1
assert isinstance(result.errors[0], FieldExistsWarning)
assert len(result.warnings) == 1
assert isinstance(result.warnings[0], FieldExistsWarning)

def test_domain_extraction_overwrites_target_field(self):
document = {"url": {"domain": "test.domain.de", "subdomain": "exists already"}}
Expand Down Expand Up @@ -314,7 +314,7 @@ def test_does_nothing_if_source_field_not_exits(self):
self.object.process(document)
assert document == expected

def test_raises_duplication_error_if_target_field_exits(self):
def test_raises_field_exists_warning_if_target_field_exits(self):
document = {"url": {"domain": "test.domain.de", "subdomain": "exists already"}}
expected = {
"tags": ["_domain_label_extractor_failure"],
Expand All @@ -336,8 +336,8 @@ def test_raises_duplication_error_if_target_field_exits(self):
}
self._load_specific_rule(rule_dict)
result = self.object.process(document)
assert len(result.errors) == 1
assert isinstance(result.errors[0], FieldExistsWarning)
assert len(result.warnings) == 1
assert isinstance(result.warnings[0], FieldExistsWarning)
assert document == expected

@responses.activate
Expand Down
6 changes: 3 additions & 3 deletions tests/unit/processor/domain_resolver/test_domain_resolver.py
Original file line number Diff line number Diff line change
Expand Up @@ -228,12 +228,12 @@ def test_configured_dotted_subfield(self, _):
assert document == expected

@mock.patch("socket.gethostbyname", return_value="1.2.3.4")
def test_duplication_error(self, _):
def test_field_exits_warning(self, _):
document = {"client": "google.de"}

result = self.object.process(document)
assert len(result.errors) == 1
assert isinstance(result.errors[0], FieldExistsWarning)
assert len(result.warnings) == 1
assert isinstance(result.warnings[0], FieldExistsWarning)

@mock.patch("socket.gethostbyname", return_value="1.2.3.4")
def test_no_duplication_error(self, _):
Expand Down
16 changes: 8 additions & 8 deletions tests/unit/processor/field_manager/test_field_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -591,11 +591,11 @@ def test_testcases(self, testcase, rule, event, expected): # pylint: disable=un
def test_testcases_failure_handling(self, testcase, rule, event, expected, error):
self._load_specific_rule(rule)
result = self.object.process(event)
assert len(result.errors) == 1
assert re.match(error, str(result.errors[0]))
assert len(result.warnings) == 1
assert re.match(error, str(result.warnings[0]))
assert event == expected, testcase

def test_process_raises_duplication_error_if_target_field_exists_and_should_not_be_overwritten(
def test_process_raises_field_exists_warning_if_target_field_exists_and_should_not_be_overwritten(
self,
):
rule = {
Expand All @@ -610,7 +610,7 @@ def test_process_raises_duplication_error_if_target_field_exists_and_should_not_
self._load_specific_rule(rule)
document = {"field": {"a": "first", "b": "second"}, "target_field": "has already content"}
result = self.object.process(document)
assert isinstance(result.errors[0], FieldExistsWarning)
assert isinstance(result.warnings[0], FieldExistsWarning)
assert "target_field" in document
assert document.get("target_field") == "has already content"
assert document.get("tags") == ["_field_manager_failure"]
Expand All @@ -626,10 +626,10 @@ def test_process_raises_processing_warning_with_missing_fields(self):
self._load_specific_rule(rule)
document = {"field": {"a": "first", "b": "second"}}
result = self.object.process(document)
assert len(result.errors) == 1
assert len(result.warnings) == 1
assert re.match(
r".*ProcessingWarning.*missing source_fields: \['does.not.exists'\]",
str(result.errors[0]),
str(result.warnings[0]),
)

def test_process_raises_processing_warning_with_missing_fields_but_event_is_processed(self):
Expand All @@ -650,10 +650,10 @@ def test_process_raises_processing_warning_with_missing_fields_but_event_is_proc
"tags": ["_field_manager_missing_field_warning"],
}
result = self.object.process(document)
assert len(result.errors) == 1
assert len(result.warnings) == 1
assert re.match(
r".*ProcessingWarning.*missing source_fields: \['does.not.exists'\]",
str(result.errors[0]),
str(result.warnings[0]),
)
assert document == expected

Expand Down
8 changes: 4 additions & 4 deletions tests/unit/processor/generic_adder/test_generic_adder.py
Original file line number Diff line number Diff line change
Expand Up @@ -409,8 +409,8 @@ def test_generic_adder_testcases_failure_handling(
):
self._load_specific_rule(rule)
result = self.object.process(event)
assert len(result.errors) == 1
assert re.match(rf".*FieldExistsWarning.*{error_message}", str(result.errors[0]))
assert len(result.warnings) == 1
assert re.match(rf".*FieldExistsWarning.*{error_message}", str(result.warnings[0]))
assert event == expected, testcase

def test_add_generic_fields_from_file_missing_and_existing_with_all_required(self):
Expand Down Expand Up @@ -611,8 +611,8 @@ def test_sql_database_raises_exception_on_duplicate(self, caplog):

self.object.process(document)
result = self.object.process(document)
assert len(result.errors) == 1
assert isinstance(result.errors[0], FieldExistsWarning)
assert len(result.warnings) == 1
assert isinstance(result.warnings[0], FieldExistsWarning)

assert document == expected

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -420,8 +420,8 @@ def test_resolve_dotted_src_and_dest_field_and_conflict_match(self, caplog):
"re": {"solved": "I already exist!"},
}
result = self.object.process(document)
assert len(result.errors) == 1
assert isinstance(result.errors[0], FieldExistsWarning)
assert len(result.warnings) == 1
assert isinstance(result.warnings[0], FieldExistsWarning)
assert document == expected

def test_resolve_generic_and_multiple_match_first_only(self):
Expand Down
8 changes: 4 additions & 4 deletions tests/unit/processor/geoip_enricher/test_geoip_enricher.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,9 +124,9 @@ def test_source_field_is_none_emits_missing_fields_warning(self):
expected = {"client": {"ip": None}, "tags": ["_geoip_enricher_missing_field_warning"]}
self._load_specific_rule(self.object.rules[0])
self.object.process(document)
assert len(self.object.result.errors) == 1
assert len(self.object.result.warnings) == 1
assert re.match(
r".*missing source_fields: \['client\.ip'].*", str(self.object.result.errors[0])
r".*missing source_fields: \['client\.ip'].*", str(self.object.result.warnings[0])
)
assert document == expected

Expand Down Expand Up @@ -164,8 +164,8 @@ def test_enrich_an_event_geoip(self):
def test_enrich_an_event_geoip_with_existing_differing_geoip(self):
document = {"client": {"ip": "8.8.8.8"}, "geoip": {"type": "Feature"}}
result = self.object.process(document)
assert len(result.errors) == 1
assert re.match(".*FieldExistsWarning.*geoip.type", str(result.errors[0]))
assert len(result.warnings) == 1
assert re.match(".*FieldExistsWarning.*geoip.type", str(result.warnings[0]))

def test_configured_dotted_output_field(self):
document = {"source": {"ip": "8.8.8.8"}}
Expand Down
4 changes: 2 additions & 2 deletions tests/unit/processor/grokker/test_grokker.py
Original file line number Diff line number Diff line change
Expand Up @@ -433,8 +433,8 @@ def test_testcases_failure_handling(self, testcase, rule, event, expected, error
self.object.setup()
if isinstance(error, str):
result = self.object.process(event)
assert len(result.errors) == 1
assert re.match(rf".*{error}", str(result.errors[0]))
assert len(result.warnings) == 1
assert re.match(rf".*{error}", str(result.warnings[0]))
assert event == expected, testcase
else:
result = self.object.process(event)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -378,8 +378,8 @@ def test_resolve_dotted_and_dest_field_with_conflict_match(self):
"tags": ["_hyperscan_resolver_failure"],
}
result = self.object.process(document)
assert len(result.errors) == 1
assert isinstance(result.errors[0], FieldExistsWarning)
assert len(result.warnings) == 1
assert isinstance(result.warnings[0], FieldExistsWarning)
assert document == expected

def test_resolve_with_multiple_match_first_only(self):
Expand Down
4 changes: 2 additions & 2 deletions tests/unit/processor/ip_informer/test_ip_informer.py
Original file line number Diff line number Diff line change
Expand Up @@ -425,6 +425,6 @@ def test_testcases(self, testcase, rule, event, expected):
def test_testcases_failure_handling(self, testcase, rule, event, expected):
self._load_specific_rule(rule)
result = self.object.process(event)
assert len(result.errors) == 1
assert isinstance(result.errors[0], ProcessingWarning)
assert len(result.warnings) == 1
assert isinstance(result.warnings[0], ProcessingWarning)
assert event == expected, testcase
6 changes: 3 additions & 3 deletions tests/unit/processor/key_checker/test_key_checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,7 +255,7 @@ def test_testcases_positiv(
self.object.process(event)
assert event == expected

def test_raises_duplication_error(self):
def test_field_exists_warning(self):
rule_dict = {
"filter": "*",
"key_checker": {
Expand All @@ -273,5 +273,5 @@ def test_raises_duplication_error(self):
"missing_fields": ["i.exists.already"],
}
result = self.object.process(document)
assert len(result.errors) == 1
assert isinstance(result.errors[0], FieldExistsWarning)
assert len(result.warnings) == 1
assert isinstance(result.warnings[0], FieldExistsWarning)
12 changes: 6 additions & 6 deletions tests/unit/processor/list_comparison/test_list_comparison.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,8 +159,8 @@ def test_target_field_exists_and_cant_be_extended(self):
self._load_specific_rule(rule_dict)
self.object.setup()
result = self.object.process(document)
assert len(result.errors) == 1
assert isinstance(result.errors[0], FieldExistsWarning)
assert len(result.warnings) == 1
assert isinstance(result.warnings[0], FieldExistsWarning)
assert document == expected

def test_intermediate_output_field_is_wrong_type(self):
Expand Down Expand Up @@ -188,8 +188,8 @@ def test_intermediate_output_field_is_wrong_type(self):
self._load_specific_rule(rule_dict)
self.object.setup()
result = self.object.process(document)
assert len(result.errors) == 1
assert isinstance(result.errors[0], FieldExistsWarning)
assert len(result.warnings) == 1
assert isinstance(result.warnings[0], FieldExistsWarning)
assert document == expected

def test_check_in_dotted_subfield(self):
Expand Down Expand Up @@ -244,8 +244,8 @@ def test_overwrite_target_field(self):
self._load_specific_rule(rule_dict)
self.object.setup()
result = self.object.process(document)
assert len(result.errors) == 1
assert isinstance(result.errors[0], FieldExistsWarning)
assert len(result.warnings) == 1
assert isinstance(result.warnings[0], FieldExistsWarning)
assert document == expected

@responses.activate
Expand Down
Loading

0 comments on commit 201d769

Please sign in to comment.