diff --git a/app/commons/esclient.py b/app/commons/esclient.py index 20931a2..f7bbaf1 100644 --- a/app/commons/esclient.py +++ b/app/commons/esclient.py @@ -214,7 +214,7 @@ def _to_index_bodies( if log.logLevel < utils.ERROR_LOGGING_LEVEL or not log.message.strip(): continue - bodies.append(log_requests.prepare_log(launch, test_item, log, project_with_prefix)) + bodies.append(request_factory.prepare_log(launch, test_item, log, project_with_prefix)) logs_added = True if logs_added: test_item_ids.append(str(test_item.testItemId)) diff --git a/app/service/auto_analyzer_service.py b/app/service/auto_analyzer_service.py index d9b534a..a812b92 100644 --- a/app/service/auto_analyzer_service.py +++ b/app/service/auto_analyzer_service.py @@ -335,7 +335,7 @@ def _query_elasticsearch(self, launches: list[Launch], max_batch_size=30): logger.info("Early finish from analyzer before timeout") break unique_logs = text_processing.leave_only_unique_logs(test_item.logs) - prepared_logs = [log_requests.prepare_log(launch, test_item, log, index_name) + prepared_logs = [request_factory.prepare_log(launch, test_item, log, index_name) for log in unique_logs if log.logLevel >= utils.ERROR_LOGGING_LEVEL] results, _ = log_merger.decompose_logs_merged_and_without_duplicates(prepared_logs) diff --git a/app/service/cluster_service.py b/app/service/cluster_service.py index a0a5ca9..3b964c2 100644 --- a/app/service/cluster_service.py +++ b/app/service/cluster_service.py @@ -350,7 +350,7 @@ def find_clusters(self, launch_info: LaunchInfoForClustering): log_ids = {} try: unique_errors_min_should_match = launch_info.launch.analyzerConfig.uniqueErrorsMinShouldMatch / 100.0 # noqa - prepared_logs = log_requests.prepare_logs_for_clustering(launch_info.launch, index_name) + prepared_logs = request_factory.prepare_logs_for_clustering(launch_info.launch, index_name) log_messages, log_dict, log_ids_for_merged_logs = log_merger.merge_logs( prepared_logs, launch_info.numberOfLogLines, launch_info.cleanNumbers) log_ids = set([str(log["_id"]) for log in log_dict.values()]) diff --git a/app/service/namespace_finder_service.py b/app/service/namespace_finder_service.py index d2d84f1..414ce98 100644 --- a/app/service/namespace_finder_service.py +++ b/app/service/namespace_finder_service.py @@ -31,7 +31,7 @@ def __init__(self, app_config: ApplicationConfig): def update_chosen_namespaces(self, launches: list[Launch]): logger.info("Started updating chosen namespaces") t_start = time() - log_words, project_id = log_requests.prepare_log_words(launches) + log_words, project_id = request_factory.prepare_log_words(launches) logger.debug(f'Project id {project_id}') if project_id is not None: self.namespace_finder.update_namespaces(project_id, log_words) diff --git a/app/service/search_service.py b/app/service/search_service.py index 4fbb69a..4d76b44 100644 --- a/app/service/search_service.py +++ b/app/service/search_service.py @@ -155,8 +155,8 @@ def prepare_messages_for_queries(self, search_req): if not message.strip(): continue - queried_log = log_requests.create_log_template() - queried_log = log_requests._fill_log_fields( + queried_log = request_factory.create_log_template() + queried_log = request_factory._fill_log_fields( queried_log, Log(logId=global_id, message=message), search_req.logLines) msg_words = " ".join(text_processing.split_words(queried_log["_source"]["message"])) diff --git a/app/service/suggest_service.py b/app/service/suggest_service.py index 559e389..7105009 100644 --- a/app/service/suggest_service.py +++ b/app/service/suggest_service.py @@ -329,7 +329,7 @@ def prepare_logs_for_suggestions(self, test_item_info: TestItemInfo, index_name: prepared_logs, test_item_id_for_suggest = self.query_logs_for_cluster(test_item_info, index_name) else: unique_logs = text_processing.leave_only_unique_logs(test_item_info.logs) - prepared_logs = [log_requests.prepare_log_for_suggests(test_item_info, log, index_name) + prepared_logs = [request_factory.prepare_log_for_suggests(test_item_info, log, index_name) for log in unique_logs if log.logLevel >= utils.ERROR_LOGGING_LEVEL] logs, _ = log_merger.decompose_logs_merged_and_without_duplicates(prepared_logs) return logs, test_item_id_for_suggest