From 5ee98f0f33d876c1807048552bf10a0fedb75daa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=C3=A1rton=20Csord=C3=A1s?= <csordasmarton92@gmail.com> Date: Thu, 2 Aug 2018 14:30:24 +0200 Subject: [PATCH 1/2] Handle invalid json files properly --- libcodechecker/analyze/log_parser.py | 14 ++-- libcodechecker/generic_package_context.py | 92 ++++++++++------------ libcodechecker/libhandlers/analyze.py | 10 +-- libcodechecker/libhandlers/parse.py | 5 +- libcodechecker/libhandlers/server.py | 5 +- libcodechecker/libhandlers/store.py | 3 +- libcodechecker/logger.py | 2 +- libcodechecker/server/api/report_server.py | 4 +- libcodechecker/server/api/store_handler.py | 4 +- libcodechecker/server/instance_manager.py | 15 ++-- libcodechecker/util.py | 17 +++- 11 files changed, 80 insertions(+), 91 deletions(-) diff --git a/libcodechecker/analyze/log_parser.py b/libcodechecker/analyze/log_parser.py index 80d8826387..338267bfe3 100644 --- a/libcodechecker/analyze/log_parser.py +++ b/libcodechecker/analyze/log_parser.py @@ -25,6 +25,7 @@ from libcodechecker.log import build_action from libcodechecker.log import option_parser from libcodechecker.logger import get_logger +from libcodechecker.util import load_json_or_empty LOG = get_logger('buildlogger') @@ -142,16 +143,15 @@ def dump_compiler_info(output_path, filename, data): filename = os.path.join(output_path, filename) all_data = dict() if os.path.exists(filename): - with open(filename, 'r') as f: - all_data = json.load(f) + all_data = load_json_or_empty(filename) + all_data.update(data) with open(filename, 'w') as f: f.write(json.dumps(all_data)) def load_compiler_info(filename, compiler): - with open(filename, 'r') as f: - data = json.load(f) + data = load_json_or_empty(filename, {}) value = data.get(compiler) if value is None: LOG.error("Could not find compiler %s in file %s" % @@ -239,7 +239,7 @@ def parse_compile_commands_json(logfile, parseLogOptions): actions = [] filtered_build_actions = {} - data = json.load(logfile) + data = load_json_or_empty(logfile, {}) compiler_includes = {} compiler_target = {} @@ -359,9 +359,9 @@ def parse_compile_commands_json(logfile, parseLogOptions): def parse_log(logfilepath, parseLogOptions): - ''' + """ logfilepath: the compile command json file which should be parsed. - ''' + """ LOG.debug('Parsing log file: ' + logfilepath) with open(logfilepath) as logfile: diff --git a/libcodechecker/generic_package_context.py b/libcodechecker/generic_package_context.py index bfda319d63..9c3aa84fb5 100644 --- a/libcodechecker/generic_package_context.py +++ b/libcodechecker/generic_package_context.py @@ -12,7 +12,6 @@ from __future__ import absolute_import from collections import Mapping -import json import os import sys @@ -20,6 +19,7 @@ from libcodechecker import logger # TODO: Refers subpackage library from libcodechecker.analyze.analyzers import analyzer_types +from libcodechecker.util import load_json_or_empty LOG = logger.get_logger('system') @@ -89,13 +89,8 @@ def set_env(self, env_vars): self._codechecker_workspace = os.environ.get('codechecker_workspace') - try: - with open(self.checkers_severity_map_file) as severity_file: - self._severity_map = SeverityMap(json.load(severity_file)) - except (IOError, ValueError): - LOG.warning("{0} doesn't exist or not JSON format. Severity " - "levels will not be available!" - .format(self.checkers_severity_map_file)) + self._severity_map = SeverityMap( + load_json_or_empty(self.checkers_severity_map_file, {})) # Get generic package specific environment variables. self.logger_bin = os.environ.get(env_vars['cc_logger_bin']) @@ -108,49 +103,38 @@ def __set_version(self): """ Get the package version from the version config file. """ - try: - with open(self.version_file, 'r') as vfile: - vfile_data = json.loads(vfile.read()) - - package_version = vfile_data['version'] - package_build_date = vfile_data['package_build_date'] - package_git_hash = vfile_data['git_hash'] - package_git_tag = vfile_data['git_describe']['tag'] - package_git_dirtytag = vfile_data['git_describe']['dirty'] - product_database_version = vfile_data['product_db_version'] - run_database_version = vfile_data['run_db_version'] - - self.__package_version = package_version['major'] + '.' + \ - package_version['minor'] + '.' + \ - package_version['revision'] - self.__product_db_version_info = db_version.DBVersionInfo( - product_database_version['major'], - product_database_version['minor']) - self.__run_db_version_info = db_version.DBVersionInfo( - run_database_version['major'], - run_database_version['minor']) - - self.__package_build_date = package_build_date - self.__package_git_hash = package_git_hash - - self.__package_git_tag = package_git_tag - if (LOG.getEffectiveLevel() == logger.DEBUG or - LOG.getEffectiveLevel() == - logger.DEBUG_ANALYZER): - self.__package_git_tag = package_git_dirtytag - - except ValueError as verr: - # db_version is required to know if the db schema is compatible. - LOG.error('Failed to get version info from the version file.') - LOG.error(verr) - sys.exit(1) + vfile_data = load_json_or_empty(self.version_file) - except IOError as ioerr: - LOG.error('Failed to read version config file: ' + - self.version_file) - LOG.error(ioerr) + if not vfile_data: sys.exit(1) + package_version = vfile_data['version'] + package_build_date = vfile_data['package_build_date'] + package_git_hash = vfile_data['git_hash'] + package_git_tag = vfile_data['git_describe']['tag'] + package_git_dirtytag = vfile_data['git_describe']['dirty'] + product_database_version = vfile_data['product_db_version'] + run_database_version = vfile_data['run_db_version'] + + self.__package_version = package_version['major'] + '.' + \ + package_version['minor'] + '.' + \ + package_version['revision'] + self.__product_db_version_info = db_version.DBVersionInfo( + product_database_version['major'], + product_database_version['minor']) + self.__run_db_version_info = db_version.DBVersionInfo( + run_database_version['major'], + run_database_version['minor']) + + self.__package_build_date = package_build_date + self.__package_git_hash = package_git_hash + + self.__package_git_tag = package_git_tag + if (LOG.getEffectiveLevel() == logger.DEBUG or + LOG.getEffectiveLevel() == + logger.DEBUG_ANALYZER): + self.__package_git_tag = package_git_dirtytag + def __populate_analyzers(self): compiler_binaries = self.pckg_layout.get('analyzers') if not compiler_binaries: @@ -371,8 +355,10 @@ def get_context(): pckg_config_file = os.path.join(package_root, "config", "config.json") LOG.debug('Reading config: ' + pckg_config_file) - with open(pckg_config_file, 'r') as cfg: - cfg_dict = json.loads(cfg.read()) + cfg_dict = load_json_or_empty(pckg_config_file) + + if not cfg_dict: + sys.exit(1) LOG.debug(cfg_dict) @@ -381,8 +367,10 @@ def get_context(): layout_cfg_file = os.path.join(package_root, "config", "package_layout.json") LOG.debug(layout_cfg_file) - with open(layout_cfg_file, 'r') as lcfg: - lcfg_dict = json.loads(lcfg.read()) + lcfg_dict = load_json_or_empty(layout_cfg_file) + + if not lcfg_dict: + sys.exit(1) # Merge static and runtime layout. layout_config = lcfg_dict['static'].copy() diff --git a/libcodechecker/libhandlers/analyze.py b/libcodechecker/libhandlers/analyze.py index b6b40b1688..e7bedd1a15 100644 --- a/libcodechecker/libhandlers/analyze.py +++ b/libcodechecker/libhandlers/analyze.py @@ -22,7 +22,8 @@ from libcodechecker.analyze import analyzer from libcodechecker.analyze import log_parser from libcodechecker.analyze.analyzers import analyzer_types -from libcodechecker.util import RawDescriptionDefaultHelpFormatter +from libcodechecker.util import RawDescriptionDefaultHelpFormatter, \ + load_json_or_empty LOG = logger.get_logger('system') @@ -488,10 +489,9 @@ def main(args): # Update metadata dictionary with old values. metadata_file = os.path.join(args.output_path, 'metadata.json') if os.path.exists(metadata_file): - with open(metadata_file, 'r') as data: - metadata_prev = json.load(data) - metadata['result_source_files'] =\ - metadata_prev['result_source_files'] + metadata_prev = load_json_or_empty(metadata_file) + metadata['result_source_files'] = \ + metadata_prev['result_source_files'] analyzer.perform_analysis(args, context, actions, metadata) diff --git a/libcodechecker/libhandlers/parse.py b/libcodechecker/libhandlers/parse.py index 27ec624589..631fb254a6 100644 --- a/libcodechecker/libhandlers/parse.py +++ b/libcodechecker/libhandlers/parse.py @@ -323,9 +323,8 @@ def skip_html_report_data_handler(report_hash, source_file, report_line, elif os.path.isdir(input_path): metadata_file = os.path.join(input_path, "metadata.json") if os.path.exists(metadata_file): - with open(metadata_file, 'r') as metadata: - metadata_dict = json.load(metadata) - LOG.debug(metadata_dict) + metadata_dict = util.load_json_or_empty(metadata_file) + LOG.debug(metadata_dict) if 'working_directory' in metadata_dict: working_dir = metadata_dict['working_directory'] diff --git a/libcodechecker/libhandlers/server.py b/libcodechecker/libhandlers/server.py index 94ce54555b..cc85672322 100644 --- a/libcodechecker/libhandlers/server.py +++ b/libcodechecker/libhandlers/server.py @@ -605,7 +605,7 @@ def __instance_management(args): head = ['Workspace', 'Computer host', 'View port'] rows = [] - for instance in instance_manager.get_instances(): + for instance in instances: if not instances_on_multiple_hosts: rows.append((instance['workspace'], str(instance['port']))) else: @@ -825,8 +825,7 @@ def server_init_start(args): checker_md_docs = os.path.join(context.doc_root, 'checker_md_docs') checker_md_docs_map = os.path.join(checker_md_docs, 'checker_doc_map.json') - with open(checker_md_docs_map, 'r') as dFile: - checker_md_docs_map = json.load(dFile) + checker_md_docs_map = util.load_json_or_empty(checker_md_docs_map, {}) package_data = {'www_root': context.www_root, 'doc_root': context.doc_root, diff --git a/libcodechecker/libhandlers/store.py b/libcodechecker/libhandlers/store.py index 4e86cc0f9d..2d66ee271f 100644 --- a/libcodechecker/libhandlers/store.py +++ b/libcodechecker/libhandlers/store.py @@ -159,8 +159,7 @@ def __get_run_name(input_list): for input_path in input_list: metafile = os.path.join(input_path, "metadata.json") if os.path.isdir(input_path) and os.path.exists(metafile): - with open(metafile, 'r') as metadata: - metajson = json.load(metadata) + metajson = util.load_json_or_empty(metafile) if 'name' in metajson: names.append(metajson['name']) diff --git a/libcodechecker/logger.py b/libcodechecker/logger.py index 7725bc36fc..d9d66ff984 100644 --- a/libcodechecker/logger.py +++ b/libcodechecker/logger.py @@ -48,7 +48,7 @@ def debug_analyzer(self, msg, *args, **kwargs): DEFAULT_LOG_CFG_FILE = os.path.join(package_root, 'config', 'logger.conf') -# Default config wich can be used if reading log config from a +# Default config which can be used if reading log config from a # file fails. DEFAULT_LOG_CONFIG = '''{ "version": 1, diff --git a/libcodechecker/server/api/report_server.py b/libcodechecker/server/api/report_server.py index c0ebeea810..106194e42e 100644 --- a/libcodechecker/server/api/report_server.py +++ b/libcodechecker/server/api/report_server.py @@ -2251,8 +2251,8 @@ def massStoreRun(self, name, tag, version, b64zip, force, LOG.error("Failed to open skip file") LOG.error(err) - with open(content_hash_file) as chash_file: - filename_to_hash = json.load(chash_file) + filename_to_hash = util.load_json_or_empty(content_hash_file, + {}) file_path_to_id = self.__store_source_files(source_root, filename_to_hash, diff --git a/libcodechecker/server/api/store_handler.py b/libcodechecker/server/api/store_handler.py index 172224b0fb..3005467cb8 100644 --- a/libcodechecker/server/api/store_handler.py +++ b/libcodechecker/server/api/store_handler.py @@ -26,6 +26,7 @@ # TODO: This is a cross-subpackage import. from libcodechecker.server.database.run_db_model import BugPathEvent, \ BugReportPoint, File, Run, RunHistory, Report, FileContent +from libcodechecker.util import load_json_or_empty LOG = get_logger('system') @@ -38,8 +39,7 @@ def metadata_info(metadata_file): if not os.path.isfile(metadata_file): return check_commands, check_durations - with open(metadata_file, 'r') as metadata: - metadata_dict = json.load(metadata) + metadata_dict = load_json_or_empty(metadata_file, {}) if 'command' in metadata_dict: check_commands.append(metadata_dict['command']) diff --git a/libcodechecker/server/instance_manager.py b/libcodechecker/server/instance_manager.py index 904d834877..cfaa774701 100644 --- a/libcodechecker/server/instance_manager.py +++ b/libcodechecker/server/instance_manager.py @@ -19,6 +19,8 @@ import socket import stat +from libcodechecker.util import load_json_or_empty + def __getInstanceDescriptorPath(folder=None): if not folder: @@ -112,13 +114,6 @@ def get_instances(folder=None): # This method does NOT write the descriptor file. descriptor = __getInstanceDescriptorPath(folder) - instances = [] - if os.path.exists(descriptor): - with open(descriptor, 'r') as f: - portalocker.lock(f, portalocker.LOCK_SH) - instances = [i for i in json.load(f) if __checkInstance( - i['hostname'], - i['pid'])] - portalocker.unlock(f) - - return instances + instances = load_json_or_empty(descriptor, {}, lock=True) + + return [i for i in instances if __checkInstance(i['hostname'], i['pid'])] diff --git a/libcodechecker/util.py b/libcodechecker/util.py index 617404d21a..a5211b8bc4 100644 --- a/libcodechecker/util.py +++ b/libcodechecker/util.py @@ -15,12 +15,14 @@ import hashlib import json import os +import portalocker import re import shutil import signal import socket import stat import subprocess +import sys import tempfile import uuid @@ -546,7 +548,6 @@ def check_file_owner_rw(file_to_check): True if only the owner can read or write the file. False if other users or groups can read or write the file. """ - mode = os.stat(file_to_check)[stat.ST_MODE] if mode & stat.S_IRGRP \ or mode & stat.S_IWGRP \ @@ -561,7 +562,7 @@ def check_file_owner_rw(file_to_check): return True -def load_json_or_empty(path, default=None, kind=None): +def load_json_or_empty(path, default=None, kind=None, lock=False): """ Load the contents of the given file as a JSON and return it's value, or default if the file can't be loaded. @@ -570,13 +571,21 @@ def load_json_or_empty(path, default=None, kind=None): ret = default try: with open(path, 'r') as handle: + if lock: + portalocker.lock(handle, portalocker.LOCK_SH) + ret = json.loads(handle.read()) - except IOError: + + if lock: + portalocker.unlock(handle) + except IOError as ex: LOG.warning("Failed to open {0} file: {1}" .format(kind if kind else 'json', path)) - except ValueError: + LOG.warning(ex) + except ValueError as ex: LOG.warning("'{1}' is not a valid {0} file." .format(kind if kind else 'json', path)) + LOG.warning(ex) return ret From 42330a10cf245e4749370a74dabf98c98b2c19a6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=C3=A1rton=20Csord=C3=A1s?= <csordasmarton92@gmail.com> Date: Thu, 2 Aug 2018 14:43:30 +0200 Subject: [PATCH 2/2] Refactoring instance manager --- libcodechecker/analyze/log_parser.py | 30 +++++---- libcodechecker/server/instance_manager.py | 75 ++++++++++++----------- libcodechecker/util.py | 21 ++++--- tests/unit/test_buildcmd_escaping.py | 8 +-- tests/unit/test_log_parser.py | 46 +++++++------- 5 files changed, 92 insertions(+), 88 deletions(-) diff --git a/libcodechecker/analyze/log_parser.py b/libcodechecker/analyze/log_parser.py index 338267bfe3..68719a5130 100644 --- a/libcodechecker/analyze/log_parser.py +++ b/libcodechecker/analyze/log_parser.py @@ -224,9 +224,9 @@ def remove_file_if_exists(filename): os.remove(filename) -def parse_compile_commands_json(logfile, parseLogOptions): +def parse_compile_commands_json(log_data, parseLogOptions): """ - logfile: is a compile command json + log_data: content of a compile command json. """ output_path = parseLogOptions.output_path @@ -239,13 +239,11 @@ def parse_compile_commands_json(logfile, parseLogOptions): actions = [] filtered_build_actions = {} - data = load_json_or_empty(logfile, {}) - compiler_includes = {} compiler_target = {} counter = 0 - for entry in data: + for entry in log_data: sourcefile = entry['file'] if not os.path.isabs(sourcefile): @@ -364,17 +362,17 @@ def parse_log(logfilepath, parseLogOptions): """ LOG.debug('Parsing log file: ' + logfilepath) - with open(logfilepath) as logfile: - try: - actions = parse_compile_commands_json(logfile, parseLogOptions) - except (ValueError, KeyError, TypeError) as ex: - if os.stat(logfilepath).st_size == 0: - LOG.error('The compile database is empty.') - else: - LOG.error('The compile database is not valid.') - LOG.debug(traceback.format_exc()) - LOG.debug(ex) - sys.exit(1) + try: + data = load_json_or_empty(logfilepath, {}) + actions = parse_compile_commands_json(data, parseLogOptions) + except (ValueError, KeyError, TypeError) as ex: + if os.stat(logfilepath).st_size == 0: + LOG.error('The compile database is empty.') + else: + LOG.error('The compile database is not valid.') + LOG.debug(traceback.format_exc()) + LOG.debug(ex) + sys.exit(1) LOG.debug('Parsing log file done.') diff --git a/libcodechecker/server/instance_manager.py b/libcodechecker/server/instance_manager.py index cfaa774701..353fad06ff 100644 --- a/libcodechecker/server/instance_manager.py +++ b/libcodechecker/server/instance_manager.py @@ -14,30 +14,31 @@ import getpass import json import os -import portalocker import psutil import socket import stat +import portalocker + from libcodechecker.util import load_json_or_empty -def __getInstanceDescriptorPath(folder=None): +def __get_instance_descriptor_path(folder=None): if not folder: folder = os.path.expanduser("~") return os.path.join(folder, ".codechecker.instances.json") -def __makeInstanceDescriptorFile(folder=None): - descriptor = __getInstanceDescriptorPath(folder) +def __make_instance_descriptor_file(folder=None): + descriptor = __get_instance_descriptor_path(folder) if not os.path.exists(descriptor): with open(descriptor, 'w') as f: json.dump([], f) os.chmod(descriptor, stat.S_IRUSR | stat.S_IWUSR) -def __checkInstance(hostname, pid): +def __check_instance(hostname, pid): """Check if the given process on the system is a valid, running CodeChecker for the current user.""" @@ -57,32 +58,34 @@ def __checkInstance(hostname, pid): return False -def __rewriteInstanceFile(append, remove, folder=None): - """This helper method reads the user's instance descriptor and manages it - eliminating dead records, appending new ones and reserialising the file.""" +def __rewrite_instance_file(append, remove, folder=None): + """ + This helper method reads the user's instance descriptor and manages it + eliminating dead records, appending new ones and re-serialising the file. + """ + __make_instance_descriptor_file(folder) + + append_pids = [i['pid'] for i in append] - __makeInstanceDescriptorFile(folder) - with open(__getInstanceDescriptorPath(folder), 'r+') as f: - portalocker.lock(f, portalocker.LOCK_EX) + # After reading, check every instance if they are still valid and + # make sure PID does not collide accidentally with the + # to-be-registered instances, if any exists in the append list as it + # would cause duplication. + # + # Also, we remove the records to the given PIDs, if any exists. + instances = [i for i in get_instances(folder) + if i['pid'] not in append_pids and + (i['hostname'] + ":" + str(i['pid'])) not in remove] - # After reading, check every instance if they are still valid and - # make sure PID does not collide accidentally with the - # to-be-registered instances, if any exists in the append list as it - # would cause duplication. - # - # Also, we remove the records to the given PIDs, if any exists. - append_pids = [i['pid'] for i in append] - instances = [i for i in json.load(f) - if i['pid'] not in append_pids and - (i['hostname'] + ":" + str(i['pid'])) not in remove and - __checkInstance(i['hostname'], i['pid'])] + with open(__get_instance_descriptor_path(folder), 'w') as instance_file: + portalocker.lock(instance_file, portalocker.LOCK_EX) instances = instances + append - f.seek(0) - f.truncate() - json.dump(instances, f, indent=2) - portalocker.unlock(f) + instance_file.seek(0) + instance_file.truncate() + json.dump(instances, instance_file, indent=2) + portalocker.unlock(instance_file) def register(pid, workspace, port, folder=None): @@ -91,12 +94,12 @@ def register(pid, workspace, port, folder=None): descriptor. """ - __rewriteInstanceFile([{"pid": pid, - "hostname": socket.gethostname(), - "workspace": workspace, - "port": port}], - [], - folder) + __rewrite_instance_file([{"pid": pid, + "hostname": socket.gethostname(), + "workspace": workspace, + "port": port}], + [], + folder) def unregister(pid, folder=None): @@ -105,7 +108,9 @@ def unregister(pid, folder=None): descriptor. """ - __rewriteInstanceFile([], [socket.gethostname() + ":" + str(pid)], folder) + __rewrite_instance_file([], + [socket.gethostname() + ":" + str(pid)], + folder) def get_instances(folder=None): @@ -113,7 +118,7 @@ def get_instances(folder=None): # This method does NOT write the descriptor file. - descriptor = __getInstanceDescriptorPath(folder) + descriptor = __get_instance_descriptor_path(folder) instances = load_json_or_empty(descriptor, {}, lock=True) - return [i for i in instances if __checkInstance(i['hostname'], i['pid'])] + return [i for i in instances if __check_instance(i['hostname'], i['pid'])] diff --git a/libcodechecker/util.py b/libcodechecker/util.py index a5211b8bc4..7de0dce9ce 100644 --- a/libcodechecker/util.py +++ b/libcodechecker/util.py @@ -13,21 +13,21 @@ import argparse import datetime import hashlib +import io import json import os -import portalocker import re import shutil import signal import socket import stat import subprocess -import sys import tempfile import uuid from threading import Timer +import portalocker import psutil from libcodechecker.logger import get_logger @@ -570,7 +570,7 @@ def load_json_or_empty(path, default=None, kind=None, lock=False): ret = default try: - with open(path, 'r') as handle: + with io.open(path, 'r') as handle: if lock: portalocker.lock(handle, portalocker.LOCK_SH) @@ -578,13 +578,18 @@ def load_json_or_empty(path, default=None, kind=None, lock=False): if lock: portalocker.unlock(handle) - except IOError as ex: - LOG.warning("Failed to open {0} file: {1}" - .format(kind if kind else 'json', path)) + except OSError as ex: + LOG.warning("Failed to open %s file: %s", + kind if kind else 'json', + path) LOG.warning(ex) except ValueError as ex: - LOG.warning("'{1}' is not a valid {0} file." - .format(kind if kind else 'json', path)) + LOG.warning("'%s' is not a valid %s file.", + kind if kind else 'json', + path) + LOG.warning(ex) + except TypeError as ex: + LOG.warning('Failed to process json file: %s', path) LOG.warning(ex) return ret diff --git a/tests/unit/test_buildcmd_escaping.py b/tests/unit/test_buildcmd_escaping.py index 2e0a957e9f..1fc549f778 100644 --- a/tests/unit/test_buildcmd_escaping.py +++ b/tests/unit/test_buildcmd_escaping.py @@ -74,8 +74,7 @@ def __get_cmp_json(self, buildcmd): "command": buildcmd + " -c " + self.src_file_path, "file": self.src_file_path} - compile_cmds = [compile_cmd] - return json.dumps(compile_cmds) + return [compile_cmd] def __get_comp_actions(self, compile_cmd): """ @@ -83,9 +82,8 @@ def __get_comp_actions(self, compile_cmd): to return the compilation actions. """ comp_cmd_json = self.__get_cmp_json(compile_cmd) - with closing(StringIO(comp_cmd_json)) as text: - return log_parser.parse_compile_commands_json(text, - ParseLogOptions()) + return log_parser.parse_compile_commands_json(comp_cmd_json, + ParseLogOptions()) def test_buildmgr(self): """ diff --git a/tests/unit/test_log_parser.py b/tests/unit/test_log_parser.py index e7b6394118..2a19ea9dba 100644 --- a/tests/unit/test_log_parser.py +++ b/tests/unit/test_log_parser.py @@ -150,26 +150,26 @@ def test_omit_preproc(self): """ Compiler preprocessor actions should be omitted. """ - preprocessor_actions = StringIO('''[ + preprocessor_actions = [ {"directory": "/tmp", - "command": "g++ /tmp/a.cpp -c /tmp/a.cpp", - "file": "/tmp/a.cpp" }, + "command": "g++ /tmp/a.cpp -c /tmp/a.cpp", + "file": "/tmp/a.cpp"}, {"directory": "/tmp", - "command": "g++ /tmp/a.cpp -E /tmp/a.cpp", - "file": "/tmp/a.cpp" }, + "command": "g++ /tmp/a.cpp -E /tmp/a.cpp", + "file": "/tmp/a.cpp"}, {"directory": "/tmp", - "command": "g++ /tmp/a.cpp -MT /tmp/a.cpp", - "file": "/tmp/a.cpp" }, + "command": "g++ /tmp/a.cpp -MT /tmp/a.cpp", + "file": "/tmp/a.cpp"}, {"directory": "/tmp", - "command": "g++ /tmp/a.cpp -MM /tmp/a.cpp", - "file": "/tmp/a.cpp" }, + "command": "g++ /tmp/a.cpp -MM /tmp/a.cpp", + "file": "/tmp/a.cpp"}, {"directory": "/tmp", - "command": "g++ /tmp/a.cpp -MF /tmp/a.cpp", - "file": "/tmp/a.cpp" }, + "command": "g++ /tmp/a.cpp -MF /tmp/a.cpp", + "file": "/tmp/a.cpp"}, {"directory": "/tmp", - "command": "g++ /tmp/a.cpp -M /tmp/a.cpp", - "file": "/tmp/a.cpp" }] - ''') + "command": "g++ /tmp/a.cpp -M /tmp/a.cpp", + "file": "/tmp/a.cpp"}] + build_actions = \ log_parser.parse_compile_commands_json(preprocessor_actions, ParseLogOptions()) @@ -182,11 +182,10 @@ def test_keep_compile_and_dep(self): """ Keep the compile command if -MD is set. Dependency generation is done as a side effect of the compilation. """ - preprocessor_actions = StringIO('''[ + preprocessor_actions = [ {"directory": "/tmp", - "command": "g++ /tmp/a.cpp -MD /tmp/a.cpp", - "file": "/tmp/a.cpp" }] - ''') + "command": "g++ /tmp/a.cpp -MD /tmp/a.cpp", + "file": "/tmp/a.cpp"}] build_actions = \ log_parser.parse_compile_commands_json(preprocessor_actions, @@ -197,14 +196,13 @@ def test_keep_compile_and_dep(self): def test_omit_dep_with_e(self): """ Skip the compile command if -MD is set together with -E. """ - preprocessor_actions = StringIO('''[ + preprocessor_actions = [ {"directory": "/tmp", - "command": "g++ /tmp/a.cpp -MD -E /tmp/a.cpp", - "file": "/tmp/a.cpp" }, + "command": "g++ /tmp/a.cpp -MD -E /tmp/a.cpp", + "file": "/tmp/a.cpp"}, {"directory": "/tmp", - "command": "g++ /tmp/a.cpp -E -MD /tmp/a.cpp", - "file": "/tmp/a.cpp" } ] - ''') + "command": "g++ /tmp/a.cpp -E -MD /tmp/a.cpp", + "file": "/tmp/a.cpp"}] build_actions = \ log_parser.parse_compile_commands_json(preprocessor_actions,