Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Handle invalid json files #1695

Merged
merged 2 commits into from
Sep 5, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 20 additions & 22 deletions libcodechecker/analyze/log_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
from libcodechecker.log import build_action
from libcodechecker.log import option_parser
from libcodechecker.logger import get_logger
from libcodechecker.util import load_json_or_empty

LOG = get_logger('buildlogger')

Expand Down Expand Up @@ -142,16 +143,15 @@ def dump_compiler_info(output_path, filename, data):
filename = os.path.join(output_path, filename)
all_data = dict()
if os.path.exists(filename):
with open(filename, 'r') as f:
all_data = json.load(f)
all_data = load_json_or_empty(filename)

all_data.update(data)
with open(filename, 'w') as f:
f.write(json.dumps(all_data))


def load_compiler_info(filename, compiler):
with open(filename, 'r') as f:
data = json.load(f)
data = load_json_or_empty(filename, {})
value = data.get(compiler)
if value is None:
LOG.error("Could not find compiler %s in file %s" %
Expand Down Expand Up @@ -224,9 +224,9 @@ def remove_file_if_exists(filename):
os.remove(filename)


def parse_compile_commands_json(logfile, parseLogOptions):
def parse_compile_commands_json(log_data, parseLogOptions):
"""
logfile: is a compile command json
log_data: content of a compile command json.
"""

output_path = parseLogOptions.output_path
Expand All @@ -239,13 +239,11 @@ def parse_compile_commands_json(logfile, parseLogOptions):
actions = []
filtered_build_actions = {}

data = json.load(logfile)

compiler_includes = {}
compiler_target = {}

counter = 0
for entry in data:
for entry in log_data:
sourcefile = entry['file']

if not os.path.isabs(sourcefile):
Expand Down Expand Up @@ -359,22 +357,22 @@ def parse_compile_commands_json(logfile, parseLogOptions):


def parse_log(logfilepath, parseLogOptions):
'''
"""
logfilepath: the compile command json file which should be parsed.
'''
"""
LOG.debug('Parsing log file: ' + logfilepath)

with open(logfilepath) as logfile:
try:
actions = parse_compile_commands_json(logfile, parseLogOptions)
except (ValueError, KeyError, TypeError) as ex:
if os.stat(logfilepath).st_size == 0:
LOG.error('The compile database is empty.')
else:
LOG.error('The compile database is not valid.')
LOG.debug(traceback.format_exc())
LOG.debug(ex)
sys.exit(1)
try:
data = load_json_or_empty(logfilepath, {})
actions = parse_compile_commands_json(data, parseLogOptions)
except (ValueError, KeyError, TypeError) as ex:
if os.stat(logfilepath).st_size == 0:
LOG.error('The compile database is empty.')
else:
LOG.error('The compile database is not valid.')
LOG.debug(traceback.format_exc())
LOG.debug(ex)
sys.exit(1)

LOG.debug('Parsing log file done.')

Expand Down
92 changes: 40 additions & 52 deletions libcodechecker/generic_package_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,14 +12,14 @@
from __future__ import absolute_import

from collections import Mapping
import json
import os
import sys

from libcodechecker import db_version
from libcodechecker import logger
# TODO: Refers subpackage library
from libcodechecker.analyze.analyzers import analyzer_types
from libcodechecker.util import load_json_or_empty

LOG = logger.get_logger('system')

Expand Down Expand Up @@ -89,13 +89,8 @@ def set_env(self, env_vars):

self._codechecker_workspace = os.environ.get('codechecker_workspace')

try:
with open(self.checkers_severity_map_file) as severity_file:
self._severity_map = SeverityMap(json.load(severity_file))
except (IOError, ValueError):
LOG.warning("{0} doesn't exist or not JSON format. Severity "
"levels will not be available!"
.format(self.checkers_severity_map_file))
self._severity_map = SeverityMap(
load_json_or_empty(self.checkers_severity_map_file, {}))

# Get generic package specific environment variables.
self.logger_bin = os.environ.get(env_vars['cc_logger_bin'])
Expand All @@ -108,49 +103,38 @@ def __set_version(self):
"""
Get the package version from the version config file.
"""
try:
with open(self.version_file, 'r') as vfile:
vfile_data = json.loads(vfile.read())

package_version = vfile_data['version']
package_build_date = vfile_data['package_build_date']
package_git_hash = vfile_data['git_hash']
package_git_tag = vfile_data['git_describe']['tag']
package_git_dirtytag = vfile_data['git_describe']['dirty']
product_database_version = vfile_data['product_db_version']
run_database_version = vfile_data['run_db_version']

self.__package_version = package_version['major'] + '.' + \
package_version['minor'] + '.' + \
package_version['revision']
self.__product_db_version_info = db_version.DBVersionInfo(
product_database_version['major'],
product_database_version['minor'])
self.__run_db_version_info = db_version.DBVersionInfo(
run_database_version['major'],
run_database_version['minor'])

self.__package_build_date = package_build_date
self.__package_git_hash = package_git_hash

self.__package_git_tag = package_git_tag
if (LOG.getEffectiveLevel() == logger.DEBUG or
LOG.getEffectiveLevel() ==
logger.DEBUG_ANALYZER):
self.__package_git_tag = package_git_dirtytag

except ValueError as verr:
# db_version is required to know if the db schema is compatible.
LOG.error('Failed to get version info from the version file.')
LOG.error(verr)
sys.exit(1)
vfile_data = load_json_or_empty(self.version_file)

except IOError as ioerr:
LOG.error('Failed to read version config file: ' +
self.version_file)
LOG.error(ioerr)
if not vfile_data:
sys.exit(1)

package_version = vfile_data['version']
package_build_date = vfile_data['package_build_date']
package_git_hash = vfile_data['git_hash']
package_git_tag = vfile_data['git_describe']['tag']
package_git_dirtytag = vfile_data['git_describe']['dirty']
product_database_version = vfile_data['product_db_version']
run_database_version = vfile_data['run_db_version']

self.__package_version = package_version['major'] + '.' + \
package_version['minor'] + '.' + \
package_version['revision']
self.__product_db_version_info = db_version.DBVersionInfo(
product_database_version['major'],
product_database_version['minor'])
self.__run_db_version_info = db_version.DBVersionInfo(
run_database_version['major'],
run_database_version['minor'])

self.__package_build_date = package_build_date
self.__package_git_hash = package_git_hash

self.__package_git_tag = package_git_tag
if (LOG.getEffectiveLevel() == logger.DEBUG or
LOG.getEffectiveLevel() ==
logger.DEBUG_ANALYZER):
self.__package_git_tag = package_git_dirtytag

def __populate_analyzers(self):
compiler_binaries = self.pckg_layout.get('analyzers')
if not compiler_binaries:
Expand Down Expand Up @@ -371,8 +355,10 @@ def get_context():

pckg_config_file = os.path.join(package_root, "config", "config.json")
LOG.debug('Reading config: ' + pckg_config_file)
with open(pckg_config_file, 'r') as cfg:
cfg_dict = json.loads(cfg.read())
cfg_dict = load_json_or_empty(pckg_config_file)

if not cfg_dict:
sys.exit(1)

LOG.debug(cfg_dict)

Expand All @@ -381,8 +367,10 @@ def get_context():
layout_cfg_file = os.path.join(package_root, "config",
"package_layout.json")
LOG.debug(layout_cfg_file)
with open(layout_cfg_file, 'r') as lcfg:
lcfg_dict = json.loads(lcfg.read())
lcfg_dict = load_json_or_empty(layout_cfg_file)

if not lcfg_dict:
sys.exit(1)

# Merge static and runtime layout.
layout_config = lcfg_dict['static'].copy()
Expand Down
10 changes: 5 additions & 5 deletions libcodechecker/libhandlers/analyze.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,8 @@
from libcodechecker.analyze import analyzer
from libcodechecker.analyze import log_parser
from libcodechecker.analyze.analyzers import analyzer_types
from libcodechecker.util import RawDescriptionDefaultHelpFormatter
from libcodechecker.util import RawDescriptionDefaultHelpFormatter, \
load_json_or_empty

LOG = logger.get_logger('system')

Expand Down Expand Up @@ -488,10 +489,9 @@ def main(args):
# Update metadata dictionary with old values.
metadata_file = os.path.join(args.output_path, 'metadata.json')
if os.path.exists(metadata_file):
with open(metadata_file, 'r') as data:
metadata_prev = json.load(data)
metadata['result_source_files'] =\
metadata_prev['result_source_files']
metadata_prev = load_json_or_empty(metadata_file)
metadata['result_source_files'] = \
metadata_prev['result_source_files']

analyzer.perform_analysis(args, context, actions, metadata)

Expand Down
5 changes: 2 additions & 3 deletions libcodechecker/libhandlers/parse.py
Original file line number Diff line number Diff line change
Expand Up @@ -323,9 +323,8 @@ def skip_html_report_data_handler(report_hash, source_file, report_line,
elif os.path.isdir(input_path):
metadata_file = os.path.join(input_path, "metadata.json")
if os.path.exists(metadata_file):
with open(metadata_file, 'r') as metadata:
metadata_dict = json.load(metadata)
LOG.debug(metadata_dict)
metadata_dict = util.load_json_or_empty(metadata_file)
LOG.debug(metadata_dict)

if 'working_directory' in metadata_dict:
working_dir = metadata_dict['working_directory']
Expand Down
5 changes: 2 additions & 3 deletions libcodechecker/libhandlers/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -605,7 +605,7 @@ def __instance_management(args):
head = ['Workspace', 'Computer host', 'View port']

rows = []
for instance in instance_manager.get_instances():
for instance in instances:
if not instances_on_multiple_hosts:
rows.append((instance['workspace'], str(instance['port'])))
else:
Expand Down Expand Up @@ -825,8 +825,7 @@ def server_init_start(args):
checker_md_docs = os.path.join(context.doc_root, 'checker_md_docs')
checker_md_docs_map = os.path.join(checker_md_docs,
'checker_doc_map.json')
with open(checker_md_docs_map, 'r') as dFile:
checker_md_docs_map = json.load(dFile)
checker_md_docs_map = util.load_json_or_empty(checker_md_docs_map, {})

package_data = {'www_root': context.www_root,
'doc_root': context.doc_root,
Expand Down
3 changes: 1 addition & 2 deletions libcodechecker/libhandlers/store.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,8 +159,7 @@ def __get_run_name(input_list):
for input_path in input_list:
metafile = os.path.join(input_path, "metadata.json")
if os.path.isdir(input_path) and os.path.exists(metafile):
with open(metafile, 'r') as metadata:
metajson = json.load(metadata)
metajson = util.load_json_or_empty(metafile)

if 'name' in metajson:
names.append(metajson['name'])
Expand Down
2 changes: 1 addition & 1 deletion libcodechecker/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def debug_analyzer(self, msg, *args, **kwargs):
DEFAULT_LOG_CFG_FILE = os.path.join(package_root, 'config', 'logger.conf')


# Default config wich can be used if reading log config from a
# Default config which can be used if reading log config from a
# file fails.
DEFAULT_LOG_CONFIG = '''{
"version": 1,
Expand Down
4 changes: 2 additions & 2 deletions libcodechecker/server/api/report_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -2251,8 +2251,8 @@ def massStoreRun(self, name, tag, version, b64zip, force,
LOG.error("Failed to open skip file")
LOG.error(err)

with open(content_hash_file) as chash_file:
filename_to_hash = json.load(chash_file)
filename_to_hash = util.load_json_or_empty(content_hash_file,
{})

file_path_to_id = self.__store_source_files(source_root,
filename_to_hash,
Expand Down
4 changes: 2 additions & 2 deletions libcodechecker/server/api/store_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
# TODO: This is a cross-subpackage import.
from libcodechecker.server.database.run_db_model import BugPathEvent, \
BugReportPoint, File, Run, RunHistory, Report, FileContent
from libcodechecker.util import load_json_or_empty

LOG = get_logger('system')

Expand All @@ -38,8 +39,7 @@ def metadata_info(metadata_file):
if not os.path.isfile(metadata_file):
return check_commands, check_durations

with open(metadata_file, 'r') as metadata:
metadata_dict = json.load(metadata)
metadata_dict = load_json_or_empty(metadata_file, {})

if 'command' in metadata_dict:
check_commands.append(metadata_dict['command'])
Expand Down
Loading