diff --git a/libcodechecker/analyze/analyzers/analyzer_clang_tidy.py b/libcodechecker/analyze/analyzers/analyzer_clang_tidy.py index b5b26443e5..f575342d45 100644 --- a/libcodechecker/analyze/analyzers/analyzer_clang_tidy.py +++ b/libcodechecker/analyze/analyzers/analyzer_clang_tidy.py @@ -139,6 +139,11 @@ def construct_analyzer_cmd(self, result_handler): analyzer_cmd.extend(self.buildaction.compiler_includes) + if not next((x for x in analyzer_cmd if x.startswith('-std=') or + x.startswith('--std')), + False): + analyzer_cmd.append(self.buildaction.compiler_standard) + analyzer_cmd.extend(compiler_warnings) return analyzer_cmd diff --git a/libcodechecker/analyze/analyzers/analyzer_clangsa.py b/libcodechecker/analyze/analyzers/analyzer_clangsa.py index 0d25513301..028aa48e2a 100644 --- a/libcodechecker/analyze/analyzers/analyzer_clangsa.py +++ b/libcodechecker/analyze/analyzers/analyzer_clangsa.py @@ -212,6 +212,11 @@ def construct_analyzer_cmd(self, result_handler): analyzer_cmd.extend(self.buildaction.compiler_includes) + if not next((x for x in analyzer_cmd if x.startswith('-std=') or + x.startswith('--std')), + False): + analyzer_cmd.append(self.buildaction.compiler_standard) + analyzer_cmd.append(self.source_file) return analyzer_cmd diff --git a/libcodechecker/analyze/log_parser.py b/libcodechecker/analyze/log_parser.py index 68719a5130..70eb419ce0 100644 --- a/libcodechecker/analyze/log_parser.py +++ b/libcodechecker/analyze/log_parser.py @@ -18,6 +18,7 @@ import shlex import subprocess import sys +import tempfile import traceback # TODO: This is a cross-subpackage import! @@ -37,8 +38,12 @@ ['^-m(32|64)', '^-std=.*']) -compiler_includes_dump_file = "compiler_includes.json" -compiler_target_dump_file = "compiler_target.json" +compiler_info_dump_file = "compiler_info.json" + + +def remove_file_if_exists(filename): + if os.path.isfile(filename): + os.remove(filename) def get_compiler_err(cmd): @@ -139,24 +144,24 @@ def parse_compiler_target(lines): return target -def dump_compiler_info(output_path, filename, data): - filename = os.path.join(output_path, filename) +def dump_compiler_info(filename, compiler, attr, data): all_data = dict() if os.path.exists(filename): all_data = load_json_or_empty(filename) - - all_data.update(data) + if compiler not in all_data: + all_data[compiler] = dict() + all_data[compiler].update({attr: data}) with open(filename, 'w') as f: - f.write(json.dumps(all_data)) + json.dump(all_data, f) -def load_compiler_info(filename, compiler): +def load_compiler_info(filename, compiler, attr): data = load_json_or_empty(filename, {}) value = data.get(compiler) if value is None: LOG.error("Could not find compiler %s in file %s" % (compiler, filename)) - return value + return value.get(attr) if isinstance(value, dict) else value def get_compiler_includes(parseLogOptions, compiler, lang, compile_opts, @@ -188,13 +193,15 @@ def get_compiler_includes(parseLogOptions, compiler, lang, compile_opts, err = get_compiler_err(cmd) else: err = load_compiler_info(parseLogOptions.compiler_includes_file, - compiler) + compiler, + 'includes') if parseLogOptions.output_path is not None: LOG.debug("Dumping default includes " + compiler) - dump_compiler_info(parseLogOptions.output_path, - compiler_includes_dump_file, - {compiler: err}) + dump_compiler_info(compiler_info_dump_file, + compiler, + 'includes', + err) return prepend_isystem_and_normalize( filter_compiler_includes(parse_compiler_includes(err))) @@ -210,18 +217,103 @@ def get_compiler_target(parseLogOptions, compiler): err = get_compiler_err(cmd) else: err = load_compiler_info(parseLogOptions.compiler_target_file, - compiler) + compiler, + 'target') if parseLogOptions.output_path is not None: - dump_compiler_info(parseLogOptions.output_path, - compiler_target_dump_file, - {compiler: err}) + dump_compiler_info(compiler_info_dump_file, + compiler, + 'target', + err) return parse_compiler_target(err) -def remove_file_if_exists(filename): - if os.path.isfile(filename): - os.remove(filename) +def get_compiler_standard(parseLogOptions, compiler, lang): + """ + Returns the default compiler standard of the given compiler. The standard + is determined by the values of __STDC_VERSION__ and __cplusplus predefined + macros. These values are integers indicating the date of the standard. + However, GCC supports a GNU extension for each standard. For sake of + generality we return the GNU extended standard, since it should be a + superset of the non-extended one, thus applicable in a more general manner. + """ + VERSION_C = u""" +#ifdef __STDC_VERSION__ +# if __STDC_VERSION__ >= 201710L +# error CC_FOUND_STANDARD_VER#17 +# elif __STDC_VERSION__ >= 201112L +# error CC_FOUND_STANDARD_VER#11 +# elif __STDC_VERSION__ >= 199901L +# error CC_FOUND_STANDARD_VER#99 +# elif __STDC_VERSION__ >= 199409L +# error CC_FOUND_STANDARD_VER#94 +# else +# error CC_FOUND_STANDARD_VER#90 +# endif +#else +# error CC_FOUND_STANDARD_VER#90 +#endif + """ + + VERSION_CPP = u""" +#ifdef __cplusplus +# if __cplusplus >= 201703L +# error CC_FOUND_STANDARD_VER#17 +# elif __cplusplus >= 201402L +# error CC_FOUND_STANDARD_VER#14 +# elif __cplusplus >= 201103L +# error CC_FOUND_STANDARD_VER#11 +# elif __cplusplus >= 199711L +# error CC_FOUND_STANDARD_VER#98 +# else +# error CC_FOUND_STANDARD_VER#98 +# endif +#else +# error CC_FOUND_STANDARD_VER#98 +#endif + """ + + standard = "" + if parseLogOptions.compiler_info_file is None: + with tempfile.NamedTemporaryFile( + suffix=('.c' if lang == 'c' else '.cpp')) as source: + + with source.file as f: + f.write(VERSION_C if lang == 'c' else VERSION_CPP) + + try: + proc = subprocess.Popen([compiler, source.name], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + _, err = proc.communicate() # Wait for execution. + + finding = re.search('CC_FOUND_STANDARD_VER#(.+)', err) + + if finding: + standard = finding.group(1) + except OSError: + LOG.error("Error during the compilation of compiler standard " + "detector.") + + if standard: + if standard == '94': + # Special case for C94 standard. + standard = '-std=iso9899:199409' + else: + standard = \ + '-std=gnu' + ('' if lang == 'c' else '++') + standard + else: + standard = load_compiler_info(parseLogOptions.compiler_info_file, + compiler, + 'default_standard') + + if parseLogOptions.output_path is not None and standard: + dump_compiler_info(compiler_info_dump_file, + compiler, + 'default_standard', + standard) + + return standard def parse_compile_commands_json(log_data, parseLogOptions): @@ -231,16 +323,17 @@ def parse_compile_commands_json(log_data, parseLogOptions): output_path = parseLogOptions.output_path if output_path is not None: - remove_file_if_exists(os.path.join(output_path, - compiler_includes_dump_file)) - remove_file_if_exists(os.path.join(output_path, - compiler_target_dump_file)) + global compiler_info_dump_file + compiler_info_dump_file = os.path.join(output_path, + compiler_info_dump_file) + remove_file_if_exists(compiler_info_dump_file) actions = [] filtered_build_actions = {} compiler_includes = {} compiler_target = {} + compiler_standard = {} counter = 0 for entry in log_data: @@ -334,7 +427,13 @@ def parse_compile_commands_json(log_data, parseLogOptions): compiler_target[results.compiler] = \ get_compiler_target(parseLogOptions, results.compiler) + if not (results.compiler in compiler_standard): + compiler_standard[results.compiler] = \ + get_compiler_standard(parseLogOptions, results.compiler, + results.lang) + action.compiler_includes = compiler_includes[results.compiler] + action.compiler_standard = compiler_standard[results.compiler] action.target = compiler_target[results.compiler] if results.action != option_parser.ActionType.COMPILE: diff --git a/libcodechecker/generic_package_context.py b/libcodechecker/generic_package_context.py index 9c3aa84fb5..fb5d44188b 100644 --- a/libcodechecker/generic_package_context.py +++ b/libcodechecker/generic_package_context.py @@ -233,6 +233,11 @@ def path_plist_to_html_dist(self): return os.path.join(self.package_root, self.pckg_layout['plist_to_html_dist_path']) + @property + def path_standard_detector(self): + return os.path.join(self.package_root, + self.pckg_layout['standard_detector_path']) + @property def compiler_resource_dir(self): resource_dir = self.pckg_layout.get('compiler_resource_dir') diff --git a/libcodechecker/libhandlers/analyze.py b/libcodechecker/libhandlers/analyze.py index 1bec20e053..2e1718bdab 100644 --- a/libcodechecker/libhandlers/analyze.py +++ b/libcodechecker/libhandlers/analyze.py @@ -128,16 +128,24 @@ def add_arguments_to_parser(parser): dest="compiler_includes_file", required=False, default=None, - help="Read the compiler includes from the specified " - "file rather than invoke the compiler " + help="DEPRECATED. Read the compiler includes from the " + "specified file rather than invoke the compiler " "executable.") parser.add_argument('--compiler-target-file', dest="compiler_target_file", required=False, default=None, - help="Read the compiler target from the specified " - "file rather than invoke the compiler " + help="DEPRECATED. Read the compiler target from the " + "specified file rather than invoke the compiler " + "executable.") + + parser.add_argument('--compiler-info-file', + dest="compiler_info_file", + required=False, + default=None, + help="Read the compiler includes and target from the " + "specified file rather than invoke the compiler " "executable.") parser.add_argument('-t', '--type', '--output-format', @@ -445,12 +453,19 @@ def __init__(self, args=None): self.output_path = None self.compiler_includes_file = None self.compiler_target_file = None + self.compiler_info_file = None else: self.output_path = getattr(args, 'output_path', None) self.compiler_includes_file =\ getattr(args, 'compiler_includes_file', None) self.compiler_target_file =\ getattr(args, 'compiler_target_file', None) + self.compiler_info_file =\ + getattr(args, 'compiler_info_file', None) + + if self.compiler_info_file: + self.compiler_includes_file = args.compiler_info_file + self.compiler_target_file = args.compiler_info_file def main(args): diff --git a/tests/functional/analyze/test_analyze.py b/tests/functional/analyze/test_analyze.py index 602f2a9491..49387bbb90 100644 --- a/tests/functional/analyze/test_analyze.py +++ b/tests/functional/analyze/test_analyze.py @@ -105,11 +105,11 @@ def test_compiler_info_files(self): # Create a compilation database. build_log = [{"directory": self.test_workspace, - "command": "gcc -c " + source_file, + "command": "g++ -c " + source_file, "file": source_file }, {"directory": self.test_workspace, - "command": "clang -c " + source_file, + "command": "clang++ -c " + source_file, "file": source_file } ] @@ -139,35 +139,21 @@ def test_compiler_info_files(self): errcode = process.returncode self.assertEquals(errcode, 0) - from libcodechecker.analyze.log_parser import\ - compiler_includes_dump_file - from libcodechecker.analyze.log_parser import compiler_target_dump_file - includes_File = os.path.join(reports_dir, compiler_includes_dump_file) - target_File = os.path.join(reports_dir, compiler_target_dump_file) - self.assertEquals(os.path.exists(includes_File), True) - self.assertEquals(os.path.exists(target_File), True) - self.assertNotEqual(os.stat(includes_File).st_size, 0) - self.assertNotEqual(os.stat(target_File).st_size, 0) + from libcodechecker.analyze.log_parser import compiler_info_dump_file + info_File = os.path.join(reports_dir, compiler_info_dump_file) + self.assertEquals(os.path.exists(info_File), True) + self.assertNotEqual(os.stat(info_File).st_size, 0) # Test the validity of the json files. - with open(includes_File, 'r') as f: + with open(info_File, 'r') as f: try: data = json.load(f) self.assertEquals(len(data), 2) - self.assertTrue("clang" in data) - self.assertTrue("gcc" in data) + self.assertTrue("clang++" in data) + self.assertTrue("g++" in data) except ValueError: self.fail("json.load should successfully parse the file %s" - % includes_File) - with open(target_File, 'r') as f: - try: - data = json.load(f) - self.assertEquals(len(data), 2) - self.assertTrue("clang" in data) - self.assertTrue("gcc" in data) - except ValueError: - self.fail("json.load should successfully parse the file %s" - % target_File) + % info_File) def test_compiler_includes_file_is_loaded(self): ''' @@ -183,7 +169,7 @@ def test_compiler_includes_file_is_loaded(self): # Contents of build log. build_log = [ {"directory": self.test_workspace, - "command": "clang -c " + source_file, + "command": "clang++ -c " + source_file, "file": source_file } ] @@ -199,7 +185,7 @@ def test_compiler_includes_file_is_loaded(self): with open(compiler_includes_file, 'w') as source: source.write( # Raw string literal, cannot break the line: - r"""{"clang": "\"\n#include \"...\" search starts here:\n"""\ + r"""{"clang++": "\"\n#include \"...\" search starts here:\n"""\ r"""#include <...> search starts here:\n"""\ r""" /TEST_FAKE_INCLUDE_DIR"}""" ) @@ -231,7 +217,7 @@ def test_compiler_target_file_is_loaded(self): # Contents of build log. build_log = [ {"directory": self.test_workspace, - "command": "clang -c " + source_file, + "command": "clang++ -c " + source_file, "file": source_file } ] @@ -247,7 +233,7 @@ def test_compiler_target_file_is_loaded(self): with open(compiler_target_file, 'w') as source: source.write( # Raw string literal, cannot break the line: - r"""{"clang": "Target: TEST_FAKE_TARGET\nConfigured with"}""" + r"""{"clang++": "Target: TEST_FAKE_TARGET\nConfigured with"}""" ) # Create analyze command. @@ -411,7 +397,7 @@ def test_incremental_analyze(self): # Create a compilation database. build_log = [{"directory": self.test_workspace, - "command": "gcc -c " + source_file, + "command": "g++ -c " + source_file, "file": source_file }] diff --git a/tests/functional/storage_of_analysis_statistics/test_storage_of_analysis_statistics.py b/tests/functional/storage_of_analysis_statistics/test_storage_of_analysis_statistics.py index 71bdb19c42..78111adcf2 100644 --- a/tests/functional/storage_of_analysis_statistics/test_storage_of_analysis_statistics.py +++ b/tests/functional/storage_of_analysis_statistics/test_storage_of_analysis_statistics.py @@ -158,8 +158,7 @@ def _check_analyzer_statistics_zip(self): # Check that analyzer files exist in the uploaded zip. analyzer_files = ['compile_cmd.json', - 'compiler_includes.json', - 'compiler_target.json', + 'compiler_info.json', 'metadata.json'] for analyzer_file in analyzer_files: orig_file = os.path.join(self._reports_dir, analyzer_file) diff --git a/www/changelog.html b/www/changelog.html index 5d988412eb..3d1c5a8a65 100644 --- a/www/changelog.html +++ b/www/changelog.html @@ -138,6 +138,18 @@