-
Notifications
You must be signed in to change notification settings - Fork 0
/
generate_report.py
96 lines (73 loc) · 3.29 KB
/
generate_report.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
#!/usr/bin/python3
import json
import os
import shutil
import sys
def remove_prefix(text, prefix):
if text.startswith(prefix):
return text[len(prefix):].lstrip()
return text
def remove_old_results():
bin_path = "./bin"
if os.path.exists(bin_path) and os.path.isdir(bin_path):
shutil.rmtree(bin_path)
def build_benchmark():
os.makedirs("build", exist_ok=True)
os.chdir('./build')
os.system("cmake .. -DCMAKE_BUILD_TYPE:String=Release")
os.chdir('..')
os.system("cmake --build build")
def run_benchmark(executable_path, output_path, output_format):
os.system("{exe_path} --benchmark_out_format={format} --benchmark_out={out_path}".format(
exe_path = executable_path,
format = output_format,
out_path = output_path,
))
def convert_gbench_json_result_to_markdown(json_path):
aggregate_only_flag = "[Aggregate only]"
md_description = ""
with open(json_path) as results_data_file:
results_data = json.load(results_data_file)
context = results_data["context"]
md_description += context["date"] + " \n"
md_description += "Run on ({num_cpus} X {mhz_per_cpu} MHz CPU s) \n".format(**context)
md_description += "CPU Caches: \n"
for cache in context["caches"]:
cache["size_kb"] = cache["size"] / 1024
cache["count"] = context["num_cpus"] / cache["num_sharing"]
md_description += " L{level} {type} {size_kb} KiB (x{count:.0f}) \n".format(**cache)
if context["library_build_type"] != "release":
md_description += "Build type: {library_build_type} \n".format(**context)
md_description += "Load Average: {}\n\n".format(", ".join(map(str, context["load_avg"])))
if context["cpu_scaling_enabled"]:
md_description += "***WARNING*** CPU scaling is enabled, the benchmark real time measurements may be noisy and will incur extra overhead.\n\n"
md_description += "### Benchmarks\nBenchmark | Time | CPU | Iterations\n--- | --- | --- | ---\n"
aggregate_benchmarks = []
for benchmark in results_data["benchmarks"]:
if benchmark["run_type"] == "iteration":
if benchmark["name"].startswith(aggregate_only_flag):
continue
if benchmark["name"].find("{N}") != -1:
slash_index = benchmark["name"].rfind("/")
benchmark["name"] = benchmark["name"][:slash_index].replace("{N}", benchmark["name"][slash_index+1:])
md_description += "{name} | {real_time:.0f} ns | {cpu_time:.0f} ns | {iterations}\n".format(**benchmark)
elif benchmark["run_type"] == "aggregate":
aggregate_benchmarks.append(benchmark)
else:
print("unknown benchmark type '{run_type}'".format(**benchmark))
sys.exit(1)
if len(aggregate_benchmarks) > 0:
md_description += "\n### BigO Complexities\nBenchmark | Complexity | Coefficient\n--- | --- | ---\n"
for benchmark in aggregate_benchmarks:
if benchmark["aggregate_name"] == "BigO":
benchmark["name"] = remove_prefix(benchmark["name"][:-5].replace("{N}", "**N**"), aggregate_only_flag)
md_description += "{name} | {big_o} | {cpu_coefficient:.2f}\n".format(**benchmark)
return md_description
def write_str_to_file(file_path, str):
with open(file_path, "w") as file:
file.write(str)
remove_old_results()
build_benchmark()
run_benchmark("./bin/RaccoonEcsBenchmark", "bin/output.json", "json")
markdown = convert_gbench_json_result_to_markdown("bin/output.json")
write_str_to_file("README.md", markdown)