36 lines
1.1 KiB
Python
Executable File
36 lines
1.1 KiB
Python
Executable File
#!/usr/bin/env python
|
|
|
|
|
|
import argparse
|
|
import json
|
|
|
|
|
|
def print_table(data, spec):
|
|
print("\t".join(column for column in spec.keys()))
|
|
|
|
for data_item in data:
|
|
values = []
|
|
for retrieve in spec.values():
|
|
raw_value = retrieve(data_item)
|
|
values.append(raw_value if isinstance(raw_value, str) else str(raw_value))
|
|
print("\t".join(values))
|
|
|
|
|
|
if __name__ == "__main__":
|
|
p = argparse.ArgumentParser(description="Turn files generated by timing.py into pgf datafiles")
|
|
p.add_argument("timing_file")
|
|
args = p.parse_args()
|
|
|
|
with open(args.timing_file, "r", encoding="utf8") as f:
|
|
jobs = json.load(f)
|
|
|
|
scaling_spec = {
|
|
"label": lambda job: job["accounting"][0]["nodes"]["count"],
|
|
"nodes": lambda job: job["accounting"][0]["nodes"]["count"],
|
|
"mean_time": lambda job: job["means"]["TimeStep"],
|
|
"std_time": lambda job: job["stds"]["TimeStep"],
|
|
"speedup": lambda job: jobs[0]["means"]["TimeStep"] / job["means"]["TimeStep"],
|
|
"speedup_std": lambda job: jobs[0]["stds"]["TimeStep"] / job["means"]["TimeStep"],
|
|
}
|
|
print_table(jobs, scaling_spec)
|