Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 30 additions & 0 deletions flow/test/test_genElapsedTime.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,36 @@ def test_no_elapsed_time(self, fake_err_output):
genElapsedTime.scan_logs(["--logDir", str(self.tmp_dir.name), "--noHeader"])
self.assertIn("No elapsed time found in", fake_err_output.getvalue())

@patch("sys.stdout", new_callable=StringIO)
def test_emits_one_row_per_result_extension(self, mock_stdout):
# logs/.../1_2_yosys.log accompanied by both .v and .sdc result
# files should produce two rows: .v with the elapsed/peak,
# .sdc with empty elapsed/peak (the row sharing the stage).
log_dir = os.path.join(self.tmp_dir.name, "logs", "p", "d", "base")
res_dir = os.path.join(self.tmp_dir.name, "results", "p", "d", "base")
os.makedirs(log_dir)
os.makedirs(res_dir)
log_path = os.path.join(log_dir, "1_2_yosys.log")
with open(log_path, "w") as f:
f.write("Elapsed time: 00:00:10[h:]min:sec. Peak memory: 51200KB.\n")
with open(os.path.join(res_dir, "1_2_yosys.v"), "w") as f:
f.write("module foo\nendmodule\n")
with open(os.path.join(res_dir, "1_2_yosys.sdc"), "w") as f:
f.write("create_clock -period 10\n")
genElapsedTime.scan_logs(["--logDir", log_dir, "--noHeader"])
out = mock_stdout.getvalue()
lines = [l for l in out.splitlines() if "1_2_yosys" in l]
self.assertEqual(len(lines), 2, out)
self.assertIn(".v", lines[0])
self.assertIn(".sdc", lines[1])
# elapsed (10) and peak (50) show only on the .v row
self.assertIn("10", lines[0])
self.assertIn("50", lines[0])
# the .sdc row repeats neither the elapsed (10) nor peak (50)
# but does include its own hash; check absence of those tokens
self.assertNotIn(" 10 ", " " + lines[1] + " ")
self.assertNotIn(" 50 ", " " + lines[1] + " ")

def tearDown(self):
self.tmp_dir.cleanup()

Expand Down
9 changes: 8 additions & 1 deletion flow/util/checkMetadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,8 +106,15 @@ def try_number(string):
PRE = "[INFO]"
CHECK = "pass"
elif rule.get("level") == "warning":
# Warning-level rules never fail the build, but the prior
# message ("[WARN] field pass test: a == b") was misleading
# when a != b -- the build_value clearly differed from the
# rule_value yet "pass" implied a match. Say "differs"
# instead so the diagnostic reads naturally for fields like
# the netlist hash where the user wants visibility without
# an error.
PRE = "[WARN]"
CHECK = "pass"
CHECK = "differs"
WARNS += 1
else:
PRE = "[ERROR]"
Expand Down
74 changes: 52 additions & 22 deletions flow/util/genElapsedTime.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,24 +14,42 @@
# ==============================================================================


def get_hash(f):
# content hash for the result file alongside .log file is useful to
# debug divergent results under what should be identical
# builds(such as local and CI builds)
for ext in [".odb", ".rtlil", ".v"]:
# Primary data artifacts first, then derived/exported artifacts and
# the SDC constraint file: yosys emits .v / .rtlil; OpenROAD stages
# emit .odb (and often .def / .sdc); routing emits .spef; finish
# emits .gds.
RESULT_EXTS = [".v", ".rtlil", ".odb", ".def", ".spef", ".gds", ".sdc"]


def get_hashes(f):
"""Return [(ext, sha1), ...] for every result file alongside log
`f` whose extension is in RESULT_EXTS. A yosys stage typically
produces both `.v` and `.sdc`; a floorplan/route stage produces
`.odb` (and often `.sdc`); the canonicalize stage produces
`.rtlil`. Hashing each separately makes "the netlist changed"
distinguishable from "the SDC changed" in the elapsed-time table
used to triage divergent local vs CI builds.

Falls back to a single ("", "N/A") entry when no result file
exists so the caller always emits at least one row per stage.
"""
results = []
for ext in RESULT_EXTS:
result_file = pathlib.Path(
str(f).replace("logs/", "results/").replace(".log", ext)
)
if result_file.exists():
hasher = hashlib.sha1()
with open(result_file, "rb") as odb_f:
with open(result_file, "rb") as rf:
while True:
chunk = odb_f.read(16 * 1024 * 1024)
chunk = rf.read(16 * 1024 * 1024)
if not chunk:
break
hasher.update(chunk)
return hasher.hexdigest()
return "N/A"
results.append((ext, hasher.hexdigest()))
if not results:
results.append(("", "N/A"))
return results


def print_log_dir_times(logdir, args):
Expand Down Expand Up @@ -87,37 +105,49 @@ def print_log_dir_times(logdir, args):
)
break

odb_hash = get_hash(f)
hashes = get_hashes(f)

if not found:
print("No elapsed time found in", str(f), file=sys.stderr)
continue

# Print the name of the step and the corresponding elapsed time
format_str = "%-25s %10s %14s %20s"
# Print the name of the step and the corresponding elapsed time.
# One row per (stage, result-file-ext); only the first row of a
# stage shows elapsed/peak.
format_str = "%-25s %-6s %10s %14s %20s"
if elapsedTime is not None and peak_memory is not None:
if first and not args.noHeader:
print(
format_str
% ("Log", "Elapsed/s", "Peak Memory/MB", "sha1sum result [0:20)")
% (
"Log",
"Ext",
"Elapsed/s",
"Peak Memory/MB",
"sha1sum result [0:20)",
)
)
first = False
print(
format_str
% (
stem,
elapsedTime,
peak_memory,
odb_hash[0:20],
stage_first = True
for ext, h in hashes:
print(
format_str
% (
stem,
ext,
elapsedTime if stage_first else "",
peak_memory if stage_first else "",
h[0:20],
)
)
)
stage_first = False
if elapsedTime is not None:
totalElapsed += elapsedTime
if peak_memory is not None:
total_max_memory = max(total_max_memory, int(peak_memory))

if totalElapsed != 0 and not args.match:
print(format_str % ("Total", totalElapsed, total_max_memory, ""))
print(format_str % ("Total", "", totalElapsed, total_max_memory, ""))


def scan_logs(args):
Expand Down
22 changes: 22 additions & 0 deletions flow/util/genMetrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
# information in specific files using regular expressions
# -----------------------------------------------------------------------------

import hashlib
import os
import shutil
from datetime import datetime, timedelta
Expand Down Expand Up @@ -190,6 +191,18 @@ def git_head_commit(git_exe, folder):
)


def file_sha1(path):
"""SHA-1 of `path`, or "N/A" if absent. Read in chunks so large
netlists don't blow the heap."""
if not os.path.isfile(path):
return "N/A"
hasher = hashlib.sha1()
with open(path, "rb") as f:
for chunk in iter(lambda: f.read(16 * 1024 * 1024), b""):
hasher.update(chunk)
return hasher.hexdigest()


def merge_jsons(root_path, output, files):
paths = sorted(glob(os.path.join(root_path, files)))
for path in paths:
Expand Down Expand Up @@ -249,6 +262,15 @@ def extract_metrics(
rptPath + "/synth_stat.txt",
)

# Netlist hashes: fingerprints of the canonical RTLIL (pre-ABC) and
# the final post-synthesis Verilog so the rules-base.json check
# (level=warning) flags when bazel-built vs make-built yosys
# disagree for the same RTL.
metrics_dict["synth__canonical_netlist__hash"] = file_sha1(
resultPath + "/1_1_yosys_canonicalize.rtlil"
)
metrics_dict["synth__netlist__hash"] = file_sha1(resultPath + "/1_2_yosys.v")

# Clocks
# =========================================================================
clk_list = read_sdc(resultPath + "/2_floorplan.sdc")
Expand Down
36 changes: 29 additions & 7 deletions flow/util/genRuleFile.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,11 @@ def gen_rule_file(

# dict format
# 'metric_name': {
# 'mode': <str>, one of ['direct', 'sum_fixed', 'period', 'padding',
# 'period_padding', 'abs_padding', 'metric']
# 'mode': <str>, one of ['direct', 'literal', 'sum_fixed', 'period',
# 'padding', 'period_padding', 'abs_padding',
# 'metric']. 'literal' propagates the metric
# value verbatim (e.g. a hash string) and
# skips all numeric padding/rounding.
# 'padding': <float>, percentage of padding to use
# 'fixed': <float>, sum this number instead of using % padding
# 'round_value': <bool>, use the rounded value for the rule
Expand All @@ -71,6 +74,21 @@ def gen_rule_file(
"level": "warning",
},
# synth
# Yosys netlist hash fingerprints. `mode: literal` propagates
# the string value verbatim; `level: warning` means a mismatch
# surfaces as a [WARN] diagnostic in checkMetadata.py without
# failing the build, matching how rules-base.json already
# treats warning counts.
"synth__canonical_netlist__hash": {
"mode": "literal",
"compare": "==",
"level": "warning",
},
"synth__netlist__hash": {
"mode": "literal",
"compare": "==",
"level": "warning",
},
"synth__design__instance__area__stdcell": {
"mode": "padding",
"padding": 15,
Expand Down Expand Up @@ -279,7 +297,7 @@ def gen_rule_file(
if ":" in field:
field = field.replace(":", "__")
processed_fields.add(field)
if isinstance(metrics[field], str):
if isinstance(metrics[field], str) and option["mode"] != "literal":
print(f"[WARNING] Skipping string field {field} = {metrics[field]}")
continue

Expand All @@ -291,6 +309,9 @@ def gen_rule_file(
if option["mode"] == "direct":
rule_value = metrics[field]

elif option["mode"] == "literal":
rule_value = metrics[field]

elif option["mode"] == "sum_fixed":
rule_value = metrics[field] + option["padding"]

Expand Down Expand Up @@ -342,10 +363,11 @@ def gen_rule_file(
print(f"[ERROR] Metric {field} has invalid mode {option['mode']}.")
sys.exit(1)

if option["round_value"] and not isinf(rule_value):
rule_value = int(round(rule_value))
else:
rule_value = float(f"{rule_value:.3g}")
if option["mode"] != "literal":
if option["round_value"] and not isinf(rule_value):
rule_value = int(round(rule_value))
else:
rule_value = float(f"{rule_value:.3g}")

preserve_old_rule = (
True
Expand Down