mirror of
https://github.com/verilator/verilator.git
synced 2025-01-01 12:17:35 +00:00
406 lines
15 KiB
Python
Executable File
406 lines
15 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
# pylint: disable=C0103,C0114,C0115,C0116,C0209,R0912,R0914,R0915,W0125,W0621,exec-used
|
|
######################################################################
|
|
|
|
import argparse
|
|
import glob
|
|
import multiprocessing
|
|
import os
|
|
import re
|
|
import subprocess
|
|
import sys
|
|
|
|
RealPath = os.path.dirname(os.path.realpath(__file__))
|
|
Exclude_Branch_Regexps = []
|
|
Exclude_Line_Regexps = []
|
|
Remove_Gcda_Regexps = []
|
|
Remove_Sources = []
|
|
Source_Globs = []
|
|
|
|
if 'VERILATOR_ROOT' not in os.environ:
|
|
os.environ['VERILATOR_ROOT'] = os.getcwd()
|
|
|
|
######################################################################
|
|
|
|
|
|
def test():
|
|
if not os.path.exists("nodist/code_coverage.dat"):
|
|
sys.exit("%Error: Run code_coverage from the top of the verilator kit")
|
|
exec(open("./nodist/code_coverage.dat", "r", encoding="utf8").read()) # pylint: disable=consider-using-with
|
|
|
|
if Args.stage_enabled[0]:
|
|
ci_fold_start("distclean")
|
|
print("Stage 0: distclean")
|
|
run("make distclean || true")
|
|
ci_fold_end()
|
|
|
|
if Args.stage_enabled[1]:
|
|
ci_fold_start("configure")
|
|
print("Stage 1: configure (coverage on)")
|
|
run("autoconf")
|
|
run("./configure --enable-longtests --enable-coverage CXX=g++")
|
|
ci_fold_end()
|
|
|
|
if Args.stage_enabled[2]:
|
|
ci_fold_start("build")
|
|
print("Stage 2: build")
|
|
nproc = multiprocessing.cpu_count()
|
|
run("make -k -j " + str(nproc) + " VERILATOR_NO_OPT_BUILD=1")
|
|
# The optimized versions will not collect good coverage, overwrite them
|
|
run("cp bin/verilator_bin_dbg bin/verilator_bin")
|
|
run("cp bin/verilator_coverage_bin_dbg bin/verilator_coverage_bin")
|
|
ci_fold_end()
|
|
|
|
if Args.stage_enabled[3]:
|
|
ci_fold_start("test")
|
|
print("Stage 3: make tests (with coverage on)")
|
|
if not Args.tests:
|
|
if not Args.scenarios or re.match('dist', Args.scenarios):
|
|
run("make examples VERILATOR_NO_OPT_BUILD=1")
|
|
run("make test_regress VERILATOR_NO_OPT_BUILD=1" +
|
|
(" SCENARIOS='" + Args.scenarios + "'" if Args.scenarios else "") +
|
|
(" DRIVER_HASHSET='--hashset=" + Args.hashset + "'" if Args.hashset else "") +
|
|
('' if Args.stop else ' || true'))
|
|
else:
|
|
for test in Args.tests:
|
|
if not os.path.exists(test) and os.path.exists("test_regress/t/" + test):
|
|
test = "test_regress/t/" + test
|
|
run(test)
|
|
ci_fold_end()
|
|
|
|
cc_dir = "nodist/obj_dir/coverage"
|
|
if Args.stage_enabled[4]:
|
|
ci_fold_start("gcno")
|
|
print("Stage 4: Create gcno files under " + cc_dir)
|
|
os.makedirs(cc_dir, exist_ok=True)
|
|
os.makedirs(cc_dir + "/info", exist_ok=True)
|
|
|
|
with subprocess.Popen("find . -print | grep .gcda", shell=True,
|
|
stdout=subprocess.PIPE) as sp:
|
|
datout = sp.stdout.read()
|
|
|
|
dats = {}
|
|
for dat in datout.splitlines():
|
|
dat = dat.decode('utf-8')
|
|
dats[dat] = 1
|
|
for dat in sorted(dats.keys()):
|
|
gcno = re.sub(r'\.gcda$', '.gcno', dat)
|
|
for regexp in Remove_Gcda_Regexps:
|
|
if re.search(regexp, dat):
|
|
# Remove .gcda/.gcno for files we don't care about before we slowly
|
|
# read them
|
|
unlink_ok(dat)
|
|
unlink_ok(gcno)
|
|
del dats[dat]
|
|
break
|
|
|
|
with subprocess.Popen("find . -print | grep .gcno", shell=True,
|
|
stdout=subprocess.PIPE) as sp:
|
|
datout = sp.stdout.read()
|
|
|
|
gcnos = {}
|
|
for gcno in datout.splitlines():
|
|
gcno = gcno.decode('utf-8')
|
|
gbase = re.sub(r'.*/', '', gcno, count=1)
|
|
gcnos[gbase] = os.path.abspath(gcno)
|
|
# We need a matching .gcno for every .gcda, try to find a matching file elsewhere
|
|
for dat in sorted(dats):
|
|
gcno = re.sub(r'\.gcda$', '.gcno', dat)
|
|
gbase = re.sub(r'.*/', '', gcno, count=1)
|
|
if not os.path.exists(gcno):
|
|
if gbase in gcnos:
|
|
os.symlink(gcnos[gbase], gcno)
|
|
else:
|
|
print("MISSING .gcno for a .gcda: " + gcno, file=sys.stderr)
|
|
ci_fold_end()
|
|
|
|
if Args.stage_enabled[5]:
|
|
ci_fold_start("fastcov")
|
|
# Must run in root directory to find all files
|
|
os.makedirs(cc_dir, exist_ok=True)
|
|
run(RealPath + "/fastcov.py -b -c src/obj_dbg -X --lcov" +
|
|
# " --exclude /usr --exclude test_regress" + " -o " + cc_dir +
|
|
" -o " + cc_dir + "/app_total.info")
|
|
# For debug to convert single .gcna/.gcno in a directory to cov.info:
|
|
# lcov -c -d . -o cov.info
|
|
ci_fold_end()
|
|
|
|
if Args.stage_enabled[6]:
|
|
ci_fold_start("clone")
|
|
# No control file to override single lines, so replicate the sources
|
|
# Also lets us see the insertion markers in the HTML source res
|
|
print("Stage 6: Clone sources under " + cc_dir)
|
|
clone_sources(cc_dir)
|
|
ci_fold_end()
|
|
|
|
if Args.stage_enabled[11]:
|
|
ci_fold_start("dirs")
|
|
print("Stage 11: Cleanup paths")
|
|
cleanup_abs_paths_info(cc_dir, cc_dir + "/app_total.info", cc_dir + "/app_total.info")
|
|
ci_fold_end()
|
|
|
|
if Args.stage_enabled[12]:
|
|
ci_fold_start("filter")
|
|
print("Stage 12: Filter processed source files")
|
|
inc = ''
|
|
for globf in Source_Globs:
|
|
for infile in glob.glob(globf):
|
|
inc += " '" + infile + "'"
|
|
exc = ''
|
|
for globf in Remove_Sources:
|
|
# Fastcov does exact match not globbing at present
|
|
# Lcov requires whole path match so needs the glob
|
|
globf = re.sub(r'^\*', '', globf)
|
|
globf = re.sub(r'\*$', '', globf)
|
|
exc += " '" + globf + "'"
|
|
if inc != '':
|
|
inc = "--include " + inc
|
|
if exc != '':
|
|
exc = "--exclude " + exc
|
|
run("cd " + cc_dir + " ; " + RealPath + "/fastcov.py -C app_total.info " + inc + " " +
|
|
exc + " -x --lcov -o app_total_f.info")
|
|
ci_fold_end()
|
|
|
|
if Args.stage_enabled[17]:
|
|
ci_fold_start("report")
|
|
print("Stage 17: Create HTML")
|
|
run("cd " + cc_dir + " ; genhtml app_total_f.info --demangle-cpp" +
|
|
" --rc lcov_branch_coverage=1 --rc genhtml_hi_limit=100 --output-directory html")
|
|
ci_fold_end()
|
|
|
|
if Args.stage_enabled[18]:
|
|
ci_fold_start("upload")
|
|
print("Stage 18: Upload")
|
|
# curl -Os https://cli.codecov.io/latest/linux/codecov ; sudo chmod +x codecov
|
|
# --disable-search does not seem to work
|
|
# -d with false directory does not seem to work
|
|
# So, remove gcno files before calling codecov
|
|
upload_dir = "nodist/obj_dir/upload"
|
|
os.makedirs(upload_dir, exist_ok=True)
|
|
cmd = "ci/codecov -v upload-process -Z" + " -f " + cc_dir + "/app_total.info )"
|
|
print("print: Not running:")
|
|
print(" export CODECOV_TOKEN=<hidden>")
|
|
print(" find . -name '*.gcno' -exec rm {} \\;")
|
|
print(" " + cmd)
|
|
ci_fold_end()
|
|
|
|
if Args.stage_enabled[19]:
|
|
print("*-* All Finished *-*")
|
|
print("")
|
|
print("* See report in " + cc_dir + "/html/index.html")
|
|
print("* Remember to make distclean && ./configure before working on non-coverage")
|
|
|
|
|
|
def clone_sources(cc_dir):
|
|
excluded_lines = 0
|
|
excluded_br_lines = 0
|
|
for globf in Source_Globs:
|
|
for infile in glob.glob(globf):
|
|
if re.match(r'^/', infile):
|
|
sys.exit("%Error: source globs should be relative not absolute filenames, " +
|
|
infile)
|
|
outfile = cc_dir + "/" + infile
|
|
outpath = re.sub(r'/[^/]*$', '', outfile, count=1)
|
|
os.makedirs(outpath, exist_ok=True)
|
|
with open(infile, "r", encoding="utf8") as fh:
|
|
with open(outfile, "w", encoding="utf8") as ofh:
|
|
lineno = 0
|
|
for line in fh:
|
|
lineno += 1
|
|
line = line.rstrip()
|
|
done = False
|
|
if re.search(r'LCOV_EXCL_LINE', line):
|
|
line += " LCOV_EXCL_BR_LINE"
|
|
done = True
|
|
elif re.search(r'LCOV_EXCL_START', line):
|
|
line += " LCOV_EXCL_BR_START"
|
|
done = True
|
|
elif re.search(r'LCOV_EXCL_STOP', line):
|
|
line += " LCOV_EXCL_BR_STOP"
|
|
done = True
|
|
|
|
for regexp in Exclude_Line_Regexps:
|
|
if done:
|
|
break
|
|
if re.search(regexp, line):
|
|
# print("%s:%d: %s" % (infile, lineno, line)
|
|
line += " //code_coverage: // LCOV_EXCL_LINE LCOV_EXCL_BR_LINE"
|
|
excluded_lines += 1
|
|
excluded_br_lines += 1
|
|
done = True
|
|
|
|
for regexp in Exclude_Branch_Regexps:
|
|
if done:
|
|
break
|
|
if re.search(regexp, line):
|
|
# print("%s:%d: %s" % (infile, lineno, line)
|
|
line += " //code_coverage: // LCOV_EXCL_BR_LINE"
|
|
excluded_br_lines += 1
|
|
done = True
|
|
|
|
ofh.write(line + "\n")
|
|
print("Number of source lines automatically LCOV_EXCL_LINE'ed: %d" % excluded_lines)
|
|
print("Number of source lines automatically LCOV_EXCL_BR_LINE'ed: %d" % excluded_br_lines)
|
|
|
|
|
|
def cleanup_abs_paths_info(cc_dir, infile, outfile):
|
|
lines = []
|
|
with open(infile, "r", encoding="utf8") as fh:
|
|
for line in fh:
|
|
if re.search(r'^SF:', line) and not re.search(r'^SF:/usr/', line):
|
|
line = re.sub(os.environ['VERILATOR_ROOT'] + '/', '', line, count=1)
|
|
line = re.sub(cc_dir + '/', '', line, count=1)
|
|
line = re.sub(r'^SF:.*?/include/', 'SF:include/', line, count=1)
|
|
line = re.sub(r'^SF:.*?/src/', 'SF:src/', line, count=1)
|
|
line = re.sub(r'^SF:.*?/test_regress/', 'SF:test_regress/', line, count=1)
|
|
line = re.sub(r'obj_dbg/verilog.y$', 'verilog.y', line)
|
|
# print("Remaining SF: "+line)
|
|
lines.append(line)
|
|
|
|
with open(outfile, "w", encoding="utf8") as ofh:
|
|
for line in lines:
|
|
ofh.write(line)
|
|
|
|
|
|
def cleanup_abs_paths_json(cc_dir, infile, outfile):
|
|
# Handcrafted cleanup, alternative would be to deserialize/serialize JSON,
|
|
# but this is much faster
|
|
lines = []
|
|
with open(infile, "r", encoding="utf8") as fh:
|
|
for line in fh:
|
|
line = re.sub('"' + os.environ['VERILATOR_ROOT'] + '/', '"', line)
|
|
line = re.sub('"' + cc_dir + '/', '"', line)
|
|
line = re.sub(r'obj_dbg/verilog.y$', 'verilog.y', line)
|
|
lines.append(line)
|
|
|
|
with open(outfile, "w", encoding="utf8") as ofh:
|
|
for line in lines:
|
|
ofh.write(line)
|
|
|
|
|
|
######################################################################
|
|
# .dat file callbacks
|
|
|
|
|
|
def exclude_branch_regexp(*regexps):
|
|
Exclude_Branch_Regexps.extend(regexps)
|
|
|
|
|
|
def exclude_line_regexp(*regexps):
|
|
Exclude_Line_Regexps.extend(regexps)
|
|
|
|
|
|
def remove_gcda_regexp(*regexps):
|
|
Remove_Gcda_Regexps.extend(regexps)
|
|
|
|
|
|
def remove_source(*sources):
|
|
Remove_Sources.extend(sources)
|
|
|
|
|
|
def source_globs(*dirs):
|
|
Source_Globs.extend(dirs)
|
|
|
|
|
|
#######################################################################
|
|
|
|
|
|
def run(command):
|
|
# run a system command, check errors
|
|
print("\t%s" % command)
|
|
status = subprocess.call(command, shell=True)
|
|
if status < 0:
|
|
raise RuntimeError("%Error: Command failed " + command + ", stopped")
|
|
|
|
|
|
def unlink_ok(filename):
|
|
try:
|
|
os.unlink(filename)
|
|
except OSError:
|
|
pass
|
|
|
|
|
|
def ci_fold_start(action):
|
|
print("::group::" + action, flush=True)
|
|
|
|
|
|
def ci_fold_end():
|
|
print("::endgroup::\n", flush=True)
|
|
|
|
|
|
#######################################################################
|
|
#######################################################################
|
|
|
|
parser = argparse.ArgumentParser(
|
|
allow_abbrev=False,
|
|
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
description="""code_coverage builds Verilator with C++ coverage support and runs
|
|
tests with coverage enabled. This will rebuild the current object
|
|
files. Run as:
|
|
|
|
cd $VERILATOR_ROOT
|
|
nodist/code_coverage""",
|
|
epilog="""Copyright 2019-2024 by Wilson Snyder. This program is free software; you
|
|
can redistribute it and/or modify it under the terms of either the GNU
|
|
Lesser General Public License Version 3 or the Perl Artistic License
|
|
Version 2.0.
|
|
|
|
SPDX-License-Identifier: LGPL-3.0-only OR Artistic-2.0""")
|
|
|
|
parser.add_argument('--debug', action='store_true', help='enable debug')
|
|
parser.add_argument('--hashset',
|
|
action='store',
|
|
help='pass test hashset onto driver.py test harness')
|
|
parser.add_argument('--scenarios',
|
|
action='store',
|
|
help='pass test scenarios onto driver.py test harness')
|
|
parser.add_argument('--stages',
|
|
'--stage',
|
|
action='store',
|
|
help='runs a specific stage or range of stages (see the script)')
|
|
parser.add_argument(
|
|
'--tests',
|
|
'--test',
|
|
action='append',
|
|
default=[],
|
|
help='Instead of normal regressions, run the specified test(s), may be used multiple times')
|
|
parser.add_argument('--no-stop',
|
|
dest='stop',
|
|
action='store_false',
|
|
help='do not stop collecting data if tests fail')
|
|
|
|
parser.set_defaults(stop=True)
|
|
Args = parser.parse_args()
|
|
|
|
if True:
|
|
start = 0
|
|
end = 99
|
|
Args.stage_enabled = {}
|
|
if Args.stages:
|
|
match_one = re.match(r'^(\d+)$', Args.stages)
|
|
match_range = re.match(r'^(\d+)-(\d+)$', Args.stages)
|
|
match_to = re.match(r'^-(\d+)$', Args.stages)
|
|
match_from = re.match(r'^(\d+)-$', Args.stages)
|
|
if match_one:
|
|
start = end = int(match_one.group(1))
|
|
elif match_range:
|
|
start = int(match_range.group(1))
|
|
end = int(match_range.group(2))
|
|
elif match_to:
|
|
end = int(match_to.group(1))
|
|
elif match_from:
|
|
start = int(match_from.group(1))
|
|
else:
|
|
sys.exit("%Error: --stages not understood: " + Args.stages)
|
|
for n in range(0, 100):
|
|
Args.stage_enabled[n] = False
|
|
for n in range(start, end + 1):
|
|
Args.stage_enabled[n] = True
|
|
|
|
test()
|
|
|
|
######################################################################
|
|
# Local Variables:
|
|
# compile-command: "cd .. ; nodist/code_coverage "
|
|
# End:
|