Internals: Convert internal code_coverage to python3.

This commit is contained in:
Wilson Snyder 2020-12-19 20:29:05 -05:00
parent beb03be731
commit db276946b4
3 changed files with 390 additions and 502 deletions

View File

@ -477,6 +477,8 @@ YAPF_FLAGS = -i
YAPF_FILES = \ YAPF_FILES = \
examples/xml_py/vl_file_copy \ examples/xml_py/vl_file_copy \
examples/xml_py/vl_hier_graph \ examples/xml_py/vl_hier_graph \
nodist/code_coverage \
nodist/code_coverage.dat \
nodist/dot_importer \ nodist/dot_importer \
nodist/fuzzer/actual_fail \ nodist/fuzzer/actual_fail \
nodist/fuzzer/generate_dictionary \ nodist/fuzzer/generate_dictionary \

View File

@ -1,511 +1,398 @@
#!/usr/bin/env perl #!/usr/bin/env python3
# See copyright, etc in below POD section.
###################################################################### ######################################################################
use warnings; import argparse
use Cwd; import glob
use File::Copy qw(cp); import multiprocessing
use File::Path qw(mkpath); import os
use File::Spec; import re
use FindBin qw($RealBin); import subprocess
use Getopt::Long;
use Parallel::Forker;
use Unix::Processors;
use IO::File;
use Pod::Usage;
use strict;
use vars qw($Debug);
our $Opt_Stop = 1; RealPath = os.path.dirname(os.path.realpath(__file__))
our $Opt_Fastcov = 1; Exclude_Branch_Regexps = []
our $Exclude_Branch_Regexp; Exclude_Line_Regexps = []
our $Exclude_Line_Regexp; Remove_Gcda_Regexps = []
our $Remove_Gcda_Regexp; Remove_Sources = []
Source_Globs = []
our @Remove_Sources; if 'VERILATOR_ROOT' not in os.environ:
our @Source_Globs; os.environ['VERILATOR_ROOT'] = os.getcwd()
our $Fork = new Parallel::Forker(use_sig_child => 1, poll_interval => 10*1000); ######################################################################
$Fork->max_proc(Unix::Processors->new->max_online * 1.5);
$SIG{CHLD} = sub { Parallel::Forker::sig_child($Fork); };
$SIG{TERM} = sub { $Fork->kill_tree_all('TERM') if $Fork && $Fork->in_parent; die "Quitting...\n"; };
$ENV{VERILATOR_ROOT} ||= Cwd::getcwd();
#====================================================================== def test():
# main if not os.path.exists("nodist/code_coverage.dat"):
sys.exit("%Error: Run code_coverage from the top of the verilator kit")
exec(open("./nodist/code_coverage.dat").read())
our $Opt_Hashset; if Args.stage_enabled[1]:
our $opt_stages = ''; ci_fold_start("configure")
our $Opt_Scenarios; print("Stage 1: configure (coverage on)")
our %Opt_Stages; run("make distclean || true")
our @Opt_Tests; run("autoconf")
autoflush STDOUT 1;
autoflush STDERR 1;
Getopt::Long::config("no_auto_abbrev");
if (! GetOptions(
"debug" => sub { $Debug = 1; },
"hashset=s" => \$Opt_Hashset, # driver.pl hashset
"<>" => sub { die "%Error: Unknown parameter: $_[0]\n"; },
"fastcov!" => \$Opt_Fastcov, # use fastcov, not documented, for debug
"scenarios=s" => \$Opt_Scenarios, # driver.pl scenarios
"stage=s" => \$opt_stages, # starting stage number
"stages=s" => \$opt_stages, # starting stage number
"stop!" => \$Opt_Stop, # stop/do not stop on error in tests
"test=s@" => \@Opt_Tests, # test name
)) {
die "%Error: Bad usage, try 'code_coverage --help'\n";
}
{
my $start = 0;
my $end = 99;
if ($opt_stages && $opt_stages =~ /^(\d+)$/) {
$start = $end = $1;
} elsif ($opt_stages && $opt_stages =~ /^(\d+)-(\d+$)$/) {
$start = $1; $end = $2;
} elsif ($opt_stages && $opt_stages =~ /^-(\d+$)$/) {
$end = $1;
} elsif ($opt_stages && $opt_stages =~ /^(\d+)-$/) {
$start = $1;
} elsif ($opt_stages) {
die "%Error: --stages not understood: $opt_stages,";
}
for (my $n = $start; $n <= $end; ++$n) { $Opt_Stages{$n} = 1; }
}
test();
exit(0);
#######################################################################
sub test {
-r "nodist/code_coverage.dat" or die "%Error: Run from the top of the verilator kit,";
require "./nodist/code_coverage.dat";
if ($Opt_Stages{1}) {
ci_fold_start("configure");
print "Stage 1: configure (coverage on)\n";
run("make distclean || true");
run("autoconf");
# Exceptions can pollute the branch coverage data # Exceptions can pollute the branch coverage data
run("./configure --enable-longtests CXX='g++ --coverage -fno-exceptions -DVL_GCOV'"); run("./configure --enable-longtests CXX='g++ --coverage -fno-exceptions -DVL_GCOV'"
ci_fold_end(); )
} ci_fold_end()
if ($Opt_Stages{2}) { if Args.stage_enabled[2]:
ci_fold_start("build"); ci_fold_start("build")
print "Stage 2: build\n"; print("Stage 2: build")
my $nproc = Unix::Processors->new->max_online; nproc = multiprocessing.cpu_count()
run("make -k -j $nproc VERILATOR_NO_OPT_BUILD=1"); run("make -k -j " + str(nproc) + " VERILATOR_NO_OPT_BUILD=1")
# The optimized versions will not collect good coverage, overwrite them # The optimized versions will not collect good coverage, overwrite them
run("cp bin/verilator_bin_dbg bin/verilator_bin"); run("cp bin/verilator_bin_dbg bin/verilator_bin")
run("cp bin/verilator_coverage_bin_dbg bin/verilator_coverage_bin"); run("cp bin/verilator_coverage_bin_dbg bin/verilator_coverage_bin")
ci_fold_end(); ci_fold_end()
}
if ($Opt_Stages{3}) { if Args.stage_enabled[3]:
ci_fold_start("test"); ci_fold_start("test")
print "Stage 3: make tests (with coverage on)\n"; print("Stage 3: make tests (with coverage on)")
if ($#Opt_Tests < 0) { if not Args.tests:
if not Args.scenarios or re.match('dist', Args.scenarios):
run("make examples VERILATOR_NO_OPT_BUILD=1") run("make examples VERILATOR_NO_OPT_BUILD=1")
if !$Opt_Scenarios || $Opt_Scenarios =~ /dist/i; run("make test_regress VERILATOR_NO_OPT_BUILD=1" +
run("make test_regress VERILATOR_NO_OPT_BUILD=1" (" SCENARIOS='" + Args.scenarios +
. ($Opt_Scenarios ? " SCENARIOS='".$Opt_Scenarios."'" : "") "'" if Args.scenarios else "") +
. ($Opt_Hashset ? " DRIVER_HASHSET='--hashset=".$Opt_Hashset."'" : "") (" DRIVER_HASHSET='--hashset=" + Args.hashset +
. ($Opt_Stop ? '' : ' || true')); "'" if Args.hashset else "") +
} else { ('' if Args.stop else ' || true'))
foreach my $test (@Opt_Tests) { else:
if (! -f $test && -f "test_regress/t/${test}") { for test in Args.tests:
$test = "test_regress/t/${test}"; if not os.path.exists(test) and os.path.exists(
} "test_regress/t/" + test):
run($test); test = "test_regress/t/" + test
} run(test)
} ci_fold_end()
ci_fold_end();
}
my $cc_dir = "nodist/obj_dir/coverage"; cc_dir = "nodist/obj_dir/coverage"
if ($Opt_Stages{4}) { if Args.stage_enabled[4]:
ci_fold_start("gcno"); ci_fold_start("gcno")
print "Stage 4: Create gcno files under $cc_dir\n"; print("Stage 4: Create gcno files under " + cc_dir)
mkpath($cc_dir); os.makedirs(cc_dir, exist_ok=True)
mkpath("$cc_dir/info"); os.makedirs(cc_dir + "/info", exist_ok=True)
my $dats = `find . -print | grep .gcda`;
my %dats; sp = subprocess.Popen("find . -print | grep .gcda",
foreach my $dat (split '\n', $dats) { shell=True,
$dats{$dat} = 1; stdout=subprocess.PIPE)
} datout = sp.stdout.read()
foreach my $dat (sort keys %dats) { dats = {}
(my $gcno = $dat) =~ s!\.gcda$!.gcno!; for dat in datout.splitlines():
if ($dat =~ /$Remove_Gcda_Regexp/) { dat = dat.decode('utf-8')
dats[dat] = 1
for dat in sorted(dats.keys()):
gcno = re.sub(r'\.gcda$', '.gcno', dat)
for regexp in Remove_Gcda_Regexps:
if re.search(regexp, dat):
# Remove .gcda/.gcno for files we don't care about before we slowly # Remove .gcda/.gcno for files we don't care about before we slowly
# read them # read them
unlink $dat; os.unlink(dat)
unlink $gcno; os.unlink(gcno)
delete $dats{$dat}; del dats[dat]
next; break
}
} sp = subprocess.Popen("find . -print | grep .gcno",
$dats = `find . -print | grep .gcno`; shell=True,
my %gcnos; stdout=subprocess.PIPE)
foreach my $gcno (split '\n', $dats) { datout = sp.stdout.read()
(my $gbase = $gcno) =~ s!.*/!!; gcnos = {}
$gcnos{$gbase} = File::Spec->rel2abs($gcno); for gcno in datout.splitlines():
} gcno = gcno.decode('utf-8')
gbase = re.sub(r'.*/', '', gcno, count=1)
gcnos[gbase] = os.path.abspath(gcno)
# We need a matching .gcno for every .gcda, try to find a matching file elsewhere # We need a matching .gcno for every .gcda, try to find a matching file elsewhere
foreach my $dat (sort keys %dats) { for dat in sorted(dats):
(my $gcno = $dat) =~ s!\.gcda$!.gcno!; gcno = re.sub(r'\.gcda$', '.gcno', dat)
(my $gbase = $gcno) =~ s!.*/!!; gbase = re.sub(r'.*/', '', gcno, count=1)
if (!-r $gcno) { if not os.path.exists(gcno):
if ($gcnos{$gbase}) { if gbase in gcnos:
symlink($gcnos{$gbase}, $gcno) os.symlink(gcnos[gbase], gcno)
or die "%Error: can't ln -s $gcnos{$gbase} $gcno,"; else:
} else { print("MISSING .gcno for a .gcda: " + gcno,
warn "MISSING .gcno for a .gcda: $gcno\n"; file=sys.stderr)
} ci_fold_end()
}
}
ci_fold_end();
}
if ($Opt_Stages{5} && $Opt_Fastcov) { if Args.stage_enabled[5]:
ci_fold_start("fastcov"); ci_fold_start("fastcov")
# Must run in root directory to find all files # Must run in root directory to find all files
mkpath($cc_dir); os.makedirs(cc_dir, exist_ok=True)
#run("${RealBin}/fastcov.py -b -c src/obj_dbg -X". run(RealPath + "/fastcov.py -b -c src/obj_dbg -X --lcov" +
# " --exclude /usr --exclude test_regress" " --exclude /usr --exclude test_regress" + " -o " + cc_dir +
# ." -o ${cc_dir}/app_total.json"); "/app_total.info")
run("${RealBin}/fastcov.py -b -c src/obj_dbg -X --lcov". ci_fold_end()
" --exclude /usr --exclude test_regress"
." -o ${cc_dir}/app_total.info");
ci_fold_end();
}
if ($Opt_Stages{5} && !$Opt_Fastcov) { if Args.stage_enabled[6]:
ci_fold_start("infos"); ci_fold_start("clone")
print "Stage 5: make infos\n";
my $dats = `find . -print | grep .gcda`;
my %dirs;
foreach my $dat (split '\n', $dats) {
(my $dir = $dat) =~ s!/[^/]+$!!;
$dirs{$dir} = 1;
}
foreach my $dir (sort keys %dirs) {
(my $outname = $dir) =~ s![^a-zA-Z0-9]+!_!g;
$Fork->schedule(run_on_start => sub {
# .info may be empty, so ignore errors (unfortunately)
run("cd $cc_dir/info ; lcov -c -d ../../../../$dir --exclude /usr -o app_test_${outname}.info || true");
})->run;
}
$Fork->wait_all;
ci_fold_end();
}
if ($Opt_Stages{6}) {
ci_fold_start("clone");
# No control file to override single lines, so replicate the sources # No control file to override single lines, so replicate the sources
# Also lets us see the insertion markers in the HTML source res # Also lets us see the insertion markers in the HTML source res
print "Stage 6: Clone sources under $cc_dir\n"; print("Stage 6: Clone sources under " + cc_dir)
clone_sources($cc_dir); clone_sources(cc_dir)
ci_fold_end(); ci_fold_end()
}
if ($Opt_Stages{8} && !$Opt_Fastcov) { if Args.stage_enabled[11]:
ci_fold_start("copy"); ci_fold_start("dirs")
print "Stage 8: Copy .gcno files\n"; print("Stage 11: Cleanup paths")
my $dats = `find . -print | grep .gcno`; cleanup_abs_paths_info(cc_dir, cc_dir + "/app_total.info",
foreach my $dat (sort (split '\n', $dats)) { cc_dir + "/app_total.info")
next if $dat =~ /$cc_dir/; ci_fold_end()
my $outdat = $cc_dir."/".$dat;
#print "cp $dat, $outdat);\n";
cp($dat, $outdat);
}
ci_fold_end();
}
if ($Opt_Stages{10} && !$Opt_Fastcov) { if Args.stage_enabled[12]:
ci_fold_start("combine"); ci_fold_start("filter")
print "Stage 10: Combine data files\n"; print("Stage 12: Filter processed source files")
{ inc = ''
run("cd $cc_dir ; lcov -c -i -d src/obj_dbg -o app_base.info"); for globf in Source_Globs:
run("cd $cc_dir ; lcov -a app_base.info -o app_total.info"); for infile in glob.glob(globf):
my $infos = `cd $cc_dir ; find info -print | grep .info`; inc += " '" + infile + "'"
my $comb = ""; exc = ''
my @infos = (sort (split /\n/, $infos)); for globf in Remove_Sources:
foreach my $info (@infos) {
$comb .= " -a $info";
# Need to batch them to avoid overrunning shell command length limit
if (length($comb) > 10000 || $info eq $infos[$#infos]) {
# .info may be empty, so ignore errors (unfortunately)
run("cd $cc_dir ; lcov -a app_total.info $comb -o app_total.info || true");
$comb = "";
}
}
}
ci_fold_end();
}
if ($Opt_Stages{11}) {
ci_fold_start("dirs");
print "Stage 11: Cleanup paths\n";
if ($Opt_Fastcov) {
cleanup_abs_paths_info($cc_dir, "$cc_dir/app_total.info", "$cc_dir/app_total.info");
#cleanup_abs_paths_json($cc_dir, "$cc_dir/app_total.json", "$cc_dir/app_total.json");
} else {
cleanup_abs_paths_info($cc_dir, "$cc_dir/app_total.info", "$cc_dir/app_total.info");
}
ci_fold_end();
}
if ($Opt_Stages{12}) {
ci_fold_start("filter");
print "Stage 12: Filter processed source files\n";
my $inc = '';
foreach my $glob (@Source_Globs) {
foreach my $infile (glob $glob) {
$inc .= " '$infile'";
}
}
my $exc = '';
foreach my $glob (@Remove_Sources) {
# Fastcov does exact match not globbing at present # Fastcov does exact match not globbing at present
# Lcov requires whole path match so needs the glob # Lcov requires whole path match so needs the glob
$glob =~ s!^\*!! if $Opt_Fastcov; globf = re.sub(r'^\*', '', globf)
$glob =~ s!\*$!! if $Opt_Fastcov; globf = re.sub(r'\*$', '', globf)
$exc .= " '$glob'"; exc += " '" + globf + "'"
} if inc != '':
if ($Opt_Fastcov) { inc = "--include " + inc
$inc = "--include ".$inc if $inc ne ''; if exc != '':
$exc = "--exclude ".$exc if $exc ne ''; exc = "--exclude " + exc
run("cd $cc_dir ; ${RealBin}/fastcov.py -C app_total.info ${inc} ${exc} -x --lcov -o app_total_f.info"); run("cd " + cc_dir + " ; " + RealPath +
} else { "/fastcov.py -C app_total.info " + inc + " " + exc +
run("cd $cc_dir ; lcov --remove app_total.info $exc -o app_total_f.info"); " -x --lcov -o app_total_f.info")
} ci_fold_end()
ci_fold_end();
}
if ($Opt_Stages{17}) { if Args.stage_enabled[17]:
ci_fold_start("report"); ci_fold_start("report")
print "Stage 17: Create HTML\n"; print("Stage 17: Create HTML")
run("cd $cc_dir ; genhtml app_total_f.info --demangle-cpp" run("cd " + cc_dir + " ; genhtml app_total_f.info --demangle-cpp" +
." --rc lcov_branch_coverage=1 --rc genhtml_hi_limit=100 --output-directory html"); " --rc lcov_branch_coverage=1 --rc genhtml_hi_limit=100 --output-directory html"
ci_fold_end(); )
} ci_fold_end()
if ($Opt_Stages{18}) { if Args.stage_enabled[18]:
ci_fold_start("upload"); ci_fold_start("upload")
print "Stage 18: Upload\n"; print("Stage 18: Upload")
my $cmd = "bash <(curl -s https://codecov.io/bash) -f $cc_dir/app_total.info"; cmd = "bash <(curl -s https://codecov.io/bash) -f " + cc_dir + "/app_total.info"
print "print: Not running: export CODECOV_TOKEN=<hidden>\n"; print("print: Not running: export CODECOV_TOKEN=<hidden>")
print "print: Not running: $cmd\n"; print("print: Not running: " + cmd)
ci_fold_end(); ci_fold_end()
}
if ($Opt_Stages{19}) { if Args.stage_enabled[19]:
print "*-* All Finished *-*\n"; print("*-* All Finished *-*")
print "\n"; print("")
print "* See report in ${cc_dir}/html/index.html\n"; print("* See report in " + cc_dir + "/html/index.html")
print "* Remember to make distclean && ./configure before working on non-coverage\n"; print(
} "* Remember to make distclean && ./configure before working on non-coverage"
} )
sub clone_sources {
my $cc_dir = shift;
my $excluded_lines = 0;
my $excluded_br_lines = 0;
foreach my $glob (@Source_Globs) {
foreach my $infile (glob $glob) {
$infile !~ m!^/!
or die "%Error: source globs should be relative not absolute filenames,";
my $outfile = $cc_dir."/".$infile;
(my $outpath = $outfile) =~ s!/[^/]*$!!;
mkpath($outpath);
my $fh = IO::File->new("<$infile") or die "%Error: $! $infile,";
my $ofh = IO::File->new(">$outfile") or die "%Error: $! $outfile,";
my $lineno = 0;
while (defined(my $line = $fh->getline)) {
$lineno++;
chomp $line;
if ($line =~ /LCOV_EXCL_LINE/) {
$line .= " LCOV_EXCL_BR_LINE";
}
elsif ($line =~ /LCOV_EXCL_START/) {
$line .= " LCOV_EXCL_BR_START";
}
elsif ($line =~ /LCOV_EXCL_STOP/) {
$line .= " LCOV_EXCL_BR_STOP";
}
elsif ($line =~ /$Exclude_Line_Regexp/) {
$line .= " //code_coverage: // LCOV_EXCL_LINE LCOV_EXCL_BR_LINE";
$excluded_lines++;
$excluded_br_lines++;
#print "$infile:$lineno: $line";
}
elsif ($line =~ /$Exclude_Branch_Regexp/) {
$line .= " //code_coverage: // LCOV_EXCL_BR_LINE";
$excluded_br_lines++;
#print "$infile:$lineno: $line";
}
$ofh->print("$line\n");
}
}
}
print "Number of source lines automatically LCOV_EXCL_LINE'ed: $excluded_lines\n";
print "Number of source lines automatically LCOV_EXCL_BR_LINE'ed: $excluded_br_lines\n";
}
sub cleanup_abs_paths_info { def clone_sources(cc_dir):
my $cc_dir = shift; excluded_lines = 0
my $infile = shift; excluded_br_lines = 0
my $outfile = shift; for globf in Source_Globs:
my $fh = IO::File->new("<$infile") or die "%Error: $! $infile,"; for infile in glob.glob(globf):
my @lines; if re.match(r'^/', infile):
while (defined(my $line = $fh->getline)) { sys.exit(
if ($line =~ m!^SF:!) { "%Error: source globs should be relative not absolute filenames, "
$line =~ s!$ENV{VERILATOR_ROOT}/!!; + infile)
$line =~ s!$cc_dir/!!; outfile = cc_dir + "/" + infile
$line =~ s!obj_dbg/verilog.y$!verilog.y!; outpath = re.sub(r'/[^/]*$', '', outfile, count=1)
#print "Remaining SF: ",$line; os.makedirs(outpath, exist_ok=True)
} with open(infile) as fh:
push @lines, $line; with open(outfile, "w") as ofh:
} lineno = 0
my $ofh = IO::File->new(">$outfile") or die "%Error: $! $outfile,"; for line in fh:
$ofh->print(@lines); lineno += 1
} line = line.rstrip()
done = False
if re.search(r'LCOV_EXCL_LINE', line):
line += " LCOV_EXCL_BR_LINE"
done = True
elif re.search(r'LCOV_EXCL_START', line):
line += " LCOV_EXCL_BR_START"
done = True
elif re.search(r'LCOV_EXCL_STOP', line):
line += " LCOV_EXCL_BR_STOP"
done = True
sub cleanup_abs_paths_json { for regexp in Exclude_Line_Regexps:
my $cc_dir = shift; if done:
my $infile = shift; break
my $outfile = shift; if re.search(regexp, line):
# Handcrafted cleanup, alternative would be to deserialize/serialize JSON #print("%s:%d: %s" % (infile, lineno, line)
# But JSON::Parse not installed by default line += " //code_coverage: // LCOV_EXCL_LINE LCOV_EXCL_BR_LINE"
# JSON::PP more likely to be installed, but slower excluded_lines += 1
my $fh = IO::File->new("<$infile") or die "%Error: $! $infile,"; excluded_br_lines += 1
my @lines; done = True
while (defined(my $line = $fh->getline)) {
$line =~ s!"$ENV{VERILATOR_ROOT}/!"!g;
$line =~ s!"$cc_dir/!"!g;
$line =~ s!obj_dbg/verilog.y$!verilog.y!g;
push @lines, $line;
}
my $ofh = IO::File->new(">$outfile") or die "%Error: $! $outfile,";
$ofh->print(@lines);
}
####################################################################### for regexp in Exclude_Branch_Regexps:
if done:
break
if re.search(regexp, line):
#print("%s:%d: %s" % (infile, lineno, line)
line += " //code_coverage: // LCOV_EXCL_BR_LINE"
excluded_br_lines += 1
done = True
ofh.write(line + "\n")
print("Number of source lines automatically LCOV_EXCL_LINE'ed: %d" %
excluded_lines)
print("Number of source lines automatically LCOV_EXCL_BR_LINE'ed: %d" %
excluded_br_lines)
def cleanup_abs_paths_info(cc_dir, infile, outfile):
lines = []
with open(infile) as fh:
for line in fh:
if re.search(r'^SF:', line):
line = re.sub(os.environ['VERILATOR_ROOT'] + '/',
'',
line,
count=1)
line = re.sub(cc_dir + '/', '', line, count=1)
line = re.sub(r'obj_dbg/verilog.y$', 'verilog.y', line)
#print("Remaining SF: "+line)
lines.append(line)
with open(outfile, "w") as ofh:
for line in lines:
ofh.write(line)
def cleanup_abs_paths_json(cc_dir, infile, outfile):
# Handcrafted cleanup, alternative would be to deserialize/serialize JSON,
# but this is much faster
lines = []
with open(infile) as fh:
for line in fh:
line = re.sub('"' + os.environ['VERILATOR_ROOT'] + '/', '"', line)
line = re.sub('"' + cc_dir + '/', '"', line)
line = re.sub(r'obj_dbg/verilog.y$', 'verilog.y', line)
lines.append(line)
with open(outfile, "w") as ofh:
for line in lines:
ofh.write(line)
######################################################################
# .dat file callbacks # .dat file callbacks
sub exclude_branch_regexp {
$Exclude_Branch_Regexp = shift; def exclude_branch_regexp(*regexps):
} Exclude_Branch_Regexps.extend(regexps)
sub exclude_line_regexp {
$Exclude_Line_Regexp = shift;
} def exclude_line_regexp(*regexps):
sub remove_gcda_regexp { Exclude_Line_Regexps.extend(regexps)
$Remove_Gcda_Regexp = shift;
}
sub remove_source { def remove_gcda_regexp(*regexps):
my @srcs = @_; Remove_Gcda_Regexps.extend(regexps)
push @Remove_Sources, @srcs;
}
sub source_globs { def remove_source(*sources):
my @dirs = @_; Remove_Sources.extend(sources)
push @Source_Globs, @dirs;
}
def source_globs(*dirs):
Source_Globs.extend(dirs)
####################################################################### #######################################################################
sub run {
# Run a system command, check errors
my $command = shift;
print "\t$command\n";
system "$command";
my $status = $?;
($status == 0) or die "%Error: Command Failed $command, $status, stopped";
}
our $_Ci_Action; def run(command):
sub ci_fold_start { # run a system command, check errors
$_Ci_Action = shift; print("\t%s" % command)
print "::group::$_Ci_Action\n"; os.system(command)
} status = subprocess.call(command, shell=True)
sub ci_fold_end { if status < 0:
print "::endgroup::\n"; raise Exception("%Error: Command failed " + command + ", stopped")
}
def ci_fold_start(action):
print("::group::" + action)
def ci_fold_end():
print("::endgroup::\n")
####################################################################### #######################################################################
__END__ #######################################################################
=pod parser = argparse.ArgumentParser(
allow_abbrev=False,
=head1 NAME formatter_class=argparse.RawDescriptionHelpFormatter,
description=
code_coverage - Build and collect Verilator coverage """code_coverage builds Verilator with C++ coverage support and runs
tests with coverage enabled. This will rebuild the current object
=head1 SYNOPSIS files. Run as:
cd $VERILATOR_ROOT cd $VERILATOR_ROOT
nodist/code_coverage nodist/code_coverage""",
epilog=
=head1 DESCRIPTION """Copyright 2019-2020 by Wilson Snyder. This program is free software; you
code_coverage builds Verilator with C++ coverage support and runs tests
with coverage enabled.
This will rebuild the current object files.
=head1 ARGUMENTS
=over 4
=item --hashset I<hashset>
Pass test hashset onto driver.pl test harness.
=item --help
Displays this message and program version and exits.
=item --scenarios I<scenarios>
Pass test scenarios onto driver.pl test harness.
=item --stages I<stage>
Runs a specific stage or range of stages (see the script).
=item --no-stop
Do not stop collecting data if tests fail.
=item --test I<test_regress_test_name>
Instead of normal regressions, run the specified test. May be specified
multiple times for multiple tests.
=back
=head1 DISTRIBUTION
Copyright 2019-2020 by Wilson Snyder. This program is free software; you
can redistribute it and/or modify it under the terms of either the GNU can redistribute it and/or modify it under the terms of either the GNU
Lesser General Public License Version 3 or the Perl Artistic License Lesser General Public License Version 3 or the Perl Artistic License
Version 2.0. Version 2.0.
SPDX-License-Identifier: LGPL-3.0-only OR Artistic-2.0 SPDX-License-Identifier: LGPL-3.0-only OR Artistic-2.0""")
=head1 AUTHORS parser.add_argument('--debug', action='store_true', help='enable debug')
parser.add_argument('--hashset',
action='store',
help='pass test hashset onto driver.pl test harness')
parser.add_argument('--scenarios',
action='store',
help='pass test scenarios onto driver.pl test harness')
parser.add_argument(
'--stages',
action='store',
help='runs a specific stage or range of stages (see the script)')
parser.add_argument(
'--test',
action='append',
dest='tests',
default=[],
help=
'Instead of normal regressions, run the specified test(s), may be used multiple times'
)
parser.add_argument('--no-stop',
dest='stop',
action='store_false',
help='do not stop collecting data if tests fail')
Wilson Snyder <wsnyder@wsnyder.org> parser.set_defaults(stop=True)
Args = parser.parse_args()
=head1 SEE ALSO if True:
start = 0
end = 99
Args.stage_enabled = {}
if Args.stages:
match_one = re.match(r'^(\d+)$', Args.stages)
match_range = re.match(r'^(\d+)-(\d+)$', Args.stages)
match_to = re.match(r'^-(\d+)$', Args.stages)
match_from = re.match(r'^(\d+)-$', Args.stages)
if match_one:
start = end = int(match_one.group(1))
elif match_range:
start = int(match_range.group(1))
end = int(match_range.group(2))
elif match_to:
end = int(match_to.group(1))
elif match_from:
start = int(match_from.group(1))
else:
os.exit("%Error: --stages not understood: " + Args.stages)
for n in range(1, 100):
Args.stage_enabled[n] = False
for n in range(start, end + 1):
Args.stage_enabled[n] = True
C<lcov> test()
=cut
###################################################################### ######################################################################
### Local Variables: ### Local Variables:

View File

@ -1,4 +1,4 @@
# -*- Perl -*- # -*- Python -*-
# DESCRIPTION: Verilator: Internal C++ code lcov control file # DESCRIPTION: Verilator: Internal C++ code lcov control file
# #
# Copyright 2019-2020 by Wilson Snyder. This program is free software; you # Copyright 2019-2020 by Wilson Snyder. This program is free software; you
@ -7,57 +7,56 @@
# Version 2.0. # Version 2.0.
# SPDX-License-Identifier: LGPL-3.0-only OR Artistic-2.0 # SPDX-License-Identifier: LGPL-3.0-only OR Artistic-2.0
source_globs("src/*.cpp", source_globs("src/*.cpp")
"src/*.h", source_globs("src/*.h")
"src/*.l", source_globs("src/*.l")
"src/*.y", source_globs("src/*.y")
"src/obj_dbg/*.h", source_globs("src/obj_dbg/*.h")
"src/obj_dbg/*.cpp", source_globs("src/obj_dbg/*.cpp")
"include/*.c", source_globs("include/*.c")
"include/*.cpp", source_globs("include/*.cpp")
"include/*.h", source_globs("include/*.h")
"include/*/*.h", source_globs("include/*/*.h")
"include/*/*.cpp", source_globs("include/*/*.cpp")
"include/*/*.c", source_globs("include/*/*.c")
);
# Note *'s are removed when using fastcov # Note *'s are removed when using fastcov
remove_source("/usr/*"); remove_source("/usr/*")
remove_source("*/include/sysc/*"); remove_source("*/include/sysc/*")
remove_source("*/V3ClkGater.cpp"); remove_source("*/V3ClkGater.cpp")
remove_source("*/V3ClkGater.h"); remove_source("*/V3ClkGater.h")
remove_source("*/V3GraphDfa.cpp"); remove_source("*/V3GraphDfa.cpp")
remove_source("*/V3GraphDfa.h"); remove_source("*/V3GraphDfa.h")
remove_source("*/V3Lexer_pregen.yy.cpp"); remove_source("*/V3Lexer_pregen.yy.cpp")
remove_source("*/V3PreLex_pregen.yy.cpp"); remove_source("*/V3PreLex_pregen.yy.cpp")
remove_source("*/verilog.c"); remove_source("*/verilog.c")
remove_source("*include/gtkwave/*"); remove_source("*include/gtkwave/*")
# Something wrong in generation, unfortunately as would like this # Something wrong in generation, unfortunately as would like this
#genhtml: ERROR: cannot read /svaha/wsnyder/SandBox/homecvs/v4/verilator/src/obj_dbg/verilog.y #genhtml: ERROR: cannot read /svaha/wsnyder/SandBox/homecvs/v4/verilator/src/obj_dbg/verilog.y
#remove_source("*/src/obj_dbg/verilog.y"); #remove_source("*/src/obj_dbg/verilog.y")
remove_source("*test_regress/*"); remove_source("*test_regress/*")
remove_source("*examples/*"); remove_source("*examples/*")
# Remove collected coverage on each little test main file # Remove collected coverage on each little test main file
# Would just be removed with remove_source in later step # Would just be removed with remove_source in later step
remove_gcda_regexp(qr!test_regress/.*/(Vt_|Vtop_).*\.gcda!); remove_gcda_regexp(r'test_regress/.*/(Vt_|Vtop_).*\.gcda')
# Exclude line entirely, also excludes from function and branch coverage # Exclude line entirely, also excludes from function and branch coverage
exclude_line_regexp(qr/(\bv3fatalSrc\b exclude_line_regexp(r'\bv3fatalSrc\b')
|\bfatalSrc\b exclude_line_regexp(r'\bfatalSrc\b')
|\bVL_UNCOVERABLE\b exclude_line_regexp(r'\bVL_UNCOVERABLE\b')
|\bVL_UNREACHABLE\b exclude_line_regexp(r'\bVL_UNREACHABLE\b')
|\bVL_FATAL exclude_line_regexp(r'\bVL_FATAL')
|\bUASSERT exclude_line_regexp(r'\bUASSERT')
|\bNUM_ASSERT exclude_line_regexp(r'\bNUM_ASSERT')
|\bERROR_RSVD_WORD exclude_line_regexp(r'\bERROR_RSVD_WORD')
|\bV3ERROR_NA exclude_line_regexp(r'\bV3ERROR_NA')
|\bUINFO\b)/x); exclude_line_regexp(r'\bUINFO\b')
# Exclude for branch coverage only # Exclude for branch coverage only
exclude_branch_regexp(qr/(\bdebug\(\) exclude_branch_regexp(r'\bdebug\(\)')
|\bassert\( exclude_branch_regexp(r'\bassert\(')
|\bBROKEN_RTK\( exclude_branch_regexp(r'\bBROKEN_RTK\(')
|\bSELF_CHECK)/x); exclude_branch_regexp(r'\bSELF_CHECK')
1; True