2020-01-10 01:01:12 +00:00
|
|
|
#!/usr/bin/env perl
|
2019-07-01 02:37:03 +00:00
|
|
|
# See copyright, etc in below POD section.
|
|
|
|
######################################################################
|
|
|
|
|
2020-01-10 01:01:12 +00:00
|
|
|
use warnings;
|
2019-07-01 02:37:03 +00:00
|
|
|
use Cwd;
|
|
|
|
use File::Copy qw(cp);
|
|
|
|
use File::Path qw(mkpath);
|
2020-05-16 18:53:22 +00:00
|
|
|
use FindBin qw($RealBin);
|
2019-07-01 02:37:03 +00:00
|
|
|
use Getopt::Long;
|
2020-05-16 18:53:22 +00:00
|
|
|
use Parallel::Forker;
|
2020-05-23 13:50:04 +00:00
|
|
|
use Unix::Processors;
|
2019-07-01 02:37:03 +00:00
|
|
|
use IO::File;
|
|
|
|
use Pod::Usage;
|
|
|
|
use strict;
|
|
|
|
use vars qw($Debug);
|
|
|
|
|
2020-05-23 13:16:58 +00:00
|
|
|
our $Opt_Stop = 1;
|
2020-05-25 14:50:46 +00:00
|
|
|
our $Opt_Fastcov = 0;
|
2019-07-01 02:37:03 +00:00
|
|
|
our $Exclude_Line_Regexp;
|
2020-05-16 10:15:25 +00:00
|
|
|
our $Remove_Gcda_Regexp;
|
|
|
|
|
2019-07-01 02:37:03 +00:00
|
|
|
our @Remove_Sources;
|
|
|
|
our @Source_Globs;
|
|
|
|
|
2020-05-23 13:50:04 +00:00
|
|
|
our $Fork = new Parallel::Forker(use_sig_child => 1, poll_interval => 10*1000);
|
|
|
|
$Fork->max_proc(Unix::Processors->new->max_online * 1.5);
|
2020-05-16 18:53:22 +00:00
|
|
|
$SIG{CHLD} = sub { Parallel::Forker::sig_child($Fork); };
|
|
|
|
$SIG{TERM} = sub { $Fork->kill_tree_all('TERM') if $Fork && $Fork->in_parent; die "Quitting...\n"; };
|
|
|
|
|
2019-07-01 02:37:03 +00:00
|
|
|
#======================================================================
|
|
|
|
# main
|
|
|
|
|
2020-05-20 21:39:32 +00:00
|
|
|
our $Opt_Hashset;
|
2020-05-16 18:53:22 +00:00
|
|
|
our $opt_stages = '';
|
2020-05-18 22:34:26 +00:00
|
|
|
our $Opt_Scenarios;
|
2020-05-16 18:53:22 +00:00
|
|
|
our %Opt_Stages;
|
2019-07-01 02:37:03 +00:00
|
|
|
|
|
|
|
autoflush STDOUT 1;
|
|
|
|
autoflush STDERR 1;
|
|
|
|
Getopt::Long::config("no_auto_abbrev");
|
|
|
|
if (! GetOptions(
|
|
|
|
"debug" => sub { $Debug = 1; },
|
2020-05-20 21:39:32 +00:00
|
|
|
"hashset=s" => \$Opt_Hashset, # driver.pl hashset
|
2019-07-01 02:37:03 +00:00
|
|
|
"<>" => sub { die "%Error: Unknown parameter: $_[0]\n"; },
|
2020-05-25 14:50:46 +00:00
|
|
|
"fastcov!" => \$Opt_Fastcov, # use fastcov, not documented, for debug
|
2020-05-18 22:34:26 +00:00
|
|
|
"scenarios=s" => \$Opt_Scenarios, # driver.pl scenarios
|
2020-05-16 18:53:22 +00:00
|
|
|
"stages=s" => \$opt_stages, # starting stage number
|
2020-05-16 10:15:25 +00:00
|
|
|
"stop!" => \$Opt_Stop, # stop/do not stop on error in tests
|
2019-07-01 02:37:03 +00:00
|
|
|
)) {
|
2020-05-18 22:34:26 +00:00
|
|
|
die "%Error: Bad usage, try 'code_coverage --help'\n";
|
2019-07-01 02:37:03 +00:00
|
|
|
}
|
|
|
|
|
2020-05-16 18:53:22 +00:00
|
|
|
{
|
|
|
|
my $start = 0;
|
|
|
|
my $end = 99;
|
|
|
|
if ($opt_stages && $opt_stages =~ /^(\d+)$/) {
|
|
|
|
$start = $end = $1;
|
|
|
|
} elsif ($opt_stages && $opt_stages =~ /^(\d+)-(\d+$)$/) {
|
|
|
|
$start = $1; $end = $2;
|
|
|
|
} elsif ($opt_stages && $opt_stages =~ /^-(\d+$)$/) {
|
|
|
|
$end = $1;
|
|
|
|
} elsif ($opt_stages && $opt_stages =~ /^(\d+)-$/) {
|
|
|
|
$start = $1;
|
|
|
|
} elsif ($opt_stages) {
|
|
|
|
die "%Error: --stages not understood: $opt_stages,";
|
|
|
|
}
|
|
|
|
for (my $n = $start; $n <= $end; ++$n) { $Opt_Stages{$n} = 1; }
|
|
|
|
}
|
|
|
|
|
2019-07-01 02:37:03 +00:00
|
|
|
test();
|
|
|
|
exit(0);
|
|
|
|
|
|
|
|
#######################################################################
|
|
|
|
|
|
|
|
sub test {
|
|
|
|
-r "nodist/code_coverage.dat" or die "%Error: Run from the top of the verilator kit,";
|
|
|
|
require "./nodist/code_coverage.dat";
|
|
|
|
|
2020-05-16 18:53:22 +00:00
|
|
|
if ($Opt_Stages{1}) {
|
2020-05-16 10:15:25 +00:00
|
|
|
travis_fold_start("configure");
|
2020-05-16 18:53:22 +00:00
|
|
|
print "Stage 1: configure (coverage on)\n";
|
2020-05-16 10:15:25 +00:00
|
|
|
run("make distclean || true");
|
2020-05-16 18:53:22 +00:00
|
|
|
run("autoconf");
|
2019-07-01 02:37:03 +00:00
|
|
|
run("./configure --enable-longtests CXX='g++ --coverage'");
|
2020-05-16 18:53:22 +00:00
|
|
|
travis_fold_end();
|
|
|
|
}
|
|
|
|
|
|
|
|
if ($Opt_Stages{2}) {
|
|
|
|
travis_fold_start("build");
|
|
|
|
print "Stage 2: build\n";
|
Travis: Use workspaces and per job persistent ccache (#2399)
Change the Travis builds to use workspaces and persistent ccache
We proceed in 2 stages (as before, but using workspaces for
persistence):
1. In the 'build' stage, we clone the repo, build it and
save the whole checkout ($TRAVIS_BUILD_DIR) as a workspace
2. In the 'test' stage, rather than cloning the repo, multiple jobs
pull down the same workspace we built to run the tests from
This enables:
- Reuse of the build in multiple test jobs (this is what we used the Travis
cache for before)
- Each job having a separate persistent Travis cache, which now only
contains the ccache. This means all jobs, including 'build' and 'test'
jobs can make maximum use of ccache across runs. This drastically cuts
down build times when the ccache hits, which is very often the case for
'test' jobs. Also, the separate caches only store the objects build by
the particular job that owns the cache, so we can keep the per job
ccache small.
If the commit message contains '[travis ccache clear]', the ccache will
be cleared at the beginning of the build. This can be used to test build
complete within the 50 minute timeout imposed by Travis, even without a
persistent ccache.
2020-06-03 20:10:13 +00:00
|
|
|
my $nproc = Unix::Processors->new->max_online;
|
|
|
|
run("make -k -j $nproc");
|
2019-07-01 02:37:03 +00:00
|
|
|
# The optimized versions will not collect good coverage, overwrite them
|
|
|
|
run("cp bin/verilator_bin_dbg bin/verilator_bin");
|
|
|
|
run("cp bin/verilator_coverage_bin_dbg bin/verilator_coverage_bin");
|
2020-05-16 10:15:25 +00:00
|
|
|
travis_fold_end();
|
2019-07-01 02:37:03 +00:00
|
|
|
}
|
|
|
|
|
2020-05-16 18:53:22 +00:00
|
|
|
if ($Opt_Stages{3}) {
|
2020-05-16 10:15:25 +00:00
|
|
|
travis_fold_start("test");
|
2020-05-17 12:19:42 +00:00
|
|
|
print "Stage 3: make tests (with coverage on)\n";
|
2020-05-23 13:16:58 +00:00
|
|
|
run("make examples")
|
|
|
|
if !$Opt_Scenarios || $Opt_Scenarios =~ /dist/i;
|
2020-05-16 10:15:25 +00:00
|
|
|
run("make test_regress"
|
2020-05-18 22:34:26 +00:00
|
|
|
. ($Opt_Scenarios ? " SCENARIOS='".$Opt_Scenarios."'" : "")
|
2020-05-23 13:16:58 +00:00
|
|
|
. ($Opt_Hashset ? " DRIVER_HASHSET='--hashset=".$Opt_Hashset."'" : "")
|
2020-05-16 10:15:25 +00:00
|
|
|
. ($Opt_Stop ? '' : ' || true'));
|
|
|
|
travis_fold_end();
|
2019-07-01 02:37:03 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
my $cc_dir = "nodist/obj_dir/coverage";
|
2020-05-16 18:53:22 +00:00
|
|
|
if ($Opt_Stages{4}) {
|
2020-05-25 14:50:46 +00:00
|
|
|
travis_fold_start("gcno");
|
|
|
|
print "Stage 4: Create gcno files under $cc_dir\n";
|
2019-07-01 02:37:03 +00:00
|
|
|
mkpath($cc_dir);
|
|
|
|
mkpath("$cc_dir/info");
|
|
|
|
my $dats = `find . -print | grep .gcda`;
|
2020-05-16 10:15:25 +00:00
|
|
|
my %dats;
|
2019-07-01 02:37:03 +00:00
|
|
|
foreach my $dat (split '\n', $dats) {
|
2020-05-16 10:15:25 +00:00
|
|
|
$dats{$dat} = 1;
|
|
|
|
}
|
|
|
|
my %gcnos;
|
|
|
|
foreach my $dat (sort keys %dats) {
|
|
|
|
(my $gcno = $dat) =~ s!\.gcda$!.gcno!;
|
|
|
|
if ($dat =~ /$Remove_Gcda_Regexp/) {
|
|
|
|
# Remove .gcda/.gcno for files we don't care about before we slowly
|
|
|
|
# read them
|
|
|
|
unlink $dat;
|
|
|
|
unlink $gcno;
|
|
|
|
delete $dats{$dat};
|
|
|
|
next;
|
|
|
|
}
|
|
|
|
(my $gbase = $gcno) =~ s!.*/!!;
|
|
|
|
if (!$gcnos{$gbase} && -r $gcno) {
|
|
|
|
$gcnos{$gbase} = $gcno;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
# We need a matching .gcno for every .gcda, try to find a matching file elsewhere
|
|
|
|
foreach my $dat (sort keys %dats) {
|
|
|
|
(my $gcno = $dat) =~ s!\.gcda$!.gcno!;
|
|
|
|
(my $gbase = $gcno) =~ s!.*/!!;
|
|
|
|
if (!-r $gcno) {
|
|
|
|
if ($gcnos{$gbase}) {
|
|
|
|
cp($gcnos{$gbase}, $gcno);
|
|
|
|
} else {
|
|
|
|
warn "MISSING .gcno for a .gcda: $gcno\n";
|
|
|
|
}
|
|
|
|
}
|
2020-05-16 18:53:22 +00:00
|
|
|
}
|
|
|
|
travis_fold_end();
|
|
|
|
}
|
|
|
|
|
|
|
|
if ($Opt_Stages{5} && $Opt_Fastcov) {
|
|
|
|
travis_fold_start("fastcov");
|
2020-05-25 14:50:46 +00:00
|
|
|
# Must run in root directory to find all files
|
|
|
|
mkpath($cc_dir);
|
|
|
|
run("${RealBin}/fastcov.py -X --lcov --exclude /usr -o ${cc_dir}/app_fastcov.info");
|
2020-05-16 18:53:22 +00:00
|
|
|
travis_fold_end();
|
|
|
|
}
|
|
|
|
|
|
|
|
if ($Opt_Stages{5} && !$Opt_Fastcov) {
|
|
|
|
travis_fold_start("infos");
|
|
|
|
print "Stage 5: make infos\n";
|
|
|
|
my $dats = `find . -print | grep .gcda`;
|
|
|
|
my %dirs;
|
|
|
|
foreach my $dat (split '\n', $dats) {
|
2020-05-16 10:15:25 +00:00
|
|
|
(my $dir = $dat) =~ s!/[^/]+$!!;
|
|
|
|
$dirs{$dir} = 1;
|
2019-07-01 02:37:03 +00:00
|
|
|
}
|
2020-05-16 18:53:22 +00:00
|
|
|
|
2019-07-01 02:37:03 +00:00
|
|
|
foreach my $dir (sort keys %dirs) {
|
|
|
|
(my $outname = $dir) =~ s![^a-zA-Z0-9]+!_!g;
|
2020-05-16 18:53:22 +00:00
|
|
|
$Fork->schedule(run_on_start => sub {
|
2020-05-24 12:30:59 +00:00
|
|
|
# .info may be empty, so ignore errors (unfortunately)
|
|
|
|
run("cd $cc_dir/info ; lcov -c -d ../../../../$dir --exclude /usr -o app_test_${outname}.info || true");
|
2020-05-16 18:53:22 +00:00
|
|
|
})->run;
|
2019-07-01 02:37:03 +00:00
|
|
|
}
|
2020-05-16 18:53:22 +00:00
|
|
|
$Fork->wait_all;
|
2020-05-16 10:15:25 +00:00
|
|
|
travis_fold_end();
|
2019-07-01 02:37:03 +00:00
|
|
|
}
|
2019-08-08 21:53:49 +00:00
|
|
|
|
2020-05-16 18:53:22 +00:00
|
|
|
if ($Opt_Stages{6}) {
|
2020-05-16 10:15:25 +00:00
|
|
|
travis_fold_start("clone");
|
2019-07-01 02:37:03 +00:00
|
|
|
# lcov doesn't have a control file to override single lines, so replicate the sources
|
2020-05-16 18:53:22 +00:00
|
|
|
print "Stage 6: Clone sources under $cc_dir\n";
|
2019-07-01 02:37:03 +00:00
|
|
|
clone_sources($cc_dir);
|
2020-05-16 10:15:25 +00:00
|
|
|
travis_fold_end();
|
2019-07-01 02:37:03 +00:00
|
|
|
}
|
|
|
|
|
2020-05-16 18:53:22 +00:00
|
|
|
if ($Opt_Stages{8}) {
|
2020-05-16 10:15:25 +00:00
|
|
|
travis_fold_start("copy");
|
2020-05-16 18:53:22 +00:00
|
|
|
print "Stage 8: Copy .gcno files\n";
|
2019-07-01 02:37:03 +00:00
|
|
|
my $dats = `find . -print | grep .gcno`;
|
2020-05-16 18:53:22 +00:00
|
|
|
foreach my $dat (sort (split '\n', $dats)) {
|
2019-07-01 02:37:03 +00:00
|
|
|
next if $dat =~ /$cc_dir/;
|
|
|
|
my $outdat = $cc_dir."/".$dat;
|
|
|
|
#print "cp $dat, $outdat);\n";
|
|
|
|
cp($dat, $outdat);
|
|
|
|
}
|
2020-05-16 10:15:25 +00:00
|
|
|
travis_fold_end();
|
2019-07-01 02:37:03 +00:00
|
|
|
}
|
|
|
|
|
2020-05-16 18:53:22 +00:00
|
|
|
if ($Opt_Stages{10}) {
|
2020-05-16 10:15:25 +00:00
|
|
|
travis_fold_start("combine");
|
2020-05-16 18:53:22 +00:00
|
|
|
print "Stage 10: Combine data files\n";
|
2019-07-01 02:37:03 +00:00
|
|
|
run("cd $cc_dir ; lcov -c -i -d src/obj_dbg -o app_base.info");
|
|
|
|
run("cd $cc_dir ; lcov -a app_base.info -o app_total.info");
|
2020-05-25 14:50:46 +00:00
|
|
|
if ($Opt_Fastcov) {
|
|
|
|
run("cd $cc_dir ; lcov -a app_base.info -a app_fastcov.info -o app_total.info");
|
|
|
|
} else {
|
|
|
|
my $infos = `cd $cc_dir ; find info -print | grep .info`;
|
|
|
|
my $comb = "";
|
|
|
|
my @infos = (sort (split /\n/, $infos));
|
|
|
|
foreach my $info (@infos) {
|
|
|
|
$comb .= " -a $info";
|
|
|
|
# Need to batch them to avoid overrunning shell command length limit
|
|
|
|
if (length($comb) > 10000 || $info eq $infos[$#infos]) {
|
|
|
|
# .info may be empty, so ignore errors (unfortunately)
|
|
|
|
run("cd $cc_dir ; lcov -a app_total.info $comb -o app_total.info || true");
|
|
|
|
$comb = "";
|
|
|
|
}
|
2019-07-01 02:37:03 +00:00
|
|
|
}
|
|
|
|
}
|
2020-05-16 10:15:25 +00:00
|
|
|
travis_fold_end();
|
2019-07-01 02:37:03 +00:00
|
|
|
}
|
|
|
|
|
2020-05-25 14:50:46 +00:00
|
|
|
if ($Opt_Stages{11}) {
|
|
|
|
travis_fold_start("dirs");
|
|
|
|
print "Stage 11: Cleanup paths\n";
|
|
|
|
cleanup_abs_paths($cc_dir, "$cc_dir/app_total.info", "$cc_dir/app_total.info");
|
|
|
|
travis_fold_end();
|
|
|
|
}
|
|
|
|
|
2020-05-16 18:53:22 +00:00
|
|
|
if ($Opt_Stages{12}) {
|
2020-05-16 10:15:25 +00:00
|
|
|
travis_fold_start("filter");
|
2020-05-16 18:53:22 +00:00
|
|
|
print "Stage 12: Filter processed source files\n";
|
2019-07-01 02:37:03 +00:00
|
|
|
my $cmd = '';
|
|
|
|
foreach my $glob (@Remove_Sources) {
|
|
|
|
$cmd .= " '$glob'";
|
|
|
|
}
|
|
|
|
run("cd $cc_dir ; lcov --remove app_total.info $cmd -o app_total.info");
|
2020-05-16 10:15:25 +00:00
|
|
|
travis_fold_end();
|
2019-07-01 02:37:03 +00:00
|
|
|
}
|
|
|
|
|
2020-05-16 18:53:22 +00:00
|
|
|
if ($Opt_Stages{17}) {
|
2020-05-16 10:15:25 +00:00
|
|
|
travis_fold_start("report");
|
2020-05-16 18:53:22 +00:00
|
|
|
print "Stage 17: Create HTML\n";
|
2019-07-01 02:37:03 +00:00
|
|
|
run("cd $cc_dir ; genhtml app_total.info --demangle-cpp"
|
|
|
|
." --rc lcov_branch_coverage=1 --rc genhtml_hi_limit=100 --output-directory html");
|
2020-05-16 10:15:25 +00:00
|
|
|
travis_fold_end();
|
|
|
|
}
|
|
|
|
|
2020-05-16 18:53:22 +00:00
|
|
|
if ($Opt_Stages{18}) {
|
2020-05-16 10:15:25 +00:00
|
|
|
travis_fold_start("upload");
|
2020-05-16 18:53:22 +00:00
|
|
|
print "Stage 18: Upload\n";
|
2020-05-16 11:43:22 +00:00
|
|
|
my $cmd = "bash <(curl -s https://codecov.io/bash) -f $cc_dir/app_total.info";
|
2020-05-23 13:50:04 +00:00
|
|
|
print "print: Not running: export CODECOV_TOKEN=<hidden>\n";
|
2020-05-16 10:15:25 +00:00
|
|
|
print "print: Not running: $cmd\n";
|
|
|
|
travis_fold_end();
|
2019-07-01 02:37:03 +00:00
|
|
|
}
|
|
|
|
|
2020-05-16 18:53:22 +00:00
|
|
|
if ($Opt_Stages{19}) {
|
2019-07-01 02:37:03 +00:00
|
|
|
print "*-* All Finished *-*\n";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
sub clone_sources {
|
|
|
|
my $cc_dir = shift;
|
|
|
|
my $excluded_lines = 0;
|
|
|
|
foreach my $glob (@Source_Globs) {
|
|
|
|
foreach my $infile (glob $glob) {
|
|
|
|
$infile !~ m!^/!
|
|
|
|
or die "%Error: source globs should be relative not absolute filenames,";
|
|
|
|
my $outfile = $cc_dir."/".$infile;
|
|
|
|
(my $outpath = $outfile) =~ s!/[^/]*$!!;
|
|
|
|
mkpath($outpath);
|
|
|
|
my $fh = IO::File->new("<$infile") or die "%Error: $! $infile,";
|
|
|
|
my $ofh = IO::File->new(">$outfile") or die "%Error: $! $outfile,";
|
|
|
|
my $lineno = 0;
|
|
|
|
while (defined(my $line = $fh->getline)) {
|
|
|
|
$lineno++;
|
|
|
|
chomp $line;
|
|
|
|
if ($line !~ m!// LCOV_EXCL_LINE!
|
|
|
|
&& $line =~ /$Exclude_Line_Regexp/) {
|
2019-07-06 02:28:34 +00:00
|
|
|
$line .= " //code_coverage: // LCOV_EXCL_LINE";
|
2019-07-01 02:37:03 +00:00
|
|
|
$excluded_lines++;
|
|
|
|
#print "$infile:$lineno: $line";
|
|
|
|
} else {
|
|
|
|
}
|
|
|
|
$ofh->print("$line\n");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
print "Source code lines automatically LCOV_EXCL_LINE'ed: $excluded_lines\n";
|
|
|
|
}
|
|
|
|
|
|
|
|
sub cleanup_abs_paths {
|
|
|
|
my $cc_dir = shift;
|
|
|
|
my $infile = shift;
|
|
|
|
my $outfile = shift;
|
|
|
|
my $fh = IO::File->new("<$infile") or die "%Error: $! $infile,";
|
|
|
|
my @lines;
|
|
|
|
while (defined(my $line = $fh->getline)) {
|
2019-07-06 02:28:34 +00:00
|
|
|
if ($line =~ m!^SF:!) {
|
2019-07-01 02:37:03 +00:00
|
|
|
$line =~ s!$ENV{VERILATOR_ROOT}/!!;
|
|
|
|
$line =~ s!$cc_dir/!!;
|
2019-07-06 02:28:34 +00:00
|
|
|
$line =~ s!obj_dbg/verilog.y$!verilog.y!;
|
|
|
|
#print "Remaining SF: ",$line;
|
2019-07-01 02:37:03 +00:00
|
|
|
}
|
|
|
|
push @lines, $line;
|
|
|
|
}
|
|
|
|
my $ofh = IO::File->new(">$outfile") or die "%Error: $! $outfile,";
|
|
|
|
$ofh->print(@lines);
|
|
|
|
}
|
|
|
|
|
|
|
|
#######################################################################
|
|
|
|
# .dat file callbacks
|
|
|
|
|
|
|
|
sub exclude_line_regexp {
|
|
|
|
$Exclude_Line_Regexp = shift;
|
|
|
|
}
|
2020-05-16 10:15:25 +00:00
|
|
|
sub remove_gcda_regexp {
|
|
|
|
$Remove_Gcda_Regexp = shift;
|
|
|
|
}
|
2019-07-01 02:37:03 +00:00
|
|
|
sub remove_source {
|
|
|
|
my @srcs = @_;
|
|
|
|
push @Remove_Sources, @srcs;
|
|
|
|
}
|
|
|
|
sub source_globs {
|
|
|
|
my @dirs = @_;
|
|
|
|
push @Source_Globs, @dirs;
|
|
|
|
}
|
|
|
|
|
|
|
|
#######################################################################
|
|
|
|
|
|
|
|
sub run {
|
|
|
|
# Run a system command, check errors
|
|
|
|
my $command = shift;
|
|
|
|
print "\t$command\n";
|
|
|
|
system "$command";
|
|
|
|
my $status = $?;
|
|
|
|
($status == 0) or die "%Error: Command Failed $command, $status, stopped";
|
|
|
|
}
|
|
|
|
|
2020-05-16 10:15:25 +00:00
|
|
|
our $_Travis_Action;
|
|
|
|
sub travis_fold_start {
|
|
|
|
$_Travis_Action = shift;
|
|
|
|
print "travis_fold:start:$_Travis_Action\n";
|
|
|
|
}
|
|
|
|
sub travis_fold_end {
|
|
|
|
print "travis_fold:end:$_Travis_Action\n";
|
|
|
|
}
|
|
|
|
|
2019-07-01 02:37:03 +00:00
|
|
|
#######################################################################
|
|
|
|
__END__
|
|
|
|
|
|
|
|
=pod
|
|
|
|
|
|
|
|
=head1 NAME
|
|
|
|
|
|
|
|
code_coverage - Build and collect Verilator coverage
|
|
|
|
|
|
|
|
=head1 SYNOPSIS
|
|
|
|
|
|
|
|
cd $VERILATOR_ROOT
|
|
|
|
nodist/code_coverage
|
|
|
|
|
|
|
|
=head1 DESCRIPTION
|
|
|
|
|
|
|
|
code_coverage builds Verilator with C++ coverage support and runs tests
|
|
|
|
with coverage enabled.
|
|
|
|
|
|
|
|
This will rebuild the current object files.
|
|
|
|
|
|
|
|
=head1 ARGUMENTS
|
|
|
|
|
|
|
|
=over 4
|
|
|
|
|
2020-05-20 21:39:32 +00:00
|
|
|
=item --hashset I<hashset>
|
|
|
|
|
|
|
|
Pass test hashset onto driver.pl test harness.
|
|
|
|
|
2019-07-01 02:37:03 +00:00
|
|
|
=item --help
|
|
|
|
|
|
|
|
Displays this message and program version and exits.
|
|
|
|
|
2020-05-18 22:34:26 +00:00
|
|
|
=item --scenarios I<scenarios>
|
2019-07-01 02:37:03 +00:00
|
|
|
|
2020-05-18 22:34:26 +00:00
|
|
|
Pass test scenarios onto driver.pl test harness.
|
|
|
|
|
|
|
|
=item --stages I<stage>
|
|
|
|
|
|
|
|
Runs a specific stage or range of stages (see the script).
|
|
|
|
|
2020-05-23 13:16:58 +00:00
|
|
|
=item --no-stop
|
2020-05-18 22:34:26 +00:00
|
|
|
|
2020-05-23 13:16:58 +00:00
|
|
|
Do not stop collecting data if tests fail.
|
2019-07-01 02:37:03 +00:00
|
|
|
|
|
|
|
=back
|
|
|
|
|
|
|
|
=head1 DISTRIBUTION
|
|
|
|
|
2020-03-21 15:24:24 +00:00
|
|
|
Copyright 2019-2020 by Wilson Snyder. This program is free software; you
|
2019-07-01 02:37:03 +00:00
|
|
|
can redistribute it and/or modify it under the terms of either the GNU
|
|
|
|
Lesser General Public License Version 3 or the Perl Artistic License
|
|
|
|
Version 2.0.
|
|
|
|
|
2020-03-21 15:24:24 +00:00
|
|
|
SPDX-License-Identifier: LGPL-3.0-only OR Artistic-2.0
|
|
|
|
|
2019-07-01 02:37:03 +00:00
|
|
|
=head1 AUTHORS
|
|
|
|
|
|
|
|
Wilson Snyder <wsnyder@wsnyder.org>
|
|
|
|
|
|
|
|
=head1 SEE ALSO
|
|
|
|
|
|
|
|
C<lcov>
|
|
|
|
|
|
|
|
=cut
|
|
|
|
|
|
|
|
######################################################################
|
|
|
|
### Local Variables:
|
|
|
|
### compile-command: "cd .. ; nodist/code_coverage "
|
|
|
|
### End:
|