verilator/nodist/code_coverage
Geza Lore 6ddd92c0a1
Travis: Use workspaces and per job persistent ccache (#2399)
Change the Travis builds to use workspaces and persistent ccache

We proceed in 2 stages (as before, but using workspaces for
persistence):
1. In the 'build' stage, we clone the repo, build it and
   save the whole checkout ($TRAVIS_BUILD_DIR) as a workspace
2. In the 'test' stage, rather than cloning the repo, multiple jobs
   pull down the same workspace we built to run the tests from

This enables:
- Reuse of the build in multiple test jobs (this is what we used the Travis
  cache for before)
- Each job having a separate persistent Travis cache, which now only
  contains the ccache. This means all jobs, including 'build' and 'test'
  jobs can make maximum use of ccache across runs. This drastically cuts
  down build times when the ccache hits, which is very often the case for
  'test' jobs. Also, the separate caches only store the objects build by
  the particular job that owns the cache, so we can keep the per job
  ccache small.

If the commit message contains '[travis ccache clear]', the ccache will
be cleared at the beginning of the build. This can be used to test build
complete within the 50 minute timeout imposed by Travis, even without a
persistent ccache.
2020-06-03 21:10:13 +01:00

425 lines
13 KiB
Perl
Executable File

#!/usr/bin/env perl
# See copyright, etc in below POD section.
######################################################################
use warnings;
use Cwd;
use File::Copy qw(cp);
use File::Path qw(mkpath);
use FindBin qw($RealBin);
use Getopt::Long;
use Parallel::Forker;
use Unix::Processors;
use IO::File;
use Pod::Usage;
use strict;
use vars qw($Debug);
our $Opt_Stop = 1;
our $Opt_Fastcov = 0;
our $Exclude_Line_Regexp;
our $Remove_Gcda_Regexp;
our @Remove_Sources;
our @Source_Globs;
our $Fork = new Parallel::Forker(use_sig_child => 1, poll_interval => 10*1000);
$Fork->max_proc(Unix::Processors->new->max_online * 1.5);
$SIG{CHLD} = sub { Parallel::Forker::sig_child($Fork); };
$SIG{TERM} = sub { $Fork->kill_tree_all('TERM') if $Fork && $Fork->in_parent; die "Quitting...\n"; };
#======================================================================
# main
our $Opt_Hashset;
our $opt_stages = '';
our $Opt_Scenarios;
our %Opt_Stages;
autoflush STDOUT 1;
autoflush STDERR 1;
Getopt::Long::config("no_auto_abbrev");
if (! GetOptions(
"debug" => sub { $Debug = 1; },
"hashset=s" => \$Opt_Hashset, # driver.pl hashset
"<>" => sub { die "%Error: Unknown parameter: $_[0]\n"; },
"fastcov!" => \$Opt_Fastcov, # use fastcov, not documented, for debug
"scenarios=s" => \$Opt_Scenarios, # driver.pl scenarios
"stages=s" => \$opt_stages, # starting stage number
"stop!" => \$Opt_Stop, # stop/do not stop on error in tests
)) {
die "%Error: Bad usage, try 'code_coverage --help'\n";
}
{
my $start = 0;
my $end = 99;
if ($opt_stages && $opt_stages =~ /^(\d+)$/) {
$start = $end = $1;
} elsif ($opt_stages && $opt_stages =~ /^(\d+)-(\d+$)$/) {
$start = $1; $end = $2;
} elsif ($opt_stages && $opt_stages =~ /^-(\d+$)$/) {
$end = $1;
} elsif ($opt_stages && $opt_stages =~ /^(\d+)-$/) {
$start = $1;
} elsif ($opt_stages) {
die "%Error: --stages not understood: $opt_stages,";
}
for (my $n = $start; $n <= $end; ++$n) { $Opt_Stages{$n} = 1; }
}
test();
exit(0);
#######################################################################
sub test {
-r "nodist/code_coverage.dat" or die "%Error: Run from the top of the verilator kit,";
require "./nodist/code_coverage.dat";
if ($Opt_Stages{1}) {
travis_fold_start("configure");
print "Stage 1: configure (coverage on)\n";
run("make distclean || true");
run("autoconf");
run("./configure --enable-longtests CXX='g++ --coverage'");
travis_fold_end();
}
if ($Opt_Stages{2}) {
travis_fold_start("build");
print "Stage 2: build\n";
my $nproc = Unix::Processors->new->max_online;
run("make -k -j $nproc");
# The optimized versions will not collect good coverage, overwrite them
run("cp bin/verilator_bin_dbg bin/verilator_bin");
run("cp bin/verilator_coverage_bin_dbg bin/verilator_coverage_bin");
travis_fold_end();
}
if ($Opt_Stages{3}) {
travis_fold_start("test");
print "Stage 3: make tests (with coverage on)\n";
run("make examples")
if !$Opt_Scenarios || $Opt_Scenarios =~ /dist/i;
run("make test_regress"
. ($Opt_Scenarios ? " SCENARIOS='".$Opt_Scenarios."'" : "")
. ($Opt_Hashset ? " DRIVER_HASHSET='--hashset=".$Opt_Hashset."'" : "")
. ($Opt_Stop ? '' : ' || true'));
travis_fold_end();
}
my $cc_dir = "nodist/obj_dir/coverage";
if ($Opt_Stages{4}) {
travis_fold_start("gcno");
print "Stage 4: Create gcno files under $cc_dir\n";
mkpath($cc_dir);
mkpath("$cc_dir/info");
my $dats = `find . -print | grep .gcda`;
my %dats;
foreach my $dat (split '\n', $dats) {
$dats{$dat} = 1;
}
my %gcnos;
foreach my $dat (sort keys %dats) {
(my $gcno = $dat) =~ s!\.gcda$!.gcno!;
if ($dat =~ /$Remove_Gcda_Regexp/) {
# Remove .gcda/.gcno for files we don't care about before we slowly
# read them
unlink $dat;
unlink $gcno;
delete $dats{$dat};
next;
}
(my $gbase = $gcno) =~ s!.*/!!;
if (!$gcnos{$gbase} && -r $gcno) {
$gcnos{$gbase} = $gcno;
}
}
# We need a matching .gcno for every .gcda, try to find a matching file elsewhere
foreach my $dat (sort keys %dats) {
(my $gcno = $dat) =~ s!\.gcda$!.gcno!;
(my $gbase = $gcno) =~ s!.*/!!;
if (!-r $gcno) {
if ($gcnos{$gbase}) {
cp($gcnos{$gbase}, $gcno);
} else {
warn "MISSING .gcno for a .gcda: $gcno\n";
}
}
}
travis_fold_end();
}
if ($Opt_Stages{5} && $Opt_Fastcov) {
travis_fold_start("fastcov");
# Must run in root directory to find all files
mkpath($cc_dir);
run("${RealBin}/fastcov.py -X --lcov --exclude /usr -o ${cc_dir}/app_fastcov.info");
travis_fold_end();
}
if ($Opt_Stages{5} && !$Opt_Fastcov) {
travis_fold_start("infos");
print "Stage 5: make infos\n";
my $dats = `find . -print | grep .gcda`;
my %dirs;
foreach my $dat (split '\n', $dats) {
(my $dir = $dat) =~ s!/[^/]+$!!;
$dirs{$dir} = 1;
}
foreach my $dir (sort keys %dirs) {
(my $outname = $dir) =~ s![^a-zA-Z0-9]+!_!g;
$Fork->schedule(run_on_start => sub {
# .info may be empty, so ignore errors (unfortunately)
run("cd $cc_dir/info ; lcov -c -d ../../../../$dir --exclude /usr -o app_test_${outname}.info || true");
})->run;
}
$Fork->wait_all;
travis_fold_end();
}
if ($Opt_Stages{6}) {
travis_fold_start("clone");
# lcov doesn't have a control file to override single lines, so replicate the sources
print "Stage 6: Clone sources under $cc_dir\n";
clone_sources($cc_dir);
travis_fold_end();
}
if ($Opt_Stages{8}) {
travis_fold_start("copy");
print "Stage 8: Copy .gcno files\n";
my $dats = `find . -print | grep .gcno`;
foreach my $dat (sort (split '\n', $dats)) {
next if $dat =~ /$cc_dir/;
my $outdat = $cc_dir."/".$dat;
#print "cp $dat, $outdat);\n";
cp($dat, $outdat);
}
travis_fold_end();
}
if ($Opt_Stages{10}) {
travis_fold_start("combine");
print "Stage 10: Combine data files\n";
run("cd $cc_dir ; lcov -c -i -d src/obj_dbg -o app_base.info");
run("cd $cc_dir ; lcov -a app_base.info -o app_total.info");
if ($Opt_Fastcov) {
run("cd $cc_dir ; lcov -a app_base.info -a app_fastcov.info -o app_total.info");
} else {
my $infos = `cd $cc_dir ; find info -print | grep .info`;
my $comb = "";
my @infos = (sort (split /\n/, $infos));
foreach my $info (@infos) {
$comb .= " -a $info";
# Need to batch them to avoid overrunning shell command length limit
if (length($comb) > 10000 || $info eq $infos[$#infos]) {
# .info may be empty, so ignore errors (unfortunately)
run("cd $cc_dir ; lcov -a app_total.info $comb -o app_total.info || true");
$comb = "";
}
}
}
travis_fold_end();
}
if ($Opt_Stages{11}) {
travis_fold_start("dirs");
print "Stage 11: Cleanup paths\n";
cleanup_abs_paths($cc_dir, "$cc_dir/app_total.info", "$cc_dir/app_total.info");
travis_fold_end();
}
if ($Opt_Stages{12}) {
travis_fold_start("filter");
print "Stage 12: Filter processed source files\n";
my $cmd = '';
foreach my $glob (@Remove_Sources) {
$cmd .= " '$glob'";
}
run("cd $cc_dir ; lcov --remove app_total.info $cmd -o app_total.info");
travis_fold_end();
}
if ($Opt_Stages{17}) {
travis_fold_start("report");
print "Stage 17: Create HTML\n";
run("cd $cc_dir ; genhtml app_total.info --demangle-cpp"
." --rc lcov_branch_coverage=1 --rc genhtml_hi_limit=100 --output-directory html");
travis_fold_end();
}
if ($Opt_Stages{18}) {
travis_fold_start("upload");
print "Stage 18: Upload\n";
my $cmd = "bash <(curl -s https://codecov.io/bash) -f $cc_dir/app_total.info";
print "print: Not running: export CODECOV_TOKEN=<hidden>\n";
print "print: Not running: $cmd\n";
travis_fold_end();
}
if ($Opt_Stages{19}) {
print "*-* All Finished *-*\n";
}
}
sub clone_sources {
my $cc_dir = shift;
my $excluded_lines = 0;
foreach my $glob (@Source_Globs) {
foreach my $infile (glob $glob) {
$infile !~ m!^/!
or die "%Error: source globs should be relative not absolute filenames,";
my $outfile = $cc_dir."/".$infile;
(my $outpath = $outfile) =~ s!/[^/]*$!!;
mkpath($outpath);
my $fh = IO::File->new("<$infile") or die "%Error: $! $infile,";
my $ofh = IO::File->new(">$outfile") or die "%Error: $! $outfile,";
my $lineno = 0;
while (defined(my $line = $fh->getline)) {
$lineno++;
chomp $line;
if ($line !~ m!// LCOV_EXCL_LINE!
&& $line =~ /$Exclude_Line_Regexp/) {
$line .= " //code_coverage: // LCOV_EXCL_LINE";
$excluded_lines++;
#print "$infile:$lineno: $line";
} else {
}
$ofh->print("$line\n");
}
}
}
print "Source code lines automatically LCOV_EXCL_LINE'ed: $excluded_lines\n";
}
sub cleanup_abs_paths {
my $cc_dir = shift;
my $infile = shift;
my $outfile = shift;
my $fh = IO::File->new("<$infile") or die "%Error: $! $infile,";
my @lines;
while (defined(my $line = $fh->getline)) {
if ($line =~ m!^SF:!) {
$line =~ s!$ENV{VERILATOR_ROOT}/!!;
$line =~ s!$cc_dir/!!;
$line =~ s!obj_dbg/verilog.y$!verilog.y!;
#print "Remaining SF: ",$line;
}
push @lines, $line;
}
my $ofh = IO::File->new(">$outfile") or die "%Error: $! $outfile,";
$ofh->print(@lines);
}
#######################################################################
# .dat file callbacks
sub exclude_line_regexp {
$Exclude_Line_Regexp = shift;
}
sub remove_gcda_regexp {
$Remove_Gcda_Regexp = shift;
}
sub remove_source {
my @srcs = @_;
push @Remove_Sources, @srcs;
}
sub source_globs {
my @dirs = @_;
push @Source_Globs, @dirs;
}
#######################################################################
sub run {
# Run a system command, check errors
my $command = shift;
print "\t$command\n";
system "$command";
my $status = $?;
($status == 0) or die "%Error: Command Failed $command, $status, stopped";
}
our $_Travis_Action;
sub travis_fold_start {
$_Travis_Action = shift;
print "travis_fold:start:$_Travis_Action\n";
}
sub travis_fold_end {
print "travis_fold:end:$_Travis_Action\n";
}
#######################################################################
__END__
=pod
=head1 NAME
code_coverage - Build and collect Verilator coverage
=head1 SYNOPSIS
cd $VERILATOR_ROOT
nodist/code_coverage
=head1 DESCRIPTION
code_coverage builds Verilator with C++ coverage support and runs tests
with coverage enabled.
This will rebuild the current object files.
=head1 ARGUMENTS
=over 4
=item --hashset I<hashset>
Pass test hashset onto driver.pl test harness.
=item --help
Displays this message and program version and exits.
=item --scenarios I<scenarios>
Pass test scenarios onto driver.pl test harness.
=item --stages I<stage>
Runs a specific stage or range of stages (see the script).
=item --no-stop
Do not stop collecting data if tests fail.
=back
=head1 DISTRIBUTION
Copyright 2019-2020 by Wilson Snyder. This program is free software; you
can redistribute it and/or modify it under the terms of either the GNU
Lesser General Public License Version 3 or the Perl Artistic License
Version 2.0.
SPDX-License-Identifier: LGPL-3.0-only OR Artistic-2.0
=head1 AUTHORS
Wilson Snyder <wsnyder@wsnyder.org>
=head1 SEE ALSO
C<lcov>
=cut
######################################################################
### Local Variables:
### compile-command: "cd .. ; nodist/code_coverage "
### End: