mirror of
https://github.com/verilator/verilator.git
synced 2025-01-01 04:07:34 +00:00
Separate tracing of const values from non-const values
Some values emitted to the trace files are constant (e.g.: actual parameter values), and never change. Previously we used to trace these in the 'full' dumps, which also included all other truly variable signals. This patch introduces a new generated trace function 'const', to complement the 'full' and 'chg' flavour, and 'const' now only contains the constant signals, while 'full' and 'chg' contain only the truly variable signals. The generate 'full' and 'chg' trace functions now have exactly the same shape. Note that 'const' signals are still traced using the 'full*' dump methods of the trace buffers, so there is no need for a third flavour of those.
This commit is contained in:
parent
774c10396c
commit
165a2ef1b8
@ -104,6 +104,7 @@ void VerilatedFst::open(const char* filename) VL_MT_SAFE_EXCLUDES(m_mutex) {
|
||||
fstWriterSetPackType(m_fst, FST_WR_PT_LZ4);
|
||||
fstWriterSetTimescaleFromString(m_fst, timeResStr().c_str()); // lintok-begin-on-ref
|
||||
if (m_useFstWriterThread) fstWriterSetParallelMode(m_fst, 1);
|
||||
constDump(true); // First dump must contain the const signals
|
||||
fullDump(true); // First dump must be full for fst
|
||||
|
||||
m_curScope.clear();
|
||||
|
@ -212,11 +212,14 @@ protected:
|
||||
private:
|
||||
std::vector<bool> m_sigs_enabledVec; // Staging for m_sigs_enabledp
|
||||
std::vector<CallbackRecord> m_initCbs; // Routines to initialize tracing
|
||||
std::vector<CallbackRecord> m_constCbs; // Routines to perform const dump
|
||||
std::vector<CallbackRecord> m_constOffloadCbs; // Routines to perform offloaded const dump
|
||||
std::vector<CallbackRecord> m_fullCbs; // Routines to perform full dump
|
||||
std::vector<CallbackRecord> m_fullOffloadCbs; // Routines to perform offloaded full dump
|
||||
std::vector<CallbackRecord> m_chgCbs; // Routines to perform incremental dump
|
||||
std::vector<CallbackRecord> m_chgOffloadCbs; // Routines to perform offloaded incremental dump
|
||||
std::vector<CallbackRecord> m_cleanupCbs; // Routines to call at the end of dump
|
||||
bool m_constDump = true; // Whether a const dump is required on the next call to 'dump'
|
||||
bool m_fullDump = true; // Whether a full dump is required on the next call to 'dump'
|
||||
uint32_t m_nextCode = 0; // Next code number to assign
|
||||
uint32_t m_numSignals = 0; // Number of distinct signals
|
||||
@ -289,6 +292,7 @@ protected:
|
||||
uint32_t nextCode() const { return m_nextCode; }
|
||||
uint32_t numSignals() const { return m_numSignals; }
|
||||
uint32_t maxBits() const { return m_maxBits; }
|
||||
void constDump(bool value) { m_constDump = value; }
|
||||
void fullDump(bool value) { m_fullDump = value; }
|
||||
|
||||
double timeRes() const { return m_timeRes; }
|
||||
@ -359,6 +363,8 @@ public:
|
||||
|
||||
void addModel(VerilatedModel*) VL_MT_SAFE_EXCLUDES(m_mutex);
|
||||
void addInitCb(initCb_t cb, void* userp) VL_MT_SAFE;
|
||||
void addConstCb(dumpCb_t cb, void* userp) VL_MT_SAFE;
|
||||
void addConstCb(dumpOffloadCb_t cb, void* userp) VL_MT_SAFE;
|
||||
void addFullCb(dumpCb_t cb, void* userp) VL_MT_SAFE;
|
||||
void addFullCb(dumpOffloadCb_t cb, void* userp) VL_MT_SAFE;
|
||||
void addChgCb(dumpCb_t cb, void* userp) VL_MT_SAFE;
|
||||
|
@ -610,6 +610,15 @@ void VerilatedTrace<VL_SUB_T, VL_BUF_T>::dump(uint64_t timeui) VL_MT_SAFE_EXCLUD
|
||||
}
|
||||
}
|
||||
|
||||
if (VL_UNLIKELY(m_constDump)) {
|
||||
m_constDump = false;
|
||||
if (offload()) {
|
||||
runOffloadedCallbacks(m_constOffloadCbs);
|
||||
} else {
|
||||
runCallbacks(m_constCbs);
|
||||
}
|
||||
}
|
||||
|
||||
for (uint32_t i = 0; i < m_cleanupCbs.size(); ++i) {
|
||||
const CallbackRecord& cbr = m_cleanupCbs[i];
|
||||
cbr.m_cleanupCb(cbr.m_userp, self());
|
||||
@ -695,6 +704,14 @@ void VerilatedTrace<VL_SUB_T, VL_BUF_T>::addInitCb(initCb_t cb, void* userp) VL_
|
||||
addCallbackRecord(m_initCbs, CallbackRecord{cb, userp});
|
||||
}
|
||||
template <>
|
||||
void VerilatedTrace<VL_SUB_T, VL_BUF_T>::addConstCb(dumpCb_t cb, void* userp) VL_MT_SAFE {
|
||||
addCallbackRecord(m_constCbs, CallbackRecord{cb, userp});
|
||||
}
|
||||
template <>
|
||||
void VerilatedTrace<VL_SUB_T, VL_BUF_T>::addConstCb(dumpOffloadCb_t cb, void* userp) VL_MT_SAFE {
|
||||
addCallbackRecord(m_constOffloadCbs, CallbackRecord{cb, userp});
|
||||
}
|
||||
template <>
|
||||
void VerilatedTrace<VL_SUB_T, VL_BUF_T>::addFullCb(dumpCb_t cb, void* userp) VL_MT_SAFE {
|
||||
addCallbackRecord(m_fullCbs, CallbackRecord{cb, userp});
|
||||
}
|
||||
|
@ -164,6 +164,7 @@ void VerilatedVcd::openNextImp(bool incFilename) {
|
||||
}
|
||||
}
|
||||
m_isOpen = true;
|
||||
constDump(true); // First dump must containt the const signals
|
||||
fullDump(true); // First dump must be full
|
||||
m_wroteBytes = 0;
|
||||
}
|
||||
|
42
src/V3Ast.h
42
src/V3Ast.h
@ -1247,6 +1247,38 @@ inline std::ostream& operator<<(std::ostream& os, const VUseType& rhs) {
|
||||
return os << rhs.ascii();
|
||||
}
|
||||
|
||||
//######################################################################
|
||||
|
||||
class VTraceType final {
|
||||
public:
|
||||
enum en : uint8_t {
|
||||
CONSTANT, // Constant value dump (once at the beginning)
|
||||
FULL, // Full value dump (always emitted)
|
||||
CHANGE // Incremental value dump (emitted only if the value changed)
|
||||
};
|
||||
enum en m_e;
|
||||
VTraceType()
|
||||
: m_e{CONSTANT} {}
|
||||
// cppcheck-suppress noExplicitConstructor
|
||||
constexpr VTraceType(en _e)
|
||||
: m_e{_e} {}
|
||||
explicit VTraceType(int _e)
|
||||
: m_e(static_cast<en>(_e)) {} // Need () or GCC 4.8 false warning
|
||||
constexpr operator en() const { return m_e; }
|
||||
const char* ascii() const {
|
||||
static const char* const names[] = {"CONSTANT", "FULL", "CHANGE"};
|
||||
return names[m_e];
|
||||
}
|
||||
};
|
||||
constexpr bool operator==(const VTraceType& lhs, const VTraceType& rhs) {
|
||||
return lhs.m_e == rhs.m_e;
|
||||
}
|
||||
constexpr bool operator==(const VTraceType& lhs, VTraceType::en rhs) { return lhs.m_e == rhs; }
|
||||
constexpr bool operator==(VTraceType::en lhs, const VTraceType& rhs) { return lhs == rhs.m_e; }
|
||||
inline std::ostream& operator<<(std::ostream& os, const VTraceType& rhs) {
|
||||
return os << rhs.ascii();
|
||||
}
|
||||
|
||||
// ######################################################################
|
||||
|
||||
class VCastable final {
|
||||
@ -2656,10 +2688,16 @@ public:
|
||||
template <typename U>
|
||||
// cppcheck-suppress noExplicitConstructor
|
||||
VNRef(U&& x)
|
||||
: std::reference_wrapper<T_Node>{x} {}
|
||||
: std::reference_wrapper<T_Node> {
|
||||
x
|
||||
}
|
||||
{}
|
||||
// cppcheck-suppress noExplicitConstructor
|
||||
VNRef(const std::reference_wrapper<T_Node>& other)
|
||||
: std::reference_wrapper<T_Node>{other} {}
|
||||
: std::reference_wrapper<T_Node> {
|
||||
other
|
||||
}
|
||||
{}
|
||||
};
|
||||
|
||||
static_assert(sizeof(VNRef<AstNode>) == sizeof(std::reference_wrapper<AstNode>),
|
||||
|
@ -3243,14 +3243,14 @@ class AstTraceInc final : public AstNodeStmt {
|
||||
private:
|
||||
AstTraceDecl* m_declp; // Pointer to declaration
|
||||
const uint32_t m_baseCode; // Trace code base value in function containing this AstTraceInc
|
||||
const bool m_full; // Is this a full vs incremental dump
|
||||
const VTraceType m_traceType; // Is this a const/full/incremental dump
|
||||
|
||||
public:
|
||||
AstTraceInc(FileLine* fl, AstTraceDecl* declp, bool full, uint32_t baseCode = 0)
|
||||
AstTraceInc(FileLine* fl, AstTraceDecl* declp, VTraceType traceType, uint32_t baseCode = 0)
|
||||
: ASTGEN_SUPER_TraceInc(fl)
|
||||
, m_declp{declp}
|
||||
, m_baseCode{baseCode}
|
||||
, m_full{full} {
|
||||
, m_traceType{traceType} {
|
||||
dtypeFrom(declp);
|
||||
this->valuep(
|
||||
declp->valuep()->cloneTree(true)); // TODO: maybe use reference to TraceDecl instead?
|
||||
@ -3274,7 +3274,7 @@ public:
|
||||
bool isOutputter() override { return true; }
|
||||
// but isPure() true
|
||||
AstTraceDecl* declp() const { return m_declp; }
|
||||
bool full() const { return m_full; }
|
||||
VTraceType traceType() const { return m_traceType; }
|
||||
uint32_t baseCode() const { return m_baseCode; }
|
||||
};
|
||||
class AstTracePopNamePrefix final : public AstNodeStmt {
|
||||
|
@ -769,7 +769,8 @@ class EmitCTrace final : EmitCFunc {
|
||||
|
||||
void emitTraceChangeOne(AstTraceInc* nodep, int arrayindex) {
|
||||
iterateAndNextConstNull(nodep->precondsp());
|
||||
const string func = nodep->full() ? "full" : "chg";
|
||||
// Note: Both VTraceType::CHANGE and VTraceType::FULL use the 'full' methods
|
||||
const std::string func = nodep->traceType() == VTraceType::CHANGE ? "chg" : "full";
|
||||
bool emitWidth = true;
|
||||
if (nodep->dtypep()->basicp()->isDouble()) {
|
||||
puts("bufp->" + func + "Double");
|
||||
@ -794,7 +795,10 @@ class EmitCTrace final : EmitCFunc {
|
||||
|
||||
const uint32_t offset = (arrayindex < 0) ? 0 : (arrayindex * nodep->declp()->widthWords());
|
||||
const uint32_t code = nodep->declp()->code() + offset;
|
||||
puts(v3Global.opt.useTraceOffload() && !nodep->full() ? "(base+" : "(oldp+");
|
||||
// Note: Both VTraceType::CHANGE and VTraceType::FULL use the 'full' methods
|
||||
puts(v3Global.opt.useTraceOffload() && nodep->traceType() == VTraceType::CHANGE
|
||||
? "(base+"
|
||||
: "(oldp+");
|
||||
puts(cvtToStr(code - nodep->baseCode()));
|
||||
puts(",");
|
||||
emitTraceValue(nodep, arrayindex);
|
||||
|
231
src/V3Trace.cpp
231
src/V3Trace.cpp
@ -30,7 +30,7 @@
|
||||
// Finally:
|
||||
// Process graph to determine when traced variables can change, allocate
|
||||
// activity flags, insert nodes to set activity flags, allocate signal
|
||||
// numbers (codes), and construct the full and incremental trace
|
||||
// numbers (codes), and construct the const, full and incremental trace
|
||||
// functions, together with all other trace support functions.
|
||||
//
|
||||
//*************************************************************************
|
||||
@ -319,11 +319,10 @@ private:
|
||||
return activityNumber;
|
||||
}
|
||||
|
||||
void sortTraces(TraceVec& traces, uint32_t& nFullCodes, uint32_t& nChgCodes) {
|
||||
void sortTraces(TraceVec& traces, uint32_t& nNonConstCodes) {
|
||||
// Populate sort structure
|
||||
traces.clear();
|
||||
nFullCodes = 0;
|
||||
nChgCodes = 0;
|
||||
nNonConstCodes = 0;
|
||||
for (V3GraphVertex* itp = m_graph.verticesBeginp(); itp; itp = itp->verticesNextp()) {
|
||||
if (TraceTraceVertex* const vtxp = itp->cast<TraceTraceVertex>()) {
|
||||
ActCodeSet actSet;
|
||||
@ -347,11 +346,8 @@ private:
|
||||
|| actSet.size() == 1,
|
||||
vtxp->nodep(), "Always active trace has further triggers");
|
||||
// Count nodes
|
||||
if (!vtxp->duplicatep()) {
|
||||
const uint32_t inc = vtxp->nodep()->codeInc();
|
||||
nFullCodes += inc;
|
||||
if (!actSet.empty()) nChgCodes += inc;
|
||||
}
|
||||
if (!vtxp->duplicatep() && !actSet.empty())
|
||||
nNonConstCodes += vtxp->nodep()->codeInc();
|
||||
if (actSet.empty()) {
|
||||
// If a trace doesn't have activity, it's constant, and we
|
||||
// don't need to track changes on it.
|
||||
@ -374,8 +370,7 @@ private:
|
||||
// Sort the traces by activity sets
|
||||
TraceVec traces;
|
||||
uint32_t unused1;
|
||||
uint32_t unused2;
|
||||
sortTraces(traces, unused1, unused2);
|
||||
sortTraces(traces, unused1);
|
||||
|
||||
// For each activity set with only a small number of signals, make those
|
||||
// signals always traced, as it's cheaper to check a few value changes
|
||||
@ -482,54 +477,62 @@ private:
|
||||
}
|
||||
}
|
||||
|
||||
AstCFunc* newCFunc(bool full, AstCFunc* topFuncp, int& funcNump, uint32_t baseCode = 0) {
|
||||
AstCFunc* newCFunc(VTraceType traceType, AstCFunc* topFuncp, unsigned funcNum,
|
||||
uint32_t baseCode = 0) {
|
||||
// Create new function
|
||||
const bool isTopFunc = topFuncp == nullptr;
|
||||
const string baseName = full && isTopFunc ? "trace_full_top_"
|
||||
: full ? "trace_full_sub_"
|
||||
: isTopFunc ? "trace_chg_top_"
|
||||
: "trace_chg_sub_";
|
||||
std::string baseName = "trace_";
|
||||
if (traceType == VTraceType::CONSTANT) {
|
||||
baseName += "const_";
|
||||
} else if (traceType == VTraceType::FULL) {
|
||||
baseName += "full_";
|
||||
} else {
|
||||
baseName += "chg_";
|
||||
}
|
||||
baseName += isTopFunc ? "top_" : "sub_";
|
||||
|
||||
FileLine* const flp = m_topScopep->fileline();
|
||||
AstCFunc* const funcp = new AstCFunc{flp, baseName + cvtToStr(funcNump++), m_topScopep};
|
||||
AstCFunc* const funcp = new AstCFunc{flp, baseName + cvtToStr(funcNum), m_topScopep};
|
||||
funcp->isTrace(true);
|
||||
funcp->dontCombine(true);
|
||||
funcp->isLoose(true);
|
||||
funcp->slow(full);
|
||||
funcp->slow(traceType != VTraceType::CHANGE);
|
||||
funcp->isStatic(isTopFunc);
|
||||
// Add it to top scope
|
||||
m_topScopep->addBlocksp(funcp);
|
||||
const auto addInitStr = [funcp, flp](const string& str) -> void {
|
||||
funcp->addInitsp(new AstCStmt{flp, str});
|
||||
};
|
||||
const std::string bufArg
|
||||
= v3Global.opt.traceClassBase()
|
||||
+ "::" + (v3Global.opt.useTraceOffload() ? "OffloadBuffer" : "Buffer") + "* bufp";
|
||||
if (isTopFunc) {
|
||||
// Top functions
|
||||
funcp->argTypes("void* voidSelf, " + v3Global.opt.traceClassBase()
|
||||
+ "::" + (v3Global.opt.useTraceOffload() ? "OffloadBuffer" : "Buffer")
|
||||
+ "* bufp");
|
||||
funcp->argTypes("void* voidSelf, " + bufArg);
|
||||
addInitStr(EmitCBase::voidSelfAssign(m_topModp));
|
||||
addInitStr(EmitCBase::symClassAssign());
|
||||
// Add global activity check to change dump functions
|
||||
if (!full) { //
|
||||
if (traceType == VTraceType::CHANGE) { //
|
||||
addInitStr("if (VL_UNLIKELY(!vlSymsp->__Vm_activity)) return;\n");
|
||||
}
|
||||
// Register function
|
||||
if (full) {
|
||||
m_regFuncp->addStmtsp(new AstText{flp, "tracep->addFullCb(", true});
|
||||
std::string str;
|
||||
if (traceType == VTraceType::CONSTANT) {
|
||||
str = "tracep->addConstCb(";
|
||||
} else if (traceType == VTraceType::FULL) {
|
||||
str = "tracep->addFullCb(";
|
||||
} else {
|
||||
m_regFuncp->addStmtsp(new AstText{flp, "tracep->addChgCb(", true});
|
||||
str = "tracep->addChgCb(";
|
||||
}
|
||||
m_regFuncp->addStmtsp(new AstText{flp, str, true});
|
||||
m_regFuncp->addStmtsp(new AstAddrOfCFunc{flp, funcp});
|
||||
m_regFuncp->addStmtsp(new AstText{flp, ", vlSelf", true});
|
||||
m_regFuncp->addStmtsp(new AstText{flp, ");\n", true});
|
||||
m_regFuncp->addStmtsp(new AstText{flp, ", vlSelf);\n", true});
|
||||
} else {
|
||||
// Sub functions
|
||||
funcp->argTypes(v3Global.opt.traceClassBase()
|
||||
+ "::" + +(v3Global.opt.useTraceOffload() ? "OffloadBuffer" : "Buffer")
|
||||
+ "* bufp");
|
||||
funcp->argTypes(bufArg);
|
||||
// Setup base references. Note in rare occasions we can end up with an empty trace
|
||||
// sub function, hence the VL_ATTR_UNUSED attributes.
|
||||
if (full) {
|
||||
if (traceType != VTraceType::CHANGE) {
|
||||
// Full dump sub function
|
||||
addInitStr("uint32_t* const oldp VL_ATTR_UNUSED = "
|
||||
"bufp->oldp(vlSymsp->__Vm_baseCode);\n");
|
||||
@ -557,104 +560,96 @@ private:
|
||||
return funcp;
|
||||
}
|
||||
|
||||
void createFullTraceFunction(const TraceVec& traces, uint32_t nAllCodes,
|
||||
uint32_t parallelism) {
|
||||
void createConstTraceFunctions(const TraceVec& traces) {
|
||||
const int splitLimit = v3Global.opt.outputSplitCTrace() ? v3Global.opt.outputSplitCTrace()
|
||||
: std::numeric_limits<int>::max();
|
||||
|
||||
int topFuncNum = 0;
|
||||
int subFuncNum = 0;
|
||||
auto it = traces.cbegin();
|
||||
while (it != traces.cend()) {
|
||||
AstCFunc* topFuncp = nullptr;
|
||||
AstCFunc* subFuncp = nullptr;
|
||||
int subStmts = 0;
|
||||
const uint32_t maxCodes = (nAllCodes + parallelism - 1) / parallelism;
|
||||
uint32_t nCodes = 0;
|
||||
for (; nCodes < maxCodes && it != traces.end(); ++it) {
|
||||
const TraceTraceVertex* const vtxp = it->second;
|
||||
AstTraceDecl* const declp = vtxp->nodep();
|
||||
if (const TraceTraceVertex* const canonVtxp = vtxp->duplicatep()) {
|
||||
// This is a duplicate trace node. We will assign the signal
|
||||
// number to the canonical node, and emit this as an alias, so
|
||||
// no need to create a TraceInc node.
|
||||
const AstTraceDecl* const canonDeclp = canonVtxp->nodep();
|
||||
UASSERT_OBJ(!canonVtxp->duplicatep(), canonDeclp,
|
||||
"Canonical node is a duplicate");
|
||||
UASSERT_OBJ(canonDeclp->code() != 0, canonDeclp,
|
||||
"Canonical node should have code assigned already");
|
||||
declp->code(canonDeclp->code());
|
||||
} else {
|
||||
// This is a canonical trace node. Assign signal number and
|
||||
// add a TraceInc node to the full dump function.
|
||||
UASSERT_OBJ(declp->code() == 0, declp,
|
||||
"Canonical node should not have code assigned yet");
|
||||
declp->code(m_code);
|
||||
m_code += declp->codeInc();
|
||||
m_statUniqCodes += declp->codeInc();
|
||||
++m_statUniqSigs;
|
||||
|
||||
// Create top function if not yet created
|
||||
if (!topFuncp) { topFuncp = newCFunc(/* full: */ true, nullptr, topFuncNum); }
|
||||
|
||||
// Crate new sub function if required
|
||||
if (!subFuncp || subStmts > splitLimit) {
|
||||
subStmts = 0;
|
||||
subFuncp = newCFunc(/* full: */ true, topFuncp, subFuncNum);
|
||||
}
|
||||
|
||||
// Add TraceInc node
|
||||
AstTraceInc* const incp
|
||||
= new AstTraceInc{declp->fileline(), declp, /* full: */ true};
|
||||
subFuncp->addStmtsp(incp);
|
||||
subStmts += incp->nodeCount();
|
||||
|
||||
// Track partitioning
|
||||
nCodes += declp->codeInc();
|
||||
}
|
||||
AstCFunc* const topFuncp = newCFunc(VTraceType::CONSTANT, nullptr, 0);
|
||||
unsigned subFuncNum = 0;
|
||||
AstCFunc* subFuncp = nullptr;
|
||||
int subStmts = 0;
|
||||
for (auto it = traces.cbegin(); it != traces.end(); ++it) {
|
||||
const TraceTraceVertex* const vtxp = it->second;
|
||||
AstTraceDecl* const declp = vtxp->nodep();
|
||||
if (const TraceTraceVertex* const canonVtxp = vtxp->duplicatep()) {
|
||||
// This is a duplicate trace node. We will assign the signal
|
||||
// number to the canonical node, and emit this as an alias, so
|
||||
// no need to create a TraceInc node.
|
||||
const AstTraceDecl* const canonDeclp = canonVtxp->nodep();
|
||||
UASSERT_OBJ(!canonVtxp->duplicatep(), canonDeclp, "Canonical node is a duplicate");
|
||||
UASSERT_OBJ(canonDeclp->code() != 0, canonDeclp,
|
||||
"Canonical node should have code assigned already");
|
||||
declp->code(canonDeclp->code());
|
||||
continue;
|
||||
}
|
||||
if (topFuncp) { // might be nullptr if all trailing entries were duplicates
|
||||
UINFO(5, "trace_full_top" << topFuncNum - 1 << " codes: " << nCodes << "/"
|
||||
<< maxCodes << endl);
|
||||
|
||||
// This is a canonical trace node. Assign trace code (signal number).
|
||||
UASSERT_OBJ(declp->code() == 0, declp,
|
||||
"Canonical node should not have code assigned yet");
|
||||
declp->code(m_code);
|
||||
m_code += declp->codeInc();
|
||||
m_statUniqCodes += declp->codeInc();
|
||||
++m_statUniqSigs;
|
||||
|
||||
// If this is a const signal, add the AstTraceInc
|
||||
const ActCodeSet& actSet = it->first;
|
||||
if (actSet.count(TraceActivityVertex::ACTIVITY_NEVER)) {
|
||||
// Crate new sub function if required
|
||||
if (!subFuncp || subStmts > splitLimit) {
|
||||
subStmts = 0;
|
||||
subFuncp = newCFunc(VTraceType::CONSTANT, topFuncp, subFuncNum);
|
||||
++subFuncNum;
|
||||
}
|
||||
FileLine* const flp = declp->fileline();
|
||||
AstTraceInc* const incp = new AstTraceInc{flp, declp, VTraceType::CONSTANT};
|
||||
subFuncp->addStmtsp(incp);
|
||||
subStmts += incp->nodeCount();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void createChgTraceFunctions(const TraceVec& traces, uint32_t nAllCodes,
|
||||
uint32_t parallelism) {
|
||||
void createNonConstTraceFunctions(const TraceVec& traces, uint32_t nAllCodes,
|
||||
uint32_t parallelism) {
|
||||
const int splitLimit = v3Global.opt.outputSplitCTrace() ? v3Global.opt.outputSplitCTrace()
|
||||
: std::numeric_limits<int>::max();
|
||||
int topFuncNum = 0;
|
||||
int subFuncNum = 0;
|
||||
unsigned topFuncNum = 0;
|
||||
unsigned subFuncNum = 0;
|
||||
TraceVec::const_iterator it = traces.begin();
|
||||
while (it != traces.end()) {
|
||||
AstCFunc* topFuncp = nullptr;
|
||||
AstCFunc* subFuncp = nullptr;
|
||||
AstCFunc* topFulFuncp = nullptr;
|
||||
AstCFunc* topChgFuncp = nullptr;
|
||||
AstCFunc* subFulFuncp = nullptr;
|
||||
AstCFunc* subChgFuncp = nullptr;
|
||||
int subStmts = 0;
|
||||
uint32_t maxCodes = (nAllCodes + parallelism - 1) / parallelism;
|
||||
if (maxCodes < 1) maxCodes = 1;
|
||||
const uint32_t maxCodes = std::max((nAllCodes + parallelism - 1) / parallelism, 1U);
|
||||
uint32_t nCodes = 0;
|
||||
const ActCodeSet* prevActSet = nullptr;
|
||||
AstIf* ifp = nullptr;
|
||||
uint32_t baseCode = 0;
|
||||
for (; nCodes < maxCodes && it != traces.end(); ++it) {
|
||||
const TraceTraceVertex* const vtxp = it->second;
|
||||
// This is a duplicate decl, no need to add it to incremental dump
|
||||
// This is a duplicate decl, no need to add it
|
||||
if (vtxp->duplicatep()) continue;
|
||||
const ActCodeSet& actSet = it->first;
|
||||
// Traced value never changes, no need to add it to incremental dump
|
||||
// Traced value never changes, no need to add it
|
||||
if (actSet.count(TraceActivityVertex::ACTIVITY_NEVER)) continue;
|
||||
|
||||
AstTraceDecl* const declp = vtxp->nodep();
|
||||
|
||||
// Create top function if not yet created
|
||||
if (!topFuncp) { topFuncp = newCFunc(/* full: */ false, nullptr, topFuncNum); }
|
||||
if (!topFulFuncp) {
|
||||
topFulFuncp = newCFunc(VTraceType::FULL, nullptr, topFuncNum);
|
||||
topChgFuncp = newCFunc(VTraceType::CHANGE, nullptr, topFuncNum);
|
||||
++topFuncNum;
|
||||
}
|
||||
|
||||
// Create new sub function if required
|
||||
if (!subFuncp || subStmts > splitLimit) {
|
||||
if (!subFulFuncp || subStmts > splitLimit) {
|
||||
baseCode = declp->code();
|
||||
subStmts = 0;
|
||||
subFuncp = newCFunc(/* full: */ false, topFuncp, subFuncNum, baseCode);
|
||||
subFulFuncp = newCFunc(VTraceType::FULL, topFulFuncp, subFuncNum, baseCode);
|
||||
subChgFuncp = newCFunc(VTraceType::CHANGE, topChgFuncp, subFuncNum, baseCode);
|
||||
++subFuncNum;
|
||||
prevActSet = nullptr;
|
||||
ifp = nullptr;
|
||||
}
|
||||
@ -674,24 +669,27 @@ private:
|
||||
}
|
||||
ifp = new AstIf{flp, condp};
|
||||
if (!always) ifp->branchPred(VBranchPred::BP_UNLIKELY);
|
||||
subFuncp->addStmtsp(ifp);
|
||||
subChgFuncp->addStmtsp(ifp);
|
||||
subStmts += ifp->nodeCount();
|
||||
prevActSet = &actSet;
|
||||
}
|
||||
|
||||
// Add TraceInc node
|
||||
AstTraceInc* const incp
|
||||
= new AstTraceInc{declp->fileline(), declp, /* full: */ false, baseCode};
|
||||
ifp->addThensp(incp);
|
||||
subStmts += incp->nodeCount();
|
||||
// Add TraceInc nodes
|
||||
FileLine* const flp = declp->fileline();
|
||||
AstTraceInc* const incFulp = new AstTraceInc{flp, declp, VTraceType::FULL};
|
||||
subFulFuncp->addStmtsp(incFulp);
|
||||
AstTraceInc* const incChgp
|
||||
= new AstTraceInc{flp, declp, VTraceType::CHANGE, baseCode};
|
||||
ifp->addThensp(incChgp);
|
||||
|
||||
// Track splitting due to size
|
||||
UASSERT_OBJ(incFulp->nodeCount() == incChgp->nodeCount(), declp,
|
||||
"Should have equal cost");
|
||||
subStmts += incChgp->nodeCount();
|
||||
|
||||
// Track partitioning
|
||||
nCodes += declp->codeInc();
|
||||
}
|
||||
if (topFuncp) { // might be nullptr if all trailing entries were duplicates/constants
|
||||
UINFO(5, "trace_chg_top" << topFuncNum - 1 << " codes: " << nCodes << "/"
|
||||
<< maxCodes << endl);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -743,11 +741,8 @@ private:
|
||||
TraceVec traces; // The sorted traces
|
||||
// We will split functions such that each have to dump roughly the same amount of data
|
||||
// for this we need to keep tack of the number of codes used by the trace functions.
|
||||
uint32_t nFullCodes = 0; // Number of non-duplicate codes (need to go into full* dump)
|
||||
uint32_t nChgCodes = 0; // Number of non-constant codes (need to go in to chg* dump)
|
||||
sortTraces(traces, nFullCodes, nChgCodes);
|
||||
|
||||
UINFO(5, "nFullCodes: " << nFullCodes << " nChgCodes: " << nChgCodes << endl);
|
||||
uint32_t nNonConstCodes = 0;
|
||||
sortTraces(traces, nNonConstCodes);
|
||||
|
||||
// Our keys are now sorted to have same activity number adjacent, then
|
||||
// by trace order. (Better would be execution order for cache
|
||||
@ -763,11 +758,11 @@ private:
|
||||
m_regFuncp->isLoose(true);
|
||||
m_topScopep->addBlocksp(m_regFuncp);
|
||||
|
||||
// Create the full dump functions, also allocates signal numbers
|
||||
createFullTraceFunction(traces, nFullCodes, m_parallelism);
|
||||
// Create the const dump functions. Also allocates trace codes.
|
||||
createConstTraceFunctions(traces);
|
||||
|
||||
// Create the incremental dump functions
|
||||
createChgTraceFunctions(traces, nChgCodes, m_parallelism);
|
||||
// Create the full and incremental dump functions
|
||||
createNonConstTraceFunctions(traces, nNonConstCodes, m_parallelism);
|
||||
|
||||
// Remove refs to traced values from TraceDecl nodes, these have now moved under
|
||||
// TraceInc
|
||||
|
Loading…
Reference in New Issue
Block a user