Improve V3Ast function usage ergonomics (#3650)

Signed-off-by: HungMingWu <u9089000@gmail.com>
This commit is contained in:
HungMingWu 2022-10-20 20:48:44 +08:00 committed by Geza Lore
parent 627a144b83
commit 196f3292d5
30 changed files with 343 additions and 260 deletions

View File

@ -56,7 +56,6 @@ DisableFormat: false
ExperimentalAutoDetectBinPacking: false
FixNamespaceComments: true
ForEachMacros:
- foreach
- Q_FOREACH
- BOOST_FOREACH

View File

@ -385,7 +385,7 @@ private:
// Blocking assignments are always OK in combinational (and initial/final) processes
if (m_check != CT_SEQ) return;
const bool ignore = nodep->lhsp()->forall<AstVarRef>([&](const AstVarRef* refp) {
const bool ignore = nodep->lhsp()->forall([&](const AstVarRef* refp) {
// Ignore reads (e.g.: index expressions)
if (refp->access().isReadOnly()) return true;
const AstVar* const varp = refp->varp();
@ -500,7 +500,7 @@ private:
void visitSenItems(AstNode* nodep) {
if (v3Global.opt.timing().isSetTrue()) {
nodep->foreach<AstSenItem>([this](AstSenItem* senItemp) { visit(senItemp); });
nodep->foreach([this](AstSenItem* senItemp) { visit(senItemp); });
}
}
@ -561,7 +561,7 @@ private:
}
}
nodep->sensp()->foreach<AstVarRef>([](const AstVarRef* refp) {
nodep->sensp()->foreach([](const AstVarRef* refp) {
refp->varp()->usedClock(true);
refp->varScopep()->user1(true);
});

View File

@ -47,7 +47,7 @@ class ActiveTopVisitor final : public VNVisitor {
static bool isInitial(AstNode* nodep) {
const VNUser1InUse user1InUse;
// Return true if no variables that read.
return nodep->forall<AstVarRef>([&](const AstVarRef* refp) -> bool {
return nodep->forall([&](const AstVarRef* refp) -> bool {
AstVarScope* const vscp = refp->varScopep();
// Note: Use same heuristic as ordering does to ignore written variables
// TODO: Use live variable analysis.

View File

@ -23,8 +23,10 @@
#include "V3Broken.h"
#include "V3Error.h"
#include "V3FileLine.h"
#include "V3FunctionTraits.h"
#include "V3Global.h"
#include "V3Number.h"
#include "V3StdFuture.h"
#include "V3Ast__gen_forward_class_decls.h" // From ./astgen
@ -2084,95 +2086,127 @@ private:
using ConstCorrectAstNode =
typename std::conditional<std::is_const<T_Arg>::value, const AstNode, AstNode>::type;
template <typename T_Arg>
inline static void foreachImpl(ConstCorrectAstNode<T_Arg>* nodep,
const std::function<void(T_Arg*)>& f, bool visitNext);
template <typename T_Arg, typename Callable>
inline static void foreachImpl(ConstCorrectAstNode<T_Arg>* nodep, const Callable& f,
bool visitNext);
template <typename T_Arg, bool Default>
inline static bool predicateImpl(ConstCorrectAstNode<T_Arg>* nodep,
const std::function<bool(T_Arg*)>& p);
template <typename T_Arg, bool Default, typename Callable>
inline static bool predicateImpl(ConstCorrectAstNode<T_Arg>* nodep, const Callable& p);
template <typename T_Node>
constexpr static bool checkTypeParameter() {
static_assert(!std::is_const<T_Node>::value,
"Type parameter 'T_Node' should not be const qualified");
static_assert(std::is_base_of<AstNode, T_Node>::value,
"Type parameter 'T_Node' must be a subtype of AstNode");
return true;
}
template <typename T_Callable>
struct Arg0NoPointerNoCV final {
using Traits = FunctionTraits<T_Callable>;
using T_Arg0 = typename Traits::template arg<0>::type;
using T_Arg0NoPtr = typename std::remove_pointer<T_Arg0>::type;
using type = typename std::remove_cv<T_Arg0NoPtr>::type;
};
public:
// Traverse subtree and call given function 'f' in pre-order on each node that has type
// 'T_Node'. The node passd to the function 'f' can be removed or replaced, but other editing
// Given a callable 'f' that takes a single argument of some AstNode subtype 'T_Node', traverse
// the tree rooted at this node, and call 'f' in pre-order on each node that is of type
// 'T_Node'. The node passed to the callable 'f' can be removed or replaced, but other editing
// of the iterated tree is not safe. Prefer 'foreach' over simple VNVisitor that only needs to
// handle a single (or a few) node types, as it's easier to write, but more importantly, the
// dispatch to the operation function in 'foreach' should be completely predictable by branch
// target caches in modern CPUs, while it is basically unpredictable for VNVisitor.
template <typename T_Node>
void foreach (std::function<void(T_Node*)> f) {
static_assert(checkTypeParameter<T_Node>(), "Invalid type parameter 'T_Node'");
// dispatch to the callable in 'foreach' should be completely predictable by branch target
// caches in modern CPUs, while it is basically unpredictable for VNVisitor.
template <typename Callable>
void foreach(Callable&& f) {
using T_Node = typename Arg0NoPointerNoCV<Callable>::type;
static_assert(vlstd::is_invocable<Callable, T_Node*>::value
&& std::is_base_of<AstNode, T_Node>::value,
"Callable 'f' must have a signature compatible with 'void(T_Node*)', "
"with 'T_Node' being a subtype of 'AstNode'");
foreachImpl<T_Node>(this, f, /* visitNext: */ false);
}
// Same as above, but for 'const' nodes
template <typename T_Node>
void foreach (std::function<void(const T_Node*)> f) const {
static_assert(checkTypeParameter<T_Node>(), "Invalid type parameter 'T_Node'");
template <typename Callable>
void foreach(Callable&& f) const {
using T_Node = typename Arg0NoPointerNoCV<Callable>::type;
static_assert(vlstd::is_invocable<Callable, const T_Node*>::value
&& std::is_base_of<AstNode, T_Node>::value,
"Callable 'f' must have a signature compatible with 'void(const T_Node*)', "
"with 'T_Node' being a subtype of 'AstNode'");
foreachImpl<const T_Node>(this, f, /* visitNext: */ false);
}
// Same as 'foreach' but also follows 'this->nextp()'
template <typename T_Node>
void foreachAndNext(std::function<void(T_Node*)> f) {
static_assert(checkTypeParameter<T_Node>(), "Invalid type parameter 'T_Node'");
// Same as 'foreach' but also traverses 'this->nextp()' transitively
template <typename Callable>
void foreachAndNext(Callable&& f) {
using T_Node = typename Arg0NoPointerNoCV<Callable>::type;
static_assert(vlstd::is_invocable<Callable, T_Node*>::value
&& std::is_base_of<AstNode, T_Node>::value,
"Callable 'f' must have a signature compatible with 'void(T_Node*)', "
"with 'T_Node' being a subtype of 'AstNode'");
foreachImpl<T_Node>(this, f, /* visitNext: */ true);
}
// Same as 'foreach' but also follows 'this->nextp()'
template <typename T_Node>
void foreachAndNext(std::function<void(const T_Node*)> f) const {
static_assert(checkTypeParameter<T_Node>(), "Invalid type parameter 'T_Node'");
// Same as above, but for 'const' nodes
template <typename Callable>
void foreachAndNext(Callable&& f) const {
using T_Node = typename Arg0NoPointerNoCV<Callable>::type;
static_assert(vlstd::is_invocable<Callable, const T_Node*>::value
&& std::is_base_of<AstNode, T_Node>::value,
"Callable 'f' must have a signature compatible with 'void(const T_Node*)', "
"with 'T_Node' being a subtype of 'AstNode'");
foreachImpl<const T_Node>(this, f, /* visitNext: */ true);
}
// Given a predicate function 'p' return true if and only if there exists a node of type
// 'T_Node' that satisfies the predicate 'p'. Returns false if no node of type 'T_Node' is
// present. Traversal is performed in some arbitrary order and is terminated as soon as the
// result can be determined.
template <typename T_Node>
bool exists(std::function<bool(T_Node*)> p) {
static_assert(checkTypeParameter<T_Node>(), "Invalid type parameter 'T_Node'");
// Given a predicate 'p' that takes a single argument of some AstNode subtype 'T_Node', return
// true if and only if there exists a node of type 'T_Node' in the tree rooted at this node,
// that satisfies the predicate 'p'. Returns false if no node of type 'T_Node' is present.
// Traversal is performed in some arbitrary order and is terminated as soon as the result can
// be determined.
template <typename Callable>
bool exists(Callable&& p) {
using T_Node = typename Arg0NoPointerNoCV<Callable>::type;
static_assert(vlstd::is_invocable_r<bool, Callable, T_Node*>::value
&& std::is_base_of<AstNode, T_Node>::value,
"Predicate 'p' must have a signature compatible with 'bool(T_Node*)', "
"with 'T_Node' being a subtype of 'AstNode'");
return predicateImpl<T_Node, /* Default: */ false>(this, p);
}
// Same as above, but for 'const' nodes
template <typename T_Node>
bool exists(std::function<bool(const T_Node*)> p) const {
static_assert(checkTypeParameter<T_Node>(), "Invalid type parameter 'T_Node'");
template <typename Callable>
bool exists(Callable&& p) const {
using T_Node = typename Arg0NoPointerNoCV<Callable>::type;
static_assert(vlstd::is_invocable_r<bool, Callable, const T_Node*>::value
&& std::is_base_of<AstNode, T_Node>::value,
"Predicate 'p' must have a signature compatible with 'bool(const T_Node*)', "
"with 'T_Node' being a subtype of 'AstNode'");
return predicateImpl<const T_Node, /* Default: */ false>(this, p);
}
// Given a predicate function 'p' return true if and only if all nodes of type
// 'T_Node' satisfy the predicate 'p'. Returns true if no node of type 'T_Node' is
// present. Traversal is performed in some arbitrary order and is terminated as soon as the
// result can be determined.
template <typename T_Node>
bool forall(std::function<bool(T_Node*)> p) {
static_assert(checkTypeParameter<T_Node>(), "Invalid type parameter 'T_Node'");
// Given a predicate 'p' that takes a single argument of some AstNode subtype 'T_Node', return
// true if and only if all nodes of type 'T_Node' in the tree rooted at this node satisfy the
// predicate 'p'. Returns true if no node of type 'T_Node' is present. Traversal is performed
// in some arbitrary order and is terminated as soon as the result can be determined.
template <typename Callable>
bool forall(Callable&& p) {
using T_Node = typename Arg0NoPointerNoCV<Callable>::type;
static_assert(vlstd::is_invocable_r<bool, Callable, T_Node*>::value
&& std::is_base_of<AstNode, T_Node>::value,
"Predicate 'p' must have a signature compatible with 'bool(T_Node*)', "
"with 'T_Node' being a subtype of 'AstNode'");
return predicateImpl<T_Node, /* Default: */ true>(this, p);
}
// Same as above, but for 'const' nodes
template <typename T_Node>
bool forall(std::function<bool(const T_Node*)> p) const {
static_assert(checkTypeParameter<T_Node>(), "Invalid type parameter 'T_Node'");
template <typename Callable>
bool forall(Callable&& p) const {
using T_Node = typename Arg0NoPointerNoCV<Callable>::type;
static_assert(vlstd::is_invocable_r<bool, Callable, const T_Node*>::value
&& std::is_base_of<AstNode, T_Node>::value,
"Predicate 'p' must have a signature compatible with 'bool(const T_Node*)', "
"with 'T_Node' being a subtype of 'AstNode'");
return predicateImpl<const T_Node, /* Default: */ true>(this, p);
}
int nodeCount() const {
// TODO: this should really return size_t, but need to fix use sites
int count = 0;
this->foreach<AstNode>([&count](const AstNode*) { ++count; });
this->foreach([&count](const AstNode*) { ++count; });
return count;
}
};
@ -2243,172 +2277,161 @@ constexpr bool AstNode::isLeaf<AstVarXRef>() {
}
// foreach implementation
template <typename T_Arg>
void AstNode::foreachImpl(ConstCorrectAstNode<T_Arg>* nodep, const std::function<void(T_Arg*)>& f,
bool visitNext) {
// Checking the function is bound up front eliminates this check from the loop at invocation
if (!f) {
nodep->v3fatal("AstNode::foreach called with unbound function"); // LCOV_EXCL_LINE
} else {
// Pre-order traversal implemented directly (without recursion) for speed reasons. The very
// first iteration (the one that operates on the input nodep) is special, as we might or
// might not need to enqueue nodep->nextp() depending on VisitNext, while in all other
// iterations, we do want to enqueue nodep->nextp(). Duplicating code (via
// 'foreachImplVisit') for the initial iteration here to avoid an extra branch in the loop
template <typename T_Arg, typename Callable>
void AstNode::foreachImpl(ConstCorrectAstNode<T_Arg>* nodep, const Callable& f, bool visitNext) {
// Pre-order traversal implemented directly (without recursion) for speed reasons. The very
// first iteration (the one that operates on the input nodep) is special, as we might or
// might not need to enqueue nodep->nextp() depending on VisitNext, while in all other
// iterations, we do want to enqueue nodep->nextp(). Duplicating code (via
// 'foreachImplVisit') for the initial iteration here to avoid an extra branch in the loop
using T_Arg_NonConst = typename std::remove_const<T_Arg>::type;
using Node = ConstCorrectAstNode<T_Arg>;
using T_Arg_NonConst = typename std::remove_const<T_Arg>::type;
using Node = ConstCorrectAstNode<T_Arg>;
// Traversal stack
std::vector<Node*> stack; // Kept as a vector for easy resizing
Node** basep = nullptr; // Pointer to base of stack
Node** topp = nullptr; // Pointer to top of stack
Node** limp = nullptr; // Pointer to stack limit (when need growing)
// Traversal stack
std::vector<Node*> stack; // Kept as a vector for easy resizing
Node** basep = nullptr; // Pointer to base of stack
Node** topp = nullptr; // Pointer to top of stack
Node** limp = nullptr; // Pointer to stack limit (when need growing)
// We prefetch this far into the stack
constexpr int prefetchDistance = 2;
// We prefetch this far into the stack
constexpr int prefetchDistance = 2;
// Grow stack to given size
const auto grow = [&](size_t size) {
const ptrdiff_t occupancy = topp - basep;
stack.resize(size);
basep = stack.data() + prefetchDistance;
topp = basep + occupancy;
limp = basep + size - 5; // We push max 5 items per iteration
};
// Grow stack to given size
const auto grow = [&](size_t size) {
const ptrdiff_t occupancy = topp - basep;
stack.resize(size);
basep = stack.data() + prefetchDistance;
topp = basep + occupancy;
limp = basep + size - 5; // We push max 5 items per iteration
};
// Initial stack size
grow(32);
// Initial stack size
grow(32);
// We want some non-null pointers at the beginning. These will be prefetched, but not
// visited, so the root node will suffice. This eliminates needing branches in the loop.
for (int i = -prefetchDistance; i; ++i) basep[i] = nodep;
// We want some non-null pointers at the beginning. These will be prefetched, but not
// visited, so the root node will suffice. This eliminates needing branches in the loop.
for (int i = -prefetchDistance; i; ++i) basep[i] = nodep;
// Visit given node, enqueue children for traversal
const auto visit = [&](Node* currp) {
// Type test this node
if (AstNode::privateTypeTest<T_Arg_NonConst>(currp)) {
// Call the client function
f(static_cast<T_Arg*>(currp));
// Short circuit if iterating leaf nodes
if VL_CONSTEXPR_CXX17 (isLeaf<T_Arg_NonConst>()) return;
}
// Enqueue children for traversal, unless futile
if (mayBeUnder<T_Arg_NonConst>(currp)) {
if (AstNode* const op4p = currp->op4p()) *topp++ = op4p;
if (AstNode* const op3p = currp->op3p()) *topp++ = op3p;
if (AstNode* const op2p = currp->op2p()) *topp++ = op2p;
if (AstNode* const op1p = currp->op1p()) *topp++ = op1p;
}
};
// Enqueue the next of the root node, if required
if (visitNext && nodep->nextp()) *topp++ = nodep->nextp();
// Visit the root node
visit(nodep);
// Visit the rest of the tree
while (VL_LIKELY(topp > basep)) {
// Pop next node in the traversal
Node* const headp = *--topp;
// Prefetch in case we are ascending the tree
ASTNODE_PREFETCH_NON_NULL(topp[-prefetchDistance]);
// Ensure we have stack space for nextp and the 4 children
if (VL_UNLIKELY(topp >= limp)) grow(stack.size() * 2);
// Enqueue the next node
if (headp->nextp()) *topp++ = headp->nextp();
// Visit the head node
visit(headp);
// Visit given node, enqueue children for traversal
const auto visit = [&](Node* currp) {
// Type test this node
if (AstNode::privateTypeTest<T_Arg_NonConst>(currp)) {
// Call the client function
f(static_cast<T_Arg*>(currp));
// Short circuit if iterating leaf nodes
if VL_CONSTEXPR_CXX17 (isLeaf<T_Arg_NonConst>()) return;
}
// Enqueue children for traversal, unless futile
if (mayBeUnder<T_Arg_NonConst>(currp)) {
if (AstNode* const op4p = currp->op4p()) *topp++ = op4p;
if (AstNode* const op3p = currp->op3p()) *topp++ = op3p;
if (AstNode* const op2p = currp->op2p()) *topp++ = op2p;
if (AstNode* const op1p = currp->op1p()) *topp++ = op1p;
}
};
// Enqueue the next of the root node, if required
if (visitNext && nodep->nextp()) *topp++ = nodep->nextp();
// Visit the root node
visit(nodep);
// Visit the rest of the tree
while (VL_LIKELY(topp > basep)) {
// Pop next node in the traversal
Node* const headp = *--topp;
// Prefetch in case we are ascending the tree
ASTNODE_PREFETCH_NON_NULL(topp[-prefetchDistance]);
// Ensure we have stack space for nextp and the 4 children
if (VL_UNLIKELY(topp >= limp)) grow(stack.size() * 2);
// Enqueue the next node
if (headp->nextp()) *topp++ = headp->nextp();
// Visit the head node
visit(headp);
}
}
// predicate implementation
template <typename T_Arg, bool Default>
bool AstNode::predicateImpl(ConstCorrectAstNode<T_Arg>* nodep,
const std::function<bool(T_Arg*)>& p) {
// Implementation similar to foreach, but abort traversal as soon as result is determined.
if (VL_UNCOVERABLE(!p)) {
nodep->v3fatal("AstNode::foreach called with unbound function"); // LCOV_EXCL_LINE
} else {
using T_Arg_NonConst = typename std::remove_const<T_Arg>::type;
using Node = ConstCorrectAstNode<T_Arg>;
template <typename T_Arg, bool Default, typename Callable>
bool AstNode::predicateImpl(ConstCorrectAstNode<T_Arg>* nodep, const Callable& p) {
// Implementation similar to foreach, but abort traversal as soon as result is determined
using T_Arg_NonConst = typename std::remove_const<T_Arg>::type;
using Node = ConstCorrectAstNode<T_Arg>;
// Traversal stack
std::vector<Node*> stack; // Kept as a vector for easy resizing
Node** basep = nullptr; // Pointer to base of stack
Node** topp = nullptr; // Pointer to top of stack
Node** limp = nullptr; // Pointer to stack limit (when need growing)
// Traversal stack
std::vector<Node*> stack; // Kept as a vector for easy resizing
Node** basep = nullptr; // Pointer to base of stack
Node** topp = nullptr; // Pointer to top of stack
Node** limp = nullptr; // Pointer to stack limit (when need growing)
// We prefetch this far into the stack
constexpr int prefetchDistance = 2;
// We prefetch this far into the stack
constexpr int prefetchDistance = 2;
// Grow stack to given size
const auto grow = [&](size_t size) {
const ptrdiff_t occupancy = topp - basep;
stack.resize(size);
basep = stack.data() + prefetchDistance;
topp = basep + occupancy;
limp = basep + size - 5; // We push max 5 items per iteration
};
// Grow stack to given size
const auto grow = [&](size_t size) {
const ptrdiff_t occupancy = topp - basep;
stack.resize(size);
basep = stack.data() + prefetchDistance;
topp = basep + occupancy;
limp = basep + size - 5; // We push max 5 items per iteration
};
// Initial stack size
grow(32);
// Initial stack size
grow(32);
// We want some non-null pointers at the beginning. These will be prefetched, but not
// visited, so the root node will suffice. This eliminates needing branches in the loop.
for (int i = -prefetchDistance; i; ++i) basep[i] = nodep;
// We want some non-null pointers at the beginning. These will be prefetched, but not
// visited, so the root node will suffice. This eliminates needing branches in the loop.
for (int i = -prefetchDistance; i; ++i) basep[i] = nodep;
// Visit given node, enqueue children for traversal, return true if result determined.
const auto visit = [&](Node* currp) {
// Type test this node
if (AstNode::privateTypeTest<T_Arg_NonConst>(currp)) {
// Call the client function
if (p(static_cast<T_Arg*>(currp)) != Default) return true;
// Short circuit if iterating leaf nodes
if VL_CONSTEXPR_CXX17 (isLeaf<T_Arg_NonConst>()) return false;
}
// Enqueue children for traversal, unless futile
if (mayBeUnder<T_Arg_NonConst>(currp)) {
if (AstNode* const op4p = currp->op4p()) *topp++ = op4p;
if (AstNode* const op3p = currp->op3p()) *topp++ = op3p;
if (AstNode* const op2p = currp->op2p()) *topp++ = op2p;
if (AstNode* const op1p = currp->op1p()) *topp++ = op1p;
}
return false;
};
// Visit the root node
if (visit(nodep)) return !Default;
// Visit the rest of the tree
while (VL_LIKELY(topp > basep)) {
// Pop next node in the traversal
Node* const headp = *--topp;
// Prefetch in case we are ascending the tree
ASTNODE_PREFETCH_NON_NULL(topp[-prefetchDistance]);
// Ensure we have stack space for nextp and the 4 children
if (VL_UNLIKELY(topp >= limp)) grow(stack.size() * 2);
// Enqueue the next node
if (headp->nextp()) *topp++ = headp->nextp();
// Visit the head node
if (visit(headp)) return !Default;
// Visit given node, enqueue children for traversal, return true if result determined.
const auto visit = [&](Node* currp) {
// Type test this node
if (AstNode::privateTypeTest<T_Arg_NonConst>(currp)) {
// Call the client function
if (p(static_cast<T_Arg*>(currp)) != Default) return true;
// Short circuit if iterating leaf nodes
if VL_CONSTEXPR_CXX17 (isLeaf<T_Arg_NonConst>()) return false;
}
return Default;
// Enqueue children for traversal, unless futile
if (mayBeUnder<T_Arg_NonConst>(currp)) {
if (AstNode* const op4p = currp->op4p()) *topp++ = op4p;
if (AstNode* const op3p = currp->op3p()) *topp++ = op3p;
if (AstNode* const op2p = currp->op2p()) *topp++ = op2p;
if (AstNode* const op1p = currp->op1p()) *topp++ = op1p;
}
return false;
};
// Visit the root node
if (visit(nodep)) return !Default;
// Visit the rest of the tree
while (VL_LIKELY(topp > basep)) {
// Pop next node in the traversal
Node* const headp = *--topp;
// Prefetch in case we are ascending the tree
ASTNODE_PREFETCH_NON_NULL(topp[-prefetchDistance]);
// Ensure we have stack space for nextp and the 4 children
if (VL_UNLIKELY(topp >= limp)) grow(stack.size() * 2);
// Enqueue the next node
if (headp->nextp()) *topp++ = headp->nextp();
// Visit the head node
if (visit(headp)) return !Default;
}
return Default;
}
inline std::ostream& operator<<(std::ostream& os, const AstNode* rhs) {

View File

@ -3833,10 +3833,9 @@ public:
return new AstAssignW{fileline(), lhsp, rhsp, controlp};
}
bool isTimingControl() const override {
return timingControlp()
|| lhsp()->exists<AstNodeVarRef>([](const AstNodeVarRef* const refp) {
return refp->access().isWriteOrRW() && refp->varp()->delayp();
});
return timingControlp() || lhsp()->exists([](const AstNodeVarRef* refp) {
return refp->access().isWriteOrRW() && refp->varp()->delayp();
});
}
bool brokeLhsMustBeLvalue() const override { return true; }
AstAlways* convertToAlways();

View File

@ -329,7 +329,7 @@ void V3Broken::brokenAll(AstNetlist* nodep) {
// Mark every node in the tree
const uint8_t brokenCntCurrent = s_brokenCntGlobal.get();
nodep->foreach<AstNode>([brokenCntCurrent](AstNode* nodep) {
nodep->foreach([brokenCntCurrent](AstNode* nodep) {
#ifdef VL_LEAK_CHECKS
UASSERT_OBJ(s_allocTable.isAllocated(nodep), nodep,
"AstNode is in tree, but not allocated");

View File

@ -2038,10 +2038,10 @@ private:
// Note only do this (need user4) when m_warn, which is
// done as unique visitor
const VNUser4InUse m_inuser4;
nodep->lhsp()->foreach<AstVarRef>([](const AstVarRef* nodep) {
nodep->lhsp()->foreach([](const AstVarRef* nodep) {
if (nodep->varp()) nodep->varp()->user4(1);
});
nodep->rhsp()->foreach<AstVarRef>([&need_temp](const AstVarRef* nodep) {
nodep->rhsp()->foreach([&need_temp](const AstVarRef* nodep) {
if (nodep->varp() && nodep->varp()->user4()) need_temp = true;
});
}

View File

@ -322,7 +322,7 @@ private:
// And its children may now be killable too; correct counts
// Recurse, as cells may not be directly under the module but in a generate
if (!modp->dead()) { // If was dead didn't increment user1's
modp->foreach<AstCell>([](const AstCell* cellp) { //
modp->foreach([](const AstCell* cellp) { //
cellp->modp()->user1Inc(-1);
});
}

View File

@ -91,7 +91,7 @@ class AstToDfgVisitor final : public VNVisitor {
// METHODS
void markReferenced(AstNode* nodep) {
nodep->foreach<AstVarRef>([this](const AstVarRef* refp) {
nodep->foreach([this](const AstVarRef* refp) {
// No need to (and in fact cannot) mark variables with unsupported dtypes
if (!DfgVertex::isSupportedDType(refp->varp()->dtypep())) return;
getNet(refp->varp())->setHasModRefs();

View File

@ -425,7 +425,7 @@ class DfgToAstVisitor final : DfgVisitor {
// Remap all references to point to the canonical variables, if one exists
VNDeleter deleter;
m_modp->foreach<AstVarRef>([&](AstVarRef* refp) {
m_modp->foreach([&](AstVarRef* refp) {
// Any variable that is written outside the DFG will have itself as the canonical
// var, so need not be replaced, furthermore, if a variable is traced, we don't
// want to update the write ref we just created above, so we only replace read only

View File

@ -249,7 +249,7 @@ void V3DfgOptimizer::optimize(AstNetlist* netlistp, const string& label) {
const VNUser2InUse user2InUse;
// Mark cross-referenced variables
netlistp->foreach<AstVarXRef>([](const AstVarXRef* xrefp) { xrefp->varp()->user2(true); });
netlistp->foreach([](const AstVarXRef* xrefp) { xrefp->varp()->user2(true); });
V3DfgOptimizationContext ctx{label};

View File

@ -786,15 +786,14 @@ void EmitCSyms::emitSymImp() {
if (v3Global.opt.profPgo()) {
puts("// Configure profiling for PGO\n");
if (v3Global.opt.mtasks()) {
v3Global.rootp()->topModulep()->foreach<AstExecGraph>(
[&](const AstExecGraph* execGraphp) {
for (const V3GraphVertex* vxp = execGraphp->depGraphp()->verticesBeginp(); vxp;
vxp = vxp->verticesNextp()) {
const ExecMTask* const mtp = static_cast<const ExecMTask*>(vxp);
puts("_vm_pgoProfiler.addCounter(" + cvtToStr(mtp->profilerId()) + ", \""
+ mtp->hashName() + "\");\n");
}
});
v3Global.rootp()->topModulep()->foreach([&](const AstExecGraph* execGraphp) {
for (const V3GraphVertex* vxp = execGraphp->depGraphp()->verticesBeginp(); vxp;
vxp = vxp->verticesNextp()) {
const ExecMTask* const mtp = static_cast<const ExecMTask*>(vxp);
puts("_vm_pgoProfiler.addCounter(" + cvtToStr(mtp->profilerId()) + ", \""
+ mtp->hashName() + "\");\n");
}
});
}
}

View File

@ -153,7 +153,7 @@ class ForceConvertVisitor final : public VNVisitor {
// referenced AstVarScope with the given function.
void transformWritenVarScopes(AstNode* nodep, std::function<AstVarScope*(AstVarScope*)> f) {
UASSERT_OBJ(nodep->backp(), nodep, "Must have backp, otherwise will be lost if replaced");
nodep->foreach<AstNodeVarRef>([&f](AstNodeVarRef* refp) {
nodep->foreach([&f](AstNodeVarRef* refp) {
if (refp->access() != VAccess::WRITE) return;
// TODO: this is not strictly speaking safe for some complicated lvalues, eg.:
// 'force foo[a(cnt)] = 1;', where 'cnt' is an out parameter, but it will
@ -230,7 +230,7 @@ class ForceConvertVisitor final : public VNVisitor {
AstAssign* const resetRdp
= new AstAssign{fl_nowarn, lhsp->cloneTree(false), lhsp->unlinkFrBack()};
// Replace write refs on the LHS
resetRdp->lhsp()->foreach<AstNodeVarRef>([this](AstNodeVarRef* refp) {
resetRdp->lhsp()->foreach([this](AstNodeVarRef* refp) {
if (refp->access() != VAccess::WRITE) return;
AstVarScope* const vscp = refp->varScopep();
AstVarScope* const newVscp
@ -243,7 +243,7 @@ class ForceConvertVisitor final : public VNVisitor {
VL_DO_DANGLING(refp->deleteTree(), refp);
});
// Replace write refs on RHS
resetRdp->rhsp()->foreach<AstNodeVarRef>([this](AstNodeVarRef* refp) {
resetRdp->rhsp()->foreach([this](AstNodeVarRef* refp) {
if (refp->access() != VAccess::WRITE) return;
AstVarScope* const vscp = refp->varScopep();
AstVarScope* const newVscp
@ -273,7 +273,7 @@ class ForceConvertVisitor final : public VNVisitor {
iterateAndNextNull(nodep->modulesp());
// Replace references to forced signals
nodep->modulesp()->foreachAndNext<AstVarRef>([this](AstVarRef* nodep) {
nodep->modulesp()->foreachAndNext([this](AstVarRef* nodep) {
if (ForceComponentsVarScope* const fcp
= m_forceComponentsVarScope.tryGet(nodep->varScopep())) {
switch (nodep->access()) {

50
src/V3FunctionTraits.h Normal file
View File

@ -0,0 +1,50 @@
// -*- mode: C++; c-file-style: "cc-mode" -*-
//*************************************************************************
// DESCRIPTION: Verilator: Function traits for metaprogramming
//
// Code available from: https://verilator.org
//
//*************************************************************************
//
// Copyright 2003-2022 by Wilson Snyder. This program is free software; you
// can redistribute it and/or modify it under the terms of either the GNU
// Lesser General Public License Version 3 or the Perl Artistic License
// Version 2.0.
// SPDX-License-Identifier: LGPL-3.0-only OR Artistic-2.0
//
//*************************************************************************
#ifndef VERILATOR_V3FUNCTIONTRAITS_H_
#define VERILATOR_V3FUNCTIONTRAITS_H_
#include "verilatedos.h"
#include <stddef.h>
#include <tuple>
#include <type_traits>
template <typename T>
struct FunctionTraits;
// For generic types, directly use the result of the signature of its 'operator()'
template <typename T>
struct FunctionTraits final
: public FunctionTraits<decltype(&std::remove_reference<T>::type::operator())> {};
// Specialization for pointers to member function
template <typename ClassType, typename ReturnType, typename... Args>
struct FunctionTraits<ReturnType (ClassType::*)(Args...) const> VL_NOT_FINAL {
// Number of arguments
static constexpr size_t arity = sizeof...(Args);
// Type of result
using result_type = ReturnType;
// Type of arguments
template <std::size_t I>
struct arg {
using type = typename std::tuple_element<I, std::tuple<Args...>>::type;
};
};
#endif

View File

@ -1022,13 +1022,13 @@ static void eliminate(AstNode* logicp,
nodep->replaceWith(newp);
VL_DO_DANGLING(nodep->deleteTree(), nodep);
// Recursively substitute the new tree
newp->foreach<AstNodeVarRef>(visit);
newp->foreach(visit);
// Remove from recursion filter
replaced.erase(vscp);
};
logicp->foreach<AstNodeVarRef>(visit);
logicp->foreach(visit);
}
// ######################################################################

View File

@ -513,7 +513,7 @@ private:
newmodp = nodep->modp();
}
// Find cell cross-references
nodep->modp()->foreach<AstCell>([](AstCell* cellp) {
nodep->modp()->foreach([](AstCell* cellp) {
// clonep is nullptr when inlining the last instance, if so the use original node
cellp->user4p(cellp->clonep() ? cellp->clonep() : cellp);
});

View File

@ -744,7 +744,7 @@ private:
if (!m_mgFirstp) {
UASSERT_OBJ(condp, nodep, "Cannot start new list without condition");
// Mark variable references in the condition
condp->foreach<AstVarRef>([](const AstVarRef* nodep) { nodep->varp()->user1(1); });
condp->foreach([](const AstVarRef* nodep) { nodep->varp()->user1(1); });
// Now check again if mergeable. We need this to pick up assignments to conditions,
// e.g.: 'c = c ? a : b' at the beginning of the list, which is in fact not mergeable
// because it updates the condition. We simply bail on these.

View File

@ -230,7 +230,7 @@ class OrderBuildVisitor final : public VNVisitor {
m_hybridp = nodep->sensesp();
// Mark AstVarScopes that are explicit sensitivities
AstNode::user3ClearTree();
senTreep->foreach<AstVarRef>([](const AstVarRef* refp) { //
senTreep->foreach([](const AstVarRef* refp) { //
refp->varScopep()->user3(true);
});
m_readTriggersCombLogic = [](const AstVarScope* vscp) { return !vscp->user3(); };

View File

@ -3215,7 +3215,7 @@ static void implementExecGraph(AstExecGraph* const execGraphp) {
void V3Partition::finalize(AstNetlist* netlistp) {
// Called by Verilator top stage
netlistp->topModulep()->foreach<AstExecGraph>([&](AstExecGraph* execGraphp) {
netlistp->topModulep()->foreach([&](AstExecGraph* execGraphp) {
// Back in V3Order, we partitioned mtasks using provisional cost
// estimates. However, V3Order precedes some optimizations (notably
// V3LifePost) that can change the cost of logic within each mtask.

View File

@ -186,10 +186,10 @@ private:
bool noopt = false;
{
const VNUser3InUse user3InUse;
nodep->lhsp()->foreach<AstVarRef>([](const AstVarRef* refp) {
nodep->lhsp()->foreach([](const AstVarRef* refp) {
if (refp->access().isWriteOrRW()) refp->varp()->user3(true);
});
nodep->rhsp()->foreach<AstVarRef>([&noopt](const AstVarRef* refp) {
nodep->rhsp()->foreach([&noopt](const AstVarRef* refp) {
if (refp->access().isReadOnly() && refp->varp()->user3()) noopt = true;
});
}

View File

@ -161,10 +161,10 @@ void splitCheck(AstCFunc* ofuncp) {
LogicClasses gatherLogicClasses(AstNetlist* netlistp) {
LogicClasses result;
netlistp->foreach<AstScope>([&](AstScope* scopep) {
netlistp->foreach([&](AstScope* scopep) {
std::vector<AstActive*> empty;
scopep->foreach<AstActive>([&](AstActive* activep) {
scopep->foreach([&](AstActive* activep) {
AstSenTree* const senTreep = activep->sensesp();
if (!activep->stmtsp()) {
// Some AstActives might be empty due to previous optimizations
@ -671,7 +671,7 @@ AstNode* createInputCombLoop(AstNetlist* netlistp, SenExprBuilder& senExprBuilde
// so we can make them sc_sensitive
if (v3Global.opt.systemC()) {
logic.foreachLogic([](AstNode* logicp) {
logicp->foreach<AstVarRef>([](AstVarRef* refp) {
logicp->foreach([](AstVarRef* refp) {
if (refp->access().isWriteOnly()) return;
AstVarScope* const vscp = refp->varScopep();
if (vscp->scopep()->isTop() && vscp->varp()->isNonOutput()) {
@ -769,13 +769,13 @@ void createEval(AstNetlist* netlistp, //
AstCFunc* const nbaDumpp = actTrig.m_dumpp->cloneTree(false);
actTrig.m_dumpp->addNextHere(nbaDumpp);
nbaDumpp->name("_dump_triggers__nba");
nbaDumpp->foreach<AstVarRef>([&](AstVarRef* refp) {
nbaDumpp->foreach([&](AstVarRef* refp) {
UASSERT_OBJ(refp->access().isReadOnly(), refp, "Should only read state");
if (refp->varScopep() == actTrig.m_vscp) {
refp->replaceWith(new AstVarRef{refp->fileline(), nbaTrigsp, VAccess::READ});
}
});
nbaDumpp->foreach<AstText>([&](AstText* textp) { //
nbaDumpp->foreach([&](AstText* textp) { //
textp->text(VString::replaceWord(textp->text(), "act", "nba"));
});
@ -970,7 +970,7 @@ void schedule(AstNetlist* netlistp) {
// Replace references in each mapped value with a reference to the given vscp
for (auto& pair : newMap) {
pair.second = pair.second->cloneTree(false);
pair.second->foreach<AstVarRef>([&](AstVarRef* refp) {
pair.second->foreach([&](AstVarRef* refp) {
UASSERT_OBJ(refp->varScopep() == actTrigVscp, refp, "Unexpected reference");
UASSERT_OBJ(refp->access() == VAccess::READ, refp, "Should be read ref");
refp->replaceWith(new AstVarRef{refp->fileline(), vscp, VAccess::READ});

View File

@ -140,7 +140,7 @@ std::unique_ptr<Graph> buildGraph(const LogicByScope& lbs) {
const VNUser2InUse user2InUse;
const VNUser3InUse user3InUse;
nodep->foreach<AstVarRef>([&](AstVarRef* refp) {
nodep->foreach([&](AstVarRef* refp) {
AstVarScope* const vscp = refp->varScopep();
VarVertex* const vvtxp = getVarVertex(vscp);
// We want to cut the narrowest signals

View File

@ -164,7 +164,7 @@ class SchedGraphBuilder final : public VNVisitor {
SchedSenVertex* const vtxp = new SchedSenVertex{m_graphp, senItemp};
// Connect up the variable references
senItemp->sensp()->foreach<AstVarRef>([&](AstVarRef* refp) {
senItemp->sensp()->foreach([&](AstVarRef* refp) {
new V3GraphEdge{m_graphp, getVarVertex(refp->varScopep()), vtxp, 1};
});
@ -186,7 +186,7 @@ class SchedGraphBuilder final : public VNVisitor {
// Clocked or hybrid logic has explicit sensitivity, so add edge from sensitivity vertex
if (!m_senTreep->hasCombo()) {
m_senTreep->foreach<AstSenItem>([=](AstSenItem* senItemp) {
m_senTreep->foreach([=](AstSenItem* senItemp) {
if (senItemp->isIllegal()) return;
UASSERT_OBJ(senItemp->isClocked() || senItemp->isHybrid(), nodep,
"Non-clocked SenItem under clocked SenTree");
@ -196,7 +196,7 @@ class SchedGraphBuilder final : public VNVisitor {
}
// Add edges based on references
nodep->foreach<AstVarRef>([=](const AstVarRef* vrefp) {
nodep->foreach([=](const AstVarRef* vrefp) {
AstVarScope* const vscp = vrefp->varScopep();
if (vrefp->access().isReadOrRW() && m_readTriggersThisLogic(vscp)) {
new V3GraphEdge{m_graphp, getVarVertex(vscp), logicVtxp, 10};
@ -208,7 +208,7 @@ class SchedGraphBuilder final : public VNVisitor {
// If the logic calls a 'context' DPI import, it might fire the DPI Export trigger
if (m_dpiExportTriggerp) {
nodep->foreach<AstCCall>([=](const AstCCall* callp) {
nodep->foreach([=](const AstCCall* callp) {
if (!callp->funcp()->dpiImportWrapper()) return;
if (!callp->funcp()->dpiContext()) return;
new V3GraphEdge{m_graphp, logicVtxp, getVarVertex(m_dpiExportTriggerp), 10};
@ -226,7 +226,7 @@ class SchedGraphBuilder final : public VNVisitor {
// Mark explicit sensitivities as not triggering these blocks
if (senTreep->hasHybrid()) {
AstNode::user2ClearTree();
senTreep->foreach<AstVarRef>([](const AstVarRef* refp) { //
senTreep->foreach([](const AstVarRef* refp) { //
refp->varScopep()->user2(true);
});
}
@ -364,7 +364,7 @@ LogicRegions partition(LogicByScope& clockedLogic, LogicByScope& combinationalLo
const VNUser2InUse user2InUse; // AstVarScope::user2() -> bool: writen in Active region
const auto markVars = [](AstNode* nodep) {
nodep->foreach<AstNodeVarRef>([](const AstNodeVarRef* vrefp) {
nodep->foreach([](const AstNodeVarRef* vrefp) {
AstVarScope* const vscp = vrefp->varScopep();
if (vrefp->access().isReadOrRW()) vscp->user1(true);
if (vrefp->access().isWriteOrRW()) vscp->user2(true);
@ -386,7 +386,7 @@ LogicRegions partition(LogicByScope& clockedLogic, LogicByScope& combinationalLo
nextp = nodep->nextp();
if (AstAssignPre* const logicp = VN_CAST(nodep, AssignPre)) {
bool toActiveRegion = false;
logicp->foreach<AstNodeVarRef>([&](const AstNodeVarRef* vrefp) {
logicp->foreach([&](const AstNodeVarRef* vrefp) {
AstVarScope* const vscp = vrefp->varScopep();
if (vrefp->access().isReadOnly()) {
// Variable only read in Pre, and is written in active region

View File

@ -176,7 +176,7 @@ std::unique_ptr<Graph> buildGraph(const LogicRegions& logicRegions) {
// Hybrid logic is triggered by all reads, except for reads of the explicit
// sensitivities
readTriggersThisLogic = [](AstVarScope* vscp) { return !vscp->user4(); };
senTreep->foreach<AstVarRef>([](const AstVarRef* refp) { //
senTreep->foreach([](const AstVarRef* refp) { //
refp->varScopep()->user4(true);
});
}
@ -187,7 +187,7 @@ std::unique_ptr<Graph> buildGraph(const LogicRegions& logicRegions) {
const VNUser2InUse user2InUse;
const VNUser3InUse user3InUse;
nodep->foreach<AstVarRef>([&](AstVarRef* refp) {
nodep->foreach([&](AstVarRef* refp) {
AstVarScope* const vscp = refp->varScopep();
VarVertex* const vvtxp = getVarVertex(vscp);

View File

@ -256,7 +256,7 @@ void transformForks(AstNetlist* const netlistp) {
// flow analysis framework which we don't have at the moment
void remapLocals(AstCFunc* const funcp, AstCCall* const callp) {
const VNUser2InUse user2InUse; // AstVarScope -> AstVarScope: var to remap to
funcp->foreach<AstNodeVarRef>([&](AstNodeVarRef* refp) {
funcp->foreach([&](AstNodeVarRef* refp) {
AstVar* const varp = refp->varp();
AstBasicDType* const dtypep = varp->dtypep()->basicp();
// If it a fork sync or an intra-assignment variable, pass it by value

View File

@ -56,7 +56,7 @@ class SenExprBuilder final {
}
static bool isSimpleExpr(const AstNode* const exprp) {
return exprp->forall<AstNode>([](const AstNode* const nodep) {
return exprp->forall([](const AstNode* const nodep) {
return VN_IS(nodep, Const) || VN_IS(nodep, NodeVarRef) || VN_IS(nodep, Sel)
|| VN_IS(nodep, NodeSel) || VN_IS(nodep, MemberSel)
|| VN_IS(nodep, CMethodHard);

View File

@ -17,6 +17,8 @@
#ifndef VERILATOR_V3STDFUTURE_H_
#define VERILATOR_V3STDFUTURE_H_
#include <functional>
namespace vlstd {
// constexpr std::max with arguments passed by value (required by constexpr before C++14)
@ -25,6 +27,17 @@ constexpr T max(T a, T b) {
return a > b ? a : b;
}
// C++17 is_invocable
template <typename F, typename... Args>
struct is_invocable
: std::is_constructible<std::function<void(Args...)>,
std::reference_wrapper<typename std::remove_reference<F>::type>> {};
// C++17 is_invocable_r
template <typename R, typename F, typename... Args>
struct is_invocable_r
: std::is_constructible<std::function<R(Args...)>,
std::reference_wrapper<typename std::remove_reference<F>::type>> {};
}; // namespace vlstd
#endif // Guard

View File

@ -408,7 +408,7 @@ private:
// Replace varrefs with new var pointer
void relink(AstNode* nodep) {
nodep->foreachAndNext<AstVarRef>([](AstVarRef* refp) {
nodep->foreachAndNext([](AstVarRef* refp) {
if (refp->varp()->user2p()) { // It's being converted to an alias.
AstVarScope* const newvscp = VN_AS(refp->varp()->user2p(), VarScope);
refp->varScopep(newvscp);
@ -1276,7 +1276,7 @@ private:
// Mark non-local variables written by the exported function
bool writesNonLocals = false;
cfuncp->foreach<AstVarRef>([&writesNonLocals](AstVarRef* refp) {
cfuncp->foreach([&writesNonLocals](AstVarRef* refp) {
if (refp->access().isReadOnly()) return; // Ignore read reference
AstVar* const varp = refp->varScopep()->varp();
// We are ignoring function locals as they should not be referenced anywhere

View File

@ -138,7 +138,7 @@ private:
AstDelay* getLhsNetDelay(AstNodeAssign* nodep) const {
bool foundWrite = false;
AstDelay* delayp = nullptr;
nodep->lhsp()->foreach<AstNodeVarRef>([&](const AstNodeVarRef* const refp) {
nodep->lhsp()->foreach([&](const AstNodeVarRef* const refp) {
if (!refp->access().isWriteOrRW()) return;
UASSERT_OBJ(!foundWrite, nodep, "Should only be one variable written to on the LHS");
foundWrite = true;
@ -190,7 +190,7 @@ private:
AstSenItem* varRefpsToSenItemsp(AstNode* const nodep) const {
AstNode* senItemsp = nullptr;
const VNUser4InUse user4InUse;
nodep->foreach<AstNodeVarRef>([&](AstNodeVarRef* refp) {
nodep->foreach([&](AstNodeVarRef* refp) {
if (refp->access().isWriteOnly()) return;
AstVarScope* const vscp = refp->varScopep();
if (vscp->user4SetOnce()) return;
@ -530,12 +530,12 @@ private:
// we want as only the top level selects are LValues. As an example,
// this transforms 'x[a[i]][b[j]] = y'
// into 't1 = a[i]; t0 = b[j]; x[t1][t0] = y'.
nodep->lhsp()->foreach<AstSel>([&](AstSel* selp) {
nodep->lhsp()->foreach([&](AstSel* selp) {
if (VN_IS(selp->lsbp(), Const)) return;
replaceWithIntermediate(selp->lsbp(), m_intraLsbNames.get(nodep));
// widthp should be const
});
nodep->lhsp()->foreach<AstNodeSel>([&](AstNodeSel* selp) {
nodep->lhsp()->foreach([&](AstNodeSel* selp) {
if (VN_IS(selp->bitp(), Const)) return;
replaceWithIntermediate(selp->bitp(), m_intraIndexNames.get(nodep));
});

View File

@ -434,7 +434,7 @@ private:
} else if (AstCFunc* const funcp = VN_CAST(insertp, CFunc)) {
// If there are awaits, insert the setter after each await
if (funcp->isCoroutine() && funcp->stmtsp()) {
funcp->stmtsp()->foreachAndNext<AstCAwait>([&](AstCAwait* awaitp) {
funcp->stmtsp()->foreachAndNext([&](AstCAwait* awaitp) {
if (awaitp->nextp()) awaitp->addNextHere(setterp->cloneTree(false));
});
}