mirror of
https://github.com/verilator/verilator.git
synced 2024-12-29 10:47:34 +00:00
Cleanup python code style issues. No functional change intended.
This commit is contained in:
parent
018d994781
commit
81ef8fb201
22
Makefile.in
22
Makefile.in
@ -474,9 +474,7 @@ clang-format:
|
||||
|| echo "*** You are not using clang-format 10.0, indents may differ from master's ***"
|
||||
$(CLANGFORMAT) $(CLANGFORMAT_FLAGS) $(CLANGFORMAT_FILES)
|
||||
|
||||
YAPF = yapf3
|
||||
YAPF_FLAGS = -i
|
||||
YAPF_FILES = \
|
||||
PY_PROGRAMS = \
|
||||
examples/xml_py/vl_file_copy \
|
||||
examples/xml_py/vl_hier_graph \
|
||||
src/astgen \
|
||||
@ -486,14 +484,28 @@ YAPF_FILES = \
|
||||
src/flexfix \
|
||||
src/vlcovgen \
|
||||
nodist/code_coverage \
|
||||
nodist/code_coverage.dat \
|
||||
nodist/dot_importer \
|
||||
nodist/fuzzer/actual_fail \
|
||||
nodist/fuzzer/generate_dictionary \
|
||||
nodist/install_test \
|
||||
|
||||
PY_FILES = \
|
||||
$(PY_PROGRAMS) \
|
||||
nodist/code_coverage.dat \
|
||||
|
||||
YAPF = yapf3
|
||||
YAPF_FLAGS = -i
|
||||
|
||||
yapf:
|
||||
$(YAPF) $(YAPF_FLAGS) $(YAPF_FILES)
|
||||
$(YAPF) $(YAPF_FLAGS) $(PY_FILES)
|
||||
|
||||
FLAKE8 = flake8
|
||||
FLAKE8_FLAGS = \
|
||||
--extend-exclude=fastcov.py \
|
||||
--ignore=E123,E129,E251,E501,W503,W504,E701
|
||||
|
||||
lint-py:
|
||||
$(FLAKE8) $(FLAKE8_FLAGS) $(PY_PROGRAMS)
|
||||
|
||||
format-pl-exec:
|
||||
-chmod a+x test_regress/t/*.pl
|
||||
|
@ -6,11 +6,10 @@ import argparse
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import xml.etree.ElementTree as ET
|
||||
from shutil import copy2
|
||||
from pprint import pprint, pformat
|
||||
# from pprint import pprint, pformat
|
||||
|
||||
#######################################################################
|
||||
|
||||
@ -22,6 +21,7 @@ class VlFileCopy:
|
||||
# ideally this script would check against options mentioned in help
|
||||
debug=0,
|
||||
output_dir='copied'): # directory name we output file uses
|
||||
|
||||
self.debug = debug
|
||||
|
||||
xml_temp = tempfile.NamedTemporaryFile()
|
||||
@ -119,6 +119,6 @@ SPDX-License-Identifier: CC0-1.0
|
||||
fc = VlFileCopy(output_dir=args.odir, debug=args.debug, verilator_args=rem)
|
||||
|
||||
######################################################################
|
||||
### Local Variables:
|
||||
### compile-command: "./vl_file_copy -h ; VERILATOR_ROOT=$V4 ./vl_file_copy +define+thru top.v"
|
||||
### End:
|
||||
# Local Variables:
|
||||
# compile-command: "./vl_file_copy -h ; VERILATOR_ROOT=$V4 ./vl_file_copy +define+thru top.v"
|
||||
# End:
|
||||
|
@ -4,13 +4,10 @@
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import xml.etree.ElementTree as ET
|
||||
from shutil import copy2
|
||||
from pprint import pprint, pformat
|
||||
# from pprint import pprint, pformat
|
||||
|
||||
#######################################################################
|
||||
|
||||
@ -70,7 +67,7 @@ class VlHierGraph:
|
||||
fh.write("}\n")
|
||||
|
||||
def name_to_vertex_number(self, name):
|
||||
if not name in self.name_to_number:
|
||||
if name not in self.name_to_number:
|
||||
self.next_vertex_number += 1
|
||||
self.name_to_number[name] = self.next_vertex_number
|
||||
return self.name_to_number[name]
|
||||
@ -146,6 +143,6 @@ SPDX-License-Identifier: CC0-1.0
|
||||
verilator_args=rem)
|
||||
|
||||
######################################################################
|
||||
### Local Variables:
|
||||
### compile-command: "./vl_hier_graph -h ; VERILATOR_ROOT=$V4 ./vl_hier_graph +define+thru top.v"
|
||||
### End:
|
||||
# Local Variables:
|
||||
# compile-command: "./vl_hier_graph -h ; VERILATOR_ROOT=$V4 ./vl_hier_graph +define+thru top.v"
|
||||
# End:
|
||||
|
@ -7,6 +7,7 @@ import multiprocessing
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
RealPath = os.path.dirname(os.path.realpath(__file__))
|
||||
Exclude_Branch_Regexps = []
|
||||
@ -223,7 +224,7 @@ def clone_sources(cc_dir):
|
||||
if done:
|
||||
break
|
||||
if re.search(regexp, line):
|
||||
#print("%s:%d: %s" % (infile, lineno, line)
|
||||
# print("%s:%d: %s" % (infile, lineno, line)
|
||||
line += " //code_coverage: // LCOV_EXCL_LINE LCOV_EXCL_BR_LINE"
|
||||
excluded_lines += 1
|
||||
excluded_br_lines += 1
|
||||
@ -233,7 +234,7 @@ def clone_sources(cc_dir):
|
||||
if done:
|
||||
break
|
||||
if re.search(regexp, line):
|
||||
#print("%s:%d: %s" % (infile, lineno, line)
|
||||
# print("%s:%d: %s" % (infile, lineno, line)
|
||||
line += " //code_coverage: // LCOV_EXCL_BR_LINE"
|
||||
excluded_br_lines += 1
|
||||
done = True
|
||||
@ -256,7 +257,7 @@ def cleanup_abs_paths_info(cc_dir, infile, outfile):
|
||||
count=1)
|
||||
line = re.sub(cc_dir + '/', '', line, count=1)
|
||||
line = re.sub(r'obj_dbg/verilog.y$', 'verilog.y', line)
|
||||
#print("Remaining SF: "+line)
|
||||
# print("Remaining SF: "+line)
|
||||
lines.append(line)
|
||||
|
||||
with open(outfile, "w") as ofh:
|
||||
@ -406,6 +407,6 @@ if True:
|
||||
test()
|
||||
|
||||
######################################################################
|
||||
### Local Variables:
|
||||
### compile-command: "cd .. ; nodist/code_coverage "
|
||||
### End:
|
||||
# Local Variables:
|
||||
# compile-command: "cd .. ; nodist/code_coverage "
|
||||
# End:
|
||||
|
@ -104,6 +104,6 @@ dotread(Args.filename)
|
||||
cwrite("graph_export.cpp")
|
||||
|
||||
######################################################################
|
||||
### Local Variables:
|
||||
### compile-command: "./dot_importer ../test_regress/obj_vlt/t_EXAMPLE/*orderg_o*.dot && cat graph_export.cpp"
|
||||
### End:
|
||||
# Local Variables:
|
||||
# compile-command: "./dot_importer ../test_regress/obj_vlt/t_EXAMPLE/*orderg_o*.dot && cat graph_export.cpp"
|
||||
# End:
|
||||
|
@ -6,6 +6,7 @@ import multiprocessing
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
######################################################################
|
||||
|
||||
@ -135,6 +136,6 @@ Args = parser.parse_args()
|
||||
test()
|
||||
|
||||
######################################################################
|
||||
### Local Variables:
|
||||
### compile-command: "cd .. ; nodist/install_test"
|
||||
### End:
|
||||
# Local Variables:
|
||||
# compile-command: "cd .. ; nodist/install_test"
|
||||
# End:
|
||||
|
20
src/astgen
20
src/astgen
@ -5,7 +5,7 @@ import argparse
|
||||
import glob
|
||||
import re
|
||||
import sys
|
||||
#from pprint import pprint, pformat
|
||||
# from pprint import pprint, pformat
|
||||
|
||||
Types = []
|
||||
Classes = {}
|
||||
@ -98,7 +98,7 @@ class Cpt:
|
||||
doflag = match.group(2)
|
||||
fromn = match.group(3)
|
||||
to = match.group(4)
|
||||
#self.print("// $fromn $to\n")
|
||||
# self.print("// $fromn $to\n")
|
||||
if not self.did_out_tree:
|
||||
self.did_out_tree = True
|
||||
self.output_func(lambda self: self.tree_match_base())
|
||||
@ -206,8 +206,6 @@ class Cpt:
|
||||
outl = re.sub(r'\$([a-zA-Z0-9]+)', r'nodep->\1()', func)
|
||||
out += outl + ";"
|
||||
elif re.match(r'^\s*Ast([a-zA-Z0-9]+)\s*\{\s*(.*)\s*\}$', func):
|
||||
nargs = 0
|
||||
argnums = [] # Number for each argument name
|
||||
aref = None
|
||||
# Recursive array with structure to form
|
||||
astack = []
|
||||
@ -300,7 +298,7 @@ class Cpt:
|
||||
bases = subclasses_of(typen)
|
||||
bases.append(typen)
|
||||
for base in bases:
|
||||
if not base in self.treeop:
|
||||
if base not in self.treeop:
|
||||
continue
|
||||
for typefunc in self.treeop[base]:
|
||||
lines = [
|
||||
@ -365,7 +363,7 @@ def read_types(filename):
|
||||
match = re.search(r':\s*public\s+(\S+)', line)
|
||||
if match:
|
||||
inh = match.group(1)
|
||||
#print("class "+classn+" : "+inh)
|
||||
# print("class "+classn+" : "+inh)
|
||||
if classn == "AstNode":
|
||||
inh = ""
|
||||
if re.search(r'Ast', inh) or classn == "AstNode":
|
||||
@ -425,7 +423,7 @@ def subclasses_of(typen):
|
||||
cllist = []
|
||||
subclass = Classes[typen]
|
||||
while True:
|
||||
if not subclass in Classes:
|
||||
if subclass not in Classes:
|
||||
break
|
||||
cllist.append(subclass)
|
||||
subclass = Classes[subclass]
|
||||
@ -448,7 +446,7 @@ def children_of(typen):
|
||||
return cllist
|
||||
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
# ---------------------------------------------------------------------
|
||||
|
||||
|
||||
def write_report(filename):
|
||||
@ -715,6 +713,6 @@ for cpt in Args.infiles:
|
||||
out_filename=cpt + "__gen.cpp")
|
||||
|
||||
######################################################################
|
||||
### Local Variables:
|
||||
### compile-command: "cd obj_dbg && ../astgen -I.. V3Const.cpp"
|
||||
### End:
|
||||
# Local Variables:
|
||||
# compile-command: "cd obj_dbg && ../astgen -I.. V3Const.cpp"
|
||||
# End:
|
||||
|
80
src/bisonpre
80
src/bisonpre
@ -6,7 +6,7 @@ import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from pprint import pprint, pformat
|
||||
# from pprint import pprint, pformat
|
||||
|
||||
######################################################################
|
||||
|
||||
@ -111,13 +111,13 @@ def clean_output(filename, outname, is_output, is_c):
|
||||
|
||||
if is_output:
|
||||
state_line = {}
|
||||
l = 0
|
||||
lineno = 0
|
||||
for line in lines:
|
||||
l += 1
|
||||
lineno += 1
|
||||
# We add a colon so it's easy to search for the definition
|
||||
match = re.match(r'^state (\d+)\s*', line)
|
||||
if match:
|
||||
state_line[match.group(1)] = l
|
||||
state_line[match.group(1)] = lineno
|
||||
out = []
|
||||
for line in lines:
|
||||
match = re.match(r'^State (\d+) (conflicts)', line)
|
||||
@ -200,12 +200,12 @@ def clean_input(filename, outname):
|
||||
if True:
|
||||
linesin = lines
|
||||
lines = []
|
||||
l = 0
|
||||
lineno = 0
|
||||
for line in linesin:
|
||||
l += 1
|
||||
lineno += 1
|
||||
# ^/ to prevent comments from matching
|
||||
if re.match(r'^[a-zA-Z0-9_<>]+:[^/]*[a-zA-Z]', line):
|
||||
sys.exit("%Error: " + filename + ":" + str(l) +
|
||||
sys.exit("%Error: " + filename + ":" + str(lineno) +
|
||||
": Move text on rule line to next line: " + line +
|
||||
"\n")
|
||||
|
||||
@ -223,7 +223,7 @@ def clean_input(filename, outname):
|
||||
dtype = matcha.group(2)
|
||||
line = name + matcha.group(3)
|
||||
if name in Rules:
|
||||
sys.exit("%Error: " + filename + ":" + str(l) +
|
||||
sys.exit("%Error: " + filename + ":" + str(lineno) +
|
||||
": Redeclaring '" + name + "': " + line)
|
||||
if dtype not in types:
|
||||
types[dtype] = {}
|
||||
@ -235,14 +235,14 @@ def clean_input(filename, outname):
|
||||
'subrules': {}
|
||||
}
|
||||
if last_rule:
|
||||
sys.exit("%Error: " + filename + ":" + str(l) +
|
||||
sys.exit("%Error: " + filename + ":" + str(lineno) +
|
||||
": Unterminated previous rule\n")
|
||||
last_rule = name
|
||||
elif matchb:
|
||||
name = matchb.group(1)
|
||||
if name != 'public' and name != 'private':
|
||||
if name in Rules:
|
||||
sys.exit("%Error: " + filename + ":" + str(l) +
|
||||
sys.exit("%Error: " + filename + ":" + str(lineno) +
|
||||
": Redeclaring '" + name + "': " + line)
|
||||
Rules[name] = {
|
||||
'name': name,
|
||||
@ -251,17 +251,16 @@ def clean_input(filename, outname):
|
||||
'subrules': {}
|
||||
}
|
||||
if last_rule:
|
||||
sys.exit("%Error: " + filename + ":" + str(l) +
|
||||
sys.exit("%Error: " + filename + ":" + str(lineno) +
|
||||
": Unterminated previous rule\n")
|
||||
last_rule = name
|
||||
|
||||
lines.append(line)
|
||||
# Now clean the line and extract some more info
|
||||
cline = re.sub(r'//.*$', '\n', line)
|
||||
rline = re.sub(r'//.*$', '\n', line)
|
||||
if re.match(r'^\s*;', cline):
|
||||
if not last_rule:
|
||||
sys.exit("%Error: " + filename + ":" + str(l) +
|
||||
sys.exit("%Error: " + filename + ":" + str(lineno) +
|
||||
": Stray semicolon\n")
|
||||
last_rule = None
|
||||
elif last_rule:
|
||||
@ -272,7 +271,7 @@ def clean_input(filename, outname):
|
||||
dtype = match.group(1)
|
||||
tok = match.group(2)
|
||||
if tok in tokens:
|
||||
sys.exit("%Error: " + filename + ":" + str(l) +
|
||||
sys.exit("%Error: " + filename + ":" + str(lineno) +
|
||||
": Redeclaring '" + tok + "': " + line)
|
||||
tokens[tok] = dtype
|
||||
|
||||
@ -281,22 +280,22 @@ def clean_input(filename, outname):
|
||||
# print("TT "+last_rule+" "+tok+"\n")
|
||||
Rules[last_rule]['subrules'][tok] = 1
|
||||
|
||||
#pprint(Rules)
|
||||
# pprint(Rules)
|
||||
|
||||
# Replace BISONPRE_VERSION(ver,,...) with expanded list
|
||||
if True:
|
||||
linesin = lines
|
||||
lines = []
|
||||
l = 0
|
||||
lineno = 0
|
||||
for line in linesin:
|
||||
l += 1
|
||||
lineno += 1
|
||||
if _enaline(line) and re.search(r'BISONPRE_VERSION', line):
|
||||
# 1 2 3 4
|
||||
match = re.search(
|
||||
r'BISONPRE_VERSION\((\S+)\s*,\s*((\S+)\s*,)?\s*([^\),]+)\)\s*$',
|
||||
line)
|
||||
if not match:
|
||||
sys.exit("%Error: " + filename + ":" + str(l) +
|
||||
sys.exit("%Error: " + filename + ":" + str(lineno) +
|
||||
": Bad form of BISONPRE_VERSION: " + line)
|
||||
ver = match.group(1)
|
||||
ver_max = match.group(3)
|
||||
@ -312,24 +311,24 @@ def clean_input(filename, outname):
|
||||
if True:
|
||||
linesin = lines
|
||||
lines = []
|
||||
l = 0
|
||||
lineno = 0
|
||||
for line in linesin:
|
||||
l += 1
|
||||
lineno += 1
|
||||
if _enaline(line) and re.search(r'BISONPRE_NOT', line):
|
||||
match = re.search(
|
||||
r'(.*)BISONPRE_NOT\((\S+)\)\s*(\{[^}]+})\s*(.*)$',
|
||||
line,
|
||||
flags=re.DOTALL)
|
||||
if not match:
|
||||
sys.exit("%Error: " + filename + ":" + str(l) +
|
||||
sys.exit("%Error: " + filename + ":" + str(lineno) +
|
||||
": Bad form of BISONPRE_NOT: " + line)
|
||||
line = match.group(1) + match.group(4)
|
||||
endtok = match.group(2)
|
||||
action = match.group(3)
|
||||
endtoks = endtok.split(',')
|
||||
for etok in endtoks:
|
||||
if not etok in tokens:
|
||||
sys.exit("%Error: " + filename + ":" + str(l) +
|
||||
if etok not in tokens:
|
||||
sys.exit("%Error: " + filename + ":" + str(lineno) +
|
||||
": Can't find definition for token: " + etok +
|
||||
"\n")
|
||||
# Push it all onto one line to avoid error messages changing
|
||||
@ -350,20 +349,20 @@ def clean_input(filename, outname):
|
||||
if True:
|
||||
linesin = lines
|
||||
lines = []
|
||||
l = 0
|
||||
lineno = 0
|
||||
for line in linesin:
|
||||
l += 1
|
||||
lineno += 1
|
||||
if _enaline(line) and re.search(r'BISONPRE_COPY', line):
|
||||
line = _bisonpre_copy(line, l, 0)
|
||||
line = _bisonpre_copy(line, lineno, 0)
|
||||
lines.append(line)
|
||||
|
||||
# Replace ~[x]~ - must be after BISONPRE_COPY expansion
|
||||
if True:
|
||||
linesin = lines
|
||||
lines = []
|
||||
l = 0
|
||||
lineno = 0
|
||||
for line in linesin:
|
||||
l += 1
|
||||
lineno += 1
|
||||
line = re.sub(r'~[a-zA-Z0-9_]+~', '', line)
|
||||
lines.append(line)
|
||||
|
||||
@ -371,10 +370,10 @@ def clean_input(filename, outname):
|
||||
if True:
|
||||
linesin = lines
|
||||
lines = []
|
||||
l = 0
|
||||
lineno = 0
|
||||
needmore = 0
|
||||
for line in linesin:
|
||||
l += 1
|
||||
lineno += 1
|
||||
if _enaline(line) and re.search(r'//BISONPRE_TYPES', line):
|
||||
lines.append(line)
|
||||
for type in sorted(types.keys()):
|
||||
@ -391,7 +390,7 @@ def clean_input(filename, outname):
|
||||
line = re.sub(r'^\s*//.*$', '', line)
|
||||
if not re.match(r'^\s*$', line):
|
||||
sys.exit(
|
||||
"%Error: " + filename + ":" + str(l) + ": Need " +
|
||||
"%Error: " + filename + ":" + str(lineno) + ": Need " +
|
||||
needmore +
|
||||
" more blank lines to keep line numbers are constant\n"
|
||||
)
|
||||
@ -404,7 +403,7 @@ def clean_input(filename, outname):
|
||||
fh.write(line)
|
||||
|
||||
|
||||
def _bisonpre_copy(text, l, depth):
|
||||
def _bisonpre_copy(text, lineno, depth):
|
||||
while re.search(r'BISONPRE_COPY', text):
|
||||
match = re.match(
|
||||
# 1 2 3 4 5
|
||||
@ -412,14 +411,14 @@ def _bisonpre_copy(text, l, depth):
|
||||
text,
|
||||
flags=re.DOTALL)
|
||||
if not match:
|
||||
sys.exit("%Error: " + Filename + ":" + str(l) +
|
||||
sys.exit("%Error: " + Filename + ":" + str(lineno) +
|
||||
": Bad form of BISONPRE_NOT: " + text)
|
||||
text = match.group(1) + '{HERE}' + match.group(5)
|
||||
once = match.group(2)
|
||||
rule = match.group(3)
|
||||
code = match.group(4)
|
||||
if not rule in Rules:
|
||||
sys.exit("%Error: " + Filename + ":" + str(l) +
|
||||
if rule not in Rules:
|
||||
sys.exit("%Error: " + Filename + ":" + str(lineno) +
|
||||
": Can't find definition for rule: " + rule)
|
||||
if depth > 0 and once:
|
||||
# _ONCE means don't inherit
|
||||
@ -430,20 +429,17 @@ def _bisonpre_copy(text, l, depth):
|
||||
insert = Rules[rule]['rules_and_productions']
|
||||
insert = re.sub(r'^\S+:', '', insert) # Strip rule name
|
||||
# Recurse so BISONPRE under B
|
||||
#print "COPY $l code $code\n"
|
||||
#print "COPY $l in $insert\n"
|
||||
for op in code.split(';'):
|
||||
if re.match(r'^\s*$', op):
|
||||
continue
|
||||
match = re.match(r'^\s*s/(.*?)/(.*?)/g\s*$', op)
|
||||
if not match:
|
||||
sys.exit("%Error: " + Filename + ":" + str(l) +
|
||||
sys.exit("%Error: " + Filename + ":" + str(lineno) +
|
||||
": Didn't understand replacement: " + op)
|
||||
left = match.group(1)
|
||||
right = match.group(2)
|
||||
insert = re.sub(left, right, insert)
|
||||
|
||||
#print "COPY $l out $insert\n"
|
||||
insert = re.sub(r'[ \t\n]+\n', "\n", insert)
|
||||
insert = re.sub(r'\n', " ",
|
||||
insert) # Optional - preserve line numbering
|
||||
@ -556,6 +552,6 @@ Args = parser.parse_args()
|
||||
process()
|
||||
|
||||
######################################################################
|
||||
### Local Variables:
|
||||
### compile-command: "./bisonpre "
|
||||
### End:
|
||||
# Local Variables:
|
||||
# compile-command: "./bisonpre "
|
||||
# End:
|
||||
|
@ -139,10 +139,10 @@ def _suppress(filename, linenum, id):
|
||||
return False
|
||||
|
||||
with open(filename) as fh:
|
||||
l = 0
|
||||
lineno = 0
|
||||
for line in fh:
|
||||
l += 1
|
||||
if (l + 1 == linenum):
|
||||
lineno += 1
|
||||
if (lineno + 1 == linenum):
|
||||
match = re.search(r'cppcheck-suppress((\s+\S+)+)', line)
|
||||
if match:
|
||||
for supid in match.group(1).split():
|
||||
@ -178,6 +178,6 @@ Args, cppcheck_args = parser.parse_known_args()
|
||||
process(cppcheck_args)
|
||||
|
||||
######################################################################
|
||||
### Local Variables:
|
||||
### compile-command: "cd .. ; src/cppcheck_filtered cppcheck --xml --enable=all src/V3Width.cpp"
|
||||
### End:
|
||||
# Local Variables:
|
||||
# compile-command: "cd .. ; src/cppcheck_filtered cppcheck --xml --enable=all src/V3Width.cpp"
|
||||
# End:
|
||||
|
@ -10,7 +10,6 @@
|
||||
######################################################################
|
||||
# DESCRIPTION: Edits flex output to get around various broken flex issues.
|
||||
|
||||
import argparse
|
||||
import re
|
||||
import sys
|
||||
|
||||
@ -29,7 +28,7 @@ for line in sys.stdin:
|
||||
r' (int)number_to_move == (int)YY_MORE_ADJ ', line)
|
||||
# Fix flex 2.5.4 namespace omission
|
||||
line = re.sub(r'^class istream;',
|
||||
'\#include <iostream>\nusing namespace std;\n', line)
|
||||
'#include <iostream>\nusing namespace std;\n', line)
|
||||
# Fix flex 2.5.31 redefinition
|
||||
line = re.sub(r'(\#define\s+yyFlexLexer\s+yyFlexLexer)', r'//flexfix: \1',
|
||||
line)
|
||||
|
@ -3,9 +3,8 @@
|
||||
|
||||
import argparse
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
from pprint import pprint
|
||||
# from pprint import pprint
|
||||
|
||||
Items = []
|
||||
|
||||
@ -105,6 +104,6 @@ lint()
|
||||
write_keys(Args.srcdir + "/../include/verilated_cov_key.h")
|
||||
|
||||
######################################################################
|
||||
### Local Variables:
|
||||
### compile-command: "./vlcovgen --srcdir ."
|
||||
### End:
|
||||
# Local Variables:
|
||||
# compile-command: "./vlcovgen --srcdir ."
|
||||
# End:
|
||||
|
Loading…
Reference in New Issue
Block a user