mirror of
https://github.com/verilator/verilator.git
synced 2025-01-01 04:07:34 +00:00
Internals: Fix lint-py warnings
This commit is contained in:
parent
609bfa46e8
commit
972a11537c
@ -1,5 +1,5 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# pylint: disable=C0103,C0114,C0115,C0116,C0123,C0301,R0902,R0913,R0914,R0912,R0915,W0621
|
# pylint: disable=C0103,C0114,C0115,C0116,C0123,C0209,C0301,R0902,R0913,R0914,R0912,R0915,W0621
|
||||||
######################################################################
|
######################################################################
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# pylint: disable=C0103,C0114,C0116
|
# pylint: disable=C0103,C0114,C0116,C0209
|
||||||
######################################################################
|
######################################################################
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
@ -73,7 +73,7 @@ def diff_file(a, b):
|
|||||||
|
|
||||||
def version_from(filename):
|
def version_from(filename):
|
||||||
# Return dump format
|
# Return dump format
|
||||||
with open(filename) as fh:
|
with open(filename, "r", encoding="utf8") as fh:
|
||||||
lineno = 0
|
lineno = 0
|
||||||
for line in fh:
|
for line in fh:
|
||||||
if lineno > 10:
|
if lineno > 10:
|
||||||
@ -86,8 +86,8 @@ def version_from(filename):
|
|||||||
|
|
||||||
def filterf(fn1, fn2):
|
def filterf(fn1, fn2):
|
||||||
# Remove hex numbers before diffing
|
# Remove hex numbers before diffing
|
||||||
with open(fn1) as fh1:
|
with open(fn1, "r", encoding="utf8") as fh1:
|
||||||
with open(fn2, "w") as fh2:
|
with open(fn2, "w", encoding="utf8") as fh2:
|
||||||
for line in fh1:
|
for line in fh1:
|
||||||
if re.search(r' This=', line):
|
if re.search(r' This=', line):
|
||||||
continue
|
continue
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# pylint: disable=C0103,C0114,C0116,C0301,R0914,R0912,R0915,W0511,eval-used
|
# pylint: disable=C0103,C0114,C0116,C0209,C0301,R0914,R0912,R0915,W0511,eval-used
|
||||||
######################################################################
|
######################################################################
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
@ -29,7 +29,7 @@ def process(filename):
|
|||||||
|
|
||||||
|
|
||||||
def read_data(filename):
|
def read_data(filename):
|
||||||
with open(filename) as fh:
|
with open(filename, "r", encoding="utf8") as fh:
|
||||||
re_thread = re.compile(r'^VLPROFTHREAD (\d+)$')
|
re_thread = re.compile(r'^VLPROFTHREAD (\d+)$')
|
||||||
re_record = re.compile(r'^VLPROFEXEC (\S+) (\d+)(.*)$')
|
re_record = re.compile(r'^VLPROFEXEC (\S+) (\d+)(.*)$')
|
||||||
re_payload_mtaskBegin = re.compile(
|
re_payload_mtaskBegin = re.compile(
|
||||||
@ -318,7 +318,7 @@ def report_cpus():
|
|||||||
|
|
||||||
def write_vcd(filename):
|
def write_vcd(filename):
|
||||||
print("Writing %s" % filename)
|
print("Writing %s" % filename)
|
||||||
with open(filename, "w") as fh:
|
with open(filename, "w", encoding="utf8") as fh:
|
||||||
vcd = {
|
vcd = {
|
||||||
'values':
|
'values':
|
||||||
collections.defaultdict(lambda: {}), # {<time>}{<code>} = value
|
collections.defaultdict(lambda: {}), # {<time>}{<code>} = value
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# pylint: disable=C0103,C0114,C0116,R0914,R0912,R0915,eval-used
|
# pylint: disable=C0103,C0114,C0116,C0209,R0914,R0912,R0915,eval-used
|
||||||
######################################################################
|
######################################################################
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
@ -13,7 +13,7 @@ import re
|
|||||||
def profcfunc(filename):
|
def profcfunc(filename):
|
||||||
funcs = {}
|
funcs = {}
|
||||||
|
|
||||||
with open(filename) as fh:
|
with open(filename, "r", encoding="utf8") as fh:
|
||||||
|
|
||||||
for line in fh:
|
for line in fh:
|
||||||
# %time cumesec selfsec calls {stuff} name
|
# %time cumesec selfsec calls {stuff} name
|
||||||
@ -67,16 +67,15 @@ def profcfunc(filename):
|
|||||||
groups['design'] = collections.defaultdict(lambda: 0)
|
groups['design'] = collections.defaultdict(lambda: 0)
|
||||||
groups['module'] = collections.defaultdict(lambda: 0)
|
groups['module'] = collections.defaultdict(lambda: 0)
|
||||||
|
|
||||||
for func in funcs:
|
for func, func_item in funcs.items():
|
||||||
pct = funcs[func]['pct']
|
pct = func_item['pct']
|
||||||
vfunc = func
|
vfunc = func
|
||||||
|
|
||||||
funcarg = re.sub(r'^.*\(', '', func)
|
funcarg = re.sub(r'^.*\(', '', func)
|
||||||
|
|
||||||
design = None
|
design = None
|
||||||
for vde in verilated_mods:
|
for vde, vde_item in verilated_mods.items():
|
||||||
if verilated_mods[vde].match(func) or verilated_mods[vde].match(
|
if vde_item.match(func) or vde_item.match(funcarg):
|
||||||
funcarg):
|
|
||||||
design = vde
|
design = vde
|
||||||
break
|
break
|
||||||
|
|
||||||
@ -114,12 +113,12 @@ def profcfunc(filename):
|
|||||||
groups['module']['C++'] += pct
|
groups['module']['C++'] += pct
|
||||||
|
|
||||||
if vfunc not in vfuncs:
|
if vfunc not in vfuncs:
|
||||||
vfuncs[vfunc] = funcs[func]
|
vfuncs[vfunc] = func_item
|
||||||
vfuncs[vfunc]['design'] = vdesign
|
vfuncs[vfunc]['design'] = vdesign
|
||||||
else:
|
else:
|
||||||
vfuncs[vfunc]['pct'] += funcs[func]['pct']
|
vfuncs[vfunc]['pct'] += func_item['pct']
|
||||||
vfuncs[vfunc]['calls'] += funcs[func]['calls']
|
vfuncs[vfunc]['calls'] += func_item['calls']
|
||||||
vfuncs[vfunc]['sec'] += funcs[func]['sec']
|
vfuncs[vfunc]['sec'] += func_item['sec']
|
||||||
|
|
||||||
for ftype in ['type', 'design', 'module']:
|
for ftype in ['type', 'design', 'module']:
|
||||||
missing = 100
|
missing = 100
|
||||||
@ -136,9 +135,9 @@ def profcfunc(filename):
|
|||||||
print()
|
print()
|
||||||
|
|
||||||
design_width = 1
|
design_width = 1
|
||||||
for func in vfuncs:
|
for func, func_item in vfuncs.items():
|
||||||
if design_width < len(vfuncs[func]['design']):
|
if design_width < len(func_item['design']):
|
||||||
design_width = len(vfuncs[func]['design'])
|
design_width = len(func_item['design'])
|
||||||
|
|
||||||
print("Verilog code profile:")
|
print("Verilog code profile:")
|
||||||
print(" These are split into three categories:")
|
print(" These are split into three categories:")
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# pylint: disable=C0112,C0114,C0115,C0116,C0301,R0201,R0903
|
# pylint: disable=C0112,C0114,C0115,C0116,C0209,C0301,R0201,R0903
|
||||||
# -*- Python -*- See copyright, etc below
|
# -*- Python -*- See copyright, etc below
|
||||||
######################################################################
|
######################################################################
|
||||||
|
|
||||||
@ -14,7 +14,7 @@ class VlSphinxExtract:
|
|||||||
SkipBasenames = {}
|
SkipBasenames = {}
|
||||||
|
|
||||||
def process(self, filename):
|
def process(self, filename):
|
||||||
with open(filename) as fhr:
|
with open(filename, "r", encoding="utf8") as fhr:
|
||||||
fhw = None
|
fhw = None
|
||||||
for line in fhr:
|
for line in fhr:
|
||||||
# =for VL_SPHINX_EXTRACT "file_to_write_to"
|
# =for VL_SPHINX_EXTRACT "file_to_write_to"
|
||||||
@ -22,7 +22,7 @@ class VlSphinxExtract:
|
|||||||
if match:
|
if match:
|
||||||
outname = match.group(1)
|
outname = match.group(1)
|
||||||
print("Writing %s" % outname)
|
print("Writing %s" % outname)
|
||||||
fhw = open(outname, "w")
|
fhw = open(outname, "w", encoding="utf8") # pylint: disable=consider-using-with
|
||||||
fhw.write(
|
fhw.write(
|
||||||
".. comment: generated by vl_sphinx_extract from " +
|
".. comment: generated by vl_sphinx_extract from " +
|
||||||
filename + "\n")
|
filename + "\n")
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# pylint: disable=C0112,C0114,C0115,C0116,C0301,R0903
|
# pylint: disable=C0112,C0114,C0115,C0116,C0209,C0301,R0903
|
||||||
# -*- Python -*- See copyright, etc below
|
# -*- Python -*- See copyright, etc below
|
||||||
######################################################################
|
######################################################################
|
||||||
|
|
||||||
@ -27,7 +27,7 @@ class VlSphinxFix:
|
|||||||
|
|
||||||
def _edit(self, filename):
|
def _edit(self, filename):
|
||||||
is_html = re.search(r'\.(html)$', filename)
|
is_html = re.search(r'\.(html)$', filename)
|
||||||
with open(filename) as fhr:
|
with open(filename, "r", encoding="utf8") as fhr:
|
||||||
origfile = fhr.read()
|
origfile = fhr.read()
|
||||||
wholefile = origfile
|
wholefile = origfile
|
||||||
# Option doesn't like spaces, so we use
|
# Option doesn't like spaces, so we use
|
||||||
@ -43,7 +43,7 @@ class VlSphinxFix:
|
|||||||
if self.debug:
|
if self.debug:
|
||||||
print("Edit %s" % filename)
|
print("Edit %s" % filename)
|
||||||
tempname = filename + ".tmp"
|
tempname = filename + ".tmp"
|
||||||
with open(tempname, "w") as fhw:
|
with open(tempname, "w", encoding="utf8") as fhw:
|
||||||
fhw.write(wholefile)
|
fhw.write(wholefile)
|
||||||
os.rename(tempname, filename)
|
os.rename(tempname, filename)
|
||||||
|
|
||||||
|
@ -21,7 +21,7 @@ import sphinx_rtd_theme # pylint: disable=wrong-import-position,
|
|||||||
|
|
||||||
def get_vlt_version():
|
def get_vlt_version():
|
||||||
filename = "../../Makefile"
|
filename = "../../Makefile"
|
||||||
with open(filename) as fh:
|
with open(filename, "r", encoding="utf8") as fh:
|
||||||
for line in fh:
|
for line in fh:
|
||||||
match = re.search(r"PACKAGE_VERSION_NUMBER *= *([a-z0-9.]+)", line)
|
match = re.search(r"PACKAGE_VERSION_NUMBER *= *([a-z0-9.]+)", line)
|
||||||
if match:
|
if match:
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# -*- Python -*- See copyright, etc below
|
# -*- Python -*- See copyright, etc below
|
||||||
# pylint: disable=C0114,C0115,R0903
|
# pylint: disable=C0114,C0115,C0209,R0903
|
||||||
######################################################################
|
######################################################################
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
@ -20,24 +20,22 @@ class VlFileCopy:
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
verilator_args, # presently all verilator options are passed-thru
|
verilator_args, # presently all verilator options are passed-thru
|
||||||
# ideally this script would check against options mentioned in help
|
debug=0,
|
||||||
debug=0,
|
|
||||||
output_dir='copied'): # directory name we output file uses
|
output_dir='copied'): # directory name we output file uses
|
||||||
|
|
||||||
self.debug = debug
|
self.debug = debug
|
||||||
|
|
||||||
xml_temp = tempfile.NamedTemporaryFile()
|
with tempfile.NamedTemporaryFile() as xml_temp:
|
||||||
|
vargs = [
|
||||||
vargs = [
|
'--xml-output',
|
||||||
'--xml-output',
|
xml_temp.name,
|
||||||
xml_temp.name,
|
'--bbox-sys', # Parse some stuff can't translate
|
||||||
'--bbox-sys', # Parse some stuff can't translate
|
'--bbox-unsup',
|
||||||
'--bbox-unsup',
|
'--prefix vlxml'
|
||||||
'--prefix vlxml'
|
] # So we know name of .xml output
|
||||||
] # So we know name of .xml output
|
vargs += verilator_args
|
||||||
vargs += verilator_args
|
self.run_verilator(vargs)
|
||||||
self.run_verilator(vargs)
|
self.tree = ET.parse(xml_temp.name)
|
||||||
self.tree = ET.parse(xml_temp.name)
|
|
||||||
|
|
||||||
os.makedirs(output_dir, 0o777, True)
|
os.makedirs(output_dir, 0o777, True)
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# -*- Python -*- See copyright, etc below
|
# -*- Python -*- See copyright, etc below
|
||||||
# pylint: disable=C0103,C0114,C0115,C0115,C0116,R0914
|
# pylint: disable=C0103,C0114,C0115,C0115,C0116,C0209,R0914
|
||||||
######################################################################
|
######################################################################
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
@ -18,27 +18,25 @@ class VlHierGraph:
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
verilator_args, # presently all verilator options are passed-thru
|
verilator_args, # presently all verilator options are passed-thru
|
||||||
# ideally this script would check against options mentioned in help
|
debug=0,
|
||||||
debug=0,
|
|
||||||
output_filename='graph.dot'): # output filename
|
output_filename='graph.dot'): # output filename
|
||||||
self.debug = debug
|
self.debug = debug
|
||||||
self.next_vertex_number = 0
|
self.next_vertex_number = 0
|
||||||
self.name_to_number = {}
|
self.name_to_number = {}
|
||||||
|
|
||||||
xml_temp = tempfile.NamedTemporaryFile()
|
with tempfile.NamedTemporaryFile() as xml_temp:
|
||||||
|
vargs = [
|
||||||
|
'--xml-output',
|
||||||
|
xml_temp.name,
|
||||||
|
'--bbox-sys', # Parse some stuff can't translate
|
||||||
|
'--bbox-unsup',
|
||||||
|
'--prefix vlxml'
|
||||||
|
] # So we know name of .xml output
|
||||||
|
vargs += verilator_args
|
||||||
|
self.run_verilator(vargs)
|
||||||
|
self.tree = ET.parse(xml_temp.name)
|
||||||
|
|
||||||
vargs = [
|
with open(output_filename, "w", encoding="utf8") as fh:
|
||||||
'--xml-output',
|
|
||||||
xml_temp.name,
|
|
||||||
'--bbox-sys', # Parse some stuff can't translate
|
|
||||||
'--bbox-unsup',
|
|
||||||
'--prefix vlxml'
|
|
||||||
] # So we know name of .xml output
|
|
||||||
vargs += verilator_args
|
|
||||||
self.run_verilator(vargs)
|
|
||||||
self.tree = ET.parse(xml_temp.name)
|
|
||||||
|
|
||||||
with open(output_filename, "w") as fh:
|
|
||||||
# For more serious purposes, use the python graphviz package instead
|
# For more serious purposes, use the python graphviz package instead
|
||||||
fh.write("digraph {\n")
|
fh.write("digraph {\n")
|
||||||
fh.write(" dpi=300;\n")
|
fh.write(" dpi=300;\n")
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# pylint: disable=C0103,C0114,C0115,C0116,R0912,R0914,R0915,W0125,W0621,exec-used
|
# pylint: disable=C0103,C0114,C0115,C0116,C0209,R0912,R0914,R0915,W0125,W0621,exec-used
|
||||||
######################################################################
|
######################################################################
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
@ -26,7 +26,7 @@ if 'VERILATOR_ROOT' not in os.environ:
|
|||||||
def test():
|
def test():
|
||||||
if not os.path.exists("nodist/code_coverage.dat"):
|
if not os.path.exists("nodist/code_coverage.dat"):
|
||||||
sys.exit("%Error: Run code_coverage from the top of the verilator kit")
|
sys.exit("%Error: Run code_coverage from the top of the verilator kit")
|
||||||
exec(open("./nodist/code_coverage.dat").read())
|
exec(open("./nodist/code_coverage.dat", "r", encoding="utf8").read()) # pylint: disable=consider-using-with
|
||||||
|
|
||||||
if Args.stage_enabled[0]:
|
if Args.stage_enabled[0]:
|
||||||
ci_fold_start("distclean")
|
ci_fold_start("distclean")
|
||||||
@ -78,10 +78,11 @@ def test():
|
|||||||
os.makedirs(cc_dir, exist_ok=True)
|
os.makedirs(cc_dir, exist_ok=True)
|
||||||
os.makedirs(cc_dir + "/info", exist_ok=True)
|
os.makedirs(cc_dir + "/info", exist_ok=True)
|
||||||
|
|
||||||
sp = subprocess.Popen("find . -print | grep .gcda",
|
with subprocess.Popen("find . -print | grep .gcda",
|
||||||
shell=True,
|
shell=True,
|
||||||
stdout=subprocess.PIPE)
|
stdout=subprocess.PIPE) as sp:
|
||||||
datout = sp.stdout.read()
|
datout = sp.stdout.read()
|
||||||
|
|
||||||
dats = {}
|
dats = {}
|
||||||
for dat in datout.splitlines():
|
for dat in datout.splitlines():
|
||||||
dat = dat.decode('utf-8')
|
dat = dat.decode('utf-8')
|
||||||
@ -97,10 +98,11 @@ def test():
|
|||||||
del dats[dat]
|
del dats[dat]
|
||||||
break
|
break
|
||||||
|
|
||||||
sp = subprocess.Popen("find . -print | grep .gcno",
|
with subprocess.Popen("find . -print | grep .gcno",
|
||||||
shell=True,
|
shell=True,
|
||||||
stdout=subprocess.PIPE)
|
stdout=subprocess.PIPE) as sp:
|
||||||
datout = sp.stdout.read()
|
datout = sp.stdout.read()
|
||||||
|
|
||||||
gcnos = {}
|
gcnos = {}
|
||||||
for gcno in datout.splitlines():
|
for gcno in datout.splitlines():
|
||||||
gcno = gcno.decode('utf-8')
|
gcno = gcno.decode('utf-8')
|
||||||
@ -204,8 +206,8 @@ def clone_sources(cc_dir):
|
|||||||
outfile = cc_dir + "/" + infile
|
outfile = cc_dir + "/" + infile
|
||||||
outpath = re.sub(r'/[^/]*$', '', outfile, count=1)
|
outpath = re.sub(r'/[^/]*$', '', outfile, count=1)
|
||||||
os.makedirs(outpath, exist_ok=True)
|
os.makedirs(outpath, exist_ok=True)
|
||||||
with open(infile) as fh:
|
with open(infile, "r", encoding="utf8") as fh:
|
||||||
with open(outfile, "w") as ofh:
|
with open(outfile, "w", encoding="utf8") as ofh:
|
||||||
lineno = 0
|
lineno = 0
|
||||||
for line in fh:
|
for line in fh:
|
||||||
lineno += 1
|
lineno += 1
|
||||||
@ -249,7 +251,7 @@ def clone_sources(cc_dir):
|
|||||||
|
|
||||||
def cleanup_abs_paths_info(cc_dir, infile, outfile):
|
def cleanup_abs_paths_info(cc_dir, infile, outfile):
|
||||||
lines = []
|
lines = []
|
||||||
with open(infile) as fh:
|
with open(infile, "r", encoding="utf8") as fh:
|
||||||
for line in fh:
|
for line in fh:
|
||||||
if re.search(r'^SF:', line):
|
if re.search(r'^SF:', line):
|
||||||
line = re.sub(os.environ['VERILATOR_ROOT'] + '/',
|
line = re.sub(os.environ['VERILATOR_ROOT'] + '/',
|
||||||
@ -261,7 +263,7 @@ def cleanup_abs_paths_info(cc_dir, infile, outfile):
|
|||||||
# print("Remaining SF: "+line)
|
# print("Remaining SF: "+line)
|
||||||
lines.append(line)
|
lines.append(line)
|
||||||
|
|
||||||
with open(outfile, "w") as ofh:
|
with open(outfile, "w", encoding="utf8") as ofh:
|
||||||
for line in lines:
|
for line in lines:
|
||||||
ofh.write(line)
|
ofh.write(line)
|
||||||
|
|
||||||
@ -270,14 +272,14 @@ def cleanup_abs_paths_json(cc_dir, infile, outfile):
|
|||||||
# Handcrafted cleanup, alternative would be to deserialize/serialize JSON,
|
# Handcrafted cleanup, alternative would be to deserialize/serialize JSON,
|
||||||
# but this is much faster
|
# but this is much faster
|
||||||
lines = []
|
lines = []
|
||||||
with open(infile) as fh:
|
with open(infile, "r", encoding="utf8") as fh:
|
||||||
for line in fh:
|
for line in fh:
|
||||||
line = re.sub('"' + os.environ['VERILATOR_ROOT'] + '/', '"', line)
|
line = re.sub('"' + os.environ['VERILATOR_ROOT'] + '/', '"', line)
|
||||||
line = re.sub('"' + cc_dir + '/', '"', line)
|
line = re.sub('"' + cc_dir + '/', '"', line)
|
||||||
line = re.sub(r'obj_dbg/verilog.y$', 'verilog.y', line)
|
line = re.sub(r'obj_dbg/verilog.y$', 'verilog.y', line)
|
||||||
lines.append(line)
|
lines.append(line)
|
||||||
|
|
||||||
with open(outfile, "w") as ofh:
|
with open(outfile, "w", encoding="utf8") as ofh:
|
||||||
for line in lines:
|
for line in lines:
|
||||||
ofh.write(line)
|
ofh.write(line)
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# pylint: disable=C0103,C0114,C0115,C0116,C0301
|
# pylint: disable=C0103,C0114,C0115,C0116,C0209,C0301
|
||||||
######################################################################
|
######################################################################
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
@ -15,7 +15,7 @@ Edges = []
|
|||||||
|
|
||||||
|
|
||||||
def dotread(filename):
|
def dotread(filename):
|
||||||
with open(filename) as fh:
|
with open(filename, "r", encoding="utf8") as fh:
|
||||||
header = True
|
header = True
|
||||||
vnum = 0
|
vnum = 0
|
||||||
|
|
||||||
@ -61,7 +61,7 @@ def dotread(filename):
|
|||||||
|
|
||||||
|
|
||||||
def cwrite(filename):
|
def cwrite(filename):
|
||||||
with open(filename, "w") as fh:
|
with open(filename, "w", encoding="utf8") as fh:
|
||||||
fh.write("void V3GraphTestImport::dotImport() {\n")
|
fh.write("void V3GraphTestImport::dotImport() {\n")
|
||||||
fh.write(" auto* gp = &m_graph;\n")
|
fh.write(" auto* gp = &m_graph;\n")
|
||||||
for ver in sorted(Vertexes, key=lambda ver: ver['num']):
|
for ver in sorted(Vertexes, key=lambda ver: ver['num']):
|
||||||
|
@ -45,8 +45,8 @@ def print_lines(a):
|
|||||||
|
|
||||||
def write_file(filename, contents):
|
def write_file(filename, contents):
|
||||||
# str->str->void
|
# str->str->void
|
||||||
f = open(filename, 'w')
|
with open(filename, "w", encoding="utf8") as fh:
|
||||||
f.write(contents)
|
fh.write(contents)
|
||||||
|
|
||||||
|
|
||||||
def parse_line(s):
|
def parse_line(s):
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# pylint: disable=C0103,C0114,C0115,C0116,R0801,R0915
|
# pylint: disable=C0103,C0114,C0115,C0116,C0209,R0801,R0915
|
||||||
######################################################################
|
######################################################################
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
14
src/astgen
14
src/astgen
@ -1,5 +1,5 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# pylint: disable=C0103,C0114,C0115,C0116,C0123,C0301,R0902,R0913,R0914,R0912,R0915,W0621
|
# pylint: disable=C0103,C0114,C0115,C0116,C0123,C0209,C0301,C0302,R0902,R0913,R0914,R0912,R0915,W0621
|
||||||
######################################################################
|
######################################################################
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
@ -80,7 +80,7 @@ class Node:
|
|||||||
# Sort sub-classes and convert to tuple, which marks completion
|
# Sort sub-classes and convert to tuple, which marks completion
|
||||||
self._subClasses = tuple(
|
self._subClasses = tuple(
|
||||||
sorted(self._subClasses,
|
sorted(self._subClasses,
|
||||||
key=lambda _: (bool(_._subClasses), _.name)))
|
key=lambda _: (bool(_._subClasses), _.name))) # pylint: disable=protected-access
|
||||||
|
|
||||||
self._ordIdx = ordIdx
|
self._ordIdx = ordIdx
|
||||||
ordIdx = ordIdx + 1
|
ordIdx = ordIdx + 1
|
||||||
@ -220,7 +220,7 @@ class Cpt:
|
|||||||
didln = False
|
didln = False
|
||||||
|
|
||||||
# Read the file and parse into list of functions that generate output
|
# Read the file and parse into list of functions that generate output
|
||||||
with open(self.in_filename) as fhi:
|
with open(self.in_filename, "r", encoding="utf8") as fhi:
|
||||||
for line in fhi:
|
for line in fhi:
|
||||||
ln += 1
|
ln += 1
|
||||||
if not didln:
|
if not didln:
|
||||||
@ -570,7 +570,7 @@ def read_types(filename, Nodes, prefix):
|
|||||||
p=prefix, n=node.name))
|
p=prefix, n=node.name))
|
||||||
hasAstgenMembers = False
|
hasAstgenMembers = False
|
||||||
|
|
||||||
with open(filename) as fh:
|
with open(filename, "r", encoding="utf8") as fh:
|
||||||
for (lineno, line) in enumerate(fh, start=1):
|
for (lineno, line) in enumerate(fh, start=1):
|
||||||
line = line.strip()
|
line = line.strip()
|
||||||
if not line:
|
if not line:
|
||||||
@ -715,7 +715,7 @@ def check_types(sortedTypes, prefix, abstractPrefix):
|
|||||||
|
|
||||||
|
|
||||||
def read_stages(filename):
|
def read_stages(filename):
|
||||||
with open(filename) as fh:
|
with open(filename, "r", encoding="utf8") as fh:
|
||||||
n = 100
|
n = 100
|
||||||
for line in fh:
|
for line in fh:
|
||||||
line = re.sub(r'//.*$', '', line)
|
line = re.sub(r'//.*$', '', line)
|
||||||
@ -731,7 +731,7 @@ def read_stages(filename):
|
|||||||
|
|
||||||
def read_refs(filename):
|
def read_refs(filename):
|
||||||
basename = re.sub(r'.*/', '', filename)
|
basename = re.sub(r'.*/', '', filename)
|
||||||
with open(filename) as fh:
|
with open(filename, "r", encoding="utf8") as fh:
|
||||||
for line in fh:
|
for line in fh:
|
||||||
line = re.sub(r'//.*$', '', line)
|
line = re.sub(r'//.*$', '', line)
|
||||||
for match in re.finditer(r'\bnew\s*(Ast[A-Za-z0-9_]+)', line):
|
for match in re.finditer(r'\bnew\s*(Ast[A-Za-z0-9_]+)', line):
|
||||||
@ -753,7 +753,7 @@ def read_refs(filename):
|
|||||||
|
|
||||||
|
|
||||||
def open_file(filename):
|
def open_file(filename):
|
||||||
fh = open(filename, "w")
|
fh = open(filename, "w", encoding="utf8") # pylint: disable=consider-using-with
|
||||||
if re.search(r'\.txt$', filename):
|
if re.search(r'\.txt$', filename):
|
||||||
fh.write("// Generated by astgen\n")
|
fh.write("// Generated by astgen\n")
|
||||||
else:
|
else:
|
||||||
|
12
src/bisonpre
12
src/bisonpre
@ -1,5 +1,5 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# pylint: disable=C0103,C0114,C0115,C0116,R0912,R0914,R0915,R1702,W0125
|
# pylint: disable=C0103,C0114,C0115,C0116,C0209,R0912,R0914,R0915,R1702,W0125
|
||||||
######################################################################
|
######################################################################
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
@ -17,7 +17,7 @@ def process():
|
|||||||
|
|
||||||
bison_version_check()
|
bison_version_check()
|
||||||
supports_report = Bison_Version >= 2.3
|
supports_report = Bison_Version >= 2.3
|
||||||
supports_counter_examples = Bison_Version >= 3.8
|
# supports_counter_examples = Bison_Version >= 3.8
|
||||||
|
|
||||||
clean_input(Args.input, tmp_prefix() + ".y")
|
clean_input(Args.input, tmp_prefix() + ".y")
|
||||||
|
|
||||||
@ -88,10 +88,10 @@ def unlink_outputs():
|
|||||||
|
|
||||||
|
|
||||||
def bison_version_check():
|
def bison_version_check():
|
||||||
sp = subprocess.Popen(Args.yacc + " --version",
|
with subprocess.Popen(Args.yacc + " --version",
|
||||||
shell=True,
|
shell=True,
|
||||||
stdout=subprocess.PIPE)
|
stdout=subprocess.PIPE) as sp:
|
||||||
out = str(sp.stdout.read())
|
out = str(sp.stdout.read())
|
||||||
match = re.search(r'([0-9]+\.[0-9]+)', out)
|
match = re.search(r'([0-9]+\.[0-9]+)', out)
|
||||||
if match:
|
if match:
|
||||||
v = float(match.group(1))
|
v = float(match.group(1))
|
||||||
@ -405,7 +405,7 @@ def clean_input(filename, outname):
|
|||||||
else:
|
else:
|
||||||
lines.append(line)
|
lines.append(line)
|
||||||
|
|
||||||
with open(outname, "w") as fh:
|
with open(outname, "w", encoding="utf8") as fh:
|
||||||
for line in lines:
|
for line in lines:
|
||||||
fh.write(line)
|
fh.write(line)
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# pylint: disable=C0103,C0114,C0115,C0116,R0911,R0912,R0915,W0621
|
# pylint: disable=C0103,C0114,C0115,C0116,C0209,R0911,R0912,R0915,W0621
|
||||||
######################################################################
|
######################################################################
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
@ -155,11 +155,11 @@ def _suppress(filename, linenum, eid):
|
|||||||
file=sys.stderr)
|
file=sys.stderr)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
with open(filename) as fh:
|
with open(filename, "r", encoding="utf8") as fh:
|
||||||
lineno = 0
|
lineno = 0
|
||||||
for line in fh:
|
for line in fh:
|
||||||
lineno += 1
|
lineno += 1
|
||||||
if ((lineno + 1) == linenum):
|
if (lineno + 1) == linenum:
|
||||||
match = re.search(
|
match = re.search(
|
||||||
r'(cppcheck|cppcheck-has-bug|cppverilator)-suppress((\s+\S+)+)',
|
r'(cppcheck|cppcheck-has-bug|cppverilator)-suppress((\s+\S+)+)',
|
||||||
line)
|
line)
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# pylint: disable=C0103,C0114,C0116,eval-used
|
# pylint: disable=C0103,C0114,C0116,C0209,eval-used
|
||||||
######################################################################
|
######################################################################
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
@ -13,7 +13,7 @@ Items = []
|
|||||||
|
|
||||||
|
|
||||||
def read_keys(filename):
|
def read_keys(filename):
|
||||||
with open(filename) as fh:
|
with open(filename, "r", encoding="utf8") as fh:
|
||||||
for line in fh:
|
for line in fh:
|
||||||
line = re.sub(r'\/\/.*$', '', line)
|
line = re.sub(r'\/\/.*$', '', line)
|
||||||
if re.match(r'^\s*$', line):
|
if re.match(r'^\s*$', line):
|
||||||
@ -45,7 +45,7 @@ def write_keys(filename):
|
|||||||
orig = []
|
orig = []
|
||||||
out = []
|
out = []
|
||||||
|
|
||||||
with open(filename) as fh:
|
with open(filename, "r", encoding="utf8") as fh:
|
||||||
deleting = False
|
deleting = False
|
||||||
for line in fh:
|
for line in fh:
|
||||||
orig.append(line)
|
orig.append(line)
|
||||||
@ -70,7 +70,7 @@ def write_keys(filename):
|
|||||||
|
|
||||||
ok = "".join(out) == "".join(orig)
|
ok = "".join(out) == "".join(orig)
|
||||||
if not ok:
|
if not ok:
|
||||||
with open(filename, "w") as fhw:
|
with open(filename, "w", encoding="utf8") as fhw:
|
||||||
fhw.write("".join(out))
|
fhw.write("".join(out))
|
||||||
|
|
||||||
|
|
||||||
|
@ -27,7 +27,7 @@ for cmd in sys.stdin:
|
|||||||
|
|
||||||
wholefile = ""
|
wholefile = ""
|
||||||
# It's faster to slurp the whole file then scan (if needed)
|
# It's faster to slurp the whole file then scan (if needed)
|
||||||
with open(filename) as fh:
|
with open(filename, "r", encoding="utf8") as fh:
|
||||||
wholefile = fh.read()
|
wholefile = fh.read()
|
||||||
|
|
||||||
if 'example_lint' in wholefile: # else short circuit
|
if 'example_lint' in wholefile: # else short circuit
|
||||||
|
Loading…
Reference in New Issue
Block a user