atpg-ls/run_exp.py

235 lines
7.1 KiB
Python
Raw Permalink Normal View History

2023-03-09 03:31:16 +00:00
import os
from concurrent.futures import ThreadPoolExecutor, wait, ALL_COMPLETED, FIRST_COMPLETED, as_completed
import time
import re
2023-03-09 12:58:14 +08:00
pool = ThreadPoolExecutor(max_workers=96)
2023-03-09 03:31:16 +00:00
2023-03-09 12:58:14 +08:00
TIMEOUT = 2000
2023-03-09 03:31:16 +00:00
work_dir = "."
2023-03-09 12:58:14 +08:00
data_dir = "./benchmark"
2023-03-09 03:31:16 +00:00
data_suffix = ".bench"
res_dir = "exp_result"
class ExitStatus:
normal = 0
timeout = 1
runtime_error = 2
def run_shell_command(command, output_file, timeout):
res = 0
T1 = time.time()
if timeout: res = os.system("timeout %ds stdbuf -oL %s > %s 2>&1 " % ( timeout, command, output_file ))
else: res = os.system("%s > %s 2>&1 " % (command, output_file))
T2 = time.time()
exec_time = T2 - T1
exit_status = ExitStatus.normal
# if(res == 31744):
# exit_status = ExitStatus.timeout
# if(res != 0):
# exit_status = ExitStatus.runtime_error
2023-03-22 03:25:38 +00:00
os.system("echo \"time: %.4f\" >> %s " % ( exec_time, output_file ))
2023-03-09 03:31:16 +00:00
return (exit_status, exec_time)
class AtalantaSolver:
name = "Atalanta"
def run(filename, output_file, timeout):
return run_shell_command("./Atalanta/atalanta -b 10000 %s" % filename, output_file, timeout)
def analyse(output_file):
content = open(output_file, "r").read()
p1 = re.compile(r'Fault coverage\s*:\s*(\d+\.\d+)\s*%', re.S)
coverage = p1.findall(content)[0]
p2 = re.compile(r'Number of test patterns before compaction\s*:\s*(\d+)', re.S)
cube = p2.findall(content)[0]
p3 = re.compile(r'Number of test patterns after compaction\s*:\s*(\d+)', re.S)
pattern = p3.findall(content)[0]
return (coverage, cube, pattern)
class ATPGLS:
name = "ATPG-LS"
def run(filename, output_file, timeout):
return run_shell_command("./crun %s" % filename, output_file, timeout)
def analyse(output_file):
2023-03-09 12:58:14 +08:00
print("anal:" + output_file)
2023-03-09 03:31:16 +00:00
content = open(output_file, "r").read()
p1 = re.compile(r'coverage\s*:\s*(\d+\.\d+)\s*%', re.S)
2023-03-09 05:45:39 +00:00
if len(p1.findall(content)) == 0:
return("ERROR", "ERROR", "ERROR")
2023-03-09 03:31:16 +00:00
coverage = p1.findall(content)[-1]
p2 = re.compile(r'pattern\s*:\s*(\d+)', re.S)
cube = p2.findall(content)[-1]
p3 = re.compile(r'pattern\s*:\s*(\d+)', re.S)
pattern = p3.findall(content)[-1]
return (coverage, cube, pattern)
class TGPro:
name = "TGPro"
def run(filename, output_file, timeout):
return run_shell_command("./tg-pro/bin/atpgSat %s" % filename, output_file, timeout)
def analyse(output_file):
content = open(output_file, "r").read()
total = re.compile(r'Total: (\d+)', re.S).findall(content)[1]
detectable = re.compile(r'Detectable: (\d+)', re.S).findall(content)[0]
undetectable = re.compile(r'Undetectable: (\d+)', re.S).findall(content)[0]
print(detectable, undetectable, total)
coverage = str((int(undetectable) + int(detectable)) / int(total) * 100) + "%"
cube = "-*-"
pattern = "-*-"
return (coverage, cube, pattern)
class OpenTPG:
name = "OpenTPG"
def run(filename, output_file, timeout):
(path, filename) = os.path.split(filename)
fault_file = os.path.join(res_dir,"%s_%s.txt" % (solver.name, filename))
cube_file = os.path.join(res_dir,"%s_%s.txt" % (solver.name, filename))
return run_shell_command("./OpenTPG/opentpg/opentpg -n %s -f %s -c /dev/null -u %s" \
% (filename, output_file), output_file, timeout)
def analyse(output_file):
content = open(output_file, "r").read()
p1 = re.compile(r'coverage:\s*:\s*(\d+\.\d+)\s*%', re.S)
coverage = p1.findall(content)[-1]
p2 = re.compile(r'pattern:\s*:\s*(\d+)', re.S)
cube = p2.findall(content)[-1]
p3 = re.compile(r'pattern:\s*:\s*(\d+)', re.S)
pattern = p3.findall(content)[-1]
return (coverage, cube, pattern)
class Table:
def __init__(self, header):
self.header = header
self.col_size = len(header)
self.lines = []
def add_line(self, cols):
if len(cols) != self.col_size:
raise Exception("cols number error! need %d but you have %d." % (self.col_size, len(cols)))
for var in cols: var = str(var)
self.lines.append(cols)
def print_table(self):
col_widths = []
for var in self.header:
col_widths.append(len(var))
for i in range(self.col_size):
for j in range(len(self.lines)):
self.lines[j][i] = str(self.lines[j][i])
for i in range(self.col_size):
for line in self.lines:
col_widths[i] = max(col_widths[i], len(line[i]))
table_with = 1
for width in col_widths:
table_with = table_with + width + 3
for i in range(table_with): print("-", end="")
print()
def print_with_filler(str, width, filler):
print(str, end="")
for i in range(len(str), width):
print(filler, end="")
print("| ", end="")
for i in range(self.col_size):
print_with_filler(self.header[i], col_widths[i], " ")
print(" | ", end="")
print()
print("| ", end="")
for i in range(self.col_size):
print_with_filler("", col_widths[i], "-")
print(" | ", end="")
print()
for line in self.lines:
print("| ", end="")
for i in range(self.col_size):
print_with_filler(line[i], col_widths[i], " ")
print(" | ", end="")
print()
for i in range(table_with): print("-", end="")
print()
class Logger:
def __init__(self, filename):
self.f = open(filename, "w")
def log(self, str):
print(str)
self.f.write(str + "\n")
self.f.flush()
table = None
def multiprocess_run_solver(solver, input_file):
(path, filename) = os.path.split(input_file)
out_file = os.path.join(res_dir,"%s_%s.txt" % (solver.name, filename))
2023-03-09 14:54:23 +08:00
(status, time) = solver.run(input_file, out_file, TIMEOUT)
# time = "-*-"
# status = ExitStatus.normal
2023-03-09 03:31:16 +00:00
fault = "-*-"
cube = "-*-"
pattern = "-*-"
if status == ExitStatus.runtime_error:
fault = "Runtime Error"
elif status == ExitStatus.timeout:
fault = "Time Out"
else:
(f, c, p) = solver.analyse(out_file)
fault = f
cube = c
pattern = p
table.add_line([input_file, fault, time, cube, pattern])
return input_file
if __name__ == '__main__':
logger = Logger("check.log")
os.chdir(work_dir)
os.makedirs(res_dir, exist_ok=True)
all_task = []
solver = ATPGLS
table = Table(["data", "fault coverage(%s)" % solver.name, "time(%s)" % solver.name, "cube(%s)" % solver.name, "pattern(%s)" % solver.name ])
for filename in os.listdir(data_dir):
if not filename.endswith(data_suffix): continue
all_task.append(pool.submit(multiprocess_run_solver, solver, os.path.join(data_dir, filename)))
s = 0
for task in as_completed(all_task):
data = task.result()
s = s + 1
logger.log("[{}/{}] 任务 {}".format(s, len(all_task), data))
2023-03-09 12:58:14 +08:00
table.print_table()