CMSIS-DSP: Improvement to testing scripts
parent
70f186b81d
commit
c2ca0dd2f8
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,283 @@
|
||||
import argparse
|
||||
import sqlite3
|
||||
import re
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
|
||||
# Command to get last runid
|
||||
lastID="""SELECT runid FROM RUN ORDER BY runid DESC LIMIT 1
|
||||
"""
|
||||
|
||||
def getLastRunID():
|
||||
r=c.execute(lastID)
|
||||
return(int(r.fetchone()[0]))
|
||||
|
||||
|
||||
runid = 1
|
||||
|
||||
parser = argparse.ArgumentParser(description='Generate summary benchmarks')
|
||||
|
||||
parser.add_argument('-b', nargs='?',type = str, default="bench.db", help="Benchmark database")
|
||||
parser.add_argument('-o', nargs='?',type = str, default="full.md", help="Full summary")
|
||||
parser.add_argument('-r', action='store_true', help="Regression database")
|
||||
|
||||
# For runid or runid range
|
||||
parser.add_argument('others', nargs=argparse.REMAINDER)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
c = sqlite3.connect(args.b)
|
||||
|
||||
if args.others:
|
||||
runid=int(args.others[0])
|
||||
else:
|
||||
runid=getLastRunID()
|
||||
|
||||
# We extract data only from data tables
|
||||
# Those tables below are used for descriptions
|
||||
REMOVETABLES=['RUN','CORE', 'PLATFORM', 'COMPILERKIND', 'COMPILER', 'TYPE', 'CATEGORY', 'CONFIG']
|
||||
|
||||
# This is assuming the database is generated by the regression script
|
||||
# So platform is the same for all benchmarks.
|
||||
# Category and type is coming from the test name in the yaml
|
||||
# So no need to add this information here
|
||||
# Name is removed here because it is added at the beginning
|
||||
REMOVECOLUMNS=['runid','NAME','type','platform','category','coredef','OPTIMIZED','HARDFP','FASTMATH','NEON','HELIUM','UNROLL','ROUNDING','DATE','compilerkindid','date','categoryid', 'ID', 'platformid', 'coreid', 'compilerid', 'typeid']
|
||||
|
||||
# Get existing benchmark tables
|
||||
def getBenchTables():
|
||||
r=c.execute("SELECT name FROM sqlite_master WHERE type='table'")
|
||||
benchtables=[]
|
||||
for table in r:
|
||||
if not table[0] in REMOVETABLES:
|
||||
benchtables.append(table[0])
|
||||
return(benchtables)
|
||||
|
||||
# get existing types in a table
|
||||
def getExistingTypes(benchTable):
|
||||
r=c.execute("select distinct typeid from %s" % benchTable).fetchall()
|
||||
result=[x[0] for x in r]
|
||||
return(result)
|
||||
|
||||
# Get compilers from specific type and table
|
||||
versioncompiler="""select distinct compiler,version from %s
|
||||
INNER JOIN COMPILER USING(compilerid)
|
||||
INNER JOIN COMPILERKIND USING(compilerkindid) WHERE typeid=?"""
|
||||
|
||||
# Get existing compiler in a table for a specific type
|
||||
# (In case report is structured by types)
|
||||
def getExistingCompiler(benchTable,typeid):
|
||||
r=c.execute(versioncompiler % benchTable,(typeid,)).fetchall()
|
||||
return(r)
|
||||
|
||||
# Get type name from type id
|
||||
def getTypeName(typeid):
|
||||
r=c.execute("select type from TYPE where typeid=?",(typeid,)).fetchone()
|
||||
return(r[0])
|
||||
|
||||
# Diff of 2 lists
|
||||
def diff(first, second):
|
||||
second = set(second)
|
||||
return [item for item in first if item not in second]
|
||||
|
||||
|
||||
# Command to get data for specific compiler
|
||||
# and type
|
||||
benchCmd="""select %s from %s
|
||||
INNER JOIN CATEGORY USING(categoryid)
|
||||
INNER JOIN PLATFORM USING(platformid)
|
||||
INNER JOIN CORE USING(coreid)
|
||||
INNER JOIN COMPILER USING(compilerid)
|
||||
INNER JOIN COMPILERKIND USING(compilerkindid)
|
||||
INNER JOIN TYPE USING(typeid)
|
||||
WHERE compiler=? AND VERSION=? AND typeid = ? AND runid = ?
|
||||
"""
|
||||
|
||||
# Command to get test names for specific compiler
|
||||
# and type
|
||||
benchNames="""select distinct NAME from %s
|
||||
INNER JOIN COMPILER USING(compilerid)
|
||||
INNER JOIN COMPILERKIND USING(compilerkindid)
|
||||
INNER JOIN TYPE USING(typeid)
|
||||
WHERE compiler=? AND VERSION=? AND typeid = ? AND runid = ?
|
||||
"""
|
||||
|
||||
# Command to get columns for specific table
|
||||
benchCmdColumns="""select * from %s
|
||||
INNER JOIN CATEGORY USING(categoryid)
|
||||
INNER JOIN PLATFORM USING(platformid)
|
||||
INNER JOIN CORE USING(coreid)
|
||||
INNER JOIN COMPILER USING(compilerid)
|
||||
INNER JOIN COMPILERKIND USING(compilerkindid)
|
||||
INNER JOIN TYPE USING(typeid)
|
||||
"""
|
||||
|
||||
def joinit(iterable, delimiter):
|
||||
it = iter(iterable)
|
||||
yield next(it)
|
||||
for x in it:
|
||||
yield delimiter
|
||||
yield x
|
||||
|
||||
# Is not a column name finishing by id
|
||||
# (often primary key for thetable)
|
||||
def isNotIDColumn(col):
|
||||
if re.match(r'^.*id$',col):
|
||||
return(False)
|
||||
else:
|
||||
return(True)
|
||||
|
||||
# Get test names
|
||||
# for specific typeid and compiler (for the data)
|
||||
def getTestNames(benchTable,comp,typeid):
|
||||
vals=(comp[0],comp[1],typeid,runid)
|
||||
result=c.execute(benchNames % benchTable,vals).fetchall()
|
||||
return([x[0] for x in list(result)])
|
||||
|
||||
# Get names of columns and data for a table
|
||||
# for specific typeid and compiler (for the data)
|
||||
def getColNamesAndData(benchTable,comp,typeid):
|
||||
cursor=c.cursor()
|
||||
result=cursor.execute(benchCmdColumns % (benchTable))
|
||||
cols= [member[0] for member in cursor.description]
|
||||
keepCols = ['NAME'] + [c for c in diff(cols , REMOVECOLUMNS) if isNotIDColumn(c)]
|
||||
keepColsStr = "".join(joinit(keepCols,","))
|
||||
vals=(comp[0],comp[1],typeid,runid)
|
||||
result=cursor.execute(benchCmd % (keepColsStr,benchTable),vals)
|
||||
vals =np.array([list(x) for x in list(result)])
|
||||
return(keepCols,vals)
|
||||
|
||||
# Write columns in markdown format
|
||||
def writeColumns(f,cols):
|
||||
colStr = "".join(joinit(cols,"|"))
|
||||
f.write("|")
|
||||
f.write(colStr)
|
||||
f.write("|\n")
|
||||
sepStr="".join(joinit([":-:" for x in cols],"|"))
|
||||
f.write("|")
|
||||
f.write(sepStr)
|
||||
f.write("|\n")
|
||||
|
||||
# Write row in markdown format
|
||||
def writeRow(f,row):
|
||||
row=[str(x) for x in row]
|
||||
rowStr = "".join(joinit(row,"|"))
|
||||
f.write("|")
|
||||
f.write(rowStr)
|
||||
f.write("|\n")
|
||||
|
||||
PARAMS=["NB","NumTaps", "NBA", "NBB", "Factor", "NumStages","VECDIM","NBR","NBC","NBI","IFFT", "BITREV"]
|
||||
|
||||
def regressionTableFor(name,output,ref,toSort,indexCols,field):
|
||||
data=ref.pivot_table(index=indexCols, columns='core',
|
||||
values=[field], aggfunc='first')
|
||||
|
||||
data=data.sort_values(toSort)
|
||||
|
||||
cores = [c[1] for c in list(data.columns)]
|
||||
columns = diff(indexCols,['NAME']) + cores
|
||||
|
||||
writeColumns(output,columns)
|
||||
dataForFunc=data.loc[name]
|
||||
if type(dataForFunc) is pd.DataFrame:
|
||||
for row in dataForFunc.itertuples():
|
||||
row=list(row)
|
||||
if type(row[0]) is int:
|
||||
row=[row[0]] + row[1:]
|
||||
else:
|
||||
row=list(row[0]) + row[1:]
|
||||
writeRow(output,row)
|
||||
else:
|
||||
writeRow(output,dataForFunc)
|
||||
|
||||
def formatTableByCore(output,testNames,cols,vals):
|
||||
if vals.size != 0:
|
||||
ref=pd.DataFrame(vals,columns=cols)
|
||||
toSort=["NAME"]
|
||||
|
||||
for param in PARAMS:
|
||||
if param in ref.columns:
|
||||
ref[param]=pd.to_numeric(ref[param])
|
||||
toSort.append(param)
|
||||
if args.r:
|
||||
# Regression table
|
||||
ref['MAX']=pd.to_numeric(ref['MAX'])
|
||||
ref['MAXREGCOEF']=pd.to_numeric(ref['MAXREGCOEF'])
|
||||
|
||||
indexCols=diff(cols,['core','Regression','MAXREGCOEF','MAX','version','compiler'])
|
||||
valList = ['Regression']
|
||||
else:
|
||||
ref['CYCLES']=pd.to_numeric(ref['CYCLES'])
|
||||
|
||||
indexCols=diff(cols,['core','CYCLES','version','compiler'])
|
||||
valList = ['CYCLES']
|
||||
|
||||
|
||||
|
||||
for name in testNames:
|
||||
if args.r:
|
||||
output.write("#### %s\n" % name)
|
||||
|
||||
output.write("##### Regression\n" )
|
||||
regressionTableFor(name,output,ref,toSort,indexCols,'Regression')
|
||||
|
||||
output.write("##### Max cycles\n" )
|
||||
regressionTableFor(name,output,ref,toSort,indexCols,'MAX')
|
||||
|
||||
output.write("##### Max Reg Coef\n" )
|
||||
regressionTableFor(name,output,ref,toSort,indexCols,'MAXREGCOEF')
|
||||
|
||||
else:
|
||||
data=ref.pivot_table(index=indexCols, columns='core',
|
||||
values=valList, aggfunc='first')
|
||||
|
||||
data=data.sort_values(toSort)
|
||||
|
||||
cores = [c[1] for c in list(data.columns)]
|
||||
columns = diff(indexCols,['NAME']) + cores
|
||||
|
||||
output.write("#### %s\n" % name)
|
||||
writeColumns(output,columns)
|
||||
dataForFunc=data.loc[name]
|
||||
if type(dataForFunc) is pd.DataFrame:
|
||||
for row in dataForFunc.itertuples():
|
||||
row=list(row)
|
||||
if type(row[0]) is int:
|
||||
row=[row[0]] + row[1:]
|
||||
else:
|
||||
row=list(row[0]) + row[1:]
|
||||
writeRow(output,row)
|
||||
else:
|
||||
writeRow(output,dataForFunc)
|
||||
|
||||
# Add a report for each table
|
||||
def addReportFor(output,benchName):
|
||||
print("Process %s\n" % benchName)
|
||||
output.write("# %s\n" % benchName)
|
||||
allTypes = getExistingTypes(benchName)
|
||||
# Add report for each type
|
||||
for aTypeID in allTypes:
|
||||
typeName = getTypeName(aTypeID)
|
||||
output.write("## %s\n" % typeName)
|
||||
## Add report for each compiler
|
||||
allCompilers = getExistingCompiler(benchName,aTypeID)
|
||||
for compiler in allCompilers:
|
||||
#print(compiler)
|
||||
output.write("### %s (%s)\n" % compiler)
|
||||
cols,vals=getColNamesAndData(benchName,compiler,aTypeID)
|
||||
names=getTestNames(benchName,compiler,aTypeID)
|
||||
formatTableByCore(output,names,cols,vals)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
try:
|
||||
with open(args.o,"w") as output:
|
||||
benchtables=getBenchTables()
|
||||
for bench in benchtables:
|
||||
addReportFor(output,bench)
|
||||
finally:
|
||||
c.close()
|
||||
|
||||
|
||||
@ -1,70 +0,0 @@
|
||||
@ECHO OFF
|
||||
|
||||
echo "Basic Maths"
|
||||
python processTests.py -e BasicBenchmarks
|
||||
call:runBench
|
||||
|
||||
echo "Complex Maths"
|
||||
python processTests.py -e ComplexBenchmarks
|
||||
call:runBench
|
||||
|
||||
echo "FIR"
|
||||
python processTests.py -e FIR
|
||||
call:runBench
|
||||
|
||||
echo "Convolution / Correlation"
|
||||
python processTests.py -e MISC
|
||||
call:runBench
|
||||
|
||||
echo "Decimation / Interpolation"
|
||||
python processTests.py -e DECIM
|
||||
call:runBench
|
||||
|
||||
echo "BiQuad"
|
||||
python processTests.py -e BIQUAD
|
||||
call:runBench
|
||||
|
||||
echo "Controller"
|
||||
python processTests.py -e Controller
|
||||
call:runBench
|
||||
|
||||
echo "Fast Math"
|
||||
python processTests.py -e FastMath
|
||||
call:runBench
|
||||
|
||||
echo "Barycenter"
|
||||
python processTests.py -e SupportBarF32
|
||||
call:runBench
|
||||
|
||||
echo "Support"
|
||||
python processTests.py -e Support
|
||||
call:runBench
|
||||
|
||||
echo "Unary Matrix"
|
||||
python processTests.py -e Unary
|
||||
call:runBench
|
||||
|
||||
echo "Binary Matrix"
|
||||
python processTests.py -e Binary
|
||||
call:runBench
|
||||
|
||||
echo "Transform"
|
||||
python processTests.py -e Transform
|
||||
call:runBench
|
||||
|
||||
EXIT /B
|
||||
|
||||
:runBench
|
||||
REM pushd build_m7
|
||||
REM pushd build_m0
|
||||
pushd build_a5
|
||||
make
|
||||
REM "C:\Program Files\ARM\Development Studio 2019.0\sw\models\bin\FVP_MPS2_Cortex-M7.exe" -a Testing > result.txt
|
||||
REM "C:\Program Files\ARM\Development Studio 2019.0\sw\models\bin\FVP_MPS2_Cortex-M0.exe" -a Testing > result.txt
|
||||
"C:\Program Files\ARM\Development Studio 2019.0\sw\models\bin\FVP_VE_Cortex-A5x1.exe" -a Testing > result.txt
|
||||
popd
|
||||
echo "Parse result"
|
||||
REM python processResult.py -e -r build_m7\result.txt
|
||||
REM python processResult.py -e -r build_m0\result.txt
|
||||
python processResult.py -e -r build_a5\result.txt
|
||||
goto:eof
|
||||
@ -1,105 +0,0 @@
|
||||
import os
|
||||
import os.path
|
||||
import subprocess
|
||||
import colorama
|
||||
from colorama import init,Fore, Back, Style
|
||||
import argparse
|
||||
|
||||
GROUPS = [
|
||||
"BasicBenchmarks",
|
||||
"ComplexBenchmarks",
|
||||
"FIR",
|
||||
"MISC",
|
||||
"DECIM",
|
||||
"BIQUAD",
|
||||
"Controller",
|
||||
"FastMath",
|
||||
"SupportBarF32",
|
||||
"Support",
|
||||
"Unary",
|
||||
"Binary",
|
||||
"Transform"
|
||||
]
|
||||
|
||||
init()
|
||||
|
||||
def msg(t):
|
||||
print(Fore.CYAN + t + Style.RESET_ALL)
|
||||
|
||||
def processTest(test):
|
||||
subprocess.call(["python","processTests.py","-e",test])
|
||||
|
||||
def addToDB(cmd):
|
||||
for g in GROUPS:
|
||||
msg("Add group %s" % g)
|
||||
subprocess.call(["python",cmd,g])
|
||||
|
||||
def run(build,fvp,custom=None):
|
||||
result = "results.txt"
|
||||
resultPath = os.path.join(build,result)
|
||||
|
||||
current=os.getcwd()
|
||||
try:
|
||||
msg("Build" )
|
||||
os.chdir(build)
|
||||
subprocess.call(["make"])
|
||||
msg("Run")
|
||||
with open(result,"w") as results:
|
||||
if custom:
|
||||
subprocess.call([fvp] + custom,stdout=results)
|
||||
else:
|
||||
subprocess.call([fvp,"-a","Testing"],stdout=results)
|
||||
finally:
|
||||
os.chdir(current)
|
||||
|
||||
msg("Parse result")
|
||||
subprocess.call(["python","processResult.py","-e","-r",resultPath])
|
||||
|
||||
msg("Regression computations")
|
||||
subprocess.call(["python","summaryBench.py","-r",resultPath])
|
||||
|
||||
msg("Add results to benchmark database")
|
||||
addToDB("addToDB.py")
|
||||
|
||||
msg("Add results to regression database")
|
||||
addToDB("addToRegDB.py")
|
||||
|
||||
|
||||
|
||||
def processAndRun(buildfolder,fvp,custom=None):
|
||||
processTest("DSPBenchmarks")
|
||||
run(buildfolder,fvp,custom=custom)
|
||||
|
||||
parser = argparse.ArgumentParser(description='Parse test description')
|
||||
parser.add_argument('-f', nargs='?',type = str, default="build_benchmark_m7", help="Build folder")
|
||||
parser.add_argument('-v', nargs='?',type = str, default="C:\\Program Files\\ARM\\Development Studio 2019.0\\sw\\models\\bin\\FVP_MPS2_Cortex-M7.exe", help="Fast Model")
|
||||
parser.add_argument('-c', nargs='?',type = str, help="Custom args")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.f is not None:
|
||||
BUILDFOLDER=args.f
|
||||
else:
|
||||
BUILDFOLDER="build_benchmark_m7"
|
||||
|
||||
if args.v is not None:
|
||||
FVP=args.v
|
||||
else:
|
||||
FVP="C:\\Program Files\\ARM\\Development Studio 2019.0\\sw\\models\\bin\\FVP_MPS2_Cortex-M7.exe"
|
||||
|
||||
|
||||
if args.c:
|
||||
custom = args.c.split()
|
||||
else:
|
||||
custom = None
|
||||
|
||||
print(Fore.RED + "bench.db and reg.db databases must exist before running this script" + Style.RESET_ALL)
|
||||
|
||||
msg("Process benchmark description file")
|
||||
subprocess.call(["python", "preprocess.py","-f","bench.txt"])
|
||||
|
||||
msg("Generate all missing C files")
|
||||
subprocess.call(["python","processTests.py", "-e"])
|
||||
|
||||
|
||||
processAndRun(BUILDFOLDER,FVP,custom=custom)
|
||||
Loading…
Reference in New Issue