1# Process the test results 2# Test status (like passed, or failed with error code) 3 4import argparse 5import re 6import TestScripts.NewParser as parse 7import TestScripts.CodeGen 8from collections import deque 9import os.path 10import numpy as np 11import pandas as pd 12import statsmodels.api as sm 13import statsmodels.formula.api as smf 14import csv 15import TestScripts.Deprecate as d 16import sqlite3 17import datetime, time 18import re 19 20# For sql table creation 21MKSTRFIELD=['Regression'] 22MKBOOLFIELD=['HARDFP', 'FASTMATH', 'NEON', 'HELIUM','UNROLL', 'ROUNDING','OPTIMIZED'] 23MKINTFIELD=['ID','MAX'] 24MKREALFIELD=['MAXREGCOEF'] 25MKDATEFIELD=[] 26MKKEYFIELD=['DATE','NAME','CATEGORY', 'PLATFORM', 'CORE', 'COMPILER','TYPE','RUN'] 27MKKEYFIELDID={'CATEGORY':'categoryid', 28 'NAME':'testnameid', 29 'DATE':'testdateid', 30 'PLATFORM':'platformid', 31 'CORE':'coreid', 32 'COMPILER':'compilerid', 33 'TYPE':'typeid', 34 'RUN':'runid'} 35 36# For csv table value extraction 37VALSTRFIELD=['TESTNAME','VERSION','Regression'] 38VALBOOLFIELD=['HARDFP', 'FASTMATH', 'NEON', 'HELIUM','UNROLL', 'ROUNDING','OPTIMIZED'] 39VALINTFIELD=['ID', 'MAX'] 40VALREALFIELD=['MAXREGCOEF'] 41VALDATEFIELD=[] 42# Some of those fields may be created by the parsing of other fields 43VALKEYFIELD=['DATE','NAME','CATEGORY', 'PLATFORM', 'CORE', 'COMPILER','TYPE'] 44 45def joinit(iterable, delimiter): 46 it = iter(iterable) 47 yield next(it) 48 for x in it: 49 yield delimiter 50 yield x 51 52def tableExists(c,tableName): 53 req=(tableName,) 54 r=c.execute("SELECT name FROM sqlite_master WHERE type='table' AND name=?",req) 55 return(r.fetchone() != None) 56 57def diff(first, second): 58 second = set(second) 59 return [item for item in first if item not in second] 60 61def getColumns(elem,full): 62 colsToKeep=[] 63 cols = list(full.columns) 64 params=diff(elem.params.full , elem.params.summary) 65 common = diff(cols + ["TYPE","RUN"] , ['OLDID'] + params) 66 67 for field in common: 68 if field in MKSTRFIELD: 69 colsToKeep.append(field) 70 if field in MKINTFIELD: 71 colsToKeep.append(field) 72 if field in MKREALFIELD: 73 colsToKeep.append(field) 74 if field in MKKEYFIELD: 75 colsToKeep.append(field) 76 if field in MKDATEFIELD: 77 colsToKeep.append(field) 78 if field in MKBOOLFIELD: 79 colsToKeep.append(field) 80 return(colsToKeep) 81 82def createTableIfMissing(conn,elem,tableName,full): 83 if not tableExists(conn,tableName): 84 sql = "CREATE TABLE %s (" % tableName 85 cols = list(full.columns) 86 params=diff(elem.params.full , elem.params.summary) 87 common = diff(cols + ["TYPE","RUN"] , ['OLDID'] + params) 88 89 sql += "%sid INTEGER PRIMARY KEY" % (tableName) 90 start = "," 91 92 for field in params: 93 sql += " %s\n %s INTEGER" % (start,field) 94 start = "," 95 96 for field in common: 97 if field in MKSTRFIELD: 98 sql += "%s\n %s TEXT" % (start,field) 99 if field in MKINTFIELD: 100 sql += "%s\n %s INTEGER" % (start,field) 101 if field in MKREALFIELD: 102 sql += "%s\n %s REAL" % (start,field) 103 if field in MKKEYFIELD: 104 sql += "%s\n %s INTEGER" % (start,MKKEYFIELDID[field]) 105 if field in MKDATEFIELD: 106 sql += "%s\n %s TEXT" % (start,field) 107 if field in MKBOOLFIELD: 108 sql += "%s\n %s INTEGER" % (start,field) 109 start = "," 110 # Create foreign keys 111 sql += "%sFOREIGN KEY(typeid) REFERENCES TYPE(typeid)," % start 112 sql += "FOREIGN KEY(categoryid) REFERENCES CATEGORY(categoryid)," 113 sql += "FOREIGN KEY(testnameid) REFERENCES TESTNAME(testnameid)," 114 sql += "FOREIGN KEY(testdateid) REFERENCES TESTDATE(testdateid)," 115 sql += "FOREIGN KEY(platformid) REFERENCES PLATFORM(platformid)," 116 sql += "FOREIGN KEY(coreid) REFERENCES CORE(coreid)," 117 sql += "FOREIGN KEY(compilerid) REFERENCES COMPILER(compilerid)" 118 sql += "FOREIGN KEY(runid) REFERENCES RUN(runid)" 119 sql += " )" 120 conn.execute(sql) 121 122# Find the key or add it in a table 123def findInTable(conn,table,keystr,strv,key): 124 #print(sql) 125 r = conn.execute("select %s from %s where %s=?" % (key,table,keystr),(strv,)) 126 result=r.fetchone() 127 if result != None: 128 return(result[0]) 129 else: 130 conn.execute("INSERT INTO %s(%s) VALUES(?)" % (table,keystr),(strv,)) 131 conn.commit() 132 r = conn.execute("select %s from %s where %s=?" % (key,table,keystr),(strv,)) 133 result=r.fetchone() 134 if result != None: 135 #print(result) 136 return(result[0]) 137 else: 138 return(None) 139 140def findInCompilerTable(conn,kind,version): 141 #print(sql) 142 r = conn.execute("select compilerid from COMPILER where compilerkindid=? AND version=?" , (kind,version)) 143 result=r.fetchone() 144 if result != None: 145 return(result[0]) 146 else: 147 fullDate = datetime.datetime.now() 148 dateid = findInTable(conn,"TESTDATE","date",str(fullDate),"testdateid") 149 conn.execute("INSERT INTO COMPILER(compilerkindid,version,testdateid) VALUES(?,?,?)" ,(kind,version,dateid)) 150 conn.commit() 151 r = conn.execute("select compilerid from COMPILER where compilerkindid=? AND version=? AND testdateid=?" , (kind,version,dateid)) 152 result=r.fetchone() 153 if result != None: 154 #print(result) 155 return(result[0]) 156 else: 157 return(None) 158 159 160def addRows(conn,elem,tableName,full,runid=0): 161 # List of columns we have in DB which is 162 # different from the columns in the table 163 compilerid = 0 164 platformid = 0 165 coreid = 0 166 keep = getColumns(elem,full) 167 cols = list(full.columns) 168 params=diff(elem.params.full , elem.params.summary) 169 common = diff(["TYPE"] + cols , ['OLDID'] + params) 170 colNameList = [] 171 for c in params + keep: 172 if c in MKKEYFIELD: 173 colNameList.append(MKKEYFIELDID[c]) 174 else: 175 colNameList.append(c) 176 colNames = "".join(joinit(colNameList,",")) 177 #print(colNameList) 178 #print(colNames) 179 #print(full) 180 for index, row in full.iterrows(): 181 sql = "INSERT INTO %s(%s) VALUES(" % (tableName,colNames) 182 keys = {} 183 184 # Get data from columns 185 for field in common: 186 if field in VALSTRFIELD: 187 keys[field]=row[field] 188 if field == "NAME": 189 name = row[field] 190 if field == "TESTNAME": 191 testname = row[field] 192 if re.match(r'^.*_f64',testname): 193 keys["TYPE"] = "f64" 194 if re.match(r'^.*_f32',testname): 195 keys["TYPE"] = "f32" 196 if re.match(r'^.*_f16',testname): 197 keys["TYPE"] = "f16" 198 if re.match(r'^.*_q31',testname): 199 keys["TYPE"] = "q31" 200 if re.match(r'^.*_q15',testname): 201 keys["TYPE"] = "q15" 202 if re.match(r'^.*_q7',testname): 203 keys["TYPE"] = "q7" 204 205 if re.match(r'^.*_s8',testname): 206 keys["TYPE"] = "s8" 207 if re.match(r'^.*_u8',testname): 208 keys["TYPE"] = "u8" 209 if re.match(r'^.*_s16',testname): 210 keys["TYPE"] = "s16" 211 if re.match(r'^.*_u16',testname): 212 keys["TYPE"] = "u16" 213 if re.match(r'^.*_s32',testname): 214 keys["TYPE"] = "s32" 215 if re.match(r'^.*_u32',testname): 216 keys["TYPE"] = "u32" 217 if re.match(r'^.*_s64',testname): 218 keys["TYPE"] = "s64" 219 if re.match(r'^.*_u64',testname): 220 keys["TYPE"] = "u64" 221 222 if field in VALINTFIELD: 223 keys[field]=row[field] 224 if field in VALREALFIELD: 225 keys[field]=row[field] 226 if field in VALDATEFIELD: 227 keys[field]=row[field] 228 if field in VALBOOLFIELD: 229 keys[field]=row[field] 230 231 keys['RUN']=runid 232 # Get foreign keys and create missing data 233 for field in common: 234 if field in VALKEYFIELD: 235 if field == "CATEGORY": 236 # Remove type extension to get category name so that 237 # all types are maped to same category which will 238 # help for post processing. 239 testField=re.sub(r'^(.*)[:]([^:]+)(F16|F32|F64|Q31|Q15|Q7)$',r'\1',row[field]) 240 val = findInTable(conn,"CATEGORY","category",testField,"categoryid") 241 keys[field]=val 242 if field == "NAME": 243 val = findInTable(conn,"TESTNAME","name",row[field],"testnameid") 244 keys[field]=val 245 if field == "DATE": 246 val = findInTable(conn,"TESTDATE","date",str(row[field]),"testdateid") 247 keys[field]=val 248 if field == "CORE": 249 val = findInTable(conn,"CORE","coredef",row[field],"coreid") 250 keys[field]=val 251 coreid = val 252 if field == "PLATFORM": 253 val = findInTable(conn,"PLATFORM","platform",row[field],"platformid") 254 keys[field]=val 255 platformid = val 256 if field == "TYPE": 257 val = findInTable(conn,"TYPE","type",keys["TYPE"],"typeid") 258 keys[field]=val 259 if field == "COMPILER": 260 compilerkind = findInTable(conn,"COMPILERKIND","compiler",row[field],"compilerkindid") 261 compiler = findInCompilerTable(conn,compilerkind,keys["VERSION"]) 262 keys[field]=compiler 263 compilerid = compiler 264 265 # Generate sql command 266 start = "" 267 for field in params: 268 sql += " %s\n %d" % (start,row[field]) 269 start = "," 270 271 for field in keep: 272 if field in MKSTRFIELD or field in MKDATEFIELD: 273 sql += " %s\n \"%s\"" % (start,keys[field]) 274 elif field in keep: 275 if field in VALREALFIELD: 276 sql += " %s\n %f" % (start,keys[field]) 277 else: 278 sql += " %s\n %d" % (start,keys[field]) 279 start = "," 280 281 sql += " )" 282 #print(sql) 283 conn.execute(sql) 284 conn.commit() 285 return({'compilerid':compilerid,'platformid':platformid,'coreid':coreid}) 286 287def addConfig(conn,config,fullDate): 288 dateid = findInTable(conn,"TESTDATE","date",str(fullDate),"testdateid") 289 conn.execute("INSERT INTO CONFIG(compilerid,platformid,coreid,testdateid) VALUES(?,?,?,?)" ,(config['compilerid'],config['platformid'],config['coreid'],dateid)) 290 conn.commit() 291 292def getGroup(a): 293 return(re.sub(r'^(.+)(F64|F32|F16|Q31|Q15|Q7|U32|U16|U8|S32|S16|S8)$',r'\1',a)) 294 295def addOneBenchmark(elem,fullPath,db,group,runid): 296 if os.path.isfile(fullPath): 297 full=pd.read_csv(fullPath,dtype={'OLDID': str} ,keep_default_na = False) 298 fullDate = datetime.datetime.now() 299 full['DATE'] = fullDate 300 if group: 301 tableName = getGroup(group) 302 else: 303 tableName = getGroup(elem.data["class"]) 304 conn = sqlite3.connect(db) 305 createTableIfMissing(conn,elem,tableName,full) 306 config = addRows(conn,elem,tableName,full,runid) 307 addConfig(conn,config,fullDate) 308 conn.close() 309 310 311def addToDB(benchmark,dbpath,elem,group,runid): 312 if not elem.data["deprecated"]: 313 if elem.params: 314 benchPath = os.path.join(benchmark,elem.fullPath(),"regression.csv") 315 print("Processing %s" % benchPath) 316 addOneBenchmark(elem,benchPath,dbpath,group,runid) 317 318 for c in elem.children: 319 addToDB(benchmark,dbpath,c,group,runid) 320 321 322 323parser = argparse.ArgumentParser(description='Generate summary benchmarks') 324 325parser.add_argument('-f', nargs='?',type = str, default="Output.pickle", help="Pickle path") 326parser.add_argument('-b', nargs='?',type = str, default="FullBenchmark", help="Full Benchmark dir path") 327#parser.add_argument('-e', action='store_true', help="Embedded test") 328parser.add_argument('-o', nargs='?',type = str, default="reg.db", help="Regression benchmark database") 329parser.add_argument('-r', nargs='?',type = int, default=0, help="Run ID") 330 331parser.add_argument('others', nargs=argparse.REMAINDER, help="Suite class") 332 333args = parser.parse_args() 334 335if args.f is not None: 336 #p = parse.Parser() 337 # Parse the test description file 338 #root = p.parse(args.f) 339 root=parse.loadRoot(args.f) 340 d.deprecate(root,args.others) 341 if args.others: 342 group=args.others[0] 343 else: 344 group=None 345 addToDB(args.b,args.o,root,group,args.r) 346 347else: 348 parser.print_help()