1import os.path 2import itertools 3import Tools 4from sklearn import svm 5import random 6import numpy as np 7 8# Number of vectors to test for each test 9NBTESTSAMPLE = 100 10# Dimension of the vectors 11VECDIM = 10 12 13# Number of vectors for training 14NBVECTORS=10 15# Distance between the two centers (training vectors are gaussianly 16# distributed around the centers) 17CENTER_DISTANCE = 1 18 19# SVM KIND 20LINEAR=1 21POLY=2 22RBF=3 23SIGMOID=4 24 25C0 = np.zeros((1,VECDIM)) 26C1 = np.copy(C0) 27C1[0,0] = C1[0,0] + CENTER_DISTANCE 28 29# Data for training 30 31X = [] 32Xone = [] 33y = [] 34 35class1 = 0 36class2 = 1 37 38for i in range(NBVECTORS): 39 v = np.random.randn(1,VECDIM) 40 v = v * CENTER_DISTANCE/2.0/10 41 # 2 classes are needed 42 if i == 0: 43 c = 0 44 elif i == 1: 45 c = 1 46 else: 47 c = np.random.choice([0,1]) 48 if (c == 0): 49 v = v + C0 50 y.append(class1) 51 else: 52 v = v + C1 53 y.append(class2) 54 if c == 0: 55 Xone.append(v[0].tolist()) 56 X.append(v[0].tolist()) 57 58# Used for benchmark data 59def genRandomVector(vecdim): 60 c0 = np.zeros((1,vecdim)) 61 c1 = np.copy(c0) 62 c1[0,0] = c1[0,0] + CENTER_DISTANCE 63 64 v = np.random.randn(1,vecdim) 65 v = v * CENTER_DISTANCE/2.0/10 66 c = np.random.choice([0,1]) 67 if (c == 0): 68 v = v + c0 69 else: 70 v = v + c1 71 72 v=v[0].tolist() 73 return(v,c) 74 75def newSVMTest(config,kind,theclass,clf,nb): 76 inputs = [] 77 references = [] 78 for i in range(NBTESTSAMPLE): 79 v = np.random.randn(1,VECDIM) 80 v = v * CENTER_DISTANCE/2.0/6.0 81 c = np.random.choice([0,1]) 82 if (c == 0): 83 v = v + C0 84 else: 85 v = v + C1 86 inputs.append(v[0].tolist()) 87 toPredict=[v[0].tolist()] 88 references.append(clf.predict(toPredict)) 89 inputs=np.array(inputs) 90 inputs=inputs.reshape(NBTESTSAMPLE*VECDIM) 91 92 config.writeInput(nb, inputs,"Samples") 93 94 references=np.array(references) 95 references=references.reshape(NBTESTSAMPLE) 96 97 # Classifier description 98 supportShape = clf.support_vectors_.shape 99 100 nbSupportVectors=supportShape[0] 101 vectorDimensions=supportShape[1] 102 intercept = np.array(clf.intercept_) 103 dualCoefs=clf.dual_coef_ 104 dualCoefs=dualCoefs.reshape(nbSupportVectors) 105 supportVectors=clf.support_vectors_ 106 supportVectors = supportVectors.reshape(nbSupportVectors*VECDIM) 107 108 if kind == LINEAR: 109 dims=np.array([kind,theclass[0],theclass[1],NBTESTSAMPLE,VECDIM,nbSupportVectors]) 110 elif kind==POLY: 111 dims=np.array([kind,theclass[0],theclass[1],NBTESTSAMPLE,VECDIM,nbSupportVectors,clf.degree]) 112 elif kind==RBF: 113 dims=np.array([kind,theclass[0],theclass[1],NBTESTSAMPLE,VECDIM,nbSupportVectors]) 114 elif kind==SIGMOID: 115 dims=np.array([kind,theclass[0],theclass[1],NBTESTSAMPLE,VECDIM,nbSupportVectors]) 116 117 config.writeInputS16(nb, dims,"Dims") 118 119 if kind == LINEAR: 120 params=np.concatenate((supportVectors,dualCoefs,intercept)) 121 elif kind == POLY: 122 coef0 = np.array([clf.coef0]) 123 gamma = np.array([clf._gamma]) 124 params=np.concatenate((supportVectors,dualCoefs,intercept,coef0,gamma)) 125 elif kind == RBF: 126 gamma = np.array([clf._gamma]) 127 params=np.concatenate((supportVectors,dualCoefs,intercept,gamma)) 128 elif kind == SIGMOID: 129 coef0 = np.array([clf.coef0]) 130 gamma = np.array([clf._gamma]) 131 params=np.concatenate((supportVectors,dualCoefs,intercept,coef0,gamma)) 132 133 config.writeInput(nb, params,"Params") 134 135 config.writeReferenceS32(nb, references,"Reference") 136 137 138def writeTests(config): 139 clf = svm.SVC(kernel='linear') 140 clf.fit(X, y) 141 newSVMTest(config,LINEAR,[class1,class2],clf,1) 142 143 clf = svm.SVC(kernel='poly',gamma='auto', coef0=1.1) 144 clf.fit(X, y) 145 newSVMTest(config,POLY,[class1,class2],clf,2) 146 147 clf = svm.SVC(kernel='rbf',gamma='auto') 148 clf.fit(X, y) 149 newSVMTest(config,RBF,[class1,class2],clf,3) 150 151 clf = svm.SVC(kernel='sigmoid',gamma='auto') 152 clf.fit(X, y) 153 newSVMTest(config,SIGMOID,[class1,class2],clf,4) 154 155 clf = svm.OneClassSVM(kernel="linear") 156 clf.fit(X) 157 newSVMTest(config,RBF,[-1,1],clf,5) 158 159 160def genSVMBenchmark(vecDim,nbVecs,k): 161 # We need to enforce a specific number of support vectors 162 # But it is a result of the training and not an input 163 # So the data generated will not make sensse since we will 164 # force the number of support vector (repeating the first one) 165 # For a benchmark it is ok. 166 X=[] 167 y=[] 168 169 for i in range(NBVECTORS): 170 v,c=genRandomVector(vecDim) 171 X.append(v) 172 y.append(c) 173 174 clf = svm.SVC(kernel=k) 175 clf.fit(X, y) 176 177 supportShape = clf.support_vectors_.shape 178 179 nbSupportVectors=supportShape[0] 180 vectorDimensions=supportShape[1] 181 intercept = list(clf.intercept_) 182 dualCoefs=clf.dual_coef_ 183 dualCoefs=dualCoefs.reshape(nbSupportVectors) 184 supportVectors=clf.support_vectors_ 185 supportVectors = supportVectors.reshape(nbSupportVectors*vecDim) 186 187 # Now we force the number of support vectors 188 nbSupportVectors = nbVecs 189 dualCoefs = [dualCoefs[0]] * nbVecs 190 supportVectors = [supportVectors[0]] * nbVecs 191 192 if k == "linear": 193 return(list(supportVectors + dualCoefs +intercept)) 194 195 if k == "poly": 196 coef0 = list(np.array([clf.coef0])) 197 gamma = list(np.array([clf._gamma])) 198 degree=list(np.array([1.0*clf.degree])) 199 return(list(supportVectors + dualCoefs + intercept + coef0 + gamma + degree)) 200 201 if k == "rbf": 202 gamma = list(np.array([clf._gamma])) 203 return(list(supportVectors + dualCoefs + intercept + gamma)) 204 205 if k == "sigmoid": 206 coef0 = list(np.array([clf.coef0])) 207 gamma = list(np.array([clf._gamma])) 208 return(list(supportVectors + dualCoefs + intercept + coef0 + gamma)) 209 210 return([]) 211 212 213 214def writeBenchmarks(config,format): 215 vecDims=[16,32,64] 216 nbVecs=[8,16,32] 217 someLists=[vecDims,nbVecs] 218 219 220 r=np.array([element for element in itertools.product(*someLists)]) 221 nbtests=len(vecDims)*len(nbVecs)*2 222 config.writeParam(6, r.reshape(nbtests)) 223 224 paramsLinear=[] 225 paramsPoly=[] 226 paramsRBF=[] 227 paramsSigmoid=[] 228 inputs=[] 229 dimsLinear=[] 230 dimsPoly=[] 231 dimsRBF=[] 232 dimsSigmoid=[] 233 nbin=0 234 nbparamLinear=0 235 nbparamPoly=0 236 nbparamRBF=0 237 nbparamSigmoid=0 238 239 240 for vecDim, nbVecs in r: 241 242 v,c=genRandomVector(vecDim) 243 244 dimsLinear += [nbin,nbparamLinear] 245 dimsPoly += [nbin,nbparamPoly] 246 dimsRBF += [nbin,nbparamRBF] 247 dimsSigmoid += [nbin,nbparamSigmoid] 248 249 p=genSVMBenchmark(vecDim,nbVecs,"linear") 250 paramsLinear += p 251 nbparamLinear = nbparamLinear + len(p) 252 253 p=genSVMBenchmark(vecDim,nbVecs,"poly") 254 paramsPoly += p 255 nbparamPoly = nbparamPoly + len(p) 256 257 p=genSVMBenchmark(vecDim,nbVecs,"rbf") 258 paramsRBF += p 259 nbparamRBF = nbparamRBF + len(p) 260 261 p=genSVMBenchmark(vecDim,nbVecs,"sigmoid") 262 paramsSigmoid += p 263 nbparamSigmoid = nbparamSigmoid + len(p) 264 265 inputs += v 266 nbin = nbin + len(v) 267 268 config.writeInput(6, inputs,"InputsBench") 269 270 config.writeInputS16(6, dimsLinear,"DimsLinear") 271 config.writeReference(6, paramsLinear,"ParamsLinear") 272 273 config.writeInputS16(6, dimsPoly,"DimsPoly") 274 config.writeReference(6, paramsPoly,"ParamsPoly") 275 276 config.writeInputS16(6, dimsRBF,"DimsRBF") 277 config.writeReference(6, paramsRBF,"ParamsRBF") 278 279 config.writeInputS16(6, dimsSigmoid,"DimsSigmoid") 280 config.writeReference(6, paramsSigmoid,"ParamsSigmoid") 281 282def generatePatterns(): 283 PATTERNDIR = os.path.join("Patterns","DSP","SVM","SVM") 284 PARAMDIR = os.path.join("Parameters","DSP","SVM","SVM") 285 286 configf32=Tools.Config(PATTERNDIR,PARAMDIR,"f32") 287 configf16=Tools.Config(PATTERNDIR,PARAMDIR,"f16") 288 289 writeTests(configf32) 290 writeTests(configf16) 291 292 writeBenchmarks(configf32,Tools.F32) 293 writeBenchmarks(configf16,Tools.F16) 294 295if __name__ == '__main__': 296 generatePatterns()