/ featweighted-svm --username avenkatesan2005/code/Svmclassification.m
Objective C | 195 lines | 166 code | 29 blank | 0 comment | 19 complexity | 6c2625c41f6222f8505b356d378357e6 MD5 | raw file
1function Svmclassification 2% Example MultiClass SVM Classifiction 3% "One against Others" 4% 5% 6% 7clc; 8clear all; 9close all; 10 11 12%-------------------------------------------------- 13%n=; 14sigma=1; 15 16% Location of the data. 17subjects = {'../mock-data/wesubj1.mat','../mock-data/wesubj2.mat','../mock-data/wesubj3.mat','../mock-data/wesubj4.mat','../mock-data/wesubj5.mat'}; 18 19%% Compute the Feature Vectors. 20% Create the features, for each activity, for 21% each sample for each subject. 22for subj = 1:5 23 load(subjects{subj}); 24 feat = []; 25 label = []; 26 for activity = 1: 5 27 for i = 1 : actionsample{activity}.tsamples 28 temp = []; 29 30 temp = [temp computestatfeatures([actionsample{activity}.sample{i}.acc1,actionsample{activity}.sample{i}.acc2])]; 31 32 %temp = [temp computefirstderivativehist(actionsample{activity}.sample{i}.acc1)]; 33 %temp = [temp computerawhist(actionsample{activity}.sample{i}.acc1)]; 34 35 feat = [feat;temp]; 36 label = [label; activity]; 37 end 38 end 39 save(sprintf('feat%d.mat',subj),'feat','label'); 40end 41 42% Perform a 5 fold cross validation on the feature vectors (histograms) 43% using SVM. 44% Variable for storing the confusion matrix. 45cfmatrix = zeros(5); 46%% SVM Classification. 47for fold = 1:5 48% if fold == 4 49% continue; 50% end; 51 % Create the train and test data set. 52 trdata = []; 53 trlabel = []; 54 tstdata = []; 55 tstlabel = []; 56 trdata_norm= []; 57 tstdata_norm= []; 58 for j = 1 : 5 59 load(sprintf('feat%d.mat',j)); 60 % Subject Independent 61 if fold == j 62 tstdata = feat; 63 tstlabel = label; 64 else 65 trdata = [trdata;feat]; 66 trlabel = [trlabel;label]; 67 end 68 end 69 70 %Normalize the trdata and tstdata before running SVM training 71 72 for i= 1: 39 %size(trdata,2) 73 trdata_norm(:,1)=(trdata(:,1)-min(trdata(:,1)))/(max(trdata(:,1))-min(trdata(:,1))); 74 if (max(trdata(:,i))-min(trdata(:,i)))~= 0 75 trdata_norm(:,i)= (trdata(:,i)-min(trdata(:,i)))/(max(trdata(:,i))-min(trdata(:,i))); 76 else 77 trdata_norm(:,i)= trdata(:,i); 78 end 79 end 80 81 for i= 1: 39 %size(tstdata,2) 82 if (max(tstdata(:,i))-min(tstdata(:,i)))~= 0 83 tstdata_norm(:,i)= (tstdata(:,i)-min(trdata(:,i)))/(max(trdata(:,i))-min(trdata(:,i))); 84 else 85 tstdata_norm(:,i)= tstdata(:,i); 86 end 87 end 88 89 %xapp=trdata_norm; 90 xapp=trdata_norm; 91 yapp= trlabel; 92 93 nbclass=5; 94 [n1, n2]=size(xapp); 95 96 %----------------------------------------------------- 97 % Learning and Learning Parameters 98 c = 1; 99 lambda = 1e-7; 100 kerneloption= 2; 101 kernel='gaussian'; 102 verbose =0; 103 104 % % %---------------------One Against All algorithms---------------- 105 nbclass=5; 106 [xsup,w,b,nbsv]=svmmulticlassoneagainstall(xapp,yapp,nbclass,c,lambda,kernel,kerneloption,verbose); 107 [ypred_trn] = svmmultival(xapp,xsup,w,b,nbsv,kernel,kerneloption); 108 fprintf( '\nRate of correct class in training data : %2.2f \n',100*sum(ypred_trn==yapp)/length(yapp)); 109 110 %------------------------------------------------------- 111 % Testing 112 %------------------------------------------------------- 113 xtest=tstdata_norm; 114 115 [ypred,maxi] = svmmultival(xtest,xsup,w,b,nbsv,kernel,kerneloption); 116 accuracy(fold)=100*sum(ypred==tstlabel)/length(tstlabel); 117 tcmatrix = computeconfusionmatrix(tstlabel,ypred); 118 cfmatrix = cfmatrix + tcmatrix; 119end 120%% Display the results. 121% Display Accuracy 122disp('Results with SVM'); 123disp(sprintf('Accuracy over 5 folds')); 124disp(accuracy); 125disp(sprintf('Overall Accuracy over 5 folds %f ',(sum(accuracy)/5))); 126% Display Confusion Matrix 127disp(sprintf('Confusion Matrix')); 128disp(cfmatrix); 129 130function cfmatrix = computeconfusionmatrix(ytest,yest) 131 132 % this function computes the confusion matrix, given the estimated and 133 % actual labels. 134 cfmatrix = zeros(max(ytest),max(ytest)); 135 for i = 1 : size(ytest,1) 136 cfmatrix(ytest(i),yest(i)) = cfmatrix(ytest(i),yest(i)) + 1; 137 end 138return; 139 140function derivhist = computefirstderivativehist(data) 141 % centers for the histogram 142 centers = -50:50; 143 l = data(2:end,:); 144 d = data(1:end-1,:)-l; 145 histx = hist(d(:,1),centers); 146 histy = hist(d(:,2),centers); 147 histz = hist(d(:,3),centers); 148 derivhist = [histx/sum(histx) histy/sum(histy) histz/sum(histz)]; 149return; 150 151function rawhist = computerawhist(data) 152 % centers for the histogram 153 % X - Axis 154 centersx = 100:450; 155 % Y - Axis 156 centersy = 0:700; 157 % Z - Axis 158 centersz = 250:600; 159 histx = hist(data(:,1),centersx); 160 histy = hist(data(:,2),centersy); 161 histz = hist(data(:,3),centersz); 162 rawhist = [histx/sum(histx) histy/sum(histy) histz/sum(histz)]; 163return; 164 165function feature = computestatfeatures(data) 166 fnorm=true; 167 168 % mean of the data 169 m = mean(data); 170 171 % variance of the data 172 v = var(data); 173 174 % correlation between the axis. 175 c = corr(data); 176 177 % absolute value of the fourier transform. 178 fftdata = abs(fft(data)); 179 180 % remove the mean value. 181 fftdata = fftdata(2:end,:); 182 183 % energy. 184 e = sum(fftdata.^2)/size(fftdata,1); 185 186 % spectral entropy 187 fftdata = 1 + fftdata/size(data,1); 188 temp = [fftdata(:,1)/sum(fftdata(:,1)), fftdata(:,2)/sum(fftdata(:,2)),... 189 fftdata(:,3)/sum(fftdata(:,3)),... 190 fftdata(:,4)/sum(fftdata(:,4)),... 191 fftdata(:,5)/sum(fftdata(:,5)),... 192 fftdata(:,6)/sum(fftdata(:,6))]; 193 ent = -sum(temp.*log(temp),1); 194 feature = [m v c(1,2:6) c(2,3:6) c(3,4:6) c(4,5:6) c(5,6) e ent]; 195return;