/ featweighted-svm --username avenkatesan2005/code/Svmclassification.m

http://featweighted-svm.googlecode.com/ · Objective C · 195 lines · 166 code · 29 blank · 0 comment · 19 complexity · 6c2625c41f6222f8505b356d378357e6 MD5 · raw file

  1. function Svmclassification
  2. % Example MultiClass SVM Classifiction
  3. % "One against Others"
  4. %
  5. %
  6. %
  7. clc;
  8. clear all;
  9. close all;
  10. %--------------------------------------------------
  11. %n=;
  12. sigma=1;
  13. % Location of the data.
  14. subjects = {'../mock-data/wesubj1.mat','../mock-data/wesubj2.mat','../mock-data/wesubj3.mat','../mock-data/wesubj4.mat','../mock-data/wesubj5.mat'};
  15. %% Compute the Feature Vectors.
  16. % Create the features, for each activity, for
  17. % each sample for each subject.
  18. for subj = 1:5
  19. load(subjects{subj});
  20. feat = [];
  21. label = [];
  22. for activity = 1: 5
  23. for i = 1 : actionsample{activity}.tsamples
  24. temp = [];
  25. temp = [temp computestatfeatures([actionsample{activity}.sample{i}.acc1,actionsample{activity}.sample{i}.acc2])];
  26. %temp = [temp computefirstderivativehist(actionsample{activity}.sample{i}.acc1)];
  27. %temp = [temp computerawhist(actionsample{activity}.sample{i}.acc1)];
  28. feat = [feat;temp];
  29. label = [label; activity];
  30. end
  31. end
  32. save(sprintf('feat%d.mat',subj),'feat','label');
  33. end
  34. % Perform a 5 fold cross validation on the feature vectors (histograms)
  35. % using SVM.
  36. % Variable for storing the confusion matrix.
  37. cfmatrix = zeros(5);
  38. %% SVM Classification.
  39. for fold = 1:5
  40. % if fold == 4
  41. % continue;
  42. % end;
  43. % Create the train and test data set.
  44. trdata = [];
  45. trlabel = [];
  46. tstdata = [];
  47. tstlabel = [];
  48. trdata_norm= [];
  49. tstdata_norm= [];
  50. for j = 1 : 5
  51. load(sprintf('feat%d.mat',j));
  52. % Subject Independent
  53. if fold == j
  54. tstdata = feat;
  55. tstlabel = label;
  56. else
  57. trdata = [trdata;feat];
  58. trlabel = [trlabel;label];
  59. end
  60. end
  61. %Normalize the trdata and tstdata before running SVM training
  62. for i= 1: 39 %size(trdata,2)
  63. trdata_norm(:,1)=(trdata(:,1)-min(trdata(:,1)))/(max(trdata(:,1))-min(trdata(:,1)));
  64. if (max(trdata(:,i))-min(trdata(:,i)))~= 0
  65. trdata_norm(:,i)= (trdata(:,i)-min(trdata(:,i)))/(max(trdata(:,i))-min(trdata(:,i)));
  66. else
  67. trdata_norm(:,i)= trdata(:,i);
  68. end
  69. end
  70. for i= 1: 39 %size(tstdata,2)
  71. if (max(tstdata(:,i))-min(tstdata(:,i)))~= 0
  72. tstdata_norm(:,i)= (tstdata(:,i)-min(trdata(:,i)))/(max(trdata(:,i))-min(trdata(:,i)));
  73. else
  74. tstdata_norm(:,i)= tstdata(:,i);
  75. end
  76. end
  77. %xapp=trdata_norm;
  78. xapp=trdata_norm;
  79. yapp= trlabel;
  80. nbclass=5;
  81. [n1, n2]=size(xapp);
  82. %-----------------------------------------------------
  83. % Learning and Learning Parameters
  84. c = 1;
  85. lambda = 1e-7;
  86. kerneloption= 2;
  87. kernel='gaussian';
  88. verbose =0;
  89. % % %---------------------One Against All algorithms----------------
  90. nbclass=5;
  91. [xsup,w,b,nbsv]=svmmulticlassoneagainstall(xapp,yapp,nbclass,c,lambda,kernel,kerneloption,verbose);
  92. [ypred_trn] = svmmultival(xapp,xsup,w,b,nbsv,kernel,kerneloption);
  93. fprintf( '\nRate of correct class in training data : %2.2f \n',100*sum(ypred_trn==yapp)/length(yapp));
  94. %-------------------------------------------------------
  95. % Testing
  96. %-------------------------------------------------------
  97. xtest=tstdata_norm;
  98. [ypred,maxi] = svmmultival(xtest,xsup,w,b,nbsv,kernel,kerneloption);
  99. accuracy(fold)=100*sum(ypred==tstlabel)/length(tstlabel);
  100. tcmatrix = computeconfusionmatrix(tstlabel,ypred);
  101. cfmatrix = cfmatrix + tcmatrix;
  102. end
  103. %% Display the results.
  104. % Display Accuracy
  105. disp('Results with SVM');
  106. disp(sprintf('Accuracy over 5 folds'));
  107. disp(accuracy);
  108. disp(sprintf('Overall Accuracy over 5 folds %f ',(sum(accuracy)/5)));
  109. % Display Confusion Matrix
  110. disp(sprintf('Confusion Matrix'));
  111. disp(cfmatrix);
  112. function cfmatrix = computeconfusionmatrix(ytest,yest)
  113. % this function computes the confusion matrix, given the estimated and
  114. % actual labels.
  115. cfmatrix = zeros(max(ytest),max(ytest));
  116. for i = 1 : size(ytest,1)
  117. cfmatrix(ytest(i),yest(i)) = cfmatrix(ytest(i),yest(i)) + 1;
  118. end
  119. return;
  120. function derivhist = computefirstderivativehist(data)
  121. % centers for the histogram
  122. centers = -50:50;
  123. l = data(2:end,:);
  124. d = data(1:end-1,:)-l;
  125. histx = hist(d(:,1),centers);
  126. histy = hist(d(:,2),centers);
  127. histz = hist(d(:,3),centers);
  128. derivhist = [histx/sum(histx) histy/sum(histy) histz/sum(histz)];
  129. return;
  130. function rawhist = computerawhist(data)
  131. % centers for the histogram
  132. % X - Axis
  133. centersx = 100:450;
  134. % Y - Axis
  135. centersy = 0:700;
  136. % Z - Axis
  137. centersz = 250:600;
  138. histx = hist(data(:,1),centersx);
  139. histy = hist(data(:,2),centersy);
  140. histz = hist(data(:,3),centersz);
  141. rawhist = [histx/sum(histx) histy/sum(histy) histz/sum(histz)];
  142. return;
  143. function feature = computestatfeatures(data)
  144. fnorm=true;
  145. % mean of the data
  146. m = mean(data);
  147. % variance of the data
  148. v = var(data);
  149. % correlation between the axis.
  150. c = corr(data);
  151. % absolute value of the fourier transform.
  152. fftdata = abs(fft(data));
  153. % remove the mean value.
  154. fftdata = fftdata(2:end,:);
  155. % energy.
  156. e = sum(fftdata.^2)/size(fftdata,1);
  157. % spectral entropy
  158. fftdata = 1 + fftdata/size(data,1);
  159. temp = [fftdata(:,1)/sum(fftdata(:,1)), fftdata(:,2)/sum(fftdata(:,2)),...
  160. fftdata(:,3)/sum(fftdata(:,3)),...
  161. fftdata(:,4)/sum(fftdata(:,4)),...
  162. fftdata(:,5)/sum(fftdata(:,5)),...
  163. fftdata(:,6)/sum(fftdata(:,6))];
  164. ent = -sum(temp.*log(temp),1);
  165. feature = [m v c(1,2:6) c(2,3:6) c(3,4:6) c(4,5:6) c(5,6) e ent];
  166. return;