/ featweighted-svm --username avenkatesan2005/code/Svmclassification.m
http://featweighted-svm.googlecode.com/ · Objective C · 195 lines · 166 code · 29 blank · 0 comment · 19 complexity · 6c2625c41f6222f8505b356d378357e6 MD5 · raw file
- function Svmclassification
- % Example MultiClass SVM Classifiction
- % "One against Others"
- %
- %
- %
- clc;
- clear all;
- close all;
-
-
- %--------------------------------------------------
- %n=;
- sigma=1;
-
- % Location of the data.
- subjects = {'../mock-data/wesubj1.mat','../mock-data/wesubj2.mat','../mock-data/wesubj3.mat','../mock-data/wesubj4.mat','../mock-data/wesubj5.mat'};
-
- %% Compute the Feature Vectors.
- % Create the features, for each activity, for
- % each sample for each subject.
- for subj = 1:5
- load(subjects{subj});
- feat = [];
- label = [];
- for activity = 1: 5
- for i = 1 : actionsample{activity}.tsamples
- temp = [];
-
- temp = [temp computestatfeatures([actionsample{activity}.sample{i}.acc1,actionsample{activity}.sample{i}.acc2])];
-
- %temp = [temp computefirstderivativehist(actionsample{activity}.sample{i}.acc1)];
- %temp = [temp computerawhist(actionsample{activity}.sample{i}.acc1)];
-
- feat = [feat;temp];
- label = [label; activity];
- end
- end
- save(sprintf('feat%d.mat',subj),'feat','label');
- end
-
- % Perform a 5 fold cross validation on the feature vectors (histograms)
- % using SVM.
- % Variable for storing the confusion matrix.
- cfmatrix = zeros(5);
- %% SVM Classification.
- for fold = 1:5
- % if fold == 4
- % continue;
- % end;
- % Create the train and test data set.
- trdata = [];
- trlabel = [];
- tstdata = [];
- tstlabel = [];
- trdata_norm= [];
- tstdata_norm= [];
- for j = 1 : 5
- load(sprintf('feat%d.mat',j));
- % Subject Independent
- if fold == j
- tstdata = feat;
- tstlabel = label;
- else
- trdata = [trdata;feat];
- trlabel = [trlabel;label];
- end
- end
-
- %Normalize the trdata and tstdata before running SVM training
-
- for i= 1: 39 %size(trdata,2)
- trdata_norm(:,1)=(trdata(:,1)-min(trdata(:,1)))/(max(trdata(:,1))-min(trdata(:,1)));
- if (max(trdata(:,i))-min(trdata(:,i)))~= 0
- trdata_norm(:,i)= (trdata(:,i)-min(trdata(:,i)))/(max(trdata(:,i))-min(trdata(:,i)));
- else
- trdata_norm(:,i)= trdata(:,i);
- end
- end
-
- for i= 1: 39 %size(tstdata,2)
- if (max(tstdata(:,i))-min(tstdata(:,i)))~= 0
- tstdata_norm(:,i)= (tstdata(:,i)-min(trdata(:,i)))/(max(trdata(:,i))-min(trdata(:,i)));
- else
- tstdata_norm(:,i)= tstdata(:,i);
- end
- end
-
- %xapp=trdata_norm;
- xapp=trdata_norm;
- yapp= trlabel;
-
- nbclass=5;
- [n1, n2]=size(xapp);
-
- %-----------------------------------------------------
- % Learning and Learning Parameters
- c = 1;
- lambda = 1e-7;
- kerneloption= 2;
- kernel='gaussian';
- verbose =0;
-
- % % %---------------------One Against All algorithms----------------
- nbclass=5;
- [xsup,w,b,nbsv]=svmmulticlassoneagainstall(xapp,yapp,nbclass,c,lambda,kernel,kerneloption,verbose);
- [ypred_trn] = svmmultival(xapp,xsup,w,b,nbsv,kernel,kerneloption);
- fprintf( '\nRate of correct class in training data : %2.2f \n',100*sum(ypred_trn==yapp)/length(yapp));
-
- %-------------------------------------------------------
- % Testing
- %-------------------------------------------------------
- xtest=tstdata_norm;
-
- [ypred,maxi] = svmmultival(xtest,xsup,w,b,nbsv,kernel,kerneloption);
- accuracy(fold)=100*sum(ypred==tstlabel)/length(tstlabel);
- tcmatrix = computeconfusionmatrix(tstlabel,ypred);
- cfmatrix = cfmatrix + tcmatrix;
- end
- %% Display the results.
- % Display Accuracy
- disp('Results with SVM');
- disp(sprintf('Accuracy over 5 folds'));
- disp(accuracy);
- disp(sprintf('Overall Accuracy over 5 folds %f ',(sum(accuracy)/5)));
- % Display Confusion Matrix
- disp(sprintf('Confusion Matrix'));
- disp(cfmatrix);
-
- function cfmatrix = computeconfusionmatrix(ytest,yest)
-
- % this function computes the confusion matrix, given the estimated and
- % actual labels.
- cfmatrix = zeros(max(ytest),max(ytest));
- for i = 1 : size(ytest,1)
- cfmatrix(ytest(i),yest(i)) = cfmatrix(ytest(i),yest(i)) + 1;
- end
- return;
-
- function derivhist = computefirstderivativehist(data)
- % centers for the histogram
- centers = -50:50;
- l = data(2:end,:);
- d = data(1:end-1,:)-l;
- histx = hist(d(:,1),centers);
- histy = hist(d(:,2),centers);
- histz = hist(d(:,3),centers);
- derivhist = [histx/sum(histx) histy/sum(histy) histz/sum(histz)];
- return;
-
- function rawhist = computerawhist(data)
- % centers for the histogram
- % X - Axis
- centersx = 100:450;
- % Y - Axis
- centersy = 0:700;
- % Z - Axis
- centersz = 250:600;
- histx = hist(data(:,1),centersx);
- histy = hist(data(:,2),centersy);
- histz = hist(data(:,3),centersz);
- rawhist = [histx/sum(histx) histy/sum(histy) histz/sum(histz)];
- return;
-
- function feature = computestatfeatures(data)
- fnorm=true;
-
- % mean of the data
- m = mean(data);
-
- % variance of the data
- v = var(data);
-
- % correlation between the axis.
- c = corr(data);
-
- % absolute value of the fourier transform.
- fftdata = abs(fft(data));
-
- % remove the mean value.
- fftdata = fftdata(2:end,:);
-
- % energy.
- e = sum(fftdata.^2)/size(fftdata,1);
-
- % spectral entropy
- fftdata = 1 + fftdata/size(data,1);
- temp = [fftdata(:,1)/sum(fftdata(:,1)), fftdata(:,2)/sum(fftdata(:,2)),...
- fftdata(:,3)/sum(fftdata(:,3)),...
- fftdata(:,4)/sum(fftdata(:,4)),...
- fftdata(:,5)/sum(fftdata(:,5)),...
- fftdata(:,6)/sum(fftdata(:,6))];
- ent = -sum(temp.*log(temp),1);
- feature = [m v c(1,2:6) c(2,3:6) c(3,4:6) c(4,5:6) c(5,6) e ent];
- return;