#train features using svm display("Train Features") #load all features # features = [sampleLabel, classLabel, binMeans, rms, psd, windowMean, windowSTD, windowVariance, windowKurtosis, windowIQR]; load "features.txt"; #matrix is also called features # split features into training and test features using leave-one-out method # class idx: # idx 1 -> forwardbend # idx 2 -> kneebend # idx 3 -> pushups # idx 4 -> situps # idx 5 -> jumpingjack # define which sampleSet is used as testset and not for training. leaveOut = find(features(:,1) == 3 & features(:,2) == 2); #sampleset 3 class 2 testFeatures = features(leaveOut, :); #set testSignal features(leaveOut,:) = []; #remove the testFeatures features(:,1) = []; #remove the sampleLabel # bring the feature matrix into libsvm format. # 1. the label vector: trainLabel = features(:,1); # 2. sparse matrix with every feature in one column: features(:,1) = []; #remove the classLabel trainFeatures = sparse(features); # before training we need to scale the feature values minimums = min(trainFeatures); ranges = max(trainFeatures) - minimums; trainFeatures = (trainFeatures - repmat(minimums, size(trainFeatures, 1), 1)) ./ repmat(ranges, size(trainFeatures, 1), 1); # training: svm with default settings model = svmtrain(trainLabel, trainFeatures); display("Classify Features") # for testing we need to scale again testLabel = testFeatures(:,2); testFeatures(:,1:2) = []; #remove the labels testFeatures = (testFeatures - repmat(minimums, size(testFeatures, 1), 1)) ./ repmat(ranges, size(testFeatures, 1), 1); # classification [predict_label, accuracy, dec_values] = svmpredict(testLabel, testFeatures, model);