Files
HandyGames/toni/octave/training.m
2016-01-11 05:33:22 +01:00

95 lines
3.0 KiB
Matlab

#train features using svm
display("Train Features")
page_screen_output(0);
page_output_immediately(1);
more off;
#load all features
# features = [sampleLabel, classLabel, binMeans, rms, psd, windowMean, windowSTD, windowVariance, windowKurtosis, windowIQR];
load "eval/512/features_512_xyz_nomag_psd18.txt"; #matrix is also called features
# split features into training and test features using leave-one-out method
# class idx:
# idx 1 -> forwardbend
# idx 2 -> kneebend
# idx 3 -> pushups
# idx 4 -> situps
# idx 5 -> jumpingjack
#remove features for evaluation
#features(:, 3:7) = []; #remove binMeans
#features(:, 8:25) = []; #remove psd
#features(:, 26) = []; #remove rms
#features(:, 27:31) = []; #remove statistical stuff
size(features, 2)
#samples_class = features(:,1:2);
#for numClass = 1 : max(samples_class(:,2))
# for numSample = 1 : max(samples_class(find(samples_class(:,2)==1),1))
t=cputime;
# define which sampleSet is used as testset and not for training.
leaveOut = randperm(length(features), 1000);
numSample = 2;
numClass = 5;
#leaveOut = find(features(:,1) == numSample & features(:,2) == numClass); #sampleset x class y
testFeatures = features(leaveOut, :); #set testSignal
features(leaveOut,:) = []; #remove the testFeatures
features(:,1) = []; #remove the sampleLabel
# bring the feature matrix into libsvm format.
# 1. the label vector:
trainLabel = features(:,1);
# 2. sparse matrix with every feature in one column:
features(:,1) = []; #remove the classLabel
trainFeatures = sparse(features);
#write out libsvm file
#libsvmwrite(strcat("trainFeatures_512_", num2str(numClass), "_", num2str(numSample), ".txt"), trainLabel, trainFeatures);
# before training we need to scale the feature values
minimums = min(trainFeatures);
ranges = max(trainFeatures) - minimums;
trainFeatures = (trainFeatures - repmat(minimums, size(trainFeatures, 1), 1)) ./ repmat(ranges, size(trainFeatures, 1), 1);
# training: svm with default settings
model = svmtrain(trainLabel, trainFeatures, '-h 0 -c 32768 -g 8 -q');
disp("Classify Features");
# for testing we need to scale again
testLabel = testFeatures(:,2);
testFeatures(:,1:2) = []; #remove the labels
testFeatures = sparse(testFeatures);
#write out libsvm file
#libsvmwrite(strcat("testFeatures_512_", num2str(numClass), "_", num2str(numSample), ".txt"), testLabel, testFeatures);
testFeatures = (testFeatures - repmat(minimums, size(testFeatures, 1), 1)) ./ repmat(ranges, size(testFeatures, 1), 1);
# classification
[predict_label, accuracy, dec_values] = svmpredict(testLabel, testFeatures, model);
# get evaluation matrix
evaluation = [];
for i = 1:5
for j = 1:5
evaluation(i,j) = sum(testLabel == i & predict_label == j);
end
end
disp(evaluation);
save evaluation_512.txt evaluation accuracy;
printf('Total cpu time: %f seconds\n', cputime-t);
# end
#end