added current c++ code

This commit is contained in:
2016-01-02 17:40:22 +01:00
parent b58fb8f27b
commit 7ce2718306
11 changed files with 2611 additions and 454 deletions

View File

@@ -34,7 +34,6 @@ FILE(GLOB HEADERS
FILE(GLOB SOURCES
./*.cpp
../KLib/inc/tinyxml/tinyxml2.cpp
)

View File

@@ -1,55 +1,55 @@
#include "sensors/SensorReader.h"
#include "Interpolator.h"
#include <sstream>
//#include "sensors/SensorReader.h"
//#include "Interpolator.h"
//#include <sstream>
/** the step size to use for interpolating the output (in ms) */
static constexpr int stepSizeMS = 10;
///** the step size to use for interpolating the output (in ms) */
//static constexpr int stepSizeMS = 10;
/** interpolate and convert the readings for one sensor to a matLab matrix */
template <typename T> std::string toMatLab(const SensorReadings<T>& values) {
///** interpolate and convert the readings for one sensor to a matLab matrix */
//template <typename T> std::string toMatLab(const SensorReadings<T>& values) {
// create and feed the interpolator with the timed sensor readings
K::Interpolator<uint64_t, T> interpol;
for(const auto& reading : values.values) {interpol.add(reading.ts, reading.val);}
interpol.makeRelative();
// // create and feed the interpolator with the timed sensor readings
// K::Interpolator<uint64_t, T> interpol;
// for(const auto& reading : values.values) {interpol.add(reading.ts, reading.val);}
// interpol.makeRelative();
// create interpolated output
const int lengthMS = interpol.values.back().key;
std::stringstream ss;
ss << "[" << std::endl;
for (int ms = stepSizeMS; ms < lengthMS; ms += stepSizeMS) {
const T cur = interpol.get(ms);
ss << cur.x << " " << cur.y << " " << cur.z << std::endl;
}
ss << "];" << std::endl;
// // create interpolated output
// const int lengthMS = interpol.values.back().key;
// std::stringstream ss;
// ss << "[" << std::endl;
// for (int ms = stepSizeMS; ms < lengthMS; ms += stepSizeMS) {
// const T cur = interpol.get(ms);
// ss << cur.x << " " << cur.y << " " << cur.z << std::endl;
// }
// ss << "];" << std::endl;
return ss.str();
// return ss.str();
}
//}
int main(const int argc, const char** argv) {
//int main(const int argc, const char** argv) {
std::cout << "converting " << (argc-1) << " files" << std::endl;
// std::cout << "converting " << (argc-1) << " files" << std::endl;
for (int i = 1; i < argc; ++i) {
// for (int i = 1; i < argc; ++i) {
std::string fileIn = argv[i];
std::string fileOut = fileIn + ".m";
// std::string fileIn = argv[i];
// std::string fileOut = fileIn + ".m";
// read all sensor values within the input file
Recording rec = SensorReader::read(fileIn);
// // read all sensor values within the input file
// Recording rec = SensorReader::read(fileIn);
// convert them to MatLab matrices
std::ofstream out(fileOut);
out << "Accel = " << toMatLab(rec.accel);
out << "Gyro = " << toMatLab(rec.gyro);
out << "Magnet = " << toMatLab(rec.magField);
out.close();
// // convert them to MatLab matrices
// std::ofstream out(fileOut);
// out << "Accel = " << toMatLab(rec.accel);
// out << "Gyro = " << toMatLab(rec.gyro);
// out << "Magnet = " << toMatLab(rec.magField);
// out.close();
}
// }
return 0;
// return 0;
}
//}

View File

@@ -1,431 +1,135 @@
//#include "usingneuralnet.h"
#include "usingpca.h"
#include <omp.h>
#include "pca/TrainPCA.h"
#include <KLib/misc/gnuplot/Gnuplot.h>
#include <KLib/misc/gnuplot/GnuplotSplot.h>
#include <KLib/misc/gnuplot/GnuplotSplotElementLines.h>
#include "pca/KNN.h"
#include "pca/aKNN.h"
#include <vector>
std::vector<std::string> COLORS = {"#000000", "#0000ff", "#00ff00", "#ff0000", "#00ffff"};
std::string getClass(const std::vector<ClassifiedFeature>& nns) {
std::unordered_map<std::string, int> map;
for(const ClassifiedFeature& nn : nns) { map[nn.className] += 1; }
for (auto& it : map) {
if (it.second > nns.size() * 0.75) {return it.first;}
}
return "";
}
struct Stats{
int match;
int error;
int unknown;
Stats() : match(0), error(0), unknown(0) {;}
float getSum() {return match+error+unknown;}
};
int main(void) {
omp_set_dynamic(false);
omp_set_num_threads(3);
const int numFeatures = 3;
std::vector<ClassifiedPattern> patTrain = TrainPCA::getTrainData();
TrainPCA::Matrices m = TrainPCA::getMatrices(patTrain, numFeatures);
std::vector<ClassifiedPattern> patTest = TrainPCA::getTestData();
// construct knn
aKNN<ClassifiedFeature, 3> knn;
for (const ClassifiedPattern& pat : patTrain) {
K::DynColVector<float> vec = m.A1 * K::PCAHelper<float>::toVector(pat.pattern);
const std::vector<float> arr = {vec(0), vec(1), vec(2)};
knn.add(ClassifiedFeature(pat.className, arr));
}
knn.build();
K::Gnuplot gp;
K::GnuplotSplot splot;
K::GnuplotSplotElementLines lines[5];
Stats stats;
int xx = 0;
for (const ClassifiedPattern& pat : patTest) {
const int idx = Settings::classToInt(pat.className);
K::DynColVector<float> vec = m.A1 * K::PCAHelper<float>::toVector(pat.pattern);
// get KNN's answer
std::vector<float> arr = {vec(0), vec(1), vec(2)};
std::vector<ClassifiedFeature> neighbors = knn.get(arr.data(), 10);
std::string gotClass = getClass(neighbors);
if (pat.className == gotClass) {stats.match++;}
else if (gotClass == "") {stats.unknown++;}
else {stats.error++;}
if (++xx % 16 == 0) {
std::cout << pat.className << " -> " << gotClass << std::endl;
std::cout << stats.getSum() << ":" << stats.match << ":" << stats.error << ":" << stats.unknown << std::endl;
std::cout << stats.match/stats.getSum() << ":" << stats.error/stats.getSum() << ":" << stats.unknown/stats.getSum() << std::endl;
}
// plot
K::GnuplotPoint3 p3(vec(0), vec(1), vec(2));
lines[idx].add(p3);
}
for (int i = 0; i < 5; ++i) {lines[i].setColorHex(COLORS[i]);}
for (int i = 0; i < 5; ++i) {splot.add(&lines[i]);}
gp.setDebugOutput(false);
gp.draw(splot);
gp.flush();
sleep(10000);
//#include "sensors/SensorReader.h"
//#include "Interpolator.h"
//#include <KLib/misc/gnuplot/Gnuplot.h>
//#include <KLib/misc/gnuplot/GnuplotPlot.h>
//#include <KLib/misc/gnuplot/GnuplotPlotElementLines.h>
//#include <KLib/misc/gnuplot/GnuplotMultiplot.h>
//#include <KLib/math/neuralnet/NeuralNetIHO.h>
//#include <KLib/math/optimization/NumOptAlgoGenetic.h>
//enum class PracticeType {
// REST,
// JUMPING_JACK,
// SITUPS,
// PUSHUPS,
// REJECT,
//};
///** interpolate the output for the given position using the provided range */
//template <typename T> T blur(K::Interpolator<uint64_t, T>& interpol, const uint64_t ms, const int s = 3) {
// return interpol.get(ms-s*2) * 0.1 +
// interpol.get(ms-s) * 0.2 +
// interpol.get(ms) * 0.4 +
// interpol.get(ms+s) * 0.2 +
// interpol.get(ms+s*2) * 0.1;
//}
//struct Practice {
// PracticeType type;
// Recording rec;
// std::vector<uint64_t> keyGyro;
// //Practice(const PracticeType p, const Recording& rec, const std::vector<uint64_t>& keyGyro) : p(p), rec(rec), keyGyro(keyGyro) {;}
// K::Interpolator<uint64_t, SensorGyro> getInterpol() {
// K::Interpolator<uint64_t, SensorGyro> interpol;
// for (auto it : rec.gyro.values) {interpol.add(it.ts, it.val);}
// interpol.makeRelative();
// return interpol;
// }
//};
//static constexpr int NUM_IN = 60;
//static constexpr int NUM_HID = 16;
//static constexpr int NUM_OUT = 4;
//static constexpr int NUM_ARGS = NUM_IN*NUM_HID + NUM_HID*NUM_OUT;
//static std::vector<float> getNetworkInput(K::Interpolator<uint64_t, SensorGyro>& interpol, const uint64_t pos) {
// std::vector<float> val;
// val.resize(NUM_IN);
// int idx = 0;
// for (int offset = -500; offset < 500; offset += 50) {
// SensorGyro gyro = interpol.get(pos + offset);
// val[idx++] = gyro.x;
// val[idx++] = gyro.y;
// val[idx++] = gyro.z;
// assert(idx <= NUM_IN);
// }
// return val;
//}
///** get the index of the largest element within vec */
//static int getMaxIdx(const K::NeuralNetResultIHO<NUM_OUT>& vec) {
// float max = 0;
// int idx = 0;
// for (int i = 0; i < NUM_OUT; ++i) {
// if (vec.values[i] > max) {
// max = vec.values[i];
// idx = i;
// }
// }
// return idx;
//}
//struct TMP {int index; float value;};
//static std::vector<TMP> getSorted(const K::NeuralNetResultIHO<NUM_OUT>& vec) {
// std::vector<TMP> tmp;
// for (int i = 0; i < NUM_OUT; ++i) {tmp.push_back( TMP{i, vec.values[i]} );}
// auto comp = [] (const TMP& t1, const TMP& t2) {return t2.value < t1.value;};
// std::sort(tmp.begin(), tmp.end(), comp);
// return tmp;
//}
//static void debug(Practice& p, K::NeuralNetResultIHO<NUM_OUT>& res) {
// const int maxIdx = getMaxIdx(res);
// const char max = (res.values[maxIdx] > 0.5) ? (maxIdx + '0') : ('?');
// std::cout << "practice was: " << (int)p.type;
// std::cout << " network says: " << max << "\t";
// std::cout << "[";
// for (int i = 0; i < NUM_OUT; ++i) {
// std::cout << res.values[i] << ", ";
// }
// std::cout << "]" << std::endl;
//}
//static void debugPlot(Practice& p) {
// static K::Gnuplot gp;
// K::GnuplotPlot plot;
// K::GnuplotPlotElementLines line[3];
// line[0].setColorHex("#ff0000"); line[0].setTitle("x");
// line[1].setColorHex("#00ff00"); line[1].setTitle("y");
// line[2].setColorHex("#0000ff"); line[2].setTitle("z");
// plot.add(&line[0]);
// plot.add(&line[1]);
// plot.add(&line[2]);
// K::Interpolator<uint64_t, SensorGyro> interpol = p.getInterpol();
// for (int ms = 0; ms < 20000; ms += 50) {
// SensorGyro s = interpol.get(ms);
// line[0].add(K::GnuplotPoint2(ms, s.x));
// line[1].add(K::GnuplotPoint2(ms, s.y));
// line[2].add(K::GnuplotPoint2(ms, s.z));
// }
// gp.setDebugOutput(true);
// gp.draw(plot);
// gp.flush();
//}
//int main(void) {
// std::vector<Practice> practices;
// practices.push_back(
// Practice {
// PracticeType::JUMPING_JACK,
// SensorReader::read("/mnt/firma/kunden/HandyGames/daten/jumpingjack/jumpingjack_gl_5_subject_3_left.txt"),
// {1950, 2900, 3850, 4850, 5850, 6850, 7850, 8850, 9800, 10800, 11850}
// }
// );
// practices.push_back(
// Practice {
// PracticeType::REST,
// SensorReader::read("/mnt/firma/kunden/HandyGames/daten/idle/restposition_gl_24.txt"),
// {1000, 2000, 3000, 4000, 5000, 6000, 7000, 8000, 9000}
// }
// );
// practices.push_back(
// Practice {
// PracticeType::SITUPS,
// SensorReader::read("/mnt/firma/kunden/HandyGames/daten/situps/situps_gl_12_subject_1_left.txt"),
// {1850, 3250, 4750, 6150, 7550, 8950, 10350, 11600, 13000}
// }
// );
// std::vector<std::vector<float>> vecs = {vec1, vec2};
// std::cout << K::PCAHelper<float>::getR(vecs) << std::endl;
// std::cout << K::PCAHelper<float>::getM(vecs) << std::endl;
// practices.push_back(
// Practice {
// PracticeType::PUSHUPS,
// SensorReader::read("/mnt/firma/kunden/HandyGames/daten/pushups/pushups_gl_8_subject_4_right.txt"),
// {2750, 4200, 5850, 7400, 9000, 10650}
// //{3500, 5000, 8300, 9900, 11550}
// }
// );
// K::PCAHelper<float>::R r;
// r.add(vec1); r.add(vec2); std::cout << r.get() << std::endl;
// Eigen::Vector3f v1; v1 << 1,2,3;
// Eigen::Vector3f v2; v2 << 3,4,5;
// std::vector<Eigen::Vector3f> vecs2 = {v1, v2};
// std::cout << K::PCAHelper<float>::getR(vecs2) << std::endl;
// std::cout << K::PCAHelper<float>::getM(vecs2) << std::endl;
// UsingNeuralNet::run();
//UsingPCA::run();
// practices.push_back(
// Practice {
// PracticeType::REST,
// SensorReader::read("/mnt/firma/kunden/HandyGames/daten/jumpingjack/jumpingjack_gl_5_subject_3_left.txt"),
// {1950+500, 2900+500, 3850+500, 4850+500, 5850+500, 6850+500, 7850+500, 8850+500, 9800+500, 10800+500, 11850+500}
// }
// );
//// practices.push_back(
//// Practice {
//// PracticeType::REST,
//// SensorReader::read("/mnt/firma/kunden/HandyGames/daten/pushups/pushups_gl_8_subject_4_right.txt"),
//// //{2750, 4200, 5850, 7400, 9000, 10650}
//// {3500, 5000, 8300, 9900, 11550}
//// }
//// );
// practices.push_back(
// Practice {
// PracticeType::REST,
// SensorReader::read("/mnt/firma/kunden/HandyGames/daten/situps/situps_gl_12_subject_1_left.txt"),
// {1850+600, 3250+600, 4750+600, 6150+600, 7550+600, 8950+600, 10350+600, 11600+600, 13000+600}
// }
// );
// debugPlot(practices.back());
// sleep(100);
// class MyOpt : public K::NumOptFunction<NUM_ARGS> {
// public:
// std::vector<Practice>& practices;
// K::NeuralNetIHO<NUM_IN, NUM_HID, NUM_OUT>& net;
// /** ctor */
// MyOpt(std::vector<Practice>& practices, K::NeuralNetIHO<NUM_IN, NUM_HID, NUM_OUT>& net) : practices(practices), net(net) {
// ;
// }
// double getValue(const K::NumOptVector<NUM_ARGS>& args) const {
// // configure the network
// std::vector<float> vals;
// for(int i = 0; i < NUM_ARGS; ++i) {vals.push_back(args[i]);}
// net.setAll(vals);
// // temporals
// float points = 0;
// // process every practice
// for (Practice& p : practices) {
// // get the values for the neural-net-input
// K::Interpolator<uint64_t, SensorGyro> interpol = p.getInterpol();
// // process 4 (positive) occurences within the practice
// for (int key = 0; key < 4; ++key) {
// for (int o = -100; o <= +100; o +=50) {
// const uint64_t ts = p.keyGyro[key] + o;
// const std::vector<float> values = getNetworkInput(interpol, ts);
// // calculate the output
// const K::NeuralNetResultIHO<NUM_OUT> res = net.getOutput(values.data());
// // largest value matches the desired type -> good!
// std::vector<TMP> resSort = getSorted(res);
// if (resSort[0].index == (int) p.type) {
// //if ( (resSort[0].value - resSort[1].value) > 0.25 ) {
// ++points;
// points += resSort[0].value;
// points -= resSort[1].value;
// //}
// //points += resSort[0].value;
// //points += (resSort[0].value - resSort[1].value);
// } else {
// --points;
// }
//// // update the score
//// for (int i = 0; i < NUM_OUT; ++i) {
//// if (i == (int) p.type) {
//// points += 3 * res.values[i]; // matches
//// } else {
//// points -= res.values[i]; // does not match
//// }
//// }
//// int maxIdx = getMaxIdx(res);
//// if (maxIdx == (int) p.type) {
//// ++points;
//// }
// }
// UsingNeuralNet::debugPlot(
// Practice {
// PracticeType::KNEEBEND,
// SensorReader::read("/mnt/firma/kunden/HandyGames/daten/kneebend/kneebend_gl_0_subject_0_right.txt"),
// {2650, 4750, 6750, 8800, 10800, 12800}
// //{3500, 5000, 8300, 9900, 11550}
// }
// }
// std::cout << points << std::endl;
// return -points;
// }
// );
// };
//sleep(1000);
// K::NumOptAlgoGenetic<NUM_ARGS> opt;
// K::NumOptVector<NUM_ARGS> vec;
// K::NeuralNetIHO<NUM_IN, NUM_HID, NUM_OUT> net;
// MyOpt func(practices, net);
// opt.setElitism(0.025f);
// opt.setPopulationSize(300);
// opt.setMaxIterations(100);
// opt.setMutation(0.10f);
// opt.setValRange(0.5);
// opt.calculateOptimum(func, vec);
//// // process every practice
//// for (Practice& p : practices) {
//// // get the values for the neural-net-input
//// K::Interpolator<uint64_t, SensorGyro> interpol = p.getInterpol();
//// // process every (positive) occurence within the practice
//// for (uint64_t ts : p.keyGyro) {
//// std::vector<float> values = getNetworkInput(interpol, ts);
//// K::NeuralNetResultIHO<NUM_OUT> res = net.getOutput(values.data());
//// debug(p, res);
//// {
//// std::vector<float> values = getNetworkInput(interpol, ts+500);
//// K::NeuralNetResultIHO<NUM_OUT> res = net.getOutput(values.data());
//// std::cout << "###"; debug(p, res);
//// }
//// }getMaxIdx
//// }
// K::Gnuplot gp1;
// K::Gnuplot gp2;
// K::GnuplotPlot plot1;
// K::GnuplotPlot plot2;
// K::GnuplotMultiplot plot(2,1);
// plot.add(&plot1);
// plot.add(&plot2);
// K::GnuplotPlotElementLines line[3];
// line[0].setColorHex("#ff0000"); line[0].setTitle("x");
// line[1].setColorHex("#00ff00"); line[1].setTitle("y");
// line[2].setColorHex("#0000ff"); line[2].setTitle("z");
// plot1.add(&line[0]);
// plot1.add(&line[1]);
// plot1.add(&line[2]);
// K::GnuplotPlotElementLines netLines[NUM_OUT];
// netLines[0].setColorHex("#ff0000"); netLines[0].setTitle("REST"); netLines[0].setLineWidth(2);
// netLines[1].setColorHex("#00ff00"); netLines[1].setTitle("JUMPING_JACK"); netLines[1].setLineWidth(2);
// netLines[2].setColorHex("#0000ff"); netLines[2].setTitle("SITUPS"); netLines[2].setLineWidth(2);
// netLines[3].setColorHex("#ffff00"); netLines[3].setTitle("PUSBACKS"); netLines[3].setLineWidth(2);
// for (int i = 0; i < NUM_OUT; ++i) {
// plot2.add(&netLines[i]);
// }
// // process every practice
// for (Practice& p : practices) {
// // get the values for the neural-net-input
// K::Interpolator<uint64_t, SensorGyro> interpol = p.getInterpol();
// line[0].clear();
// line[1].clear();
// line[2].clear();
// for (int i = 0; i < NUM_OUT; ++i) {
// netLines[i].clear();
// }
// for (int ms = 0; ms < 20000; ms += 50) {
// SensorGyro s = interpol.get(ms);
// line[0].add(K::GnuplotPoint2(ms, s.x));
// line[1].add(K::GnuplotPoint2(ms, s.y));
// line[2].add(K::GnuplotPoint2(ms, s.z));
// }
// // process every (positive) occurence within the practice
// for (int ts = 1000; ts < 10000; ts += 50) {
// std::vector<float> values = getNetworkInput(interpol, ts);
// K::NeuralNetResultIHO<NUM_OUT> res = net.getOutput(values.data());
// debug(p, res);
// for (int i = 0; i < NUM_OUT; ++i) {
// netLines[i].add(K::GnuplotPoint2(ts, res.values[i]));
// }
// gp1 << "set arrow 1 from " << ts-500 << ",-10 to " << ts-500 << ",+10\n";
// gp1 << "set arrow 2 from " << ts+500 << ",-10 to " << ts+500 << ",+10\n";
// gp1.draw(plot1);
// gp1.flush();
// gp2.draw(plot2);
// gp2.flush();
// usleep(1000*33);
// }
// }
//// K::Gnuplot gp;
//// K::GnuplotPlot plot;
//// K::GnuplotPlotElementLines line[3];
//// line[0].setColorHex("#ff0000"); line[0].setTitle("x");
//// line[1].setColorHex("#00ff00"); line[1].setTitle("y");
//// line[2].setColorHex("#0000ff"); line[2].setTitle("z");
//// Practice p1 = practices[0];
//// auto interpol = p1.getInterpol();
//// for (int ms = 0; ms < 20000; ms += 50) {
//// SensorGyro s = blur(interpol, ms, 10);
//// line[0].add(K::GnuplotPoint2(ms, s.x));
//// line[1].add(K::GnuplotPoint2(ms, s.y));
//// line[2].add(K::GnuplotPoint2(ms, s.z));
//// }
//// plot.add(&line[0]);
//// plot.add(&line[1]);
//// plot.add(&line[2]);
//// gp.draw(plot);
//// for (uint64_t ts : p1.keyGyro) {
//// gp << "set arrow from " << ts << ",-10 to " << ts << ",+10\n";
//// }
//// gp.flush();
// sleep(1000);
//}
}

112
workspace/pca/Data.h Normal file
View File

@@ -0,0 +1,112 @@
#ifndef TRAINDATA_H
#define TRAINDATA_H
#include "Settings.h"
#include <KLib/fs/File.h>
#include "../Interpolator.h"
#include "../sensors/SensorReader.h"
struct ClassifiedPattern {
std::string className;
std::vector<float> pattern;
ClassifiedPattern(const std::string& className, const std::vector<float>& pattern) : className(className), pattern(pattern) {;}
};
struct ClassifiedFeature {
std::string className;
std::vector<float> feature;
ClassifiedFeature(const std::string& className, const std::vector<float>& feature) : className(className), feature(feature) {;}
ClassifiedFeature() : className("??????") {;}
/** get the l2- distance to the given vector */
float getDistance(const std::vector<float>& vec) const {
if (vec.size() != feature.size()) {throw "error!";}
float dist = 0;
for (int i = 0; i < (int)vec.size(); ++i) {dist += (vec[i]-feature[i])*(vec[i]-feature[i]);}
return std::sqrt(dist);
}
};
struct ClassifiedDataFile {
std::string className;
std::string fileName;
ClassifiedDataFile(const std::string& className, const std::string& fileName) : className(className), fileName(fileName) {;}
};
class Data {
public:
/** get X data-files for each class */
static std::vector<ClassifiedDataFile> getDataFiles(const int filesPerClass) {
Settings s;
std::vector<ClassifiedDataFile> files;
K::File folder(s.path);
for (const std::string& className : s.classNames) {
K::File classFolder(folder, className);
int i = 0;
for (const K::File classFile : classFolder.listFiles()) {
const std::string fileName = classFile.getAbsolutePath();
if (fileName[fileName.length()-1] == 'm') {continue;}
if (++i > filesPerClass) {break;}
ClassifiedDataFile cdf(className, fileName);
files.push_back(cdf);
}
}
return files;
}
/** get sample date from the given data-file */
static std::vector<std::vector<float>> getSamples(const std::string fileName, const int windowSize_ms, const int regionStart_ms, const float regionPercent, const int stepSize_ms) {
// read all sensor-values within the given data-file
Recording rec = SensorReader::read(fileName);
// get the value-interpolator
K::Interpolator<uint64_t, SensorAccelerometer> intAccel;
for (const auto& val : rec.accel.values) {intAccel.add(val.ts, val.val);}
intAccel.makeRelative();
const int regionEnd_ms = intAccel.values.back().key * regionPercent;
// construct all sample windows
std::vector<std::vector<float>> samples;
for (int center = regionStart_ms; center < regionEnd_ms; center += stepSize_ms) {
std::vector<float> window = getSampleWindow(intAccel, center, windowSize_ms, stepSize_ms);
samples.push_back(window);
}
return samples;
}
template <typename T> static std::vector<float> getSampleWindow(K::Interpolator<uint64_t, T>& interpol, const int center_ms, const int windowSize_ms, const int stepSize_ms) {
std::vector<float> window;
const int start = center_ms - windowSize_ms/2;
const int end = center_ms + windowSize_ms/2;
for (uint64_t ms = start; ms < end; ms += stepSize_ms) {
const T val = interpol.get(ms);
window.push_back(val.x);
window.push_back(val.y);
window.push_back(val.z);
}
return window;
}
};
#endif // TRAINDATA_H

41
workspace/pca/KNN.h Normal file
View File

@@ -0,0 +1,41 @@
#ifndef KNN_H
#define KNN_H
#include <vector>
#include <algorithm>
template <typename T, int dim> class KNN {
private:
std::vector<T> elems;
public:
/** add a new element */
void add(const T& elem) {
elems.push_back(elem);
}
void build() {;}
/** get the nearest n elements */
template <typename T2> std::vector<T> get(const T2& src, const int num) {
auto lambda = [&] (const T& e1, const T& e2) {
return e1.getDistance(src) < e2.getDistance(src);
};
std::sort(elems.begin(), elems.end(), lambda);
std::vector<T> res;
for (int i = 0; i < num; ++i) { res.push_back(elems[i]); }
return res;
}
};
#endif // KNN_H

25
workspace/pca/Settings.h Normal file
View File

@@ -0,0 +1,25 @@
#ifndef SETTINGS_H
#define SETTINGS_H
#include <string>
class Settings {
public:
std::string path = "/mnt/firma/kunden/HandyGames/daten";
std::vector<std::string> classNames = {"forwardbend", "jumpingjack", "kneebend", "pushups", "situps"};
static int classToInt(const std::string className) {
if ("forwardbend" == className) {return 0;}
if ("jumpingjack" == className) {return 1;}
if ("kneebend" == className) {return 2;}
if ("pushups" == className) {return 3;}
if ("situps" == className) {return 4;}
throw "error";
}
};
#endif // SETTINGS_H

108
workspace/pca/TrainPCA.h Normal file
View File

@@ -0,0 +1,108 @@
#ifndef TRAINPCA_H
#define TRAINPCA_H
#include "Data.h"
#include "Settings.h"
#include <KLib/math/linearTransform/PCA.h>
class TrainPCA {
private:
public:
struct Matrices {
K::DynMatrix<float> A1;
K::DynMatrix<float> A2;
K::DynMatrix<float> A3;
};
static std::vector<ClassifiedPattern> getTestData() {
const int windowSize_ms = 1000;
const int regionStart_ms = 1500 + 25; // worst case: half-window-size offset
const float regionPercent = 0.85;
const int stepSize_ms = 50;
// get 10 data-files per class
std::vector<ClassifiedDataFile> files = Data::getDataFiles(30);
// get patterns for each class
std::vector<ClassifiedPattern> patterns;
for (ClassifiedDataFile cdf : files) {
std::cout << cdf.fileName << std::endl;
std::vector<std::vector<float>> samples = Data::getSamples(cdf.fileName, windowSize_ms, regionStart_ms, regionPercent, stepSize_ms);
for (const std::vector<float> vec : samples) {
patterns.push_back(ClassifiedPattern(cdf.className, vec));
}
}
return patterns;
}
/** train PCA features */
static std::vector<ClassifiedPattern> getTrainData() {
const int windowSize_ms = 1000;
const int regionStart_ms = 1500;
const float regionPercent = 0.4;
const int stepSize_ms = 50;
// get 5 data-files per class
std::vector<ClassifiedDataFile> files = Data::getDataFiles(30);
// get patterns for each class
std::vector<ClassifiedPattern> patterns;
for (ClassifiedDataFile cdf : files) {
std::cout << cdf.fileName << std::endl;
std::vector<std::vector<float>> samples = Data::getSamples(cdf.fileName, windowSize_ms, regionStart_ms, regionPercent, stepSize_ms);
std::cout << "\tgot" << samples.size() << " samples, each " << samples[0].size() << " values" << std::endl;
for (const std::vector<float> vec : samples) {
patterns.push_back(ClassifiedPattern(cdf.className, vec));
}
}
return patterns;
}
/** get the A1,A2,A3 matrices for the given training data */
static Matrices getMatrices(const std::vector<ClassifiedPattern>& data, const int numFeatures) {
K::LinearTransform<float>::PCA pca;
K::LinearTransform<float>::MaxInterClassDistance<std::string> inter;
K::LinearTransform<float>::MinIntraClassDistance<std::string> intra;
for (const ClassifiedPattern& pat : data) {
pca.add(pat.pattern);
inter.add(pat.className, pat.pattern);
intra.add(pat.className, pat.pattern);
}
Matrices m;
m.A1 = pca.getA(numFeatures);
m.A2 = inter.getA(numFeatures);
m.A3 = intra.getA(numFeatures);
std::cout << "A1: " << std::endl << m.A1 << std::endl;
std::cout << "A2: " << std::endl << m.A2 << std::endl;
std::cout << "A3: " << std::endl << m.A3 << std::endl;
return m;
}
};
#endif // TRAINPCA_H

73
workspace/pca/aKNN.h Normal file
View File

@@ -0,0 +1,73 @@
#ifndef AKNN_H
#define AKNN_H
#include "nanoflann.hpp"
using namespace nanoflann;
template <typename T, int dim> class aKNN {
struct DataSet {
std::vector<T> elems;
inline size_t kdtree_get_point_count() const {return elems.size();}
inline float kdtree_distance(const float* p1, const size_t idxP2, size_t) const {
float dist = 0;
for (int i = 0; i < dim; ++i) {
float delta = (p1[i] - kdtree_get_pt(idxP2, i));
dist += delta*delta;
}
return dist;
}
inline float kdtree_get_pt(const size_t idx, int pos) const {
return elems[idx].feature[pos];
}
template <class BBOX> bool kdtree_get_bbox(BBOX&) const {return false;}
} data;
typedef KDTreeSingleIndexAdaptor<L2_Simple_Adaptor<float, DataSet>, DataSet, dim> MyTree;
MyTree* tree = nullptr;
public:
/** add a new element */
void add(const T& elem) {
data.elems.push_back(elem);
}
/** build the KD-Tree */
void build() {
tree = new MyTree(dim, data, KDTreeSingleIndexAdaptorParams(10) );
tree->buildIndex();
}
/** get the nearest n elements */
template <typename T2> std::vector<T> get(const T2* query, const int numResults) {
float distances[numResults];
size_t indices[numResults];
KNNResultSet<float> res(numResults);
res.init(indices, distances);
tree->knnSearch(query, numResults, indices, distances);
std::vector<T> vec;
for (int i = 0; i < numResults; ++i) {
vec.push_back(data.elems[indices[i]]);
}
return vec;
}
};
#endif // AKNN_H

1397
workspace/pca/nanoflann.hpp Normal file

File diff suppressed because it is too large Load Diff

585
workspace/usingneuralnet.h Normal file
View File

@@ -0,0 +1,585 @@
#ifndef USINGNEURALNET_H
#define USINGNEURALNET_H
#include <vector>
#include "sensors/SensorReader.h"
#include "Interpolator.h"
#include <KLib/misc/gnuplot/Gnuplot.h>
#include <KLib/misc/gnuplot/GnuplotPlot.h>
#include <KLib/misc/gnuplot/GnuplotPlotElementLines.h>
#include <KLib/misc/gnuplot/GnuplotMultiplot.h>
#include <KLib/math/neuralnet/FeedForwardNeuralNet.h>
#include <KLib/math/optimization/NumOptAlgoGenetic.h>
#include <KLib/math/optimization/NumOptAlgoDownhillSimplex.h>
enum class PracticeType {
//REST,
JUMPING_JACK,
SITUPS,
PUSHUPS,
KNEEBEND,
FORWARDBEND,
};
std::string NAMES[] = {
"JUMPING_JACK",
"SITUPS",
"PUSHUPS",
"KNEEBEND",
"FORWARDBEND"
};
std::string COLORS[] = {
"#ff0000",
"#00ff00",
"#0000ff",
"#ffff00",
"#000000",
"#666666"
};
struct Practice {
PracticeType type;
Recording rec;
std::vector<uint64_t> keyGyro;
//Practice(const PracticeType p, const Recording& rec, const std::vector<uint64_t>& keyGyro) : p(p), rec(rec), keyGyro(keyGyro) {;}
K::Interpolator<uint64_t, SensorGyro> getInterpol() const {
K::Interpolator<uint64_t, SensorGyro> interpol;
for (auto it : rec.gyro.values) {interpol.add(it.ts, it.val);}
interpol.makeRelative();
return interpol;
}
};
static constexpr int NUM_IN = 48;
static constexpr int NUM_HID = 15;
static constexpr int NUM_OUT = 5;
static constexpr int NUM_ARGS = NUM_IN*NUM_HID + NUM_HID*NUM_OUT;
class UsingNeuralNet {
public:
///** interpolate the output for the given position using the provided range */
//template <typename T> T blur(K::Interpolator<uint64_t, T>& interpol, const uint64_t ms, const int s = 3) {
// return interpol.get(ms-s*2) * 0.1 +
// interpol.get(ms-s) * 0.2 +
// interpol.get(ms) * 0.4 +
// interpol.get(ms+s) * 0.2 +
// interpol.get(ms+s*2) * 0.1;
//}
/** get the input vector for the neuronal network */float points = 0;
static std::vector<float> getNetworkInput(K::Interpolator<uint64_t, SensorGyro>& interpol, const uint64_t pos) {
std::vector<float> val;
val.resize(NUM_IN);
int idx = 0;
for (int offset = -400; offset < 400; offset += 50) {
SensorGyro gyro = interpol.get(pos + offset);
val[idx++] = gyro.x;
val[idx++] = gyro.y;
val[idx++] = gyro.z;
assert(idx <= NUM_IN);
}
return val;
}
/** get the index of the largest element within vec */
static int getMaxIdx(const std::vector<float>& vec) {
float max = 0;
int idx = 0;
for (int i = 0; i < NUM_OUT; ++i) {
if (vec[i] > max) {
max = vec[i];
idx = i;
}
}
return idx;
}
struct TMP {int index; float value;};
//static std::vector<TMP> getSorted(const K::NeuralNetResultIHO<NUM_OUT>& vec) {
// std::vector<TMP> tmp;
// for (int i = 0; i < NUM_OUT; ++i) {tmp.push_back( TMP{i, vec.values[i]} );}
// auto comp = [] (const TMP& t1, const TMP& t2) {return t2.value < t1.val//struct TMP {int index; float value;};
static std::vector<TMP> getSorted(const std::vector<float>& vec) {
std::vector<TMP> tmp;
for (int i = 0; i < NUM_OUT; ++i) {tmp.push_back( TMP{i, vec[i]} );}
auto comp = [] (const TMP& t1, const TMP& t2) {return t2.value < t1.value;};
std::sort(tmp.begin(), tmp.end(), comp);
return tmp;
}
// std::sort(tmp.begin(), tmp.end(), comp);
// return tmp;
//}
static void debug(Practice& p, std::vector<float>& res) {
const int maxIdx = getMaxIdx(res);
const char max = (res[maxIdx] > 0.5) ? (maxIdx + '0') : ('?');
std::cout << "practice was: " << (int)p.type;
std::cout << " network says: " << max << "\t";
std::cout << "[";
for (int i = 0; i < NUM_OUT; ++i) {
std::cout << res[i] << ", ";
}
std::cout << "]" << std::endl;
}
static void debugPlot(Practice p) {
static K::Gnuplot gp;
K::GnuplotPlot plot;
K::GnuplotPlotElementLines line[3];
line[0].setColorHex("#ff0000"); line[0].setTitle("x");
line[1].setColorHex("#00ff00"); line[1].setTitle("y");
line[2].setColorHex("#0000ff"); line[2].setTitle("z");
plot.add(&line[0]);
plot.add(&line[1]);
plot.add(&line[2]);
K::Interpolator<uint64_t, SensorGyro> interpol = p.getInterpol();
volatile int len = p.rec.gyro.values.back().ts - p.rec.gyro.values.front().ts;
for (int ms = 0; ms < len; ms += 50) {
SensorGyro s = interpol.get(ms);
line[0].add(K::GnuplotPoint2(ms, s.x));
line[1].add(K::GnuplotPoint2(ms, s.y));
line[2].add(K::GnuplotPoint2(ms, s.z));
}
gp.setDebugOutput(true);
gp.draw(plot);
for (uint64_t ts : p.keyGyro) {
gp << "set arrow from " << ts << ",-10 to " << ts << ",+10\n";
}
gp.flush();
}
class MyOpt : public K::NumOptFunction<NUM_ARGS> {
public:
std::vector<Practice>& practices;
K::FeedForwardNeuralNet<float, K::FeedForwardNeuralNetOPLogistic>& net;
/** ctor */
MyOpt(std::vector<Practice>& practices, K::FeedForwardNeuralNet<float, K::FeedForwardNeuralNetOPLogistic>& net) : practices(practices), net(net) {
;
}
// static float getScore(const int shouldBe, const std::vector<float> values) {
// float points = 0;
// for (int i = 0; i < NUM_OUT; ++i) {
// if (i == shouldBe) {
// if (values[i] > 0.5) {points += values[i];} // matches and > 0.5 -> score
// } else {
// if (values[i] > 0.5) {points -= values[i];} // does not match but > 0.5 -> neg-score
// }
// }
// return points;
// }
static float getScore(const int shouldBe, const std::vector<float> values) {
// largest value matches the desired type -> good!
float points = 0;
std::vector<TMP> resSort = getSorted(values);
if (resSort[0].index == shouldBe) {
//if ( (resSort[0].value - resSort[1].value) > 0.25 ) {
points += 2;
points += resSort[0].value;
points -= resSort[1].value;
//}
//points += resSort[0].value;
//points += (resSort[0].value - resSort[1].value);
} else {
points -= 3; // higher seems better!
}
return points;
}
static float getScorePos(const int shouldBe, const std::vector<float> values) {
float points = 0;
for (int idx = 0; idx < values.size(); ++idx) {
const float v = values[idx];
if (idx == shouldBe) {
points += (v > 0.5) ? 1 : 0;
//points += (v > 0.5) ? v : 0;
} else {
points -= (v > 0.5) ? 1 : 0;
//points -= (v > 0.5) ? v : 0;
}
}
return points;
}
static float getScoreReject(const std::vector<float> values) {
float points = 0;
for (float v : values) {
points -= (v > 0.5) ? 1 : 0;
//points -= (v > 0.5) ? v : 0;
}
return points;
}
double getValue(const K::NumOptVector<NUM_ARGS>& args) const {
// configure the network
std::vector<float> vals;
for(int i = 0; i < NUM_ARGS; ++i) {vals.push_back(args[i]);}
net.setFactors(vals);
// temporals
float points = 0;
int cnt = 0;
// process every practice
for (const Practice& p : practices) {
// get the values for the neural-net-input
K::Interpolator<uint64_t, SensorGyro> interpol = p.getInterpol();
// process 4 (positive) occurences within the practice
for (int key = 0; key < 3; ++key) {
uint64_t steps = 100;//(tTo - tFrom) / 8;
// positive
volatile uint64_t k1 = p.keyGyro[key];
volatile uint64_t k2 = p.keyGyro[key+1];
volatile uint64_t diff = k2 - k1;
volatile uint64_t tFrom = k1 - diff/5;
volatile uint64_t tTo = k1 + diff/5;
for (uint64_t o = tFrom; o <= tTo; o += steps) {
const std::vector<float> values = getNetworkInput(interpol, o);
const std::vector<float> res = net.get(values, true);
points += getScorePos((int)p.type, res);
++cnt;
}
// negative
tFrom = k1 + diff/2;
tTo = k2 - diff/2;
for (uint64_t o = tFrom; o <= tTo; o += steps) {
const std::vector<float> values = getNetworkInput(interpol, o);
const std::vector<float> res = net.get(values, true);
points += getScoreReject(res);
++cnt;
}
}
// // positive
// for (int ts = 1500; ts <= 7000; ts +=400) {
// const std::vector<float> values = getNetworkInput(interpol, ts);
// const std::vector<float> res = net.get(values, false);
// points += getScore((int)p.type, res);
// }
}
points /= cnt;
static float max = -999999;
if (points > max) {
max = points;
std::cout << points << std::endl;
}
return -points;
}
};
static void run() {
std::vector<Practice> practices;
// practices.push_back(
// Practice {
// PracticeType::REST,
// SensorReader::read("/mnt/firma/kunden/HandyGames/daten/idle/restposition_gl_24.txt"),
// {1000, 2000, 3000, 4000, 5000, 6000, 7000, 8000, 9000}
// }
// );
practices.push_back(
Practice {
PracticeType::JUMPING_JACK,
SensorReader::read("/mnt/firma/kunden/HandyGames/daten/jumpingjack/jumpingjack_gl_5_subject_3_left.txt"),
{1950, 2900, 3850, 4850, 5850, 6850, 7850, 8850, 9800, 10800, 11850}
}
);
// practices.push_back(
// Practice {
// PracticeType::JUMPING_JACK,
// SensorReader::read("/mnt/firma/kunden/HandyGames/daten/jumpingjack/jumpingjack_gl_6_subject_4_right.txt"),
// {2750, 3850, 4850, 5900, 7000, 7950, 9100 }
// }
// );
// practices.push_back(
// Practice {
// PracticeType::JUMPING_JACK,
// SensorReader::read("/mnt/firma/kunden/HandyGames/daten/jumpingjack/jumpingjack_sw_5_subject_2_right.txt"),
// {1700, 2850, 4050, 5250, 6450, 7600, 8800}
// }
// );
practices.push_back(
Practice {
PracticeType::SITUPS,
SensorReader::read("/mnt/firma/kunden/HandyGames/daten/situps/situps_gl_12_subject_1_left.txt"),
{1850, 3250, 4750, 6150, 7550, 8950, 10350, 11600, 13000}
}
);
practices.push_back(
Practice {
PracticeType::PUSHUPS,
SensorReader::read("/mnt/firma/kunden/HandyGames/daten/pushups/pushups_gl_8_subject_4_right.txt"),
//{2750, 4200, 5850, 7400, 9000, 10650}
{3700, 5200, 6850, 8450, 10050, 11750}
}
);
practices.push_back(
Practice {
PracticeType::KNEEBEND,
SensorReader::read("/mnt/firma/kunden/HandyGames/daten/kneebend/kneebend_gl_0_subject_0_right.txt"),
{2650, 4750, 6750, 8800, 10800, 12800}
//{3500, 5000, 8300, 9900, 11550}
}
);
practices.push_back(
Practice {
PracticeType::FORWARDBEND,
SensorReader::read("/mnt/firma/kunden/HandyGames/daten/forwardbend/forwardbend_gl_3_subject_1_left.txt"),
{3500, 9000, 14150, 19300}
//{3500, 5000, 8300, 9900, 11550}
}
);
// practices.push_back(
// Practice {
// PracticeType::REST,
// SensorReader::read("/mnt/firma/kunden/HandyGames/daten/jumpingjack/jumpingjack_gl_5_subject_3_left.txt"),
// {1950+500, 2900+500, 3850+500, 4850+500, 5850+500, 6850+500, 7850+500, 8850+500, 9800+500, 10800+500, 11850+500}
// }
// );
// // practices.push_back(
// // Practice {
// // PracticeType::REST,
// // SensorReader::read("/mnt/firma/kunden/HandyGames/daten/pushups/pushups_gl_8_subject_4_right.txt"),
// // //{2750, 4200, 5850, 7400, 9000, 10650}
// // {3500, 5000, 8300, 9900, 11550}
// // }
// // );
// practices.push_back(
// Practice {
// PracticeType::REST,
// SensorReader::read("/mnt/firma/kunden/HandyGames/daten/situps/situps_gl_12_subject_1_left.txt"),
// {1850+600, 3250+600, 4750+600, 6150+600, 7550+600, 8950+600, 10350+600, 11600+600, 13000+600}
// }
// );
// debugPlot(practices.back());
// sleep(100);
K::NumOptVector<NUM_ARGS> vec;
K::FeedForwardNeuralNet<float, K::FeedForwardNeuralNetOPLogistic> net;
net.setLayers({NUM_IN, NUM_HID, NUM_OUT});
MyOpt func(practices, net);
// K::NumOptAlgoDownhillSimplex<NUM_ARGS> opt;
// opt.setMaxIterations(100);
// opt.setNumRestarts(2);
// opt.calculateOptimum(func, vec);
K::NumOptAlgoGenetic<NUM_ARGS> opt;
opt.setElitism(0.07f);
opt.setPopulationSize(100);
opt.setMaxIterations(200);
opt.setMutation(0.20f);
opt.setValRange(0.25);
opt.calculateOptimum(func, vec);
// // process every practice
// for (Practice& p : practices) {
// // get the values for the neural-net-input
// K::Interpolator<uint64_t, SensorGyro> interpol = p.getInterpol();
// // process every (positive) occurence within the practice
// for (uint64_t ts : p.keyGyro) {
// std::vector<float> values = getNetworkInput(interpol, ts);
// K::NeuralNetResultIHO<NUM_OUT> res = net.getOutput(values.data());
// debug(p, res);
// {
// std::vector<float> values = getNetworkInput(interpol, ts+500);
// K::NeuralNetResultIHO<NUM_OUT> res = net.getOutput(values.data());
// std::cout << "###"; debug(p, res); debugPlot(practices.back());
// }
// }getMaxIdx
// }
K::Gnuplot gp1;
K::Gnuplot gp2;
K::GnuplotPlot plot1;
K::GnuplotPlot plot2;
K::GnuplotMultiplot plot(2,1);
plot.add(&plot1);
plot.add(&plot2);
K::GnuplotPlotElementLines line[3];
line[0].setColorHex("#ff0000"); line[0].setTitle("x");
line[1].setColorHex("#00ff00"); line[1].setTitle("y");
line[2].setColorHex("#0000ff"); line[2].setTitle("z");
plot1.add(&line[0]);
plot1.add(&line[1]);
plot1.add(&line[2]);
K::GnuplotPlotElementLines netLines[NUM_OUT];
for (int i = 0; i < NUM_OUT; ++i) {
netLines[i].setColorHex(COLORS[i]);
netLines[i].setTitle(NAMES[i]);
netLines[i].setLineWidth(2);
}
for (int i = 0; i < NUM_OUT; ++i) {
plot2.add(&netLines[i]);
}
// process every practice
for (Practice& p : practices) {
// get the values for the neural-net-input
K::Interpolator<uint64_t, SensorGyro> interpol = p.getInterpol();
line[0].clear();
line[1].clear();
line[2].clear();
for (int i = 0; i < NUM_OUT; ++i) {
netLines[i].clear();
}
for (int ms = 0; ms < 20000; ms += 50) { // K::Gnuplot gp;
// K::GnuplotPlot plot;
// K::GnuplotPlotElementLines line[3];
// line[0].setColorHex("#ff0000"); line[0].setTitle("x");
// line[1].setColorHex("#00ff00"); line[1].setTitle("y");
// line[2].setColorHex("#0000ff"); line[2].setTitle("z");
// Practice p1 = practices[0];
// auto interpol = p1.getInterpol();
// for (int ms = 0; ms < 20000; ms += 50) {
// SensorGyro s = blur(interpol, ms, 10);
// line[0].add(K::GnuplotPoint2(ms, s.x));
// line[1].add(K::GnuplotPoint2(ms, s.y));
// line[2].add(K::GnuplotPoint2(ms, s.z));
// }
// plot.add(&line[0]);
// plot.add(&line[1]);
// plot.add(&line[2]);
// gp.draw(plot);
// for (uint64_t ts : p1.keyGyro) {
// gp << "set arrow from " << ts << ",-10 to " << ts << ",+10\n";
// }
// gp.flush();
SensorGyro s = interpol.get(ms);
line[0].add(K::GnuplotPoint2(ms, s.x));
line[1].add(K::GnuplotPoint2(ms, s.y));
line[2].add(K::GnuplotPoint2(ms, s.z));
}
// process every (positive) occurence within the practice
for (int ts = 1000; ts < 10000; ts += 50) {
std::vector<float> values = getNetworkInput(interpol, ts);
std::vector<float> res = net.get(values);
debug(p, res);
for (int i = 0; i < NUM_OUT; ++i) {
float val = res[i];
val = (val < 0.5) ? 0 : 1;
netLines[i].add(K::GnuplotPoint2(ts, val));
}
gp1 << "set arrow 1 from " << ts-500 << ",-10 to " << ts-500 << ",+10\n";
gp1 << "set arrow 2 from " << ts+500 << ",-10 to " << ts+500 << ",+10\n";
gp1.draw(plot1);
gp1.flush();
gp2.draw(plot2);
gp2.flush();
usleep(1000*50);
}
}
sleep(1000);
}
};
#endif // USINGNEURALNET_H

113
workspace/usingpca.h Normal file
View File

@@ -0,0 +1,113 @@
#ifndef USINGPCA_H
#define USINGPCA_H
#include <vector>
#include "sensors/SensorReader.h"
#include "Interpolator.h"
#include <eigen3/Eigen/Dense>
enum class PracticeType {
//REST,
JUMPING_JACK,
SITUPS,
PUSHUPS,
KNEEBEND,
FORWARDBEND,
};
struct Practice {
PracticeType type;
Recording rec;
std::vector<uint64_t> keyGyro;
//Practice(const PracticeType p, const Recording& rec, const std::vector<uint64_t>& keyGyro) : p(p), rec(rec), keyGyro(keyGyro) {;}
K::Interpolator<uint64_t, SensorGyro> getInterpol() const {
K::Interpolator<uint64_t, SensorGyro> interpol;
for (auto it : rec.gyro.values) {interpol.add(it.ts, it.val);}
interpol.makeRelative();
return interpol;
}
};
class UsingPCA {
public:
static Eigen::VectorXf getWindow(Practice& p, uint64_t pos) {
K::Interpolator<uint64_t, SensorGyro> interpol = p.getInterpol();
Eigen::VectorXf vec(600/50*3, 1);
int idx = 0;
for (int offset = -300; offset < 300; offset += 50) {
SensorGyro gyro = interpol.get(pos + offset);
vec(idx++,0) = (gyro.x);
vec(idx++,0) = (gyro.y);
vec(idx++,0) = (gyro.z);
}
std::cout << vec << std::endl;
return vec;
}
static std::vector<Eigen::VectorXf> getClassWindows(Practice& p) {
std::vector<Eigen::VectorXf> windows;
for (uint64_t pos = 1000; pos < 5000; pos += 500) {
Eigen::VectorXf window = getWindow(p, pos);
windows.push_back(window);
}
return windows;
}
static Eigen::MatrixXf getR(std::vector<Eigen::VectorXf>& vecs) {
Eigen::MatrixXf mat = Eigen::MatrixXf::Zero(vecs[0].rows(), vecs[0].rows());
for (const Eigen::VectorXf& vec : vecs) {
mat += vec * vec.transpose();
}
mat /= vecs.size();
return mat;
}
static Eigen::VectorXf getM(std::vector<Eigen::VectorXf>& vecs) {
Eigen::MatrixXf mat = Eigen::MatrixXf::Zero(vecs[0].rows(), vecs[0].cols());
for (const Eigen::VectorXf& vec : vecs) {
mat += vec;
}
mat /= vecs.size();
return mat;
}
static void run() {
std::vector<Practice> practices;
practices.push_back(
Practice {
PracticeType::JUMPING_JACK,
SensorReader::read("/mnt/firma/kunden/HandyGames/daten/jumpingjack/jumpingjack_gl_5_subject_3_left.txt"),
{1950, 2900, 3850, 4850, 5850, 6850, 7850, 8850, 9800, 10800, 11850}
}
);
std::vector<Eigen::VectorXf> windows = getClassWindows(practices.back());
Eigen::MatrixXf R = getR(windows);
Eigen::MatrixXf m = getM(windows);
Eigen::MatrixXf Q = R - (m * m.transpose());
Eigen::SelfAdjointEigenSolver<Eigen::MatrixXf> es;
es.compute(Q);
int i = 0;
}
};
#endif // USINGPCA_H