added activity recognition to smoothing transition
This commit is contained in:
@@ -1,6 +1,6 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
<!DOCTYPE QtCreatorProject>
|
<!DOCTYPE QtCreatorProject>
|
||||||
<!-- Written by QtCreator 3.6.0, 2016-04-20T09:34:12. -->
|
<!-- Written by QtCreator 3.6.0, 2016-04-21T09:32:26. -->
|
||||||
<qtcreator>
|
<qtcreator>
|
||||||
<data>
|
<data>
|
||||||
<variable>EnvironmentId</variable>
|
<variable>EnvironmentId</variable>
|
||||||
|
|||||||
@@ -245,12 +245,12 @@ public:
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
case s_accel: {
|
case s_accel: {
|
||||||
float acc[3];
|
float acc[3];
|
||||||
SensorReaderAccel sre; sre.read(se, acc);
|
SensorReaderAccel sre; sre.read(se, acc);
|
||||||
actDet.addAccel(acc);
|
actDet.addAccel(acc);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
// case s_linearAcceleration:{
|
// case s_linearAcceleration:{
|
||||||
// baroSensorReader.readVerticalAcceleration(se);
|
// baroSensorReader.readVerticalAcceleration(se);
|
||||||
@@ -284,9 +284,10 @@ public:
|
|||||||
|
|
||||||
// currently detected activity
|
// currently detected activity
|
||||||
// TODO: feed sensor values!
|
// TODO: feed sensor values!
|
||||||
ctrl.currentActivitiy = actDet.getCurrentActivity();
|
ctrl.currentActivitiy = actDet.getCurrentActivity();
|
||||||
|
|
||||||
|
|
||||||
|
// this is just for testing purposes
|
||||||
|
obs.currentActivity = actDet.getCurrentActivity();
|
||||||
|
|
||||||
// time for a transition?
|
// time for a transition?
|
||||||
if (se.ts - lastTransitionTS > MiscSettings::timeSteps) {
|
if (se.ts - lastTransitionTS > MiscSettings::timeSteps) {
|
||||||
|
|||||||
@@ -51,37 +51,44 @@ public:
|
|||||||
|
|
||||||
|
|
||||||
//create the backward smoothing filter
|
//create the backward smoothing filter
|
||||||
//bf = new K::BackwardSimulation<MyState>(50);
|
bf = new K::BackwardSimulation<MyState>(500);
|
||||||
bf = new K::CondensationBackwardFilter<MyState>;
|
//bf = new K::CondensationBackwardFilter<MyState>;
|
||||||
//bf->setSampler( std::unique_ptr<K::CumulativeSampler<MyState>>(new K::CumulativeSampler<MyState>()));
|
bf->setSampler( std::unique_ptr<K::CumulativeSampler<MyState>>(new K::CumulativeSampler<MyState>()));
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void fixedIntervallSimpleTransPath1() {
|
void fixedIntervallSimpleTransPath1(){
|
||||||
|
|
||||||
runName = "fixedIntervallSimpleTransPath1";
|
runName = "fixedIntervallSimpleTransPath1";
|
||||||
bool smoothing_resample = false;
|
|
||||||
smoothing_time_delay = 1;
|
|
||||||
|
|
||||||
BarometerEvaluation::barometerSigma = 0.10;
|
BarometerEvaluation::barometerSigma = 0.10;
|
||||||
sr = new SensorReader("./measurements/bergwerk/path1/nexus/vor/1454775984079.csv"); // forward
|
sr = new SensorReader("./measurements/bergwerk/path3/nexus/vor/1454782562231.csv"); // forward
|
||||||
srt = new SensorReaderTurn("./measurements/bergwerk/path1/nexus/vor/Turns.txt");
|
srt = new SensorReaderTurn("./measurements/bergwerk/path3/nexus/vor/Turns.txt");
|
||||||
srs = new SensorReaderStep("./measurements/bergwerk/path1/nexus/vor/Steps2.txt");
|
srs = new SensorReaderStep("./measurements/bergwerk/path3/nexus/vor/Steps2.txt");
|
||||||
gtw = getGroundTruthWay(*sr, floors.gtwp, path1dbl);
|
gtw = getGroundTruthWay(*sr, floors.gtwp, path3dbl);
|
||||||
|
|
||||||
MyGridNode& end = (MyGridNode&)grid.getNodeFor( conv(floors.gtwp[path1dbl.back()]) );
|
MyGridNode& end = (MyGridNode&)grid.getNodeFor( conv(floors.gtwp[path4dbl.back()]) );
|
||||||
GridWalkPathControl<MyGridNode>* walk = new GridWalkPathControl<MyGridNode>(grid, DijkstraMapper(grid), end);
|
GridWalkPathControl<MyGridNode>* walk = new GridWalkPathControl<MyGridNode>(grid, DijkstraMapper(grid), end);
|
||||||
pf->setTransition( std::unique_ptr<MyTransition>( new MyTransition(grid, *walk)) );
|
pf->setTransition( std::unique_ptr<MyTransition>( new MyTransition(grid, *walk)) );
|
||||||
|
|
||||||
|
|
||||||
|
//Smoothing Variables
|
||||||
|
smoothing_walk_mu = 0.7;
|
||||||
|
smoothing_walk_sigma = 0.5;
|
||||||
|
smoothing_heading_sigma = 5.0;
|
||||||
|
smoothing_baro_sigma = 0.05;
|
||||||
|
|
||||||
|
bool smoothing_resample = false;
|
||||||
|
smoothing_time_delay = 1;
|
||||||
|
|
||||||
//Smoothing using Simple Trans
|
//Smoothing using Simple Trans
|
||||||
bf->setEstimation(std::unique_ptr<K::ParticleFilterEstimationWeightedAverage<MyState>>(new K::ParticleFilterEstimationWeightedAverage<MyState>()));
|
bf->setEstimation(std::unique_ptr<K::ParticleFilterEstimationWeightedAverageWithAngle<MyState>>(new K::ParticleFilterEstimationWeightedAverageWithAngle<MyState>()));
|
||||||
if(smoothing_resample)
|
if(smoothing_resample)
|
||||||
bf->setResampling( std::unique_ptr<K::ParticleFilterResamplingSimple<MyState>>(new K::ParticleFilterResamplingSimple<MyState>()) );
|
bf->setResampling( std::unique_ptr<K::ParticleFilterResamplingSimple<MyState>>(new K::ParticleFilterResamplingSimple<MyState>()) );
|
||||||
bf->setTransition(std::unique_ptr<MySmoothingTransitionSimple>( new MySmoothingTransitionSimple()) );
|
bf->setTransition(std::unique_ptr<MySmoothingTransitionExperimental>( new MySmoothingTransitionExperimental) );
|
||||||
|
}
|
||||||
}
|
|
||||||
|
|
||||||
void fixedIntervallSimpleTransPath4(){
|
void fixedIntervallSimpleTransPath4(){
|
||||||
|
|
||||||
@@ -111,7 +118,7 @@ public:
|
|||||||
bf->setEstimation(std::unique_ptr<K::ParticleFilterEstimationWeightedAverageWithAngle<MyState>>(new K::ParticleFilterEstimationWeightedAverageWithAngle<MyState>()));
|
bf->setEstimation(std::unique_ptr<K::ParticleFilterEstimationWeightedAverageWithAngle<MyState>>(new K::ParticleFilterEstimationWeightedAverageWithAngle<MyState>()));
|
||||||
if(smoothing_resample)
|
if(smoothing_resample)
|
||||||
bf->setResampling( std::unique_ptr<K::ParticleFilterResamplingSimple<MyState>>(new K::ParticleFilterResamplingSimple<MyState>()) );
|
bf->setResampling( std::unique_ptr<K::ParticleFilterResamplingSimple<MyState>>(new K::ParticleFilterResamplingSimple<MyState>()) );
|
||||||
bf->setTransition(std::unique_ptr<MySmoothingTransitionExperimental>( new MySmoothingTransitionExperimental) );
|
bf->setTransition(std::unique_ptr<MySmoothingTransitionSimple>( new MySmoothingTransitionSimple) );
|
||||||
}
|
}
|
||||||
|
|
||||||
// ============================================================ Dijkstra ============================================== //
|
// ============================================================ Dijkstra ============================================== //
|
||||||
|
|||||||
@@ -72,7 +72,7 @@ void testModelWalk() {
|
|||||||
|
|
||||||
while(true) {
|
while(true) {
|
||||||
for (GridWalkState<MyGridNode>& state : states) {
|
for (GridWalkState<MyGridNode>& state : states) {
|
||||||
state = walk.getDestination(grid, state, std::abs(wDist.draw()), wHead.draw());
|
state = walk.getDestination(grid, state, std::abs(wDist.draw()), wHead.draw(), Activity::UNKNOWN);
|
||||||
}
|
}
|
||||||
usleep(1000*80);
|
usleep(1000*80);
|
||||||
vis.showStates(states);
|
vis.showStates(states);
|
||||||
@@ -88,9 +88,18 @@ void testModelWalk() {
|
|||||||
int main(void) {
|
int main(void) {
|
||||||
|
|
||||||
// testModelWalk();
|
// testModelWalk();
|
||||||
// SmoothingEval1 eval;
|
{SmoothingEval1 eval;
|
||||||
// eval.fixedIntervallSimpleTransPath4();
|
eval.fixedIntervallSimpleTransPath4();
|
||||||
// eval.run();
|
eval.run();}
|
||||||
|
{SmoothingEval1 eval;
|
||||||
|
eval.fixedIntervallSimpleTransPath4();
|
||||||
|
eval.run();}
|
||||||
|
{SmoothingEval1 eval;
|
||||||
|
eval.fixedIntervallSimpleTransPath4();
|
||||||
|
eval.run();}
|
||||||
|
{SmoothingEval1 eval;
|
||||||
|
eval.fixedIntervallSimpleTransPath4();
|
||||||
|
eval.run();}
|
||||||
|
|
||||||
//Eval1 eval;
|
//Eval1 eval;
|
||||||
//eval.bergwerk_path4_nexus_multi();
|
//eval.bergwerk_path4_nexus_multi();
|
||||||
@@ -98,41 +107,41 @@ int main(void) {
|
|||||||
|
|
||||||
//{SmoothingEval1 eval; eval.bergwerk_path1_nexus_simple(); eval.run();}
|
//{SmoothingEval1 eval; eval.bergwerk_path1_nexus_simple(); eval.run();}
|
||||||
//{SmoothingEval1 eval; eval.bergwerk_path1_nexus_imp(); eval.run();}
|
//{SmoothingEval1 eval; eval.bergwerk_path1_nexus_imp(); eval.run();}
|
||||||
{SmoothingEval1 eval; eval.bergwerk_path1_nexus_multi(); eval.run();}
|
// {SmoothingEval1 eval; eval.bergwerk_path1_nexus_multi(); eval.run();}
|
||||||
{SmoothingEval1 eval; eval.bergwerk_path1_nexus_shortest(); eval.run();}
|
// {SmoothingEval1 eval; eval.bergwerk_path1_nexus_shortest(); eval.run();}
|
||||||
|
|
||||||
{SmoothingEval1 eval; eval.bergwerk_path2_nexus_simple(); eval.run();}
|
// {SmoothingEval1 eval; eval.bergwerk_path2_nexus_simple(); eval.run();}
|
||||||
//{SmoothingEval1 eval; eval.bergwerk_path2_nexus_imp(); eval.run();}
|
// //{SmoothingEval1 eval; eval.bergwerk_path2_nexus_imp(); eval.run();}
|
||||||
{SmoothingEval1 eval; eval.bergwerk_path2_nexus_multi(); eval.run();}
|
// {SmoothingEval1 eval; eval.bergwerk_path2_nexus_multi(); eval.run();}
|
||||||
{SmoothingEval1 eval; eval.bergwerk_path2_nexus_shortest(); eval.run();}
|
// {SmoothingEval1 eval; eval.bergwerk_path2_nexus_shortest(); eval.run();}
|
||||||
|
|
||||||
{SmoothingEval1 eval; eval.bergwerk_path3_nexus_simple(); eval.run();}
|
// {SmoothingEval1 eval; eval.bergwerk_path3_nexus_simple(); eval.run();}
|
||||||
//{SmoothingEval1 eval; eval.bergwerk_path3_nexus_imp(); eval.run();}
|
// //{SmoothingEval1 eval; eval.bergwerk_path3_nexus_imp(); eval.run();}
|
||||||
{SmoothingEval1 eval; eval.bergwerk_path3_nexus_multi(); eval.run();}
|
// {SmoothingEval1 eval; eval.bergwerk_path3_nexus_multi(); eval.run();}
|
||||||
{SmoothingEval1 eval; eval.bergwerk_path3_nexus_shortest(); eval.run();}
|
// {SmoothingEval1 eval; eval.bergwerk_path3_nexus_shortest(); eval.run();}
|
||||||
|
|
||||||
{SmoothingEval1 eval; eval.bergwerk_path4_nexus_simple(); eval.run();}
|
// {SmoothingEval1 eval; eval.bergwerk_path4_nexus_simple(); eval.run();}
|
||||||
{SmoothingEval1 eval; eval.bergwerk_path4_nexus_imp(); eval.run();}
|
// {SmoothingEval1 eval; eval.bergwerk_path4_nexus_imp(); eval.run();}
|
||||||
{SmoothingEval1 eval; eval.bergwerk_path4_nexus_multi(); eval.run();}
|
// {SmoothingEval1 eval; eval.bergwerk_path4_nexus_multi(); eval.run();}
|
||||||
{SmoothingEval1 eval; eval.bergwerk_path4_nexus_shortest(); eval.run();}
|
// {SmoothingEval1 eval; eval.bergwerk_path4_nexus_shortest(); eval.run();}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
{SmoothingEval1 eval; eval.bergwerk_path1_galaxy_simple(); eval.run();}
|
//// {SmoothingEval1 eval; eval.bergwerk_path1_galaxy_simple(); eval.run();}
|
||||||
{SmoothingEval1 eval; eval.bergwerk_path1_galaxy_multi(); eval.run();}
|
// {SmoothingEval1 eval; eval.bergwerk_path1_galaxy_multi(); eval.run();}
|
||||||
{SmoothingEval1 eval; eval.bergwerk_path1_galaxy_shortest(); eval.run();}
|
//// {SmoothingEval1 eval; eval.bergwerk_path1_galaxy_shortest(); eval.run();}
|
||||||
|
|
||||||
{SmoothingEval1 eval; eval.bergwerk_path2_galaxy_simple(); eval.run();}
|
//// {SmoothingEval1 eval; eval.bergwerk_path2_galaxy_simple(); eval.run();}
|
||||||
{SmoothingEval1 eval; eval.bergwerk_path2_galaxy_multi(); eval.run();}
|
// {SmoothingEval1 eval; eval.bergwerk_path2_galaxy_multi(); eval.run();}
|
||||||
{SmoothingEval1 eval; eval.bergwerk_path2_galaxy_shortest(); eval.run();}
|
//// {SmoothingEval1 eval; eval.bergwerk_path2_galaxy_shortest(); eval.run();}
|
||||||
|
|
||||||
{SmoothingEval1 eval; eval.bergwerk_path3_galaxy_simple(); eval.run();}
|
//// {SmoothingEval1 eval; eval.bergwerk_path3_galaxy_simple(); eval.run();}
|
||||||
{SmoothingEval1 eval; eval.bergwerk_path3_galaxy_multi(); eval.run();}
|
// {SmoothingEval1 eval; eval.bergwerk_path3_galaxy_multi(); eval.run();}
|
||||||
{SmoothingEval1 eval; eval.bergwerk_path3_galaxy_shortest(); eval.run();}
|
//// {SmoothingEval1 eval; eval.bergwerk_path3_galaxy_shortest(); eval.run();}
|
||||||
|
|
||||||
{SmoothingEval1 eval; eval.bergwerk_path4_galaxy_simple(); eval.run();}
|
//// {SmoothingEval1 eval; eval.bergwerk_path4_galaxy_simple(); eval.run();}
|
||||||
{SmoothingEval1 eval; eval.bergwerk_path4_galaxy_multi(); eval.run();}
|
// {SmoothingEval1 eval; eval.bergwerk_path4_galaxy_multi(); eval.run();}
|
||||||
{SmoothingEval1 eval; eval.bergwerk_path4_galaxy_shortest(); eval.run();}
|
//// {SmoothingEval1 eval; eval.bergwerk_path4_galaxy_shortest(); eval.run();}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -72,17 +72,19 @@ public:
|
|||||||
}
|
}
|
||||||
|
|
||||||
// CONTROL!
|
// CONTROL!
|
||||||
// if (useStep) {
|
// if (useStep) {
|
||||||
// weight *= stepEval.getProbability(p.state, observation.step);
|
// //weight *= stepEval.getProbability(p.state, observation.step);
|
||||||
// }
|
// }
|
||||||
|
|
||||||
// CONTROL!
|
// CONTROL!
|
||||||
// if (useTurn) {
|
if (useTurn) {
|
||||||
// weight *= turnEval.getProbability(p.state, observation.turn, true);
|
//weight *= turnEval.getProbability(p.state, observation.turn, true);
|
||||||
|
|
||||||
// //set
|
//set
|
||||||
// p.state.angularHeadingChange = observation.turn->delta_heading;
|
p.state.angularHeadingChange = observation.turn->delta_heading;
|
||||||
// }
|
}
|
||||||
|
|
||||||
|
p.state.currentActivity = observation.currentActivity;
|
||||||
|
|
||||||
// set and accumulate
|
// set and accumulate
|
||||||
p.weight = weight;
|
p.weight = weight;
|
||||||
|
|||||||
@@ -9,6 +9,8 @@
|
|||||||
#include "../lukas/StepObservation.h"
|
#include "../lukas/StepObservation.h"
|
||||||
#include "../lukas/TurnObservation.h"
|
#include "../lukas/TurnObservation.h"
|
||||||
|
|
||||||
|
#include "Indoor/grid/walk/GridWalk.h"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* all available sensor readings
|
* all available sensor readings
|
||||||
*/
|
*/
|
||||||
@@ -31,6 +33,10 @@ struct MyObservation {
|
|||||||
/** turn observation data (if any) */
|
/** turn observation data (if any) */
|
||||||
TurnObservation* turn = nullptr;
|
TurnObservation* turn = nullptr;
|
||||||
|
|
||||||
|
/** get the activity into the observation. just for testing in smoothing */
|
||||||
|
Activity currentActivity = Activity::UNKNOWN;
|
||||||
|
|
||||||
|
|
||||||
/** timestamp of the youngest sensor data that resides within this observation. used to detect the age of all other observations! */
|
/** timestamp of the youngest sensor data that resides within this observation. used to detect the age of all other observations! */
|
||||||
uint64_t latestSensorDataTS = 0;
|
uint64_t latestSensorDataTS = 0;
|
||||||
|
|
||||||
|
|||||||
@@ -5,6 +5,7 @@
|
|||||||
#include <KLib/math/optimization/NumOptVector.h>
|
#include <KLib/math/optimization/NumOptVector.h>
|
||||||
|
|
||||||
#include <Indoor/grid/walk/GridWalkState.h>
|
#include <Indoor/grid/walk/GridWalkState.h>
|
||||||
|
#include <Indoor/grid/walk/GridWalk.h>
|
||||||
|
|
||||||
#include "../MyGridNode.h"
|
#include "../MyGridNode.h"
|
||||||
|
|
||||||
@@ -34,6 +35,9 @@ struct MyState {
|
|||||||
|
|
||||||
double avgAngle;
|
double avgAngle;
|
||||||
|
|
||||||
|
//the current Activity
|
||||||
|
Activity currentActivity;
|
||||||
|
|
||||||
//int distanceWalkedCM;
|
//int distanceWalkedCM;
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -96,22 +96,8 @@ public:
|
|||||||
|
|
||||||
auto p2 = &particles_new[j];
|
auto p2 = &particles_new[j];
|
||||||
|
|
||||||
// find the node (square) the particle is within
|
|
||||||
// just to be safe, we round z to the nearest floor
|
|
||||||
|
|
||||||
//TODO:: Nullptr check! sometimes src/dst can be nullptr
|
|
||||||
//const Node3* dst = graph->getNearestNode(p1->state.x_cm, p1->state.y_cm, std::round(p1->state.z_nr));
|
|
||||||
//const Node3* src = graph->getNearestNode(p2->state.x_cm, p2->state.y_cm, std::round(p2->state.z_nr));
|
|
||||||
|
|
||||||
const MyGridNode* src = grid->getNodePtrFor(GridPoint(p2->state.pCur.x, p2->state.pCur.y, p2->state.pCur.z));
|
const MyGridNode* src = grid->getNodePtrFor(GridPoint(p2->state.pCur.x, p2->state.pCur.y, p2->state.pCur.z));
|
||||||
|
|
||||||
// Dijkstra<MyGridNode> dijkstra;
|
|
||||||
// dijkstra.build(src, dst, DijkstraMapper(*grid));
|
|
||||||
|
|
||||||
// double distDijkstra_m = dijkstra.getNode(*src)->cumWeight;
|
|
||||||
|
|
||||||
double distDijkstra_m = 0;
|
double distDijkstra_m = 0;
|
||||||
//std::vector<const MyGridNode*> shortestPath;
|
|
||||||
|
|
||||||
// check if this shortestPath was already calculated
|
// check if this shortestPath was already calculated
|
||||||
std::map<my_key_type, double>::iterator it;
|
std::map<my_key_type, double>::iterator it;
|
||||||
@@ -121,16 +107,8 @@ public:
|
|||||||
}
|
}
|
||||||
else{
|
else{
|
||||||
|
|
||||||
//Dijkstra/A* for shortest path
|
|
||||||
//shortestPath = aStar.get(src, dst, dm);
|
|
||||||
|
|
||||||
distDijkstra_m = aStar.get(src, dst, dm);
|
distDijkstra_m = aStar.get(src, dst, dm);
|
||||||
|
|
||||||
//get distance walked and getProb using the walking model
|
|
||||||
// for(int i = 0; i < shortestPath.size() - 1; ++i){
|
|
||||||
// distDijkstra_m += dm.getWeightBetween(*shortestPath[i], *shortestPath[i+1]);
|
|
||||||
// }
|
|
||||||
|
|
||||||
if(distDijkstra_m != distDijkstra_m) {throw "detected NaN";}
|
if(distDijkstra_m != distDijkstra_m) {throw "detected NaN";}
|
||||||
|
|
||||||
//save distance and nodes in lookup map
|
//save distance and nodes in lookup map
|
||||||
@@ -140,22 +118,12 @@ public:
|
|||||||
|
|
||||||
const double distProb = distWalk.getProbability(distDijkstra_m * 0.01);
|
const double distProb = distWalk.getProbability(distDijkstra_m * 0.01);
|
||||||
|
|
||||||
//getProb using the angle(heading) between src and dst
|
//heading change prob
|
||||||
// double angle = 0.0;
|
double diffRad = Angle::getDiffRAD_2PI_PI(p2->state.walkState.heading.getRAD(), p1->state.walkState.heading.getRAD());
|
||||||
// if(!(p2->state.pCur.x == p1->state.pCur.x) && !(p2->state.pCur.y == p1->state.pCur.y)){
|
double diffDeg = Angle::radToDeg(diffRad);
|
||||||
// angle = Angle::getDEG_360(p2->state.pCur.x, p2->state.pCur.y, p1->state.pCur.x, p1->state.pCur.y);
|
double angularChangeZeroToPi = std::fmod(std::abs(p1->state.angularHeadingChange), 360.0);
|
||||||
// }
|
|
||||||
// const double headingProb = K::NormalDistribution::getProbability(p1->state.cumulativeHeading, smoothing_heading_sigma, angle);
|
|
||||||
|
|
||||||
// is the heading change similiar to the measurement?
|
const double headingProb = K::NormalDistribution::getProbability(angularChangeZeroToPi, smoothing_heading_sigma, diffDeg);
|
||||||
double p2AngleDeg = p2->state.walkState.heading.getRAD() * 180/3.14159265359;
|
|
||||||
double p1AngleDeg = p1->state.walkState.heading.getRAD() * 180/3.14159265359;
|
|
||||||
|
|
||||||
double diffDeg = p2AngleDeg - p1AngleDeg;
|
|
||||||
const double headingProb = K::NormalDistribution::getProbability(p1->state.angularHeadingChange, smoothing_heading_sigma, diffDeg);
|
|
||||||
|
|
||||||
//assert(headingProb != 0.0);
|
|
||||||
//assert(distProb != 0.0);
|
|
||||||
|
|
||||||
//check how near we are to the measurement
|
//check how near we are to the measurement
|
||||||
double floorProb = K::NormalDistribution::getProbability(p1->state.measurement_pressure, smoothing_baro_sigma, p2->state.hPa);
|
double floorProb = K::NormalDistribution::getProbability(p1->state.measurement_pressure, smoothing_baro_sigma, p2->state.hPa);
|
||||||
@@ -167,10 +135,10 @@ public:
|
|||||||
//if(distance_m != distance_m) {throw "detected NaN";}
|
//if(distance_m != distance_m) {throw "detected NaN";}
|
||||||
//if(distProb != distProb) {throw "detected NaN";}
|
//if(distProb != distProb) {throw "detected NaN";}
|
||||||
//if(angle != angle) {throw "detected NaN";}
|
//if(angle != angle) {throw "detected NaN";}
|
||||||
if(headingProb != headingProb) {throw "detected NaN";}
|
//if(headingProb != headingProb) {throw "detected NaN";}
|
||||||
if(floorProb != floorProb) {throw "detected NaN";}
|
//if(floorProb != floorProb) {throw "detected NaN";}
|
||||||
if(floorProb == 0) {throw "detected NaN";}
|
//if(floorProb == 0) {throw "detected NaN";}
|
||||||
if(prob != prob) {throw "detected NaN";}
|
//if(prob != prob) {throw "detected NaN";}
|
||||||
|
|
||||||
//assert(prob != 0.0);
|
//assert(prob != 0.0);
|
||||||
|
|
||||||
|
|||||||
@@ -52,8 +52,9 @@ public:
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* smoothing transition starting at T with t, t-1,...0
|
* smoothing transition starting at T with t, t-1,...0
|
||||||
* @param particles_new p_t (Forward Filter)
|
* @param particles_new p_t (Forward Filter) p2
|
||||||
* @param particles_old p_t+1 (Smoothed Particles from Step before)
|
* @param particles_old p_t+1 (Smoothed Particles from Step before) p1
|
||||||
|
* q(p1 | p2) is calculated
|
||||||
*/
|
*/
|
||||||
std::vector<std::vector<double>> transition(std::vector<K::Particle<MyState>>const& particles_new,
|
std::vector<std::vector<double>> transition(std::vector<K::Particle<MyState>>const& particles_new,
|
||||||
std::vector<K::Particle<MyState>>const& particles_old ) override {
|
std::vector<K::Particle<MyState>>const& particles_old ) override {
|
||||||
@@ -76,59 +77,76 @@ public:
|
|||||||
|
|
||||||
auto p2 = &particles_new[j];
|
auto p2 = &particles_new[j];
|
||||||
|
|
||||||
//!!!distance kann hier zu groß werden!!!
|
|
||||||
const double distance_m = p2->state.pCur.getDistance(p1->state.pCur) / 100.0;
|
const double distance_m = p2->state.pCur.getDistance(p1->state.pCur) / 100.0;
|
||||||
|
|
||||||
|
double muDistance = 1.0;
|
||||||
|
double sigmaDistance = 0.5;
|
||||||
|
|
||||||
//get distance walked and getProb using the walking model
|
switch (p2->state.currentActivity) {
|
||||||
//double distDijkstra_m = ((GRID_DISTANCE_CM / 100.0) * (8 - 1));
|
case Activity::ELEVATOR:
|
||||||
const double distProb = distWalk.getProbability(distance_m);
|
muDistance = 0.0;
|
||||||
|
sigmaDistance = 0.3;
|
||||||
|
break;
|
||||||
|
|
||||||
|
case Activity::STAIRS_DOWN:
|
||||||
|
muDistance = 0.5;
|
||||||
|
sigmaDistance = 0.3;
|
||||||
|
break;
|
||||||
|
|
||||||
//getProb using the angle(heading) between src and dst
|
case Activity::STAIRS_UP:
|
||||||
// double angle = 0.0;
|
muDistance = 0.4;
|
||||||
// if(!(p2->state.pCur.x == p1->state.pCur.x) && !(p2->state.pCur.y == p1->state.pCur.y)){
|
sigmaDistance = 0.2;
|
||||||
// angle = Angle::getDEG_360(p2->state.pCur.x, p2->state.pCur.y, p1->state.pCur.x, p1->state.pCur.y);
|
break;
|
||||||
// }
|
|
||||||
|
|
||||||
// const double headingProb = K::NormalDistribution::getProbability(p1->state.cumulativeHeading, smoothing_heading_sigma, angle);
|
case Activity::STANDING:
|
||||||
|
muDistance = 0.0;
|
||||||
|
sigmaDistance = 0.2;
|
||||||
|
break;
|
||||||
|
|
||||||
|
case Activity::WALKING:
|
||||||
|
muDistance = 1.0;
|
||||||
|
sigmaDistance = 0.5;
|
||||||
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
muDistance = 1.0;
|
||||||
|
sigmaDistance = 0.5;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
const double distProb = K::NormalDistribution::getProbability(muDistance, sigmaDistance, distance_m);
|
||||||
|
|
||||||
// is the heading change similiar to the measurement?
|
// is the heading change similiar to the measurement?
|
||||||
double p2AngleDeg = p2->state.walkState.heading.getRAD() * 180/3.14159265359;
|
double diffRad = Angle::getDiffRAD_2PI_PI(p2->state.walkState.heading.getRAD(), p1->state.walkState.heading.getRAD());
|
||||||
double p1AngleDeg = p1->state.walkState.heading.getRAD() * 180/3.14159265359;
|
double diffDeg = Angle::radToDeg(diffRad);
|
||||||
|
double angularChangeZeroToPi = std::fmod(std::abs(p1->state.angularHeadingChange), 360.0);
|
||||||
double diffDeg = p2AngleDeg - p1AngleDeg;
|
|
||||||
const double headingProb = K::NormalDistribution::getProbability(p1->state.angularHeadingChange, smoothing_heading_sigma, diffDeg);
|
|
||||||
|
|
||||||
//assert(headingProb != 0.0);
|
|
||||||
//assert(distProb != 0.0);
|
|
||||||
|
|
||||||
|
const double headingProb = K::NormalDistribution::getProbability(angularChangeZeroToPi, smoothing_heading_sigma, diffDeg);
|
||||||
|
|
||||||
|
|
||||||
//check how near we are to the measurement
|
//check how near we are to the measurement
|
||||||
double floorProb = K::NormalDistribution::getProbability(p1->state.measurement_pressure, smoothing_baro_sigma, p2->state.hPa);
|
const double floorProb = K::NormalDistribution::getProbability(p1->state.measurement_pressure, smoothing_baro_sigma, p2->state.hPa);
|
||||||
|
|
||||||
|
|
||||||
//combine the probabilities
|
//combine the probabilities
|
||||||
double prob = distProb * headingProb * floorProb;
|
double prob = distProb * headingProb * floorProb;
|
||||||
innerVector.push_back(prob);
|
innerVector.push_back(prob);
|
||||||
|
|
||||||
if(distance_m != distance_m) {throw "detected NaN";}
|
//error checks
|
||||||
if(distProb != distProb) {throw "detected NaN";}
|
// if(distance_m != distance_m) {throw "detected NaN";}
|
||||||
// if(angle != angle) {throw "detected NaN";}
|
// if(distProb != distProb) {throw "detected NaN";}
|
||||||
if(headingProb != headingProb) {throw "detected NaN";}
|
// if(headingProb != headingProb) {throw "detected NaN";}
|
||||||
if(floorProb != floorProb) {throw "detected NaN";}
|
// if(floorProb != floorProb) {throw "detected NaN";}
|
||||||
if(floorProb == 0) {throw "detected NaN";}
|
// if(floorProb == 0) {throw "detected zero prob in floorprob";}
|
||||||
if(prob != prob) {throw "detected NaN";}
|
// if(prob != prob) {throw "detected NaN";}
|
||||||
|
//if(prob == 0) {++zeroCounter;}
|
||||||
//assert(prob != 0.0);
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
#pragma omp critical
|
#pragma omp critical
|
||||||
predictionProbabilities.push_back(innerVector);
|
predictionProbabilities.push_back(innerVector);
|
||||||
}
|
}
|
||||||
|
|
||||||
return predictionProbabilities;
|
return predictionProbabilities;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -17,37 +17,38 @@ class MySmoothingTransitionSimple : public K::BackwardFilterTransition<MyState>
|
|||||||
|
|
||||||
private:
|
private:
|
||||||
|
|
||||||
/** a simple normal distribution */
|
/** a simple normal distribution */
|
||||||
K::NormalDistribution distWalk;
|
K::NormalDistribution distWalk;
|
||||||
|
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* ctor
|
* ctor
|
||||||
* @param choice the choice to use for randomly drawing nodes
|
* @param choice the choice to use for randomly drawing nodes
|
||||||
* @param fp the underlying floorplan
|
* @param fp the underlying floorplan
|
||||||
*/
|
*/
|
||||||
MySmoothingTransitionSimple() :
|
MySmoothingTransitionSimple() :
|
||||||
distWalk(smoothing_walk_mu, smoothing_walk_sigma) {
|
distWalk(smoothing_walk_mu, smoothing_walk_sigma)
|
||||||
|
{
|
||||||
distWalk.setSeed(4321);
|
distWalk.setSeed(4321);
|
||||||
}
|
}
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
||||||
uint64_t ts = 0;
|
uint64_t ts = 0;
|
||||||
uint64_t deltaMS = 0;
|
uint64_t deltaMS = 0;
|
||||||
|
|
||||||
/** set the current time in millisconds */
|
/** set the current time in millisconds */
|
||||||
void setCurrentTime(const uint64_t ts) {
|
void setCurrentTime(const uint64_t ts) {
|
||||||
if (this->ts == 0) {
|
if (this->ts == 0) {
|
||||||
this->ts = ts;
|
this->ts = ts;
|
||||||
deltaMS = 0;
|
deltaMS = 0;
|
||||||
} else {
|
} else {
|
||||||
deltaMS = this->ts - ts;
|
deltaMS = this->ts - ts;
|
||||||
this->ts = ts;
|
this->ts = ts;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* smoothing transition starting at T with t, t-1,...0
|
* smoothing transition starting at T with t, t-1,...0
|
||||||
@@ -64,52 +65,44 @@ public:
|
|||||||
// p(q_490(1)|q_489(1)); p(q_490(2)|q_489(1)) ... p(q_490(M)|q_489(1))
|
// p(q_490(1)|q_489(1)); p(q_490(2)|q_489(1)) ... p(q_490(M)|q_489(1))
|
||||||
std::vector<std::vector<double>> predictionProbabilities;
|
std::vector<std::vector<double>> predictionProbabilities;
|
||||||
|
|
||||||
auto p1 = particles_old.begin(); //smoothed / backward filter p_t+1
|
omp_set_dynamic(0); // Explicitly disable dynamic teams
|
||||||
auto p2 = particles_new.begin(); //forward filter p_t
|
omp_set_num_threads(6);
|
||||||
|
#pragma omp parallel for shared(predictionProbabilities)
|
||||||
#pragma omp parallel for private(p2) shared(predictionProbabilities)
|
for (int i = 0; i < particles_old.size(); ++i) {
|
||||||
for (p1 = particles_old.begin(); p1 < particles_old.end(); ++p1) {
|
|
||||||
std::vector<double> innerVector;
|
std::vector<double> innerVector;
|
||||||
for(p2 = particles_new.begin(); p2 < particles_new.end(); ++p2){
|
auto p1 = &particles_old[i];
|
||||||
|
|
||||||
//!!!distance kann hier zu groß werden!!!
|
for(int j = 0; j < particles_new.size(); ++j){
|
||||||
|
|
||||||
|
auto p2 = &particles_new[j];
|
||||||
|
|
||||||
|
//distance can be pretty big here
|
||||||
const double distance_m = p2->state.pCur.getDistance(p1->state.pCur) / 100.0;
|
const double distance_m = p2->state.pCur.getDistance(p1->state.pCur) / 100.0;
|
||||||
|
|
||||||
//get distance walked and getProb using the walking model
|
//get distance walked and getProb using the walking model
|
||||||
//double distDijkstra_m = ((GRID_DISTANCE_CM / 100.0) * (8 - 1));
|
|
||||||
const double distProb = distWalk.getProbability(distance_m);
|
const double distProb = distWalk.getProbability(distance_m);
|
||||||
|
|
||||||
|
//get proba for heading change
|
||||||
//getProb using the angle(heading) between src and dst
|
double diffRad = Angle::getDiffRAD_2PI_PI(p2->state.walkState.heading.getRAD(), p1->state.walkState.heading.getRAD());
|
||||||
double angle = 0.0;
|
double diffDeg = Angle::radToDeg(diffRad);
|
||||||
if(!(p2->state.pCur.x == p1->state.pCur.x) && !(p2->state.pCur.y == p1->state.pCur.y)){
|
double angularChangeZeroToPi = std::fmod(std::abs(p1->state.angularHeadingChange), 360.0);
|
||||||
angle = Angle::getDEG_360(p2->state.pCur.x, p2->state.pCur.y, p1->state.pCur.x, p1->state.pCur.y);
|
const double headingProb = K::NormalDistribution::getProbability(angularChangeZeroToPi, smoothing_heading_sigma, diffDeg);
|
||||||
}
|
|
||||||
|
|
||||||
const double headingProb = K::NormalDistribution::getProbability(p1->state.cumulativeHeading, smoothing_heading_sigma, angle);
|
|
||||||
|
|
||||||
//assert(headingProb != 0.0);
|
|
||||||
//assert(distProb != 0.0);
|
|
||||||
|
|
||||||
|
|
||||||
//check how near we are to the measurement
|
//check how near we are to the measurement
|
||||||
double floorProb = K::NormalDistribution::getProbability(p1->state.measurement_pressure, smoothing_baro_sigma, p2->state.hPa);
|
double floorProb = K::NormalDistribution::getProbability(p1->state.measurement_pressure, smoothing_baro_sigma, p2->state.hPa);
|
||||||
|
|
||||||
|
|
||||||
//combine the probabilities
|
//combine the probabilities
|
||||||
double prob = distProb * headingProb * floorProb;
|
double prob = distProb * headingProb * floorProb;
|
||||||
innerVector.push_back(prob);
|
innerVector.push_back(prob);
|
||||||
|
|
||||||
|
//error checks
|
||||||
if(distance_m != distance_m) {throw "detected NaN";}
|
if(distance_m != distance_m) {throw "detected NaN";}
|
||||||
if(distProb != distProb) {throw "detected NaN";}
|
if(distProb != distProb) {throw "detected NaN";}
|
||||||
if(angle != angle) {throw "detected NaN";}
|
|
||||||
if(headingProb != headingProb) {throw "detected NaN";}
|
if(headingProb != headingProb) {throw "detected NaN";}
|
||||||
if(floorProb != floorProb) {throw "detected NaN";}
|
if(floorProb != floorProb) {throw "detected NaN";}
|
||||||
if(floorProb == 0) {throw "detected NaN";}
|
if(floorProb == 0) {throw "detected zero prob in floorprob";}
|
||||||
if(prob != prob) {throw "detected NaN";}
|
if(prob != prob) {throw "detected NaN";}
|
||||||
|
//if(prob == 0) {throw "detected zero prob in smoothing transition";}
|
||||||
//assert(prob != 0.0);
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
#pragma omp critical
|
#pragma omp critical
|
||||||
@@ -122,4 +115,4 @@ public:
|
|||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif // MYTRANSITION_H
|
#endif // MYTRANSITIONSIMPLE_H
|
||||||
|
|||||||
Binary file not shown.
@@ -2,6 +2,17 @@
|
|||||||
|
|
||||||
ddd \cite{Ville09} dddd
|
ddd \cite{Ville09} dddd
|
||||||
|
|
||||||
|
Evaluation:
|
||||||
|
\begin{itemize}
|
||||||
|
\item Filter ist immer der gleiche mit MultiPathPrediction und Importance Factors
|
||||||
|
\item FBS Interval mit 500 und 7500 Partikeln auf 4 Pfaden mit SimpleSmoothingTrans
|
||||||
|
\item BS Interval mit 500 zu 50 und 7500 zu 500 Partikeln auf auf 4 Pfaden mit SimpleSmoothingTrans
|
||||||
|
\item FBS Lag = 5 mit 500 und 7500 Partikeln auf 4 Pfaden mit SimpleSmoothingTrans
|
||||||
|
\item BS Lag = 5 mit 500 zu 50 und 7500 zu 500 Partikeln auf auf 4 Pfaden mit SimpleSmoothingTrans
|
||||||
|
\item BS Lag zu Error Plot. Lag von 0 bis 100, wie verhält sich der Error. Am besten auf Pfad 4 mit SimpleSmoothingTrans.
|
||||||
|
\item BS Lag = 5 mit 500 Partikeln auf einem Pfad der manuell angepasst ist (mach ich) mit DijkstraTrans.
|
||||||
|
\end{itemize}
|
||||||
|
|
||||||
|
|
||||||
\begin{itemize}
|
\begin{itemize}
|
||||||
\item Vorwärtsschritt die Ergebnisse und Probleme beschreiben. Zeitlicher Verzug etc.
|
\item Vorwärtsschritt die Ergebnisse und Probleme beschreiben. Zeitlicher Verzug etc.
|
||||||
@@ -15,3 +26,5 @@ ddd \cite{Ville09} dddd
|
|||||||
\item Fixed-lag gap
|
\item Fixed-lag gap
|
||||||
\subitem einen offset (gap) im smoothing. was bringt es? sinnvoll?
|
\subitem einen offset (gap) im smoothing. was bringt es? sinnvoll?
|
||||||
\end{itemize}
|
\end{itemize}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
\section{Smoothing}
|
\section{Smoothing}
|
||||||
\label{sec:smoothing}
|
\label{sec:smoothing}
|
||||||
|
|
||||||
Consider a situation given all observations until a time step T...
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user