This repository has been archived on 2020-04-08. You can view files and clone it, but cannot push or open issues or pull requests.
Files
IPIN2017/code/filter/KLB.h

290 lines
11 KiB
C++

#ifndef KLB_H
#define KLB_H
#include <chrono>
#include <Indoor/math/divergence/KullbackLeibler.h>
#include <Indoor/grid/factory/v2/GridFactory.h>
#include <Indoor/floorplan/v2/Floorplan.h>
#include <Indoor/floorplan/v2/FloorplanReader.h>
#include <Indoor/grid/factory/v2/GridFactory.h>
#include <Indoor/grid/factory/v2/Importance.h>
#include <Indoor/geo/Heading.h>
#include <Indoor/geo/Point2.h>
#include <Indoor/sensors/offline/FileReader.h>
#include <KLib/math/statistics/Statistics.h>
#include <Indoor/sensors/imu/TurnDetection.h>
#include <Indoor/sensors/imu/StepDetection.h>
#include <Indoor/sensors/imu/MotionDetection.h>
#include <Indoor/sensors/pressure/RelativePressure.h>
#include <Indoor/sensors/radio/WiFiGridEstimator.h>
#include <Indoor/sensors/beacon/model/BeaconModelLogDistCeiling.h>
#include <Indoor/math/MovingAVG.h>
#include <Indoor/math/FixedFrequencyInterpolator.h>
#include <Indoor/math/divergence/KullbackLeibler.h>
#include <Indoor/math/divergence/JensenShannon.h>
#include <Indoor/data/Timestamp.h>
#include <KLib/math/filter/particles/Particle.h>
#include <KLib/math/filter/particles/ParticleFilterMixing.h>
#include <KLib/math/filter/particles/ParticleFilterInitializer.h>
#include <KLib/math/filter/particles/estimation/ParticleFilterEstimationWeightedAverage.h>
#include <KLib/math/filter/particles/estimation/ParticleFilterEstimationRegionalWeightedAverage.h>
#include <KLib/math/filter/particles/estimation/ParticleFilterEstimationOrderedWeightedAverage.h>
//#include <KLib/math/filter/particles/estimation/ParticleFilterEstimationKernelDensity.h>
#include <KLib/math/filter/particles/resampling/ParticleFilterResamplingSimple.h>
#include <KLib/math/filter/particles/resampling/ParticleFilterResamplingPercent.h>
#include <KLib/math/filter/particles/resampling/ParticleFilterResamplingDivergence.h>
#include <KLib/math/filter/merging/MarkovTransitionProbability.h>
#include <KLib/math/filter/merging/mixing/MixingSamplerDivergency.h>
#include <KLib/math/filter/merging/estimation/JointEstimationPosteriorOnly.h>
#include "Structs.h"
#include "../Plotti.h"
#include "Logic.h"
#include "../Settings.h"
double __KLD = 0.0;
//todo function return the transition prob matrix for markov chain!
//getKernelDensityProbability should work fine for first shot! nevertheless we need to do 2 kernel density estimations for both filters :( :( :(
struct ModeProbabilityTransition : public K::MarkovTransitionProbability<MyState, MyControl, MyObs>{
Grid<MyNode>& grid;
const double lambda;
ModeProbabilityTransition(Grid<MyNode>& grid, double lambda) : grid(grid), lambda(lambda) {;}
virtual Eigen::MatrixXd update(std::vector<K::ParticleFilterMixing<MyState, MyControl, MyObs>>& modes) override {
std::vector<double> probsWifiV;
std::vector<double> probsParticleV;
// mode[0] -> Posterior & mode[1] -> Wifi ---- i know what im doing :)
for(MyNode node : grid.getNodes()){
double probParzenPosterior = calcKernelDensity(node, modes[0].getParticles());
probsParticleV.push_back(probParzenPosterior);
double probParzenWifi = calcKernelDensity(node, modes[1].getParticles());
probsWifiV.push_back(probParzenWifi);
}
// make vectors
Eigen::Map<Eigen::VectorXd> probsWifi(&probsWifiV[0], probsWifiV.size());
Eigen::Map<Eigen::VectorXd> probsParticle(&probsParticleV[0], probsParticleV.size());
// get kld
double kld = Divergence::KullbackLeibler<double>::getGeneralFromSamples(probsParticle, probsWifi, Divergence::LOGMODE::NATURALIS);
// debugging global variable
__KLD = kld;
//exp. distribution
double expKld = std::exp(-lambda * kld);
//create the matrix
Eigen::MatrixXd m(2,2);
m << 1-expKld, expKld, 0, 1;
return m;
}
double calcKernelDensity(const MyNode node, const std::vector<K::Particle<MyState>> particles){
int size = particles.size();
double prob = 0;
#pragma omp parallel for reduction(+:prob) num_threads(6)
for(int i = 0; i < size; ++i){
double distance = particles[i].state.position.getDistanceInCM(node);
prob += Distribution::Normal<double>::getProbability(0, 100, distance) * particles[i].weight;
}
return prob;
}
};
static double getKernelDensityProbability(std::vector<K::Particle<MyState>>& particles, MyState state, std::vector<K::Particle<MyState>>& samplesWifi){
Distribution::KernelDensity<double, MyState> parzen([&](MyState state){
int size = particles.size();
double prob = 0;
#pragma omp parallel for reduction(+:prob) num_threads(6)
for(int i = 0; i < size; ++i){
double distance = particles[i].state.position.getDistanceInCM(state.position);
prob += Distribution::Normal<double>::getProbability(0, 100, distance) * particles[i].weight;
}
return prob;
;});
std::vector<double> probsWifiV;
std::vector<double> probsParticleV;
//just for plottingstuff
std::vector<K::Particle<MyState>> samplesParticles;
const int step = 4;
int i = 0;
for(K::Particle<MyState> particle : samplesWifi){
if(++i % step != 0){continue;}
MyState state(GridPoint(particle.state.position.x_cm, particle.state.position.y_cm, particle.state.position.z_cm));
double probiParticle = parzen.getProbability(state);
probsParticleV.push_back(probiParticle);
double probiwifi = particle.weight;
probsWifiV.push_back(probiwifi);
//samplesParticles.push_back(K::Particle<MyState>(state, probiParticle));
}
//make vectors
Eigen::Map<Eigen::VectorXd> probsWifi(&probsWifiV[0], probsWifiV.size());
Eigen::Map<Eigen::VectorXd> probsParticle(&probsParticleV[0], probsParticleV.size());
//get divergence
double kld = Divergence::KullbackLeibler<double>::getGeneralFromSamples(probsParticle, probsWifi, Divergence::LOGMODE::NATURALIS);
//double kld = Divergence::JensenShannon<double>::getGeneralFromSamples(probsParticle, probsWifi, Divergence::LOGMODE::NATURALIS);
//plotti
//plot.debugDistribution1(samplesWifi);
//plot.debugDistribution1(samplesParticles);
//estimate the mean
// K::ParticleFilterEstimationOrderedWeightedAverage<MyState> estimateWifi(0.95);
// const MyState estWifi = estimateWifi.estimate(samplesWifi);
// plot.addEstimationNodeSmoothed(estWifi.position.inMeter());
return kld;
}
static double kldFromMultivariatNormal(std::vector<K::Particle<MyState>>& particles, MyState state, std::vector<K::Particle<MyState>>& particleWifi){
//kld: particle die resampling hatten nehmen und nv daraus schätzen. vergleiche mit wi-fi
//todo put this in depletionhelper.h
Point3 estPos = state.position.inMeter();
//this is a hack! it is possible that the sigma of z is getting 0 and therefore the rank decreases to 2 and
//no inverse matrix is possible
std::mt19937_64 rng;
// initialize the random number generator with time-dependent seed
uint64_t timeSeed = std::chrono::high_resolution_clock::now().time_since_epoch().count();
std::seed_seq ss{uint32_t(timeSeed & 0xffffffff), uint32_t(timeSeed>>32)};
rng.seed(ss);
// initialize a uniform distribution between -0.0001 and 0.0001
std::uniform_real_distribution<double> unif(-0.0001, 0.0001);
//create a gauss dist for the current particle approx.
Eigen::MatrixXd m(particles.size(), 3);
for(int i = 0; i < particles.size(); ++i){
m(i,0) = (particles[i].state.position.x_cm / 100.0) + unif(rng);
m(i,1) = (particles[i].state.position.y_cm / 100.0) + unif(rng);
m(i,2) = (particles[i].state.position.z_cm / 100.0) + unif(rng);
}
Eigen::VectorXd mean(3);
mean << estPos.x, estPos.y, estPos.z;
Distribution::NormalDistributionN normParticle = Distribution::NormalDistributionN::getNormalNFromSamplesAndMean(m, mean);
//create a gauss dist for wifi
Eigen::MatrixXd covWifi(3,3);
covWifi << Settings::WiFiModel::sigma, 0, 0,
0, Settings::WiFiModel::sigma, 0,
0, 0, 0.01;
// //calc wi-fi prob for every node and get mean vector
// WiFiObserverFree wiFiProbability(Settings::WiFiModel::sigma, model);
// const WiFiMeasurements wifiObs = Settings::WiFiModel::vg_eval.group(obs.wifi);
// std::vector<MyNode> allNodes = grid.getNodes();
// std::vector<K::Particle<MyState>> particleWifi;
// //problem! dadurch das ich nur die nodes nehme, verschiebt sich der mittelwert natürlich in die mitte des gebäudes und nicht an den rand
// //muss also die verteilung über mehr nodes oder sampling erstellen!! mittelwert fehler!!!!
// //#pragma omp parallel for num_threads(6)
// for(MyNode node : allNodes){
// double prob = wiFiProbability.getProbability(node, ts, wifiObs);
// K::Particle<MyState> tmp (MyState(GridPoint(node.x_cm, node.y_cm, node.z_cm)), prob);
// //#pragma omp critical
// particleWifi.push_back(tmp);
// }
// std::vector<double> floors;
// floors.push_back(0.0);
// floors.push_back(4.0);
// floors.push_back(7.4);
// floors.push_back(10.8);
// #pragma omp parallel for num_threads(6)
// for(int x = -20; x < 100; ++x){
// for(int y = -20; y < 75; ++y){
// for(double z : floors){
// double X = x;// / 10.0;
// double Y = y;// / 10.0;
// double Z = z;// / 10.0;
// Point3 pt(X,Y,Z);
// double prob = wiFiProbability.getProbability(pt + Point3(0,0,1.3), ts, wifiObs);
// K::Particle<MyState> tmp (MyState(GridPoint(X * 100.0, Y * 100.0, Z * 100.0)), prob);
// #pragma omp critical
// particleWifi.push_back(tmp);
// }
// }
// }
//estimate the mean
K::ParticleFilterEstimationOrderedWeightedAverage<MyState> estimateWifi(0.95);
const MyState estWifi = estimateWifi.estimate(particleWifi);
//get matrix with wifi particles
// Eigen::MatrixXd mW(particleWifi.size(), 3);
// for(int i = 0; i < particleWifi.size(); ++i){
// mW(i,0) = particleWifi[i].state.position.x_cm / 100.0;
// mW(i,1) = particleWifi[i].state.position.y_cm / 100.0;
// mW(i,2) = estWifi.position.z_cm / 100.0;
// }
Eigen::VectorXd meanWifi(3);
meanWifi << estWifi.position.x_cm / 100.0, estWifi.position.y_cm / 100.0, estWifi.position.z_cm / 100.0;
Distribution::NormalDistributionN normWifi(meanWifi, covWifi);
//Distribution::NormalDistributionN normWifi = Distribution::NormalDistributionN::getNormalNFromSamplesAndMean(mW, meanWifi);
//get the kld distance
double kld = Divergence::KullbackLeibler<double>::getMultivariateGauss(normParticle, normWifi);
//plot.debugDistribution1(particleWifi);
//plot.drawNormalN1(normParticle);
//plot.drawNormalN2(normWifi);
//plot.addEstimationNodeSmoothed(estWifi.position.inMeter());
return kld;
}
#endif // KLB_H