256
smc/filtering/ParticleFilter.h
Normal file
256
smc/filtering/ParticleFilter.h
Normal file
@@ -0,0 +1,256 @@
|
||||
/*
|
||||
* ParticleFilter.h
|
||||
*
|
||||
* Created on: Sep 17, 2013
|
||||
* Author: Frank Ebner
|
||||
*/
|
||||
|
||||
#ifndef PARTICLEFILTER_H_
|
||||
#define PARTICLEFILTER_H_
|
||||
|
||||
#include <vector>
|
||||
#include <memory>
|
||||
|
||||
#include "../Particle.h"
|
||||
|
||||
#include "resampling/ParticleFilterResampling.h"
|
||||
#include "estimation/ParticleFilterEstimation.h"
|
||||
#include "ParticleFilterTransition.h"
|
||||
#include "ParticleFilterEvaluation.h"
|
||||
#include "ParticleFilterInitializer.h"
|
||||
|
||||
#include "../../Assertions.h"
|
||||
|
||||
namespace SMC {
|
||||
|
||||
/**
|
||||
* the main-class for the particle filter
|
||||
* @param State the (user-defined) state for each particle
|
||||
* @param Observation the observation (sensor) data
|
||||
*/
|
||||
template <typename State, typename Control, typename Observation>
|
||||
|
||||
class ParticleFilter {
|
||||
|
||||
private:
|
||||
|
||||
/** all used particles */
|
||||
std::vector<Particle<State>> particles;
|
||||
|
||||
/** the resampler to use */
|
||||
std::unique_ptr<ParticleFilterResampling<State>> resampler;
|
||||
|
||||
/** the estimation function to use */
|
||||
std::unique_ptr<ParticleFilterEstimation<State>> estimation;
|
||||
|
||||
/** the transition function to use */
|
||||
std::unique_ptr<ParticleFilterTransition<State, Control>> transition;
|
||||
|
||||
/** the evaluation function to use */
|
||||
std::unique_ptr<ParticleFilterEvaluation<State, Observation>> evaluation;
|
||||
|
||||
/** the initialization function to use */
|
||||
std::unique_ptr<ParticleFilterInitializer<State>> initializer;
|
||||
|
||||
/** the percentage-of-efficient-particles-threshold for resampling */
|
||||
double nEffThresholdPercent = 0.25;
|
||||
|
||||
|
||||
|
||||
public:
|
||||
|
||||
/** ctor */
|
||||
ParticleFilter(const uint32_t numParticles, std::unique_ptr<ParticleFilterInitializer<State>> initializer) {
|
||||
particles.resize(numParticles);
|
||||
setInitializier(std::move(initializer));
|
||||
init();
|
||||
}
|
||||
|
||||
/** dtor */
|
||||
~ParticleFilter() {
|
||||
;
|
||||
}
|
||||
|
||||
/** access to all particles */
|
||||
const std::vector<Particle<State>>& getParticles() {
|
||||
return particles;
|
||||
}
|
||||
|
||||
/** initialize/re-start the particle filter */
|
||||
void init() {
|
||||
Assert::isNotNull(initializer, "initializer MUST not be null! call setInitializer() first!");
|
||||
initializer->initialize(particles);
|
||||
}
|
||||
|
||||
|
||||
/** set the resampling method to use */
|
||||
void setResampling(std::unique_ptr<ParticleFilterResampling<State>> resampler) {
|
||||
Assert::isNotNull(resampler, "setResampling() MUST not be called with a nullptr!");
|
||||
this->resampler = std::move(resampler);
|
||||
}
|
||||
|
||||
/** set the estimation method to use */
|
||||
void setEstimation(std::unique_ptr<ParticleFilterEstimation<State>> estimation) {
|
||||
Assert::isNotNull(estimation, "setEstimation() MUST not be called with a nullptr!");
|
||||
this->estimation = std::move(estimation);
|
||||
}
|
||||
|
||||
/** set the transition method to use */
|
||||
void setTransition(std::unique_ptr<ParticleFilterTransition<State, Control>> transition) {
|
||||
Assert::isNotNull(transition, "setTransition() MUST not be called with a nullptr!");
|
||||
this->transition = std::move(transition);
|
||||
}
|
||||
|
||||
/** get the used transition method */
|
||||
ParticleFilterTransition<State, Control>* getTransition() {
|
||||
return this->transition.get();
|
||||
}
|
||||
|
||||
/** set the evaluation method to use */
|
||||
void setEvaluation(std::unique_ptr<ParticleFilterEvaluation<State, Observation>> evaluation) {
|
||||
Assert::isNotNull(evaluation, "setEvaluation() MUST not be called with a nullptr!");
|
||||
this->evaluation = std::move(evaluation);
|
||||
}
|
||||
|
||||
/** set the initialization method to use */
|
||||
void setInitializier(std::unique_ptr<ParticleFilterInitializer<State>> initializer) {
|
||||
Assert::isNotNull(initializer, "setInitializer() MUST not be called with a nullptr!");
|
||||
this->initializer = std::move(initializer);
|
||||
}
|
||||
|
||||
/** set the resampling threshold as the percentage of efficient particles */
|
||||
void setNEffThreshold(const double thresholdPercent) {
|
||||
this->nEffThresholdPercent = thresholdPercent;
|
||||
}
|
||||
|
||||
double lastNEff = 9999999999999;
|
||||
|
||||
/** perform resampling -> transition -> evaluation -> estimation */
|
||||
State update(const Control* control, const Observation& observation) {
|
||||
|
||||
// sanity checks (if enabled)
|
||||
Assert::isNotNull(resampler, "resampler MUST not be null! call setResampler() first!");
|
||||
Assert::isNotNull(transition, "transition MUST not be null! call setTransition() first!");
|
||||
Assert::isNotNull(evaluation, "evaluation MUST not be null! call setEvaluation() first!");
|
||||
Assert::isNotNull(estimation, "estimation MUST not be null! call setEstimation() first!");
|
||||
|
||||
// if the number of efficient particles is too low, perform resampling
|
||||
if (lastNEff < particles.size() * nEffThresholdPercent) {resampler->resample(particles); }
|
||||
|
||||
// perform the transition step
|
||||
transition->transition(particles, control);
|
||||
|
||||
// perform the evaluation step and calculate the sum of all particle weights
|
||||
evaluation->evaluation(particles, observation);
|
||||
|
||||
// normalize the particle weights and thereby calculate N_eff
|
||||
lastNEff = normalize();
|
||||
|
||||
//std::cout << "normalized. n_eff is " << lastNEff << std::endl;
|
||||
|
||||
// estimate the current state
|
||||
const State est = estimation->estimate(particles);
|
||||
|
||||
// done
|
||||
return est;
|
||||
|
||||
}
|
||||
|
||||
/** perform only the transition step */
|
||||
void updateTransitionOnly(const Control* control) {
|
||||
|
||||
// sanity checks (if enabled)
|
||||
Assert::isNotNull(transition, "transition MUST not be null! call setTransition() first!");
|
||||
|
||||
// perform the transition step
|
||||
transition->transition(particles, control);
|
||||
|
||||
}
|
||||
|
||||
/** perform only the evaluation step */
|
||||
State updateEvaluationOnly(const Observation& observation) {
|
||||
|
||||
// sanity checks (if enabled)
|
||||
Assert::isNotNull(resampler, "resampler MUST not be null! call setResampler() first!");
|
||||
Assert::isNotNull(evaluation, "evaluation MUST not be null! call setEvaluation() first!");
|
||||
Assert::isNotNull(estimation, "estimation MUST not be null! call setEstimation() first!");
|
||||
|
||||
// perform the evaluation step and calculate the sum of all particle weights
|
||||
evaluation->evaluation(particles, observation);
|
||||
|
||||
// not needed anymore.. was to tricky to forget etc...
|
||||
// sanity check
|
||||
//Assert::isNotNaN(weightSum, "sum of all particle weights (returned from eval) is NAN!");
|
||||
//Assert::isNotNull(weightSum, "sum of all particle weights (returned from eval) is 0.0!");
|
||||
|
||||
// normalize the particle weights and thereby calculate N_eff
|
||||
const double neff = normalize();
|
||||
|
||||
// estimate the current state
|
||||
const State est = estimation->estimate(particles);
|
||||
|
||||
// if the number of efficient particles is too low, perform resampling
|
||||
if (neff < particles.size() * nEffThresholdPercent) { resampler->resample(particles); }
|
||||
|
||||
// done
|
||||
return est;
|
||||
|
||||
}
|
||||
|
||||
/** estimate the current state without update or transition just based on the current weights */
|
||||
State estimate() {
|
||||
|
||||
// sanity checks (if enabled)
|
||||
Assert::isNotNull(estimation, "estimation MUST not be null! call setEstimation() first!");
|
||||
|
||||
return estimation->estimate(particles);
|
||||
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
/** normalize the weight of all particles to 1.0 and perform some sanity checks */
|
||||
double normalize() {
|
||||
|
||||
// calculate sum(weights)
|
||||
//double min1 = 9999999;
|
||||
double weightSum = 0.0;
|
||||
for (const auto& p : particles) {
|
||||
weightSum += p.weight;
|
||||
//if (p.weight < min1) {min1 = p.weight;}
|
||||
}
|
||||
|
||||
// sanity check. always!
|
||||
if (weightSum != weightSum) {
|
||||
throw Exception("sum of paticle-weights is NaN");
|
||||
}
|
||||
if (weightSum == 0) {
|
||||
throw Exception("sum of paticle-weights is 0.0");
|
||||
}
|
||||
|
||||
// normalize and calculate N_eff
|
||||
double sum2 = 0.0;
|
||||
//double min2 = 9999999;
|
||||
for (auto& p : particles) {
|
||||
p.weight /= weightSum;
|
||||
//if (p.weight < min2) {min2 = p.weight;}
|
||||
sum2 += (p.weight * p.weight);
|
||||
}
|
||||
|
||||
// N_eff
|
||||
return 1.0 / sum2;
|
||||
|
||||
}
|
||||
|
||||
// /** calculate the number of efficient particles (N_eff) */
|
||||
// double getNeff() const {
|
||||
// double sum = 0.0;
|
||||
// for (auto& p : particles) {sum += (p.weight * p.weight);}
|
||||
// return 1.0 / sum;
|
||||
// }
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif /* PARTICLEFILTER_H_ */
|
||||
29
smc/filtering/ParticleFilterEvaluation.h
Normal file
29
smc/filtering/ParticleFilterEvaluation.h
Normal file
@@ -0,0 +1,29 @@
|
||||
#ifndef K_MATH_FILTERS_PARTICLE_PARTICLEFILTEREVALUATION_H
|
||||
#define K_MATH_FILTERS_PARTICLE_PARTICLEFILTEREVALUATION_H
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "../Particle.h"
|
||||
|
||||
namespace SMC {
|
||||
|
||||
/**
|
||||
* interface for the user-defined particle-evaluation.
|
||||
* the evaluation weighs the particle by comparing its state with the observation p(o_t | q_t)
|
||||
*/
|
||||
template <typename State, typename Observation>
|
||||
class ParticleFilterEvaluation {
|
||||
|
||||
public:
|
||||
|
||||
/**
|
||||
* evaluate all particles (update p.weight) depending on their state and the current observation.
|
||||
* this method MUST return the sum of all weights (used for normalization)
|
||||
*/
|
||||
virtual double evaluation(std::vector<Particle<State>>& particles, const Observation& observation) = 0;
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // K_MATH_FILTERS_PARTICLE_PARTICLEFILTEREVALUATION_H
|
||||
283
smc/filtering/ParticleFilterHistory.h
Normal file
283
smc/filtering/ParticleFilterHistory.h
Normal file
@@ -0,0 +1,283 @@
|
||||
/*
|
||||
* ParticleFilterHistory.h
|
||||
*
|
||||
* Created on: Jul 13, 2015
|
||||
* Author: Toni Fetzer
|
||||
*/
|
||||
|
||||
#ifndef PARTICLEFILTERHISTORY_H_
|
||||
#define PARTICLEFILTERHISTORY_H_
|
||||
|
||||
#include <vector>
|
||||
#include <memory>
|
||||
|
||||
#include "../Particle.h"
|
||||
|
||||
#include "resampling/ParticleFilterResampling.h"
|
||||
#include "estimation/ParticleFilterEstimation.h"
|
||||
#include "ParticleFilterTransition.h"
|
||||
#include "ParticleFilterEvaluation.h"
|
||||
#include "ParticleFilterInitializer.h"
|
||||
|
||||
#include "../../Assertions.h"
|
||||
|
||||
namespace SMC {
|
||||
|
||||
/**
|
||||
* the main-class for the particle filter
|
||||
* @param State the (user-defined) state for each particle
|
||||
* @param Observation the observation (sensor) data
|
||||
*/
|
||||
template <typename State, typename Control, typename Observation>
|
||||
|
||||
class ParticleFilterHistory {
|
||||
|
||||
private:
|
||||
|
||||
/** all used particles */
|
||||
std::vector<Particle<State>> particles;
|
||||
|
||||
/** all non resampled particles */
|
||||
std::vector<Particle<State>> particlesNoResampling;
|
||||
|
||||
/** the resampler to use */
|
||||
std::unique_ptr<ParticleFilterResampling<State>> resampler;
|
||||
|
||||
/** the estimation function to use */
|
||||
std::unique_ptr<ParticleFilterEstimation<State>> estimation;
|
||||
|
||||
/** the transition function to use */
|
||||
std::unique_ptr<ParticleFilterTransition<State, Control>> transition;
|
||||
|
||||
/** the evaluation function to use */
|
||||
std::unique_ptr<ParticleFilterEvaluation<State, Observation>> evaluation;
|
||||
|
||||
/** the initialization function to use */
|
||||
std::unique_ptr<ParticleFilterInitializer<State>> initializer;
|
||||
|
||||
/** the percentage-of-efficient-particles-threshold for resampling */
|
||||
double nEffThresholdPercent = 0.25;
|
||||
|
||||
|
||||
|
||||
public:
|
||||
|
||||
/** ctor */
|
||||
ParticleFilterHistory(const uint32_t numParticles, std::unique_ptr<ParticleFilterInitializer<State>> initializer) {
|
||||
particles.resize(numParticles);
|
||||
particlesNoResampling.resize(numParticles);
|
||||
setInitializier(std::move(initializer));
|
||||
init();
|
||||
}
|
||||
|
||||
/** dtor */
|
||||
~ParticleFilterHistory() {
|
||||
;
|
||||
}
|
||||
|
||||
/** access to all particles */
|
||||
const std::vector<Particle<State>>& getParticles() {
|
||||
return particles;
|
||||
}
|
||||
|
||||
/** access to all non resample particles */
|
||||
const std::vector<Particle<State>>& getNonResamplingParticles() {
|
||||
return particlesNoResampling;
|
||||
}
|
||||
|
||||
/** initialize/re-start the particle filter */
|
||||
void init() {
|
||||
Assert::isNotNull(initializer, "initializer MUST not be null! call setInitializer() first!");
|
||||
initializer->initialize(particles);
|
||||
}
|
||||
|
||||
void setAngle(const double angle){
|
||||
for(SMC::Particle<State>& p : particles){
|
||||
p.state.walkState.heading = angle;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/** set the resampling method to use */
|
||||
void setResampling(std::unique_ptr<ParticleFilterResampling<State>> resampler) {
|
||||
Assert::isNotNull(resampler, "setResampling() MUST not be called with a nullptr!");
|
||||
this->resampler = std::move(resampler);
|
||||
}
|
||||
|
||||
/** set the estimation method to use */
|
||||
void setEstimation(std::unique_ptr<ParticleFilterEstimation<State>> estimation) {
|
||||
Assert::isNotNull(estimation, "setEstimation() MUST not be called with a nullptr!");
|
||||
this->estimation = std::move(estimation);
|
||||
}
|
||||
|
||||
/** set the transition method to use */
|
||||
void setTransition(std::unique_ptr<ParticleFilterTransition<State, Control>> transition) {
|
||||
Assert::isNotNull(transition, "setTransition() MUST not be called with a nullptr!");
|
||||
this->transition = std::move(transition);
|
||||
}
|
||||
|
||||
/** get the used transition method */
|
||||
ParticleFilterTransition<State, Control>* getTransition() {
|
||||
return this->transition.get();
|
||||
}
|
||||
|
||||
/** set the evaluation method to use */
|
||||
void setEvaluation(std::unique_ptr<ParticleFilterEvaluation<State, Observation>> evaluation) {
|
||||
Assert::isNotNull(evaluation, "setEvaluation() MUST not be called with a nullptr!");
|
||||
this->evaluation = std::move(evaluation);
|
||||
}
|
||||
|
||||
/** set the initialization method to use */
|
||||
void setInitializier(std::unique_ptr<ParticleFilterInitializer<State>> initializer) {
|
||||
Assert::isNotNull(initializer, "setInitializer() MUST not be called with a nullptr!");
|
||||
this->initializer = std::move(initializer);
|
||||
}
|
||||
|
||||
/** set the resampling threshold as the percentage of efficient particles */
|
||||
void setNEffThreshold(const double thresholdPercent) {
|
||||
this->nEffThresholdPercent = thresholdPercent;
|
||||
}
|
||||
|
||||
/** perform resampling -> transition -> evaluation -> estimation */
|
||||
State update(const Control* control, const Observation& observation) {
|
||||
|
||||
// sanity checks (if enabled)
|
||||
Assert::isNotNull(resampler, "resampler MUST not be null! call setResampler() first!");
|
||||
Assert::isNotNull(transition, "transition MUST not be null! call setTransition() first!");
|
||||
Assert::isNotNull(evaluation, "evaluation MUST not be null! call setEvaluation() first!");
|
||||
Assert::isNotNull(estimation, "estimation MUST not be null! call setEstimation() first!");
|
||||
|
||||
// perform the transition step
|
||||
transition->transition(particles, control);
|
||||
|
||||
// perform the evaluation step and calculate the sum of all particle weights
|
||||
const double weightSum = evaluation->evaluation(particles, observation);
|
||||
|
||||
// normalize the particle weights and thereby calculate N_eff
|
||||
const double neff = normalize(weightSum);
|
||||
|
||||
// estimate the current state
|
||||
const State est = estimation->estimate(particles);
|
||||
|
||||
//save particels before resampling to save the weight at this timestep
|
||||
particlesNoResampling = particles;
|
||||
|
||||
// if the number of efficient particles is too low, perform resampling
|
||||
if (neff < particles.size() * nEffThresholdPercent) { resampler->resample(particles); }
|
||||
|
||||
// done
|
||||
return est;
|
||||
|
||||
}
|
||||
|
||||
/** perform resampling -> transition -> evaluation -> estimation */
|
||||
State update(const Control* control, const Observation& observation, std::vector<Particle<State>>& particlesWifi,
|
||||
std::function<double(std::vector<Particle<State>>&, State, std::vector<Particle<State>>&)> calcKLD, double& kld) {
|
||||
|
||||
// sanity checks (if enabled)
|
||||
Assert::isNotNull(resampler, "resampler MUST not be null! call setResampler() first!");
|
||||
Assert::isNotNull(transition, "transition MUST not be null! call setTransition() first!");
|
||||
Assert::isNotNull(evaluation, "evaluation MUST not be null! call setEvaluation() first!");
|
||||
Assert::isNotNull(estimation, "estimation MUST not be null! call setEstimation() first!");
|
||||
//Assert::isNotNull(kld, "kld MUST not be null! call setEstimation() first!");
|
||||
|
||||
// perform the transition step
|
||||
transition->transition(particles, control);
|
||||
|
||||
// perform the evaluation step and calculate the sum of all particle weights
|
||||
const double weightSum = evaluation->evaluation(particles, observation);
|
||||
|
||||
// normalize the particle weights and thereby calculate N_eff
|
||||
const double neff = normalize(weightSum);
|
||||
|
||||
// estimate the current state
|
||||
const State est = estimation->estimate(particles);
|
||||
|
||||
//save particels before resampling to save the weight at this timestep
|
||||
particlesNoResampling = particles;
|
||||
|
||||
//calc the current divergence between wifi and particle propability
|
||||
kld = calcKLD(particles, est, particlesWifi);
|
||||
|
||||
// if the number of efficient particles is too low, perform resampling
|
||||
if (neff < particles.size() * nEffThresholdPercent) { resampler->resample(particles, kld, particlesWifi); }
|
||||
|
||||
// done
|
||||
return est;
|
||||
|
||||
}
|
||||
|
||||
|
||||
/** perform only the transition step */
|
||||
void updateTransitionOnly(const Control* control) {
|
||||
|
||||
// sanity checks (if enabled)
|
||||
Assert::isNotNull(transition, "transition MUST not be null! call setTransition() first!");
|
||||
|
||||
// perform the transition step
|
||||
transition->transition(particles, control);
|
||||
|
||||
}
|
||||
|
||||
/** perform only the evaluation step */
|
||||
State updateEvaluationOnly(const Observation& observation) {
|
||||
|
||||
// sanity checks (if enabled)
|
||||
Assert::isNotNull(resampler, "resampler MUST not be null! call setResampler() first!");
|
||||
Assert::isNotNull(evaluation, "evaluation MUST not be null! call setEvaluation() first!");
|
||||
Assert::isNotNull(estimation, "estimation MUST not be null! call setEstimation() first!");
|
||||
|
||||
// perform the evaluation step and calculate the sum of all particle weights
|
||||
const double weightSum = evaluation->evaluation(particles, observation);
|
||||
|
||||
// sanity check
|
||||
Assert::isNotNaN(weightSum, "sum of all particle weights (returned from eval) is NAN!");
|
||||
Assert::isNotNull(weightSum, "sum of all particle weights (returned from eval) is 0.0!");
|
||||
|
||||
// normalize the particle weights and thereby calculate N_eff
|
||||
const double neff = normalize(weightSum);
|
||||
|
||||
// estimate the current state
|
||||
const State est = estimation->estimate(particles);
|
||||
|
||||
// if the number of efficient particles is too low, perform resampling
|
||||
if (neff < particles.size() * nEffThresholdPercent) { resampler->resample(particles); }
|
||||
|
||||
// done
|
||||
return est;
|
||||
|
||||
}
|
||||
|
||||
/** estimate the current state without update or transition just based on the current weights */
|
||||
State estimate() {
|
||||
|
||||
// sanity checks (if enabled)
|
||||
Assert::isNotNull(estimation, "estimation MUST not be null! call setEstimation() first!");
|
||||
|
||||
return estimation->estimate(particles);
|
||||
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
/** normalize the weight of all particles to one */
|
||||
double normalize(const double weightSum) {
|
||||
double sum = 0.0;
|
||||
for (auto& p : particles) {
|
||||
p.weight /= weightSum;
|
||||
sum += (p.weight * p.weight);
|
||||
}
|
||||
return 1.0 / sum;
|
||||
}
|
||||
|
||||
/** calculate the number of efficient particles (N_eff) */
|
||||
double getNeff() const {
|
||||
double sum = 0.0;
|
||||
for (auto& p : particles) {sum += (p.weight * p.weight);}
|
||||
return 1.0 / sum;
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif /* PARTICLEFILTERHISTORY_H_ */
|
||||
25
smc/filtering/ParticleFilterInitializer.h
Normal file
25
smc/filtering/ParticleFilterInitializer.h
Normal file
@@ -0,0 +1,25 @@
|
||||
#ifndef K_MATH_FILTER_PARTICLES_PARTICLEFILTERINITIALIZER_H
|
||||
#define K_MATH_FILTER_PARTICLES_PARTICLEFILTERINITIALIZER_H
|
||||
|
||||
#include <vector>
|
||||
#include "../Particle.h"
|
||||
|
||||
namespace SMC {
|
||||
|
||||
/**
|
||||
* interface for particle filter initializers.
|
||||
* an initializer "configures" all particles before the filter starts: p(q_0)
|
||||
*/
|
||||
template <typename State>
|
||||
class ParticleFilterInitializer {
|
||||
|
||||
public:
|
||||
|
||||
/** the initializer must setup each particle within the given vector */
|
||||
virtual void initialize(std::vector<Particle<State>>& particles) = 0;
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // K_MATH_FILTER_PARTICLES_PARTICLEFILTERINITIALIZER_H
|
||||
241
smc/filtering/ParticleFilterMixing.h
Normal file
241
smc/filtering/ParticleFilterMixing.h
Normal file
@@ -0,0 +1,241 @@
|
||||
#ifndef PARTICLEFILTERMIXING_H
|
||||
#define PARTICLEFILTERMIXING_H
|
||||
|
||||
#include <vector>
|
||||
#include <memory>
|
||||
|
||||
#include "../Particle.h"
|
||||
|
||||
#include "resampling/ParticleFilterResampling.h"
|
||||
#include "estimation/ParticleFilterEstimation.h"
|
||||
#include "ParticleFilterTransition.h"
|
||||
#include "ParticleFilterEvaluation.h"
|
||||
#include "ParticleFilterInitializer.h"
|
||||
|
||||
#include "../../Assertions.h"
|
||||
|
||||
namespace SMC {
|
||||
|
||||
/**
|
||||
* the main-class for the particle filter
|
||||
* @param State the (user-defined) state for each particle
|
||||
* @param Observation the observation (sensor) data
|
||||
*/
|
||||
template <typename State, typename Control, typename Observation>
|
||||
class ParticleFilterMixing {
|
||||
|
||||
private:
|
||||
|
||||
/** all used particles */
|
||||
std::vector<Particle<State>> particles;
|
||||
|
||||
/** the current calculated estimation */
|
||||
State estimation;
|
||||
|
||||
/** the resampler to use */
|
||||
std::shared_ptr<ParticleFilterResampling<State>> resampler;
|
||||
|
||||
/** the estimation function to use */
|
||||
std::shared_ptr<ParticleFilterEstimation<State>> estimator;
|
||||
|
||||
/** the transition function to use */
|
||||
std::shared_ptr<ParticleFilterTransition<State, Control>> transition;
|
||||
|
||||
/** the evaluation function to use */
|
||||
std::shared_ptr<ParticleFilterEvaluation<State, Observation>> evaluation;
|
||||
|
||||
/** the initialization function to use */
|
||||
std::shared_ptr<ParticleFilterInitializer<State>> initializer;
|
||||
|
||||
/** the percentage-of-efficient-particles-threshold for resampling */
|
||||
double nEffThresholdPercent = 0.25;
|
||||
|
||||
/** the current sum of all weights NOT normalized*/
|
||||
double weightSum = 1.0;
|
||||
|
||||
/** the predicted mode probability P(m_t|Z_t-1) */
|
||||
double predictedModeProbability = 1.0;
|
||||
|
||||
/** the posterior probability of the mode p(m_t | Z_t)*/
|
||||
double modePosteriorProbability = 1.0;
|
||||
|
||||
/** the transition mode probability P(m_t-1 | m_t, Z_t-1)*/
|
||||
double transitionModeProbability = 1.0;
|
||||
|
||||
public:
|
||||
|
||||
/** ctor
|
||||
* NOTE: The modePosteriorProbability needs the be normalized depending on the number of filters within the IMMPF!!
|
||||
*/
|
||||
ParticleFilterMixing(const uint32_t numParticles, std::shared_ptr<ParticleFilterInitializer<State>> initializer, double modePosteriorProbability) {
|
||||
this->modePosteriorProbability = modePosteriorProbability;
|
||||
|
||||
particles.resize(numParticles);
|
||||
setInitializier(std::move(initializer));
|
||||
init();
|
||||
}
|
||||
|
||||
/** dtor */
|
||||
~ParticleFilterMixing() {
|
||||
;
|
||||
}
|
||||
|
||||
/** access to all particles */
|
||||
const std::vector<Particle<State>>& getParticles() const {
|
||||
return this->particles;
|
||||
}
|
||||
|
||||
void setParticles(const std::vector<Particle<State>>& newParticles){
|
||||
this->particles = newParticles;
|
||||
}
|
||||
|
||||
/** get the current estimation */
|
||||
const State getEstimation() const {
|
||||
return estimation;
|
||||
}
|
||||
|
||||
/** initialize/re-start the particle filter */
|
||||
void init() {
|
||||
Assert::isNotNull(initializer, "initializer MUST not be null! call setInitializer() first!");
|
||||
initializer->initialize(particles);
|
||||
}
|
||||
|
||||
|
||||
/** set the resampling method to use */
|
||||
void setResampling(std::shared_ptr<ParticleFilterResampling<State>> resampler) {
|
||||
Assert::isNotNull(resampler, "setResampling() MUST not be called with a nullptr!");
|
||||
this->resampler = std::move(resampler);
|
||||
}
|
||||
|
||||
/** set the estimation method to use */
|
||||
void setEstimator(std::shared_ptr<ParticleFilterEstimation<State>> estimator) {
|
||||
Assert::isNotNull(estimator, "setEstimation() MUST not be called with a nullptr!");
|
||||
this->estimator = std::move(estimator);
|
||||
}
|
||||
|
||||
/** set the transition method to use */
|
||||
void setTransition(std::shared_ptr<ParticleFilterTransition<State, Control>> transition) {
|
||||
Assert::isNotNull(transition, "setTransition() MUST not be called with a nullptr!");
|
||||
this->transition = std::move(transition);
|
||||
}
|
||||
|
||||
/** get the used transition method */
|
||||
ParticleFilterTransition<State, Control>* getTransition() {
|
||||
return this->transition.get();
|
||||
}
|
||||
|
||||
/** set the evaluation method to use */
|
||||
void setEvaluation(std::shared_ptr<ParticleFilterEvaluation<State, Observation>> evaluation) {
|
||||
Assert::isNotNull(evaluation, "setEvaluation() MUST not be called with a nullptr!");
|
||||
this->evaluation = std::move(evaluation);
|
||||
}
|
||||
|
||||
/** set the initialization method to use */
|
||||
void setInitializier(std::shared_ptr<ParticleFilterInitializer<State>> initializer) {
|
||||
Assert::isNotNull(initializer, "setInitializer() MUST not be called with a nullptr!");
|
||||
this->initializer = std::move(initializer);
|
||||
}
|
||||
|
||||
/** set the resampling threshold as the percentage of efficient particles */
|
||||
void setNEffThreshold(const double thresholdPercent) {
|
||||
this->nEffThresholdPercent = thresholdPercent;
|
||||
}
|
||||
|
||||
/** get the unormalized weight sum of all particles */
|
||||
double getWeightSum() const
|
||||
{
|
||||
return this->weightSum;
|
||||
}
|
||||
|
||||
/** get the predicted mode probability P(m_t|Z_t-1)*/
|
||||
double getPredictedModeProbability() const
|
||||
{
|
||||
return this->predictedModeProbability;
|
||||
}
|
||||
|
||||
/** set the predicted mode probability P(m_t|Z_t-1)*/
|
||||
void setPredictedModeProbability(const double val) {
|
||||
this->predictedModeProbability = val;
|
||||
}
|
||||
|
||||
/** get the posterior mode probability P(m_t|Z_t)*/
|
||||
double getModePosteriorProbability() const
|
||||
{
|
||||
return this->modePosteriorProbability;
|
||||
}
|
||||
|
||||
/** set the posterior mode probability P(m_t|Z_t)*/
|
||||
void setModePosteriorProbability(const double likelihoodSum) {
|
||||
|
||||
Assert::isNotNull(likelihoodSum, "likelihoodsum is zero.. thats not possible");
|
||||
Assert::isNotNull(this->weightSum, "weightSum is zero.. thats not possible");
|
||||
//Assert::isNotNull(this->predictedModeProbability, "predictedModeProbability is zero.. thats not possible");
|
||||
|
||||
this->modePosteriorProbability = (this->weightSum * this->predictedModeProbability) / likelihoodSum;
|
||||
|
||||
//Assert::isNotNull(this->modePosteriorProbability, "modePosteriorProbability is zero.. thats not possible");
|
||||
}
|
||||
|
||||
/** get the transition mode probability P(m_t|Z_t)
|
||||
* NOTE: Dont use this value! It is only needed for more beatiful mixed sampling!
|
||||
*/
|
||||
double getTransitionModeProbability() const
|
||||
{
|
||||
return this->transitionModeProbability;
|
||||
}
|
||||
|
||||
/** set the transition mode probability P(m_t|Z_t)*/
|
||||
void setTransitionModeProbability(const double val) {
|
||||
this->transitionModeProbability = val;
|
||||
}
|
||||
|
||||
/** perform resampling -> transition -> evaluation -> estimation */
|
||||
void update(const Control* control, const Observation& observation) {
|
||||
|
||||
// sanity checks (if enabled)
|
||||
Assert::isNotNull(resampler, "resampler MUST not be null! call setResampler() first!");
|
||||
Assert::isNotNull(transition, "transition MUST not be null! call setTransition() first!");
|
||||
Assert::isNotNull(evaluation, "evaluation MUST not be null! call setEvaluation() first!");
|
||||
Assert::isNotNull(estimator, "estimation MUST not be null! call setEstimation() first!");
|
||||
|
||||
// perform the transition step
|
||||
transition->transition(particles, control);
|
||||
|
||||
// perform the evaluation step and calculate the sum of all particle weights
|
||||
this->weightSum = evaluation->evaluation(particles, observation);
|
||||
|
||||
// normalize the particle weights and thereby calculate N_eff
|
||||
const double neff = normalize(weightSum);
|
||||
|
||||
// estimate the current state
|
||||
this->estimation = estimator->estimate(particles);
|
||||
|
||||
// if the number of efficient particles is too low, perform resampling
|
||||
if (neff < particles.size() * nEffThresholdPercent) { resampler->resample(particles); }
|
||||
|
||||
// done
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
/** normalize the weight of all particles to one */
|
||||
double normalize(const double weightSum) {
|
||||
double sum = 0.0;
|
||||
for (auto& p : particles) {
|
||||
p.weight /= weightSum;
|
||||
sum += (p.weight * p.weight);
|
||||
}
|
||||
return 1.0 / sum;
|
||||
}
|
||||
|
||||
/** calculate the number of efficient particles (N_eff) */
|
||||
double getNeff() const {
|
||||
double sum = 0.0;
|
||||
for (auto& p : particles) {sum += (p.weight * p.weight);}
|
||||
return 1.0 / sum;
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // PARTICLEFILTERMIXING_H
|
||||
26
smc/filtering/ParticleFilterTransition.h
Normal file
26
smc/filtering/ParticleFilterTransition.h
Normal file
@@ -0,0 +1,26 @@
|
||||
#ifndef K_MATH_FILTER_PARTICLES_PARTICLEFILTERTRANSITION_H
|
||||
#define K_MATH_FILTER_PARTICLES_PARTICLEFILTERTRANSITION_H
|
||||
|
||||
#include <vector>
|
||||
#include "../Particle.h"
|
||||
|
||||
namespace SMC {
|
||||
|
||||
/**
|
||||
* interface for the user-defined particle transition.
|
||||
* the transition describes the particles change during the transition phase p(q_t | q_t-1)
|
||||
* depending on the control data (if any)
|
||||
*/
|
||||
template <typename State, typename Control>
|
||||
class ParticleFilterTransition {
|
||||
|
||||
public:
|
||||
|
||||
/** perform the transition p(q_t | q_t-1) for all particles based on the given control data */
|
||||
virtual void transition(std::vector<Particle<State>>& particles, const Control* control) = 0;
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // K_MATH_FILTER_PARTICLES_PARTICLEFILTERTRANSITION_H
|
||||
24
smc/filtering/estimation/ParticleFilterEstimation.h
Normal file
24
smc/filtering/estimation/ParticleFilterEstimation.h
Normal file
@@ -0,0 +1,24 @@
|
||||
#ifndef PARTICLEFILTERESTIMATION_H
|
||||
#define PARTICLEFILTERESTIMATION_H
|
||||
|
||||
#include "../../Particle.h"
|
||||
#include <vector>
|
||||
|
||||
namespace SMC {
|
||||
|
||||
template <typename State>
|
||||
class ParticleFilterEstimation {
|
||||
|
||||
public:
|
||||
|
||||
// dtor
|
||||
virtual ~ParticleFilterEstimation() {;}
|
||||
|
||||
// get the current state estimation for the given particle set
|
||||
virtual State estimate(const std::vector<Particle<State>>& particles) = 0;
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // PARTICLEFILTERESTIMATION_H
|
||||
160
smc/filtering/estimation/ParticleFilterEstimationKernelDensity.h
Normal file
160
smc/filtering/estimation/ParticleFilterEstimationKernelDensity.h
Normal file
@@ -0,0 +1,160 @@
|
||||
#if FIXME
|
||||
|
||||
#ifndef K_MATH_FILTER_PARTICLES_PARTICLEFILTERESTIMATIONKERNELDENSITY_H
|
||||
#define K_MATH_FILTER_PARTICLES_PARTICLEFILTERESTIMATIONKERNELDENSITY_H
|
||||
|
||||
|
||||
#include "ParticleFilterEstimation.h"
|
||||
#include <algorithm>
|
||||
#include <vector>
|
||||
#include "../../Particle.h"
|
||||
|
||||
#include "../../../../misc/gnuplot/Gnuplot.h"
|
||||
#include "../../../optimization/NumOptVector.h"
|
||||
#include "../../../optimization/NumOptAlgoDownhillSimplex.h"
|
||||
|
||||
|
||||
|
||||
namespace SMC {
|
||||
|
||||
template <typename State, int numParams>
|
||||
class ParticleFilterEstimationKernelDensity : public ParticleFilterEstimation<State> {
|
||||
|
||||
Gnuplot gp;
|
||||
|
||||
K::NumOptAlgoDownhillSimplex<float, numParams> simplex;
|
||||
|
||||
private:
|
||||
|
||||
class OptFunc {
|
||||
|
||||
private:
|
||||
|
||||
/** the particles to work on */
|
||||
const std::vector<Particle<State>>& particles;
|
||||
|
||||
public:
|
||||
|
||||
/** ctor */
|
||||
OptFunc(const std::vector<Particle<State>>& particles) : particles(particles) {;}
|
||||
|
||||
float operator () (const float* params) const {
|
||||
|
||||
double prob = 0;
|
||||
const int size = particles.size();
|
||||
|
||||
//#pragma omp parallel for
|
||||
for (int i = 0; i < size; i+=10) {
|
||||
const Particle<State>& p = particles[i];
|
||||
prob += p.state.getKernelDensityProbability(params) * p.weight;
|
||||
}
|
||||
|
||||
// convert probability to "error"
|
||||
return -prob;
|
||||
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
|
||||
public:
|
||||
|
||||
State estimate(std::vector<Particle<State>>& particles) override {
|
||||
|
||||
|
||||
// comparator
|
||||
auto comp = [] (const Particle<State>& p1, const Particle<State>& p2) { return p1.weight < p2.weight; };
|
||||
|
||||
// // find max state
|
||||
// auto el = std::max_element(particles.begin(), particles.end(), comp);
|
||||
// State max = el->state;
|
||||
|
||||
|
||||
// // region to check
|
||||
// BBox2 bbox;
|
||||
// bbox.add(Point2f(-50,-50));
|
||||
// bbox.add(Point2f(100,100));
|
||||
|
||||
// const float stepSize = 1.0f;
|
||||
|
||||
// const int pxX = (bbox.getMax().x - bbox.getMin().x) / stepSize;
|
||||
// const int pxY = (bbox.getMax().y - bbox.getMin().y) / stepSize;
|
||||
|
||||
// // optimize using simplex
|
||||
OptFunc func(particles);
|
||||
|
||||
// // calculate the optimum
|
||||
// simplex.calculateOptimum(func, params);
|
||||
// std::cout << params << std::endl;
|
||||
|
||||
|
||||
float params[numParams];
|
||||
getGlobalMax(particles, params);
|
||||
|
||||
// create output state from optimized params
|
||||
State res(params);
|
||||
|
||||
|
||||
gp << "splot '-' with lines\n";
|
||||
|
||||
int x1 = 0;//params[0]-2500;
|
||||
int x2 = 100*100;//params[0]+2500;
|
||||
int y1 = 0;//params[1]-2500;
|
||||
int y2 = 60*100;//params[1]+2500;
|
||||
|
||||
for (int x = x1; x < x2; x += 400) {
|
||||
for (int y = y1; y < y2; y += 400) {
|
||||
params[0] = x;
|
||||
params[1] = y;
|
||||
params[2] = 0;
|
||||
gp << x << " " << y << " " << -func(params) << "\n";
|
||||
}
|
||||
gp << "\n";
|
||||
}
|
||||
gp << "e\n";
|
||||
gp.flush();
|
||||
|
||||
return res;
|
||||
|
||||
}
|
||||
|
||||
void getGlobalMax(const std::vector<Particle<State>>& particles, float* startParams) {
|
||||
|
||||
OptFunc func(particles);
|
||||
double bestP = 0;
|
||||
float bestParams[numParams];
|
||||
|
||||
simplex.setMaxIterations(10);
|
||||
simplex.setNumRestarts(0);
|
||||
|
||||
|
||||
for (int i = 0; i < 15; ++i) {
|
||||
|
||||
// start at a random particle
|
||||
const int idx = rand() % particles.size();
|
||||
|
||||
// start optimization at this particle's paramters
|
||||
particles[idx].state.fillKernelDenstityParameters(startParams);
|
||||
|
||||
simplex.calculateOptimum(func, startParams);
|
||||
|
||||
const float prob = -func(startParams);
|
||||
if (prob > bestP) {
|
||||
bestP = prob;
|
||||
memcpy(bestParams, startParams, numParams*sizeof(float));
|
||||
//std::cout << bestParams << std::endl;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
memcpy(startParams, bestParams, numParams*sizeof(float));
|
||||
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // K_MATH_FILTER_PARTICLES_PARTICLEFILTERESTIMATIONKERNELDENSITY_H
|
||||
|
||||
#endif
|
||||
37
smc/filtering/estimation/ParticleFilterEstimationMax.h
Normal file
37
smc/filtering/estimation/ParticleFilterEstimationMax.h
Normal file
@@ -0,0 +1,37 @@
|
||||
#ifndef PARTICLEFILTERESTIMATIONMAX_H
|
||||
#define PARTICLEFILTERESTIMATIONMAX_H
|
||||
|
||||
#include "ParticleFilterEstimation.h"
|
||||
#include <algorithm>
|
||||
#include <vector>
|
||||
#include "../../Particle.h"
|
||||
|
||||
namespace SMC {
|
||||
|
||||
/**
|
||||
* just use the particle with the maximum weight
|
||||
* as the currently sestimated state
|
||||
*/
|
||||
template <typename State>
|
||||
class ParticleFilterEstimationMax : public ParticleFilterEstimation<State> {
|
||||
|
||||
public:
|
||||
|
||||
State estimate(const std::vector<Particle<State>>& particles) override {
|
||||
|
||||
// comparator
|
||||
auto comp = [] (const Particle<State>& p1, const Particle<State>& p2) {
|
||||
return p1.weight < p2.weight;
|
||||
};
|
||||
|
||||
// find max element
|
||||
auto el = std::max_element(particles.begin(), particles.end(), comp);
|
||||
return el->state;
|
||||
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // PARTICLEFILTERESTIMATIONMAX_H
|
||||
@@ -0,0 +1,78 @@
|
||||
#ifndef PARTICLEFILTERESTIMATIONORDEREDWEIGHTEDAVERAGE_H
|
||||
#define PARTICLEFILTERESTIMATIONORDEREDWEIGHTEDAVERAGE_H
|
||||
|
||||
|
||||
#include <vector>
|
||||
#include "../../Particle.h"
|
||||
#include "../../ParticleAssertions.h"
|
||||
#include "ParticleFilterEstimation.h"
|
||||
|
||||
#include "../../../Assertions.h"
|
||||
|
||||
namespace SMC {
|
||||
|
||||
/**
|
||||
* calculate the (weighted) average state using
|
||||
* the X% best weighted particles
|
||||
*/
|
||||
template <typename State>
|
||||
class ParticleFilterEstimationOrderedWeightedAverage : public ParticleFilterEstimation<State> {
|
||||
|
||||
private:
|
||||
|
||||
const float percent;
|
||||
|
||||
public:
|
||||
|
||||
/** ctor */
|
||||
ParticleFilterEstimationOrderedWeightedAverage(const float percent) : percent(percent) {;}
|
||||
|
||||
State estimate(const std::vector<Particle<State>>& particles) override {
|
||||
|
||||
// compile-time sanity checks
|
||||
static_assert( HasOperatorPlusEq<State>::value, "your state needs a += operator!" );
|
||||
static_assert( HasOperatorDivEq<State>::value, "your state needs a /= operator!" );
|
||||
static_assert( HasOperatorMul<State>::value, "your state needs a * operator!" );
|
||||
|
||||
// comparator (highest first)
|
||||
auto comp = [] (const Particle<State>& p1, const Particle<State>& p2) {
|
||||
return p1.weight > p2.weight;
|
||||
};
|
||||
|
||||
// create a copy
|
||||
std::vector<Particle<State>> copy;
|
||||
copy.insert(copy.begin(), particles.begin(), particles.end());
|
||||
|
||||
// sort the copy (highest weight first)
|
||||
std::sort (copy.begin(), copy.end(), comp);
|
||||
|
||||
State tmp;
|
||||
|
||||
// calculate weighted average
|
||||
const int numBest = copy.size() * percent;
|
||||
double weightSum = 0;
|
||||
for (int i = 0; i < numBest; ++i) {
|
||||
const Particle<State>& p = copy[i];
|
||||
tmp += p.state * p.weight;
|
||||
weightSum += p.weight;
|
||||
}
|
||||
|
||||
if (weightSum != weightSum) {
|
||||
int i = 0; (void) i;
|
||||
}
|
||||
|
||||
Assert::isTrue( (weightSum == weightSum), "the sum of particle weights is NaN!");
|
||||
Assert::isTrue( (weightSum != 0), "the sum of particle weights is null!");
|
||||
|
||||
// normalize
|
||||
tmp /= weightSum;
|
||||
|
||||
return tmp;
|
||||
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // PARTICLEFILTERESTIMATIONORDEREDWEIGHTEDAVERAGE_H
|
||||
@@ -0,0 +1,60 @@
|
||||
#ifndef PARTICLEFILTERESTIMATIONREGIONALWEIGHTEDAVERAGE_H
|
||||
#define PARTICLEFILTERESTIMATIONREGIONALWEIGHTEDAVERAGE_H
|
||||
|
||||
#include "ParticleFilterEstimation.h"
|
||||
#include "../../Particle.h"
|
||||
#include "../../ParticleAssertions.h"
|
||||
|
||||
#include <algorithm>
|
||||
|
||||
namespace SMC {
|
||||
|
||||
/**
|
||||
* this implementation estimates the current state
|
||||
* by using the most probable particle and some near particles
|
||||
* combining them by their weight (weighted average)
|
||||
*
|
||||
* the checker, whether a particle is near or not, uses a special,
|
||||
* user-defined metric "belongsToRegion()". This user-method must
|
||||
* return a boolean, whether to include the provided particle
|
||||
* within the region around the maximum particle, or not.
|
||||
*/
|
||||
template <typename State>
|
||||
class ParticleFilterEstimationRegionalWeightedAverage : public ParticleFilterEstimation<State> {
|
||||
|
||||
public:
|
||||
|
||||
State estimate(const std::vector<Particle<State>>& particles) override {
|
||||
|
||||
// compile-time sanity checks
|
||||
static_assert( HasOperatorPlusEq<State>::value, "your state needs a += operator!" );
|
||||
static_assert( HasOperatorDivEq<State>::value, "your state needs a /= operator!" );
|
||||
static_assert( HasOperatorMul<State>::value, "your state needs a * operator!" );
|
||||
|
||||
//1) get the most probable particle
|
||||
const auto comp = [] (const Particle<State>& p1, const Particle<State>& p2) {return p1.weight < p2.weight;};
|
||||
const Particle<State>& max = *std::max_element(particles.begin(), particles.end(), comp);
|
||||
|
||||
//2) calculate the regional weighted average
|
||||
double cumWeight = 0;
|
||||
State res;
|
||||
for (const Particle<State>& p : particles) {
|
||||
if (!p.state.belongsToRegion(max.state)) {continue;}
|
||||
cumWeight += p.weight;
|
||||
res += p.state * p.weight;
|
||||
}
|
||||
|
||||
// 3) normalize
|
||||
res /= cumWeight;
|
||||
|
||||
// done
|
||||
return res;
|
||||
|
||||
}
|
||||
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // PARTICLEFILTERESTIMATIONREGIONALWEIGHTEDAVERAGE_H
|
||||
@@ -0,0 +1,52 @@
|
||||
#ifndef PARTICLEFILTERESTIMATIONWEIGHTEDAVERAGE_H
|
||||
#define PARTICLEFILTERESTIMATIONWEIGHTEDAVERAGE_H
|
||||
|
||||
#include <vector>
|
||||
#include "../../Particle.h"
|
||||
#include "../../ParticleAssertions.h"
|
||||
#include "ParticleFilterEstimation.h"
|
||||
|
||||
#include "../../../Assertions.h"
|
||||
|
||||
namespace SMC {
|
||||
|
||||
/**
|
||||
* calculate the (weighted) average state using
|
||||
* all particles and their weight
|
||||
*/
|
||||
template <typename State>
|
||||
class ParticleFilterEstimationWeightedAverage : public ParticleFilterEstimation<State> {
|
||||
|
||||
public:
|
||||
|
||||
State estimate(const std::vector<Particle<State>>& particles) override {
|
||||
|
||||
// compile-time sanity checks
|
||||
static_assert( HasOperatorPlusEq<State>::value, "your state needs a += operator!" );
|
||||
static_assert( HasOperatorDivEq<State>::value, "your state needs a /= operator!" );
|
||||
static_assert( HasOperatorMul<State>::value, "your state needs a * operator!" );
|
||||
|
||||
State tmp;
|
||||
|
||||
// calculate weighted average
|
||||
double weightSum = 0;
|
||||
for (const Particle<State>& p : particles) {
|
||||
tmp += p.state * p.weight;
|
||||
weightSum += p.weight;
|
||||
}
|
||||
|
||||
Assert::isTrue( (weightSum == weightSum), "the sum of particle weights is NaN!");
|
||||
Assert::isTrue( (weightSum != 0), "the sum of particle weights is null!");
|
||||
|
||||
// normalize
|
||||
tmp /= weightSum;
|
||||
|
||||
return tmp;
|
||||
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // PARTICLEFILTERESTIMATIONWEIGHTEDAVERAGE_H
|
||||
@@ -0,0 +1,66 @@
|
||||
#ifndef PARTICLEFILTERESTIMATIONWEIGHTEDAVERAGEWITHANGLE_H
|
||||
#define PARTICLEFILTERESTIMATIONWEIGHTEDAVERAGEWITHANGLE_H
|
||||
|
||||
#include <vector>
|
||||
#include <cmath>
|
||||
#include <math.h>
|
||||
#include "../../Particle.h"
|
||||
#include "../../ParticleAssertions.h"
|
||||
#include "ParticleFilterEstimation.h"
|
||||
|
||||
#include "../../../Assertions.h"
|
||||
|
||||
namespace SMC {
|
||||
|
||||
/**
|
||||
* calculate the (weighted) average state using
|
||||
* all particles and their weight
|
||||
*/
|
||||
template <typename State>
|
||||
class ParticleFilterEstimationWeightedAverageWithAngle : public ParticleFilterEstimation<State> {
|
||||
|
||||
public:
|
||||
|
||||
State estimate(std::vector<Particle<State>>& particles) override {
|
||||
|
||||
// compile-time sanity checks
|
||||
static_assert( HasOperatorPlusEq<State>::value, "your state needs a += operator!" );
|
||||
static_assert( HasOperatorDivEq<State>::value, "your state needs a /= operator!" );
|
||||
static_assert( HasOperatorMul<State>::value, "your state needs a * operator!" );
|
||||
|
||||
State tmp;
|
||||
|
||||
// calculate weighted average
|
||||
double weightSum = 0;
|
||||
|
||||
double xAngle = 0;
|
||||
double yAngle = 0;
|
||||
for (const Particle<State>& p : particles) {
|
||||
tmp += p.state * p.weight;
|
||||
weightSum += p.weight;
|
||||
|
||||
xAngle += std::cos(p.state.walkState.heading.getRAD());
|
||||
yAngle += std::sin(p.state.walkState.heading.getRAD());
|
||||
}
|
||||
|
||||
Assert::isTrue( (weightSum == weightSum), "the sum of particle weights is NaN!");
|
||||
Assert::isTrue( (weightSum != 0), "the sum of particle weights is null!");
|
||||
|
||||
// normalize
|
||||
tmp /= weightSum;
|
||||
|
||||
//calculated avg angle
|
||||
tmp.avgAngle = std::fmod((std::atan2(yAngle / particles.size(), xAngle / particles.size()) * 180.0/PI) + 720, 360.0);
|
||||
|
||||
if(tmp.avgAngle > 360.0)
|
||||
std::cout << "fuck" << std::endl;
|
||||
|
||||
return tmp;
|
||||
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // PARTICLEFILTERESTIMATIONWEIGHTEDAVERAGE_H
|
||||
43
smc/filtering/resampling/ParticleFilterResampling.h
Normal file
43
smc/filtering/resampling/ParticleFilterResampling.h
Normal file
@@ -0,0 +1,43 @@
|
||||
/*
|
||||
* ParticleResampling.h
|
||||
*
|
||||
* Created on: Sep 18, 2013
|
||||
* Author: Frank Ebner
|
||||
*/
|
||||
|
||||
#ifndef PARTICLEFILTERRESAMPLING_H_
|
||||
#define PARTICLEFILTERRESAMPLING_H_
|
||||
|
||||
#include "../../Particle.h"
|
||||
#include <vector>
|
||||
#include <chrono>
|
||||
|
||||
/**
|
||||
* interface for all available resampling methods
|
||||
* within the particle filter
|
||||
* @param State the (user-defined) state
|
||||
*/
|
||||
|
||||
namespace SMC {
|
||||
|
||||
template <typename State> class ParticleFilterResampling {
|
||||
|
||||
public:
|
||||
|
||||
/**
|
||||
* perform resampling on the given particle-vector
|
||||
* @param particles the vector of all particles to resample
|
||||
*/
|
||||
virtual void resample(std::vector<Particle<State>>& particles) = 0;
|
||||
|
||||
/**
|
||||
* perform resampling on the given particle-vector
|
||||
* @param particles the vector of all particles to resample
|
||||
*/
|
||||
virtual void resample(std::vector<Particle<State>>& particles, double kld, std::vector<Particle<State>>& particlesWifi) {}
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif /* PARTICLEFILTERRESAMPLING_H_ */
|
||||
157
smc/filtering/resampling/ParticleFilterResamplingDivergence.h
Normal file
157
smc/filtering/resampling/ParticleFilterResamplingDivergence.h
Normal file
@@ -0,0 +1,157 @@
|
||||
#ifndef PARTICLEFILTERRESAMPLINGDIVERGENCE_H
|
||||
#define PARTICLEFILTERRESAMPLINGDIVERGENCE_H
|
||||
|
||||
#include <algorithm>
|
||||
#include <random>
|
||||
|
||||
#include "ParticleFilterResampling.h"
|
||||
#include "../../ParticleAssertions.h"
|
||||
|
||||
int fuck_off = 0;
|
||||
|
||||
|
||||
namespace SMC {
|
||||
|
||||
/**
|
||||
* draws new particle depending on the current divergence between two
|
||||
* probability distributations based on Jensen–Shannon divergence
|
||||
*/
|
||||
template <typename State>
|
||||
class ParticleFilterResamplingDivergence : public ParticleFilterResampling<State> {
|
||||
|
||||
private:
|
||||
|
||||
/** this is a copy of the particle-set to draw from it */
|
||||
std::vector<Particle<State>> particlesCopy;
|
||||
|
||||
/** this is a copy of the wifi particle-set */
|
||||
std::vector<Particle<State>> particlesWifiCopy;
|
||||
|
||||
/** random number generator */
|
||||
std::minstd_rand gen;
|
||||
|
||||
public:
|
||||
|
||||
/** ctor */
|
||||
ParticleFilterResamplingDivergence() {
|
||||
gen.seed(1234);
|
||||
}
|
||||
|
||||
|
||||
void resample(std::vector<Particle<State>>& particles) override{
|
||||
|
||||
}
|
||||
|
||||
void resample(std::vector<Particle<State>>& particles, double kld, std::vector<Particle<State>>& particlesWifi) override {
|
||||
|
||||
// compile-time sanity checks
|
||||
// TODO: this solution requires EXPLICIT overloading which is bad...
|
||||
//static_assert( HasOperatorAssign<State>::value, "your state needs an assignment operator!" );
|
||||
|
||||
const uint32_t cnt = (uint32_t) particles.size();
|
||||
const uint32_t cntWifi = (uint32_t) particlesWifi.size();
|
||||
|
||||
// equal weight for all particles. sums up to 1.0
|
||||
const double equalWeight = 1.0 / (double) cnt;
|
||||
|
||||
// ensure the copy vector has the same size as the real particle vector
|
||||
particlesCopy.resize(cnt);
|
||||
particlesWifiCopy.resize(cntWifi);
|
||||
|
||||
// swap both vectors
|
||||
particlesCopy.swap(particles);
|
||||
particlesWifiCopy = particlesWifi;
|
||||
|
||||
// calculate cumulative weight
|
||||
double cumWeight = 0;
|
||||
for (uint32_t i = 0; i < cnt; ++i) {
|
||||
cumWeight += particlesCopy[i].weight;
|
||||
particlesCopy[i].weight = cumWeight;
|
||||
}
|
||||
|
||||
double cumWeightWifi = 0;
|
||||
for (uint32_t i = 0; i < cntWifi; ++i) {
|
||||
cumWeightWifi += particlesWifiCopy[i].weight;
|
||||
particlesWifiCopy[i].weight = cumWeightWifi;
|
||||
}
|
||||
|
||||
double maxKld = 250;
|
||||
double minKld = 20;
|
||||
|
||||
double diffKld = kld - minKld;
|
||||
if(diffKld < 0){
|
||||
diffKld = 0;
|
||||
}
|
||||
|
||||
int numWifiParticel = (diffKld / maxKld) * cnt;
|
||||
|
||||
//skip the first observations
|
||||
static const int skipStart = 5;
|
||||
if(fuck_off++ < skipStart){
|
||||
numWifiParticel = 0;
|
||||
}
|
||||
|
||||
|
||||
// now draw from the copy vector and fill the original one
|
||||
// with the resampled particle-set keep the number of particles.
|
||||
for (uint32_t i = 0; i < cnt; ++i) {
|
||||
if(i < numWifiParticel){
|
||||
|
||||
//we draw a particle from posterior and change to position based on the wifi distribution
|
||||
Particle<State> posteriorParticle = draw(cumWeight);
|
||||
Particle<State> wifiParticle = drawWifi(cumWeightWifi);
|
||||
|
||||
posteriorParticle.state.position = wifiParticle.state.position;
|
||||
particles[i] = posteriorParticle;
|
||||
} else {
|
||||
particles[i] = draw(cumWeight);
|
||||
}
|
||||
particles[i].weight = equalWeight;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
/** draw one particle according to its weight from the copy vector */
|
||||
const Particle<State>& draw(const double cumWeight) {
|
||||
|
||||
// generate random values between [0:cumWeight]
|
||||
std::uniform_real_distribution<float> dist(0, cumWeight);
|
||||
|
||||
// draw a random value between [0:cumWeight]
|
||||
const float rand = dist(gen);
|
||||
|
||||
// search comparator (cumWeight is ordered -> use binary search)
|
||||
auto comp = [] (const Particle<State>& s, const float d) {return s.weight < d;};
|
||||
auto it = std::lower_bound(particlesCopy.begin(), particlesCopy.end(), rand, comp);
|
||||
return *it;
|
||||
|
||||
}
|
||||
|
||||
|
||||
/** draw one particle according to its weight from the copy vector */
|
||||
const Particle<State>& drawWifi(const double cumWeight) {
|
||||
|
||||
// generate random values between [0:cumWeight]
|
||||
std::uniform_real_distribution<float> dist(0, cumWeight);
|
||||
|
||||
// draw a random value between [0:cumWeight]
|
||||
const float rand = dist(gen);
|
||||
|
||||
// search comparator (cumWeight is ordered -> use binary search)
|
||||
auto comp = [] (const Particle<State>& s, const float d) {return s.weight < d;};
|
||||
auto it = std::lower_bound(particlesWifiCopy.begin(), particlesWifiCopy.end(), rand, comp);
|
||||
return *it;
|
||||
|
||||
}
|
||||
|
||||
|
||||
};
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
#endif // PARTICLEFILTERRESAMPLINGDIVERGENCE_H
|
||||
130
smc/filtering/resampling/ParticleFilterResamplingKLD.h
Normal file
130
smc/filtering/resampling/ParticleFilterResamplingKLD.h
Normal file
@@ -0,0 +1,130 @@
|
||||
#ifndef PARTICLEFILTERRESAMPLINGKLD_H
|
||||
#define PARTICLEFILTERRESAMPLINGKLD_H
|
||||
|
||||
#include <algorithm>
|
||||
#include <random>
|
||||
|
||||
#include <Indoor/math/random/RandomGenerator.h>
|
||||
#include "ParticleFilterResampling.h"
|
||||
#include "../../ParticleAssertions.h"
|
||||
|
||||
#include <Indoor/math/distribution/NormalCDF.h>
|
||||
#include <Indoor/misc/Binning.h>
|
||||
|
||||
namespace SMC {
|
||||
|
||||
/**
|
||||
* "Adapting sample size in particle filters through KLD-resampling" T.Li et al.
|
||||
* Resample a dynamic size of new particles. for resampling we use the simple (multinomial version)
|
||||
* We cann guarente with the probability 1 - delt, the KLD between posterior and the true posterior is
|
||||
* less then epsilon.
|
||||
*
|
||||
*
|
||||
* Note: the State template parameter needs to implement a float getBinValue(const int dim) const {..} function
|
||||
*/
|
||||
template <typename State>
|
||||
class ParticleFilterResamplingKLD : public ParticleFilterResampling<State> {
|
||||
|
||||
private:
|
||||
|
||||
/** this is a copy of the particle-set to draw from it */
|
||||
std::vector<Particle<State>> particlesCopy;
|
||||
|
||||
/** random number generator */
|
||||
std::minstd_rand gen;
|
||||
|
||||
/** upper bound epsilon of the kld distance - the particle size is not allowed to exceed epsilon*/
|
||||
double epsilon;
|
||||
|
||||
/** the upper 1 - delta quantil of the normal distribution. something like 0.01 */
|
||||
double delta;
|
||||
|
||||
/** the bins */
|
||||
Binning<State> bins;
|
||||
|
||||
/** max particle size */
|
||||
uint32_t N_max;
|
||||
|
||||
public:
|
||||
|
||||
/** ctor */
|
||||
ParticleFilterResamplingKLD(double delta = 0.01, double epsilon = 0.13, uint32_t N_max = 25000) : delta(delta), epsilon(epsilon), N_max(N_max) {
|
||||
gen.seed(RANDOM_SEED);
|
||||
|
||||
bins.setBinSizes({0.01, 0.01, 0.2, 0.3});
|
||||
bins.setRanges({BinningRange(-1,100), BinningRange(-10,60), BinningRange(-1,15), BinningRange(0, 2 * M_PI)});
|
||||
}
|
||||
|
||||
void resample(std::vector<Particle<State>>& particles) override {
|
||||
|
||||
const uint32_t cnt = (uint32_t) particles.size();
|
||||
|
||||
// equal weight for all particles. sums up to 1.0
|
||||
const double equalWeight = 1.0 / (double) cnt;
|
||||
|
||||
// ensure the copy vector has the same size as the real particle vector
|
||||
particlesCopy.resize(cnt);
|
||||
|
||||
// swap both vectors
|
||||
particlesCopy.swap(particles);
|
||||
|
||||
// calculate cumulative weight
|
||||
double cumWeight = 0;
|
||||
for (uint32_t i = 0; i < cnt; ++i) {
|
||||
cumWeight += particlesCopy[i].weight;
|
||||
particlesCopy[i].weight = cumWeight;
|
||||
}
|
||||
|
||||
//clean the bins and particles
|
||||
bins.clearUsed();
|
||||
particles.clear();
|
||||
|
||||
// draw a new particle and check if it is within a bin or not
|
||||
uint32_t k = 1;
|
||||
double N = 0;
|
||||
int i = 0;
|
||||
while(i <= N && i <= N_max){
|
||||
particles.push_back(draw(cumWeight));
|
||||
particles.back().weight = equalWeight;
|
||||
|
||||
//is bin free?
|
||||
if(bins.isFree(particles[i].state)){
|
||||
k++;
|
||||
bins.markUsed(particles[i].state);
|
||||
|
||||
//calculate the new N
|
||||
double z_delta = Distribution::NormalCDF<double>::getProbit(1 - delta);
|
||||
double front = (k - 1) / (2 * epsilon);
|
||||
double back = 1 - (2 / (9 * (k - 1))) + (std::sqrt(2 / (9 * (k - 1))) * z_delta );
|
||||
|
||||
N = front * std::pow(back, 3.0);
|
||||
}
|
||||
++i;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
/** draw one particle according to its weight from the copy vector */
|
||||
const Particle<State>& draw(const double cumWeight) {
|
||||
|
||||
// generate random values between [0:cumWeight]
|
||||
std::uniform_real_distribution<float> dist(0, cumWeight);
|
||||
|
||||
// draw a random value between [0:cumWeight]
|
||||
const float rand = dist(gen);
|
||||
|
||||
// search comparator (cumWeight is ordered -> use binary search)
|
||||
auto comp = [] (const Particle<State>& s, const float d) {return s.weight < d;};
|
||||
auto it = std::lower_bound(particlesCopy.begin(), particlesCopy.end(), rand, comp);
|
||||
return *it;
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
}
|
||||
|
||||
#endif // PARTICLEFILTERRESAMPLINGKLD_H
|
||||
31
smc/filtering/resampling/ParticleFilterResamplingLog.h
Normal file
31
smc/filtering/resampling/ParticleFilterResamplingLog.h
Normal file
@@ -0,0 +1,31 @@
|
||||
#ifndef K_MATH_FILTER_PARTICLES_PARTICLEFILTERRESAMPLINGLOG_H
|
||||
#define K_MATH_FILTER_PARTICLES_PARTICLEFILTERRESAMPLINGLOG_H
|
||||
|
||||
#include "ParticleFilterResamplingSimple.h"
|
||||
|
||||
namespace K {
|
||||
|
||||
/**
|
||||
* draw particles using their log-weight
|
||||
*/
|
||||
template <typename State>
|
||||
class ParticleFilterResamplingLog : public ParticleFilterResamplingSimple<State> {
|
||||
|
||||
public:
|
||||
|
||||
void resample(std::vector<Particle<State>>& particles) override {
|
||||
|
||||
for (Particle<State>& p : particles) {
|
||||
//p.weight = - 1.0 / std::log(p.weight);
|
||||
p.weight = std::pow(p.weight, 0.5);
|
||||
}
|
||||
|
||||
ParticleFilterResamplingSimple<State>::resample(particles);
|
||||
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // K_MATH_FILTER_PARTICLES_PARTICLEFILTERRESAMPLINGLOG_H
|
||||
163
smc/filtering/resampling/ParticleFilterResamplingNEff.h
Normal file
163
smc/filtering/resampling/ParticleFilterResamplingNEff.h
Normal file
@@ -0,0 +1,163 @@
|
||||
#ifndef PARTICLEFILTERRESAMPLINGNEFF_H
|
||||
#define PARTICLEFILTERRESAMPLINGNEFF_H
|
||||
|
||||
|
||||
|
||||
#include <algorithm>
|
||||
#include <functional>
|
||||
|
||||
#include "ParticleFilterResampling.h"
|
||||
#include "../ParticleAssertions.h"
|
||||
|
||||
namespace K {
|
||||
|
||||
/**
|
||||
* TODO
|
||||
*/
|
||||
template <typename State> class ParticleFilterResamplingNEff : public ParticleFilterResampling<State> {
|
||||
|
||||
public:
|
||||
|
||||
using DrawCallback = std::function<void(Particle<State>& p)>;
|
||||
|
||||
private:
|
||||
|
||||
/** random number generator */
|
||||
std::minstd_rand gen;
|
||||
|
||||
const float desiredNEff;
|
||||
|
||||
const float maxAdapt = 0.05;
|
||||
|
||||
DrawCallback drawCallback;
|
||||
|
||||
public:
|
||||
|
||||
/** ctor */
|
||||
ParticleFilterResamplingNEff(const float desiredNEff = 0.75, const float maxAdapt = 0.05) : desiredNEff(desiredNEff), maxAdapt(maxAdapt) {
|
||||
gen.seed(1234);
|
||||
}
|
||||
|
||||
/** callback to inform about redrawn particles */
|
||||
void setDrawCallback(const DrawCallback& callback) {this->drawCallback = callback;}
|
||||
|
||||
void resample(std::vector<Particle<State>>& particles) override {
|
||||
|
||||
// comparator (highest first)
|
||||
static auto comp = [] (const Particle<State>& p1, const Particle<State>& p2) {
|
||||
return p1.weight > p2.weight;
|
||||
};
|
||||
|
||||
// ensure all particle-weights sum up to 1.0
|
||||
normalize(particles);
|
||||
|
||||
// calculate current N_eff
|
||||
const double curNEff = getNEff(particles);
|
||||
|
||||
// cur N_eff is > desired N_eff -> nothing to do
|
||||
if (curNEff > desiredNEff) {return;}
|
||||
|
||||
const size_t cnt = particles.size();
|
||||
|
||||
// how many particles to discard and resample based on N_eff
|
||||
// example: N_eff = 75% -> 25% to discard/resample
|
||||
// cur N_eff < desired N_eff -> calculate adaption
|
||||
float adapt = desiredNEff - curNEff;
|
||||
if (adapt > maxAdapt) {adapt = maxAdapt;}
|
||||
const size_t toResample = cnt * adapt;
|
||||
|
||||
|
||||
|
||||
// sort orig particles by weight (highest first)
|
||||
std::sort(particles.begin(), particles.end(), comp);
|
||||
|
||||
// to-be-removed region [at the end of the vector]
|
||||
const size_t start = particles.size() - toResample;
|
||||
|
||||
// zero the weight of the to-be-removed region
|
||||
for (size_t i = start; i < cnt; ++i) {
|
||||
particles[i].weight = 0;
|
||||
}
|
||||
|
||||
// renormalize the remaining set
|
||||
normalize(particles);
|
||||
|
||||
// replace the to-be-removed region [start:cnt]
|
||||
for (size_t i = start; i < cnt; ++i) {
|
||||
|
||||
// track cumulative weight for each particle-index
|
||||
std::vector<float> cumWeights;
|
||||
|
||||
// calculate cumulative weight for the copy [0:start] = the to-be-kept particles
|
||||
double cumWeight = 0;
|
||||
for (size_t i = 0; i < start; ++i) {
|
||||
cumWeight += particles[i].weight;
|
||||
cumWeights.push_back(cumWeight);
|
||||
}
|
||||
|
||||
// get a random particle between [0:start]
|
||||
const size_t idx = draw(cumWeights);
|
||||
|
||||
// split
|
||||
particles[i] = particles[idx];
|
||||
particles[i].weight /= 2;
|
||||
particles[idx].weight /= 2;
|
||||
|
||||
// callback?
|
||||
if (drawCallback) {drawCallback(particles[i]);}
|
||||
|
||||
}
|
||||
|
||||
// sanity check. sum of weights must now still be 1.0!
|
||||
double weightSum = 0;
|
||||
for (const auto& p : particles) {weightSum += p.weight;}
|
||||
|
||||
if (std::abs(weightSum - 1.0) > 0.01) {
|
||||
throw Exception("particle weight does not sum up to 1.0");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
void normalize(std::vector<Particle<State>>& particles) const {
|
||||
double sum = 0.0;
|
||||
for (const auto& p : particles) {sum += p.weight;} // calculate sum
|
||||
for (auto& p : particles) {p.weight /= sum;} // normalize
|
||||
}
|
||||
|
||||
double getNEff(const std::vector<Particle<State>>& particles) const {
|
||||
double sum = 0.0;
|
||||
for (auto& p : particles) {
|
||||
sum += (p.weight * p.weight);
|
||||
}
|
||||
return 1.0 / sum / particles.size();
|
||||
}
|
||||
|
||||
size_t draw(const std::vector<float>& cumWeights) {
|
||||
|
||||
// get the cumulative weight [= last entry]
|
||||
const double cumWeight = cumWeights.back();
|
||||
|
||||
// generate random values between [0:cumWeight]
|
||||
std::uniform_real_distribution<float> dist(0, cumWeight);
|
||||
|
||||
// draw a random value between [0:cumWeight]
|
||||
const float rand = dist(gen);
|
||||
|
||||
// search comparator (cumWeight is ordered -> use binary search)
|
||||
auto it = std::lower_bound(cumWeights.begin(), cumWeights.end(), rand);
|
||||
|
||||
// convert iterator to index
|
||||
const int idx = it - cumWeights.begin();
|
||||
|
||||
return idx;
|
||||
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
|
||||
#endif // PARTICLEFILTERRESAMPLINGNEFF_H
|
||||
34
smc/filtering/resampling/ParticleFilterResamplingNone.h
Normal file
34
smc/filtering/resampling/ParticleFilterResamplingNone.h
Normal file
@@ -0,0 +1,34 @@
|
||||
/*
|
||||
* ParticleResamplingNone.h
|
||||
*
|
||||
* Created on: Sep 18, 2013
|
||||
* Author: Frank Ebner
|
||||
*/
|
||||
|
||||
#ifndef K_MATH_FILTER_PARTICLES_PARTICLEFILTERRESAMPLINGNONE_H_
|
||||
#define K_MATH_FILTER_PARTICLES_PARTICLEFILTERRESAMPLINGNONE_H_
|
||||
|
||||
#include "ParticleFilterResampling.h"
|
||||
|
||||
namespace K {
|
||||
|
||||
/**
|
||||
* dummy resampler simply doing nothing
|
||||
*/
|
||||
template <typename State>
|
||||
class ParticleFilterResamplingNone : public ParticleFilterResampling<State> {
|
||||
|
||||
public:
|
||||
|
||||
// /** dtor */
|
||||
// ~ParticleFilterResamplingNone() {;}
|
||||
|
||||
void resample(std::vector<Particle<State>>& particles) override {
|
||||
(void) particles;
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif /* K_MATH_FILTER_PARTICLES_PARTICLEFILTERRESAMPLINGNONE_H_ */
|
||||
94
smc/filtering/resampling/ParticleFilterResamplingPercent.h
Normal file
94
smc/filtering/resampling/ParticleFilterResamplingPercent.h
Normal file
@@ -0,0 +1,94 @@
|
||||
#ifndef PARTICLEFILTERRESAMPLINGPERCENT_H
|
||||
#define PARTICLEFILTERRESAMPLINGPERCENT_H
|
||||
|
||||
|
||||
#include <algorithm>
|
||||
|
||||
#include "ParticleFilterResampling.h"
|
||||
#include "../../ParticleAssertions.h"
|
||||
|
||||
namespace SMC {
|
||||
|
||||
/**
|
||||
* TODO
|
||||
*/
|
||||
template <typename State> class ParticleFilterResamplingPercent : public ParticleFilterResampling<State> {
|
||||
|
||||
private:
|
||||
|
||||
/** random number generator */
|
||||
std::minstd_rand gen;
|
||||
|
||||
float percent;
|
||||
|
||||
public:
|
||||
|
||||
/** ctor */
|
||||
ParticleFilterResamplingPercent(const float percent) : percent(percent) {
|
||||
gen.seed(1234);
|
||||
}
|
||||
|
||||
void resample(std::vector<Particle<State>>& particles) override {
|
||||
|
||||
// comparator (highest first)
|
||||
static auto comp = [] (const Particle<State>& p1, const Particle<State>& p2) {
|
||||
return p1.weight > p2.weight;
|
||||
};
|
||||
|
||||
const uint32_t cnt = (uint32_t) particles.size();
|
||||
|
||||
// sort particles by weight (highest first)
|
||||
std::sort(particles.begin(), particles.end(), comp);
|
||||
|
||||
|
||||
// to-be-removed region
|
||||
const int start = particles.size() * (1-percent);
|
||||
const int end = particles.size();
|
||||
std::uniform_int_distribution<int> dist(0, start-1);
|
||||
|
||||
// remove by re-drawing
|
||||
for (uint32_t i = start; i < end; ++i) {
|
||||
const int rnd = dist(gen);
|
||||
particles[i] = particles[rnd];
|
||||
particles[i].weight /= 2;
|
||||
particles[rnd].weight /= 2;
|
||||
}
|
||||
|
||||
// calculate weight-sum
|
||||
double weightSum = 0;
|
||||
for (const auto& p : particles) {
|
||||
weightSum += p.weight;
|
||||
}
|
||||
|
||||
|
||||
int i = 0;
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
/** draw one particle according to its weight from the copy vector */
|
||||
const Particle<State>& draw(std::vector<Particle<State>>& copy, const double cumWeight) {
|
||||
|
||||
// generate random values between [0:cumWeight]
|
||||
std::uniform_real_distribution<float> dist(0, cumWeight);
|
||||
|
||||
// draw a random value between [0:cumWeight]
|
||||
const float rand = dist(gen);
|
||||
|
||||
// search comparator (cumWeight is ordered -> use binary search)
|
||||
auto comp = [] (const Particle<State>& s, const float d) {return s.weight < d;};
|
||||
auto it = std::lower_bound(copy.begin(), copy.end(), rand, comp);
|
||||
return *it;
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // PARTICLEFILTERRESAMPLINGPERCENT_H
|
||||
102
smc/filtering/resampling/ParticleFilterResamplingSimple.h
Normal file
102
smc/filtering/resampling/ParticleFilterResamplingSimple.h
Normal file
@@ -0,0 +1,102 @@
|
||||
/*
|
||||
* ParticleResamplingSimple.h
|
||||
*
|
||||
* Created on: Sep 18, 2013
|
||||
* Author: Frank Ebner
|
||||
*/
|
||||
|
||||
#ifndef K_MATH_FILTER_PARTICLES_PARTICLEFILTERRESAMPLINGSIMPLE_H_
|
||||
#define K_MATH_FILTER_PARTICLES_PARTICLEFILTERRESAMPLINGSIMPLE_H_
|
||||
|
||||
#include <algorithm>
|
||||
#include <random>
|
||||
|
||||
#include "ParticleFilterResampling.h"
|
||||
#include "../../ParticleAssertions.h"
|
||||
|
||||
namespace SMC {
|
||||
|
||||
/**
|
||||
* uses simple probability resampling by drawing particles according
|
||||
* to their current weight.
|
||||
* O(log(n)) per particle
|
||||
*/
|
||||
template <typename State>
|
||||
class ParticleFilterResamplingSimple : public ParticleFilterResampling<State> {
|
||||
|
||||
private:
|
||||
|
||||
/** this is a copy of the particle-set to draw from it */
|
||||
std::vector<Particle<State>> particlesCopy;
|
||||
|
||||
/** random number generator */
|
||||
std::minstd_rand gen;
|
||||
|
||||
public:
|
||||
|
||||
/** ctor */
|
||||
ParticleFilterResamplingSimple() {
|
||||
gen.seed(1234);
|
||||
}
|
||||
|
||||
void resample(std::vector<Particle<State>>& particles) override {
|
||||
|
||||
// compile-time sanity checks
|
||||
// TODO: this solution requires EXPLICIT overloading which is bad...
|
||||
// static_assert( HasOperatorAssign<State>::value, "your state needs an assignment operator!" );
|
||||
|
||||
const uint32_t cnt = (uint32_t) particles.size();
|
||||
|
||||
// equal weight for all particles. sums up to 1.0
|
||||
const double equalWeight = 1.0 / (double) cnt;
|
||||
|
||||
// ensure the copy vector has the same size as the real particle vector
|
||||
particlesCopy.resize(cnt);
|
||||
|
||||
// swap both vectors
|
||||
particlesCopy.swap(particles);
|
||||
|
||||
// calculate cumulative weight
|
||||
double cumWeight = 0;
|
||||
for (uint32_t i = 0; i < cnt; ++i) {
|
||||
cumWeight += particlesCopy[i].weight;
|
||||
particlesCopy[i].weight = cumWeight;
|
||||
}
|
||||
|
||||
// now draw from the copy vector and fill the original one
|
||||
// with the resampled particle-set
|
||||
for (uint32_t i = 0; i < cnt; ++i) {
|
||||
particles[i] = draw(cumWeight);
|
||||
particles[i].weight = equalWeight;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
/** draw one particle according to its weight from the copy vector */
|
||||
const Particle<State>& draw(const double cumWeight) {
|
||||
|
||||
// generate random values between [0:cumWeight]
|
||||
std::uniform_real_distribution<float> dist(0, cumWeight);
|
||||
|
||||
// draw a random value between [0:cumWeight]
|
||||
const float rand = dist(gen);
|
||||
|
||||
// search comparator (cumWeight is ordered -> use binary search)
|
||||
auto comp = [] (const Particle<State>& s, const float d) {return s.weight < d;};
|
||||
auto it = std::lower_bound(particlesCopy.begin(), particlesCopy.end(), rand, comp);
|
||||
return *it;
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
};
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
#endif /* K_MATH_FILTER_PARTICLES_PARTICLEFILTERRESAMPLINGSIMPLE_H_ */
|
||||
95
smc/filtering/resampling/ParticleFilterResamplingWheel.h
Normal file
95
smc/filtering/resampling/ParticleFilterResamplingWheel.h
Normal file
@@ -0,0 +1,95 @@
|
||||
#ifndef K_MATH_FILTER_PARTICLES_PARTICLEFILTERRESAMPLINGWHEEL_H_
|
||||
#define K_MATH_FILTER_PARTICLES_PARTICLEFILTERRESAMPLINGWHEEL_H_
|
||||
|
||||
#include <algorithm>
|
||||
#include <vector>
|
||||
|
||||
#include "ParticleFilterResampling.h"
|
||||
#include "../ParticleAssertions.h"
|
||||
#include "../../../distribution/Uniform.h"
|
||||
#include "../../../../os/Time.h"
|
||||
|
||||
/**
|
||||
* sources:
|
||||
* https://www.udacity.com/course/viewer#!/c-cs373/l-48704330/e-48748082/m-48740082
|
||||
* https://www.youtube.com/watch?list=PLpUPoM7Rgzi_7YWn14Va2FODh7LzADBSm&feature=player_detailpage&v=kZhOJgooMxI#t=567
|
||||
*/
|
||||
|
||||
namespace K {
|
||||
|
||||
/**
|
||||
* uses simple probability resampling by drawing particles according
|
||||
* to their current weight using a "wheel"
|
||||
* -> O(n) for all particles
|
||||
*/
|
||||
template <typename State>
|
||||
class ParticleFilterResamplingWheel: public ParticleFilterResampling<State> {
|
||||
|
||||
private:
|
||||
|
||||
/** this is a copy of the particle-set to draw from it */
|
||||
std::vector<Particle<State>> particlesCopy;
|
||||
|
||||
public:
|
||||
|
||||
void resample(std::vector<Particle<State>>& particles) override {
|
||||
|
||||
uint64_t start = K::Time::getTimeMS();
|
||||
|
||||
// compile-time sanity checks
|
||||
// TODO: this solution requires EXPLICIT overloading which is bad...
|
||||
//static_assert( HasOperatorAssign<State>::value, "your state needs an assignment operator!" );
|
||||
|
||||
const uint32_t cnt = (uint32_t) particles.size();
|
||||
|
||||
// equal weight for all particles. sums up to 1.0
|
||||
const double equalWeight = 1.0 / (double) cnt;
|
||||
|
||||
|
||||
|
||||
// get the weight of the "heaviest" particle
|
||||
const auto lambda = [] (const Particle<State>& p1, const Particle<State>& p2) {return p1.weight < p2.weight;};
|
||||
const Particle<State> max = *std::max_element(particles.begin(), particles.end(), lambda);
|
||||
|
||||
// draw form the input particle set by treating it as a wheel
|
||||
K::UniformDistribution dist(0, 2 * max.weight);
|
||||
int curIdx = K::UniformDistribution::draw( (int)0, (int)particles.size() - 1);
|
||||
double curOffset = dist.draw();
|
||||
|
||||
// ensure the copy vector has the same size as the real particle vector
|
||||
particlesCopy.resize(cnt);
|
||||
|
||||
// draw the new set of particles
|
||||
for (uint32_t i = 0; i < cnt; ) {
|
||||
|
||||
// found a suitable particle, use it and draw the next random number
|
||||
if (particles[curIdx].weight >= curOffset) {
|
||||
particlesCopy[i] = particles[curIdx];
|
||||
particlesCopy[i].weight = equalWeight;
|
||||
curOffset += dist.draw();
|
||||
++i;
|
||||
|
||||
// weight to small, subtract the particles weight and move on to the next particle
|
||||
} else {
|
||||
curOffset -= particles[curIdx].weight;
|
||||
curIdx = (curIdx + 1) % (particles.size());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// swap both vectors
|
||||
particlesCopy.swap(particles);
|
||||
|
||||
uint64_t end = K::Time::getTimeMS();
|
||||
std::cout << (end-start) << std::endl;
|
||||
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
#endif /* K_MATH_FILTER_PARTICLES_PARTICLEFILTERRESAMPLINGWHEEL_H_ */
|
||||
Reference in New Issue
Block a user