#ifndef KLB_H #define KLB_H #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include //#include #include #include #include #include "Structs.h" #include "../Plotti.h" #include "Logic.h" #include "../Settings.h" static double getKernelDensityProbability(std::vector>& particles, MyState state, std::vector>& samplesWifi){ Distribution::KernelDensity parzen([&](MyState state){ int size = particles.size(); double prob = 0; #pragma omp parallel for reduction(+:prob) num_threads(6) for(int i = 0; i < size; ++i){ double distance = particles[i].state.position.getDistanceInCM(state.position); prob += Distribution::Normal::getProbability(0, 100, distance) * particles[i].weight; } return prob; ;}); std::vector probsWifiV; std::vector probsParticleV; //just for plottingstuff std::vector> samplesParticles; const int step = 4; int i = 0; for(K::Particle particle : samplesWifi){ if(++i % step != 0){continue;} MyState state(GridPoint(particle.state.position.x_cm, particle.state.position.y_cm, particle.state.position.z_cm)); double probiParticle = parzen.getProbability(state); probsParticleV.push_back(probiParticle); double probiwifi = particle.weight; probsWifiV.push_back(probiwifi); //samplesParticles.push_back(K::Particle(state, probiParticle)); } //make vectors Eigen::Map probsWifi(&probsWifiV[0], probsWifiV.size()); Eigen::Map probsParticle(&probsParticleV[0], probsParticleV.size()); //get divergence //double kld = Divergence::KullbackLeibler::getGeneralFromSamples(probsParticle, probsWifi, Divergence::LOGMODE::NATURALIS); double kld = Divergence::JensenShannon::getGeneralFromSamples(probsParticle, probsWifi, Divergence::LOGMODE::NATURALIS); //plotti //plot.debugDistribution1(samplesWifi); //plot.debugDistribution1(samplesParticles); //estimate the mean //K::ParticleFilterEstimationOrderedWeightedAverage estimateWifi(0.95); //const MyState estWifi = estimateWifi.estimate(samplesWifi); //plot.addEstimationNodeSmoothed(estWifi.position.inMeter()); return kld; } static double kldFromMultivariatNormal(std::vector>& particles, MyState state, std::vector>& particleWifi){ //kld: particle die resampling hatten nehmen und nv daraus schätzen. vergleiche mit wi-fi //todo put this in depletionhelper.h Point3 estPos = state.position.inMeter(); //this is a hack! it is possible that the sigma of z is getting 0 and therefore the rank decreases to 2 and //no inverse matrix is possible std::mt19937_64 rng; // initialize the random number generator with time-dependent seed uint64_t timeSeed = std::chrono::high_resolution_clock::now().time_since_epoch().count(); std::seed_seq ss{uint32_t(timeSeed & 0xffffffff), uint32_t(timeSeed>>32)}; rng.seed(ss); // initialize a uniform distribution between -0.0001 and 0.0001 std::uniform_real_distribution unif(-0.0001, 0.0001); //create a gauss dist for the current particle approx. Eigen::MatrixXd m(particles.size(), 3); for(int i = 0; i < particles.size(); ++i){ m(i,0) = (particles[i].state.position.x_cm / 100.0) + unif(rng); m(i,1) = (particles[i].state.position.y_cm / 100.0) + unif(rng); m(i,2) = (particles[i].state.position.z_cm / 100.0) + unif(rng); } Eigen::VectorXd mean(3); mean << estPos.x, estPos.y, estPos.z; Distribution::NormalDistributionN normParticle = Distribution::NormalDistributionN::getNormalNFromSamplesAndMean(m, mean); //create a gauss dist for wifi Eigen::MatrixXd covWifi(3,3); covWifi << Settings::WiFiModel::sigma, 0, 0, 0, Settings::WiFiModel::sigma, 0, 0, 0, 0.01; // //calc wi-fi prob for every node and get mean vector // WiFiObserverFree wiFiProbability(Settings::WiFiModel::sigma, model); // const WiFiMeasurements wifiObs = Settings::WiFiModel::vg_eval.group(obs.wifi); // std::vector allNodes = grid.getNodes(); // std::vector> particleWifi; // //problem! dadurch das ich nur die nodes nehme, verschiebt sich der mittelwert natürlich in die mitte des gebäudes und nicht an den rand // //muss also die verteilung über mehr nodes oder sampling erstellen!! mittelwert fehler!!!! // //#pragma omp parallel for num_threads(6) // for(MyNode node : allNodes){ // double prob = wiFiProbability.getProbability(node, ts, wifiObs); // K::Particle tmp (MyState(GridPoint(node.x_cm, node.y_cm, node.z_cm)), prob); // //#pragma omp critical // particleWifi.push_back(tmp); // } // std::vector floors; // floors.push_back(0.0); // floors.push_back(4.0); // floors.push_back(7.4); // floors.push_back(10.8); // #pragma omp parallel for num_threads(6) // for(int x = -20; x < 100; ++x){ // for(int y = -20; y < 75; ++y){ // for(double z : floors){ // double X = x;// / 10.0; // double Y = y;// / 10.0; // double Z = z;// / 10.0; // Point3 pt(X,Y,Z); // double prob = wiFiProbability.getProbability(pt + Point3(0,0,1.3), ts, wifiObs); // K::Particle tmp (MyState(GridPoint(X * 100.0, Y * 100.0, Z * 100.0)), prob); // #pragma omp critical // particleWifi.push_back(tmp); // } // } // } //estimate the mean K::ParticleFilterEstimationOrderedWeightedAverage estimateWifi(0.95); const MyState estWifi = estimateWifi.estimate(particleWifi); //get matrix with wifi particles // Eigen::MatrixXd mW(particleWifi.size(), 3); // for(int i = 0; i < particleWifi.size(); ++i){ // mW(i,0) = particleWifi[i].state.position.x_cm / 100.0; // mW(i,1) = particleWifi[i].state.position.y_cm / 100.0; // mW(i,2) = estWifi.position.z_cm / 100.0; // } Eigen::VectorXd meanWifi(3); meanWifi << estWifi.position.x_cm / 100.0, estWifi.position.y_cm / 100.0, estWifi.position.z_cm / 100.0; Distribution::NormalDistributionN normWifi(meanWifi, covWifi); //Distribution::NormalDistributionN normWifi = Distribution::NormalDistributionN::getNormalNFromSamplesAndMean(mW, meanWifi); //get the kld distance double kld = Divergence::KullbackLeibler::getMultivariateGauss(normParticle, normWifi); //plot.debugDistribution1(particleWifi); //plot.drawNormalN1(normParticle); //plot.drawNormalN2(normWifi); //plot.addEstimationNodeSmoothed(estWifi.position.inMeter()); return kld; } #endif // KLB_H