diff --git a/data/HistoryTS.h b/data/HistoryTS.h new file mode 100755 index 0000000..87e4291 --- /dev/null +++ b/data/HistoryTS.h @@ -0,0 +1,68 @@ +#ifndef HISTORYTS_H +#define HISTORYTS_H + +#include +#include "Timestamp.h" +#include + +/** + * keep the history of values for a given amount of time + */ +template class HistoryTS { + +private: + + /** timestamp -> value combination */ + struct Entry { + Timestamp ts; + T value; + Entry(const Timestamp ts, const T& value) : ts(ts), value(value) {;} + }; + + /** the time-window to keep */ + Timestamp window; + + /** the value history for the window-size */ + std::vector history; + +public: + + + /** ctor with the time-window to keep */ + HistoryTS(const Timestamp window) : window(window) { + + } + + /** add a new entry */ + void add(const Timestamp ts, const T& data) { + + // append to history + history.push_back(Entry(ts, data)); + + // remove too-old history entries + const Timestamp oldest = ts - window; + while(history.front().ts < oldest) { + + // remove from history + history.erase(history.begin()); + + } + + } + + /** get the most recent entry */ + T getMostRecent() const { + return history.back().value; + } + + /** get the oldest entry available */ + T getOldest() const { + return history.front().value; + } + + + +}; + + +#endif // HISTORYTS_H diff --git a/floorplan/v2/FloorplanCeilings.h b/floorplan/v2/FloorplanCeilings.h index 3e8e2d6..5a8699f 100644 --- a/floorplan/v2/FloorplanCeilings.h +++ b/floorplan/v2/FloorplanCeilings.h @@ -73,7 +73,7 @@ namespace Floorplan { const float myDistToCeil = (ap.z < person.z) ? (person.z - z) : (z - person.z); if ( std::abs(myDistToCeil) < near ) { - cnt += sigmoid(myDistToCeil*6); + cnt += sigmoid(myDistToCeil * 6); } else if (ap.z < z && person.z >= z+near) { // AP below celing, me above ceiling cnt += 1; } else if (ap.z > z && person.z <= z-near) { // AP above ceiling, me below ceiling @@ -86,6 +86,22 @@ namespace Floorplan { } + float numCeilingsBetweenLinearInt(const Point3 ap, const Point3 person) const { + + // sanity checks + Assert::isNot0(ceilingsAtHeight_m.size(), "no ceilings available for testing! incorrect map?"); + + int cnt = 0; + float sum = 0; + for (float z = -1.0; z <= +1.0; z+= 0.25) { + sum += numCeilingsBetween(ap, person + Point3(0,0,z)); + ++cnt; + } + + return sum/cnt; + + } + /** get the number of ceilings between z1 and z2 */ int numCeilingsBetween(const Point3 pos1, const Point3 pos2) const { @@ -98,8 +114,9 @@ namespace Floorplan { #ifdef WITH_ASSERTIONS - static size_t numNear = 0; - static size_t numFar = 0; + static uint64_t numNear = 0; + static uint64_t numFar = 0; + for (const float z : ceilingsAtHeight_m) { const float diff = std::min( std::abs(z-zMin), std::abs(z-zMax) ); if (diff < 0.1) {++numNear;} else {++numFar;} diff --git a/main.cpp b/main.cpp index 2808d16..46f1192 100755 --- a/main.cpp +++ b/main.cpp @@ -30,7 +30,7 @@ int main(int argc, char** argv) { //::testing::GTEST_FLAG(filter) = "*Offline.readWrite*"; - ::testing::GTEST_FLAG(filter) = "*Angle*"; + ::testing::GTEST_FLAG(filter) = "*Jensen*"; //::testing::GTEST_FLAG(filter) = "*Barometer*"; diff --git a/math/MovingAVG.h b/math/MovingAVG.h index d3c0ee3..95d1488 100644 --- a/math/MovingAVG.h +++ b/math/MovingAVG.h @@ -11,7 +11,7 @@ private: std::vector values; /** track the current sum of the vector's values */ - T curSum = 0; + T curSum; /** the number of elements to average */ int size; @@ -19,7 +19,7 @@ private: public: /** ctor */ - MovingAVG(const int size) : size(size) {;} + MovingAVG(const int size) : curSum(), size(size) {;} /** add a new value */ void add(const T val) { diff --git a/math/Random.h b/math/Random.h index 6dd8f8f..ae8d8c5 100644 --- a/math/Random.h +++ b/math/Random.h @@ -18,10 +18,10 @@ class RandomGenerator : public std::minstd_rand { public: /** ctor with default seed */ - RandomGenerator() : std::minstd_rand(RANDOM_SEED) {;} + RandomGenerator() : std::minstd_rand(RANDOM_SEED) {;} /** ctor with custom seed */ - RandomGenerator(result_type seed) : std::minstd_rand(seed) {;} + RandomGenerator(result_type seed) : std::minstd_rand(seed) {;} }; diff --git a/math/distribution/NormalN.h b/math/distribution/NormalN.h index bb332c4..5075dea 100644 --- a/math/distribution/NormalN.h +++ b/math/distribution/NormalN.h @@ -91,7 +91,7 @@ namespace Distribution { Assert::notEqual(numElements, 0, "data is empty, thats not enough for getting the distribution!"); const Eigen::MatrixXd centered = data.rowwise() - mean.transpose(); - const Eigen::MatrixXd cov = (centered.adjoint() * centered) / double(data.rows() - 1); + Eigen::MatrixXd cov = (centered.adjoint() * centered) / double(data.rows() - 1); return NormalDistributionN(mean, cov); } diff --git a/math/divergence/JensenShannon.h b/math/divergence/JensenShannon.h index 5c9bbb3..df64088 100644 --- a/math/divergence/JensenShannon.h +++ b/math/divergence/JensenShannon.h @@ -18,6 +18,13 @@ public: * @param Q is a vector containg the densities of the same samples set then P */ static inline Scalar getGeneralFromSamples(Eigen::VectorXd P, Eigen::VectorXd Q, LOGMODE mode){ + // normalize + P /= P.sum(); + Q /= Q.sum(); + + Assert::isNear((double)P.sum(), 1.0, 0.01,"Normalization failed.. this shouldn't happen"); + Assert::isNear((double)Q.sum(), 1.0, 0.01, "Normalization failed.. this shouldn't happen"); + Eigen::VectorXd M = 0.5 * (P + Q); return (0.5 * KullbackLeibler::getGeneralFromSamples(P, M, mode)) + (0.5 * KullbackLeibler::getGeneralFromSamples(Q, M, mode)); diff --git a/sensors/activity/ActivityDetector.h b/sensors/activity/ActivityDetector.h index 195d087..b85ed4b 100644 --- a/sensors/activity/ActivityDetector.h +++ b/sensors/activity/ActivityDetector.h @@ -11,13 +11,20 @@ #include "../../math/MovingAverageTS.h" #include "../../math/MovingStdDevTS.h" +#include "../../data/HistoryTS.h" + #include "../activity/Activity.h" +//#define ACT_DET_DEBUG_PLOT + +#ifdef ACT_DET_DEBUG_PLOT #include #include #include #include #include +#endif + /** * simple step detection based on accelerometer magnitude. @@ -37,50 +44,49 @@ private: MovingAverageTS baroAvgSlow; MovingAverageTS baroAvgFast; + MovingAverageTS baroAvg; + HistoryTS baroHistory; + + Activity current; public: +#ifdef ACT_DET_DEBUG_PLOT K::Gnuplot gp; K::GnuplotPlot gplot; K::GnuplotPlotElementLines l1; K::GnuplotPlotElementLines l2; +#endif /** ctor */ ActivityDetector() : avgLong(Timestamp::fromMS(1500), 0), avgShort(Timestamp::fromMS(500), 0), - stdDev(Timestamp::fromMS(200), 0), stdDev2(Timestamp::fromMS(2000), 0) { + stdDev(Timestamp::fromMS(150), 0), stdDev2(Timestamp::fromMS(2000), 0), + baroAvg(Timestamp::fromMS(500), 0), baroHistory(Timestamp::fromMS(4000)) { ; - gplot.add(&l1); - gplot.add(&l2); l2.getStroke().getColor().setHexStr("#ff0000"); + + #ifdef ACT_DET_DEBUG_PLOT + gplot.add(&l1); + gplot.add(&l2); l2.getStroke().getColor().setHexStr("#ff0000"); + #endif + } + //int xx = 0; + /** add barometer data */ void add(const Timestamp ts, const BarometerData& baro) { if (baro.isValid()) { - baroAvgSlow.add(ts, baro.hPa); - baroAvgFast.add(ts, baro.hPa); + baroAvg.add(ts, baro.hPa); + const float avg = baroAvg.get(); + baroHistory.add(ts, avg); + //l1.add(K::GnuplotPoint2(xx, avg)); + update(); } } - Activity get() { - - // delta in acceleration - const float delta_acc = std::abs(avgLong.get() - avgShort.get()); - - if (delta_acc < 0.3) { - return Activity::STANDING; - } - - // delta in pressure - const float delta_hPa = baroAvgFast.get() - baroAvgSlow.get(); - - if (std::abs(delta_hPa) < 0.1) { - return Activity::WALKING; - } else if (delta_hPa > 0) { - return Activity::WALKING_DOWN; - } else { - return Activity::WALKING_UP; - } - + /** get the currently detected activity */ + Activity get() const { + return current; } /** does the given data indicate a step? */ @@ -96,13 +102,6 @@ public: // static int x = 0; ++x; -//// if (x % 10 == 0) { -//// l1.add({x, avgLong.get()}); -//// l2.add({x, avgShort.get()}); -//// gp.draw(gplot); -//// gp.flush(); -//// } - // if (delta < 0.3) { // return Activity::STANDING; // } @@ -118,8 +117,41 @@ public: } +private: + /** estimate the current activity based on the sensor data */ + void update() { + // delta in acceleration + const float delta_acc = std::abs(avgLong.get() - avgShort.get()); + + if (delta_acc < 0.015) { + current = Activity::STANDING; + return; + } + + // delta in pressure + const float delta_hPa = baroHistory.getMostRecent() - baroHistory.getOldest(); + + #ifdef ACT_DET_DEBUG_PLOT + l2.add(K::GnuplotPoint2(xx, delta_hPa)); + gp.draw(gplot); + gp.flush(); + ++xx; + #endif + + if (std::abs(delta_hPa) < 0.042) { + current = Activity::WALKING; + return; + } else if (delta_hPa > 0) { + current = Activity::WALKING_DOWN; + return; + } else { + current = Activity::WALKING_UP; + return; + } + + } }; diff --git a/sensors/radio/VAPGrouper.h b/sensors/radio/VAPGrouper.h index b8a7545..d12cd7a 100644 --- a/sensors/radio/VAPGrouper.h +++ b/sensors/radio/VAPGrouper.h @@ -50,18 +50,27 @@ private: /** the signal-strength aggregation algorithm to use */ const Aggregation agg; + /** respect only outputs with at-least X occurences of one physical hardware [can be used to prevent issues] */ + int minOccurences; + public: /** ctor */ - VAPGrouper(const Mode mode, const Aggregation agg) : mode(mode), agg(agg) { - + VAPGrouper(const Mode mode, const Aggregation agg, const int minOccurences = 2) : + mode(mode), agg(agg), minOccurences(minOccurences) { + ; } + /** set the number of needed occurences per VAP-group to be accepted */ + void setMinOccurences(const int min) { + this->minOccurences = min; + } /** get a vap-grouped version of the given input */ WiFiMeasurements group(const WiFiMeasurements& original) const { // first, group all VAPs into a vector [one vector per VAP-group] + // by using the modified MAC address that all VAPs have in common std::unordered_map> grouped; for (const WiFiMeasurement& m : original.entries) { @@ -72,8 +81,9 @@ public: } - // output + // to-be-constructed output WiFiMeasurements result; + int skipped = 0; // perform aggregation on each VAP-group for (auto it : grouped) { @@ -81,6 +91,9 @@ public: const MACAddress& base = it.first; const std::vector& vaps = it.second; + // remove entries that do not satify the min-occurences metric + if ((int)vaps.size() < minOccurences) {++skipped; continue;} + // group all VAPs into one measurement const WiFiMeasurement groupedMeasurement = groupVAPs(base, vaps); @@ -89,8 +102,12 @@ public: } - Log::add(name, "grouped " + std::to_string(original.entries.size()) + " measurements into " + std::to_string(result.entries.size()), true); - + // debug + Log::add(name, + "grouped " + std::to_string(original.entries.size()) + " measurements " + + "into " + std::to_string(result.entries.size()) + " [omitted: " + std::to_string(skipped) + "]", + true + ); // done return result; diff --git a/sensors/radio/WiFiMeasurements.h b/sensors/radio/WiFiMeasurements.h index d7d2f04..412883a 100644 --- a/sensors/radio/WiFiMeasurements.h +++ b/sensors/radio/WiFiMeasurements.h @@ -32,6 +32,46 @@ struct WiFiMeasurements { return nullptr; } + /** remove the entry for the given MAC (if any) */ + void remove(const MACAddress& mac) { + for (size_t i = 0; i < entries.size(); ++i) { + if (entries[i].getAP().getMAC() == mac) { + entries.erase(entries.begin() + i); + break; + } + } + } + + + /** create a combination */ + static WiFiMeasurements mix(const WiFiMeasurements& a, const WiFiMeasurements& b, float sec = 3) { + + Timestamp max; + WiFiMeasurements res; + + for (const WiFiMeasurement& m : a.entries) { + res.entries.push_back(m); + if (m.getTimestamp() > max) {max = m.getTimestamp();} + } + + for (const WiFiMeasurement& m : b.entries) { + res.entries.push_back(m); + if (m.getTimestamp() > max) {max = m.getTimestamp();} + } + + std::vector tmp; + std::swap(res.entries, tmp); + + for (const WiFiMeasurement& m : tmp) { + if ((max - m.getTimestamp()).sec() < sec) { + res.entries.push_back(m); + } + } + + return res; + + } + }; #endif // WIFIMEASUREMENTS_H diff --git a/sensors/radio/WiFiProbabilityFree.h b/sensors/radio/WiFiProbabilityFree.h index 19a89ec..c8f79e0 100644 --- a/sensors/radio/WiFiProbabilityFree.h +++ b/sensors/radio/WiFiProbabilityFree.h @@ -45,7 +45,6 @@ public: double prob = 1.0; //double prob = 0; - double error = 0; int numMatchingAPs = 0; @@ -77,14 +76,13 @@ public: const float sigma = this->sigma + this->sigmaPerSecond * age.sec(); // probability for this AP - //double local = Distribution::Normal::getProbability(modelRSSI, sigma, scanRSSI); - double local = Distribution::Exponential::getProbability(0.1, std::abs(modelRSSI-scanRSSI)); + double local = Distribution::Normal::getProbability(modelRSSI, sigma, scanRSSI); + //double local = Distribution::Exponential::getProbability(0.1, std::abs(modelRSSI-scanRSSI)); // also add the error value? [location is OK but model is wrong] if (useError) { local = 0.95 * local + 0.05 * (1.0-local); //local = 0.95 * local + 0.05; -#warning "TODO" } // update probability diff --git a/sensors/radio/WiFiQualityAnalyzer.h b/sensors/radio/WiFiQualityAnalyzer.h index 994b2ec..522b8cf 100644 --- a/sensors/radio/WiFiQualityAnalyzer.h +++ b/sensors/radio/WiFiQualityAnalyzer.h @@ -68,8 +68,8 @@ private: const float stdDev = std::sqrt(avg2 - avg*avg); // avg rssi score - const float minRSSI = -90; - const float maxRSSI = -65; + const float minRSSI = -85; + const float maxRSSI = -70; float score1 = (avg-minRSSI) / (maxRSSI-minRSSI); // min = 0; max = 1 if (score1 > 1) {score1 = 1;} if (score1 < 0) {score1 = 0;} diff --git a/sensors/radio/model/WiFiModelLogDistCeiling.h b/sensors/radio/model/WiFiModelLogDistCeiling.h index 3251f13..f024251 100644 --- a/sensors/radio/model/WiFiModelLogDistCeiling.h +++ b/sensors/radio/model/WiFiModelLogDistCeiling.h @@ -149,6 +149,7 @@ public: // WAF loss (params.waf is a negative value!) -> WAF loss is also a negative value //const float wafLoss = params.waf * ceilings.numCeilingsBetween(position_m, params.position_m); const float wafLoss = params.waf * ceilings.numCeilingsBetweenFloat(position_m, params.position_m); + //const float wafLoss = params.waf * ceilings.numCeilingsBetweenLinearInt(position_m, params.position_m); // combine const float res = rssiLOS + wafLoss; diff --git a/sensors/radio/model/WiFiModelPerBBox.h b/sensors/radio/model/WiFiModelPerBBox.h index 8a32899..1ae5ffc 100644 --- a/sensors/radio/model/WiFiModelPerBBox.h +++ b/sensors/radio/model/WiFiModelPerBBox.h @@ -52,15 +52,18 @@ public: /** get a list of all APs known to the model */ std::vector getAllAPs() const override { + + // combine all submodels std::vector res; for (const ModelForBBoxes& sub : models) { for (const AccessPoint& ap : sub.mdl->getAllAPs()) { - if (std::find(res.begin(), res.end(), ap) == res.end()) { + if (std::find(res.begin(), res.end(), ap) == res.end()) { // TODO use map instead? res.push_back(ap); } } } return res; + } void add(WiFiModel* mdl, const BBoxes3 bboxes) { diff --git a/sensors/radio/model/WiFiModelPerFloor.h b/sensors/radio/model/WiFiModelPerFloor.h index e5aa2b1..f14dd7e 100644 --- a/sensors/radio/model/WiFiModelPerFloor.h +++ b/sensors/radio/model/WiFiModelPerFloor.h @@ -49,7 +49,18 @@ public: /** get a list of all APs known to the model */ std::vector getAllAPs() const override { - return models.front().mdl->getAllAPs(); + + // combine all submodels + std::vector res; + for (const ModelForFloor& sub : models) { + for (const AccessPoint& ap : sub.mdl->getAllAPs()) { + if (std::find(res.begin(), res.end(), ap) == res.end()) { // TODO use map instead? + res.push_back(ap); + } + } + } + return res; + } void add(WiFiModel* mdl, const Floorplan::Floor* floor) { diff --git a/tests/math/divergence/TestJensenShannon.cpp b/tests/math/divergence/TestJensenShannon.cpp new file mode 100644 index 0000000..4bf8cf3 --- /dev/null +++ b/tests/math/divergence/TestJensenShannon.cpp @@ -0,0 +1,94 @@ +#ifdef WITH_TESTS + +#include "../../Tests.h" +#include "../../../math/divergence/JensenShannon.h" +#include "../../../math/Distributions.h" + +#include + + +TEST(JensenShannon, generalFromSamples) { + //ge cov + Eigen::VectorXd mu1(2); + mu1[0] = 1.0; + mu1[1] = 1.0; + + Eigen::VectorXd mu2(2); + mu2[0] = 1.0; + mu2[1] = 1.0; + + Eigen::VectorXd mu3(2); + mu3[0] = 1.0; + mu3[1] = 1.0; + + Eigen::VectorXd mu4(2); + mu4[0] = 9.0; + mu4[1] = 9.0; + + Eigen::MatrixXd cov1(2,2); + cov1(0,0) = 1.0; + cov1(0,1) = 0.0; + cov1(1,0) = 0.0; + cov1(1,1) = 1.0; + + Eigen::MatrixXd cov2(2,2); + cov2(0,0) = 1.0; + cov2(0,1) = 0.0; + cov2(1,0) = 0.0; + cov2(1,1) = 1.0; + + Eigen::MatrixXd cov3(2,2); + cov3(0,0) = 1.0; + cov3(0,1) = 0.0; + cov3(1,0) = 0.0; + cov3(1,1) = 1.0; + + Eigen::MatrixXd cov4(2,2); + cov4(0,0) = 3.0; + cov4(0,1) = 0.0; + cov4(1,0) = 0.0; + cov4(1,1) = 13.0; + + Distribution::NormalDistributionN norm1(mu1, cov1); + Distribution::NormalDistributionN norm2(mu2, cov2); + Distribution::NormalDistributionN norm3(mu3, cov3); + Distribution::NormalDistributionN norm4(mu4, cov4); + + int size = 10000; + Eigen::VectorXd samples1(size); + Eigen::VectorXd samples2(size); + Eigen::VectorXd samples3(size); + Eigen::VectorXd samples4(size); + + //random numbers + std::mt19937_64 rng; + // initialize the random number generator with time-dependent seed + uint64_t timeSeed = std::chrono::high_resolution_clock::now().time_since_epoch().count(); + std::seed_seq ss{uint32_t(timeSeed & 0xffffffff), uint32_t(timeSeed>>32)}; + rng.seed(ss); + // initialize a uniform distribution between 0 and 1 + std::uniform_real_distribution unif(-9, 10); + + //generate samples + for(int i = 0; i < size; ++i){ + + double r1 = unif(rng); + double r2 = unif(rng); + Eigen::VectorXd v(2); + v << r1, r2; + + samples1[i] = norm1.getProbability(v); + samples2[i] = norm2.getProbability(v); + samples3[i] = norm3.getProbability(v); + samples4[i] = norm4.getProbability(v); + } + + double kld12 = Divergence::JensenShannon::getGeneralFromSamples(samples1, samples2, Divergence::LOGMODE::NATURALIS); + double kld34 = Divergence::JensenShannon::getGeneralFromSamples(samples3, samples4, Divergence::LOGMODE::NATURALIS); + std::cout << kld34 << " > " << kld12 << std::endl; + + ASSERT_GE(kld34, kld12); + +} + +#endif diff --git a/tests/math/divergence/TestJensenShannon.h b/tests/math/divergence/TestJensenShannon.h new file mode 100644 index 0000000..e69de29