added kullback leibler for gaussian cases

This commit is contained in:
toni
2017-03-09 18:57:47 +01:00
parent 62087fe072
commit e48d3bafcd
9 changed files with 374 additions and 8 deletions

View File

@@ -0,0 +1,214 @@
#ifdef WITH_TESTS
#include "../../Tests.h"
#include "../../../math/divergence/KullbackLeibler.h"
#include "../../../math/Distributions.h"
#include <random>
TEST(KullbackLeibler, univariateGaussEQ) {
//if the distributions are equal, kld is 0
Distribution::Normal<float> norm1(0,1);
Distribution::Normal<float> norm2(0,1);
ASSERT_EQ(0.0f, Divergence::KullbackLeibler<float>::getUnivariateGauss(norm1, norm2));
ASSERT_EQ(0.0f, Divergence::KullbackLeibler<float>::getUnivariateGaussSymmetric(norm1, norm2));
}
TEST(KullbackLeibler, univariateGaussGEmu) {
//bigger mu means greater kld
Distribution::Normal<float> norm1(0,1);
Distribution::Normal<float> norm2(0,1);
Distribution::Normal<float> norm3(0,1);
Distribution::Normal<float> norm4(1,1);
ASSERT_GE(Divergence::KullbackLeibler<float>::getUnivariateGauss(norm3, norm4), Divergence::KullbackLeibler<float>::getUnivariateGauss(norm1, norm2));
ASSERT_GE(Divergence::KullbackLeibler<float>::getUnivariateGaussSymmetric(norm3, norm4), Divergence::KullbackLeibler<float>::getUnivariateGaussSymmetric(norm1, norm2));
}
TEST(KullbackLeibler, univariateGaussGEsigma) {
//bigger sigma means greater kld
Distribution::Normal<float> norm1(0,1);
Distribution::Normal<float> norm2(0,1);
Distribution::Normal<float> norm5(0,1);
Distribution::Normal<float> norm6(0,3);
ASSERT_GE(Divergence::KullbackLeibler<float>::getUnivariateGauss(norm5, norm6), Divergence::KullbackLeibler<float>::getUnivariateGauss(norm1, norm2));
ASSERT_GE(Divergence::KullbackLeibler<float>::getUnivariateGaussSymmetric(norm5, norm6), Divergence::KullbackLeibler<float>::getUnivariateGaussSymmetric(norm1, norm2));
}
TEST(KullbackLeibler, univariateGaussRAND) {
for(int i = 0; i < 20; i++){
auto randMu1 = rand() % 100;
auto randMu2 = rand() % 100 + 100;
auto randMu3 = rand() % 100;
auto randMu4 = rand() % 100 + 200;
Distribution::Normal<float> norm7(randMu1,1);
Distribution::Normal<float> norm8(randMu2,1);
Distribution::Normal<float> norm9(randMu3,1);
Distribution::Normal<float> norm10(randMu4,1);
ASSERT_GE(Divergence::KullbackLeibler<float>::getUnivariateGauss(norm9, norm10), Divergence::KullbackLeibler<float>::getUnivariateGauss(norm8, norm7));
ASSERT_GE(Divergence::KullbackLeibler<float>::getUnivariateGaussSymmetric(norm9, norm10), Divergence::KullbackLeibler<float>::getUnivariateGaussSymmetric(norm8, norm7));
}
}
TEST(KullbackLeibler, multivariateGaussEQ) {
//eq
Eigen::VectorXd mu1(2);
mu1[0] = 1.0;
mu1[1] = 1.0;
Eigen::VectorXd mu2(2);
mu2[0] = 1.0;
mu2[1] = 1.0;
Eigen::MatrixXd cov1(2,2);
cov1(0,0) = 1.0;
cov1(0,1) = 0.0;
cov1(1,0) = 0.0;
cov1(1,1) = 1.0;
Eigen::MatrixXd cov2(2,2);
cov2(0,0) = 1.0;
cov2(0,1) = 0.0;
cov2(1,0) = 0.0;
cov2(1,1) = 1.0;
Distribution::NormalDistributionN norm1(mu1, cov1);
Distribution::NormalDistributionN norm2(mu2, cov2);
ASSERT_EQ(0.0f, Divergence::KullbackLeibler<float>::getMultivariateGauss(norm1, norm2));
ASSERT_EQ(0.0f, Divergence::KullbackLeibler<float>::getMultivariateGaussSymmetric(norm1, norm2));
}
TEST(KullbackLeibler, multivariateGaussGeMu) {
//ge mu
Eigen::VectorXd mu1(2);
mu1[0] = 1.0;
mu1[1] = 1.0;
Eigen::VectorXd mu2(2);
mu2[0] = 1.0;
mu2[1] = 1.0;
Eigen::VectorXd mu3(2);
mu3[0] = 1.0;
mu3[1] = 1.0;
Eigen::VectorXd mu4(2);
mu4[0] = 1.0;
mu4[1] = 3.0;
Eigen::MatrixXd cov1(2,2);
cov1(0,0) = 1.0;
cov1(0,1) = 0.0;
cov1(1,0) = 0.0;
cov1(1,1) = 1.0;
Eigen::MatrixXd cov2(2,2);
cov2(0,0) = 1.0;
cov2(0,1) = 0.0;
cov2(1,0) = 0.0;
cov2(1,1) = 1.0;
Eigen::MatrixXd cov3(2,2);
cov3(0,0) = 1.0;
cov3(0,1) = 0.0;
cov3(1,0) = 0.0;
cov3(1,1) = 1.0;
Eigen::MatrixXd cov4(2,2);
cov4(0,0) = 1.0;
cov4(0,1) = 0.0;
cov4(1,0) = 0.0;
cov4(1,1) = 1.0;
Distribution::NormalDistributionN norm1(mu1, cov1);
Distribution::NormalDistributionN norm2(mu2, cov2);
Distribution::NormalDistributionN norm3(mu3, cov3);
Distribution::NormalDistributionN norm4(mu4, cov4);
double kld12 = Divergence::KullbackLeibler<float>::getMultivariateGauss(norm1, norm2);
double kld34 = Divergence::KullbackLeibler<float>::getMultivariateGauss(norm3, norm4);
std::cout << kld34 << " > " << kld12 << std::endl;
double kld12sym = Divergence::KullbackLeibler<float>::getMultivariateGaussSymmetric(norm1, norm2);
double kld34sym = Divergence::KullbackLeibler<float>::getMultivariateGaussSymmetric(norm3, norm4);
std::cout << kld34sym << " > " << kld12sym << std::endl;
ASSERT_GE(kld34, kld12);
ASSERT_GE(kld34sym, kld12sym);
}
TEST(KullbackLeibler, multivariateGaussGeCov) {
//ge cov
Eigen::VectorXd mu1(2);
mu1[0] = 1.0;
mu1[1] = 1.0;
Eigen::VectorXd mu2(2);
mu2[0] = 1.0;
mu2[1] = 1.0;
Eigen::VectorXd mu3(2);
mu3[0] = 1.0;
mu3[1] = 1.0;
Eigen::VectorXd mu4(2);
mu4[0] = 1.0;
mu4[1] = 1.0;
Eigen::MatrixXd cov1(2,2);
cov1(0,0) = 1.0;
cov1(0,1) = 0.0;
cov1(1,0) = 0.0;
cov1(1,1) = 1.0;
Eigen::MatrixXd cov2(2,2);
cov2(0,0) = 1.0;
cov2(0,1) = 0.0;
cov2(1,0) = 0.0;
cov2(1,1) = 1.0;
Eigen::MatrixXd cov3(2,2);
cov3(0,0) = 1.0;
cov3(0,1) = 0.0;
cov3(1,0) = 0.0;
cov3(1,1) = 1.0;
Eigen::MatrixXd cov4(2,2);
cov4(0,0) = 3.0;
cov4(0,1) = 0.0;
cov4(1,0) = 0.0;
cov4(1,1) = 1.0;
Distribution::NormalDistributionN norm1(mu1, cov1);
Distribution::NormalDistributionN norm2(mu2, cov2);
Distribution::NormalDistributionN norm3(mu3, cov3);
Distribution::NormalDistributionN norm4(mu4, cov4);
double kld12 = Divergence::KullbackLeibler<float>::getMultivariateGauss(norm1, norm2);
double kld34 = Divergence::KullbackLeibler<float>::getMultivariateGauss(norm3, norm4);
std::cout << kld34 << " >" << kld12 << std::endl;
double kld12sym = Divergence::KullbackLeibler<float>::getMultivariateGaussSymmetric(norm1, norm2);
double kld34sym = Divergence::KullbackLeibler<float>::getMultivariateGaussSymmetric(norm3, norm4);
std::cout << kld34sym << " > " << kld12sym << std::endl;
ASSERT_GE(kld34, kld12);
ASSERT_GE(kld34sym, kld12sym);
}
#endif