[aGrUM/pyAgrum] API : hybridApprox->loopySampling + sampling inference API in pyAgrum

parent d91a0d55
#include <agrum/BN/inference/hybridApproxInference.h>
template class gum::HybridApproxInference<float, gum::WeightedSampling>;
template class gum::HybridApproxInference<double, gum::WeightedSampling>;
template class gum::HybridApproxInference<float, gum::ImportanceSampling>;
template class gum::HybridApproxInference<double, gum::ImportanceSampling>;
template class gum::HybridApproxInference<float, gum::GibbsSampling>;
template class gum::HybridApproxInference<double, gum::GibbsSampling>;
template class gum::HybridApproxInference<float, gum::MonteCarloSampling>;
template class gum::HybridApproxInference<double, gum::MonteCarloSampling>;
......@@ -41,7 +41,7 @@ namespace gum {
/**
* Default constructor
*/
LoopyBeliefPropagation( const IBayesNet<GUM_SCALAR>* BN );
LoopyBeliefPropagation( const IBayesNet<GUM_SCALAR>* bn );
/**
* Destructor.
......
......@@ -43,8 +43,8 @@ namespace gum {
/// default constructor
template <typename GUM_SCALAR>
LoopyBeliefPropagation<GUM_SCALAR>::LoopyBeliefPropagation(
const IBayesNet<GUM_SCALAR>* BN )
: ApproximateInference<GUM_SCALAR>( BN ) {
const IBayesNet<GUM_SCALAR>* bn )
: ApproximateInference<GUM_SCALAR>( bn ) {
// for debugging purposes
GUM_CONSTRUCTOR( LoopyBeliefPropagation );
......
#include <agrum/BN/inference/loopySamplingInference.h>
template class gum::LoopySamplingInference<float, gum::WeightedSampling>;
template class gum::LoopySamplingInference<double, gum::WeightedSampling>;
template class gum::LoopySamplingInference<float, gum::ImportanceSampling>;
template class gum::LoopySamplingInference<double, gum::ImportanceSampling>;
template class gum::LoopySamplingInference<float, gum::GibbsSampling>;
template class gum::LoopySamplingInference<double, gum::GibbsSampling>;
template class gum::LoopySamplingInference<float, gum::MonteCarloSampling>;
template class gum::LoopySamplingInference<double, gum::MonteCarloSampling>;
......@@ -26,8 +26,8 @@
* @author Paul ALAM & Pierre-Henri WUILLEMIN
*/
#ifndef GUM_HYBRID_INFERENCE_H
#define GUM_HYBRID_INFERENCE_H
#ifndef GUM_LOOPY_INFERENCE_H
#define GUM_LOOPY_INFERENCE_H
#include <agrum/BN/inference/GibbsSampling.h>
#include <agrum/BN/inference/MonteCarloSampling.h>
#include <agrum/BN/inference/importanceSampling.h>
......@@ -38,37 +38,33 @@
namespace gum {
/**
* @class HybridApproxInference hybridApproxInference.h
*<agrum/BN/inference/hybridApproxInference.h>
* @class LoopySamplingInference LoopySamplingInference.h
*<agrum/BN/inference/loopySamplingInference.h>
* @brief class for making hybrid sampling inference with loopy belief propagation
*and
* an approximation inference method in bayesian networks.
* and an approximation inference method in bayesian networks.
* @ingroup bn_approximation
*
* This class inherits of template class APPROX, which SHOULD be one of the 4
*approximate
* inference methods (MonteCarlo, Weighted, Importance, Gibbs).
* approximate inference methods (MonteCarlo, Weighted, Importance, Gibbs).
* It makes the inference with respect to the inherited class' method, after
*having
* initialized the estimators with the posteriors obtained by running
*LoopyBeliefPropagation
* algorithm.
* having initialized the estimators with the posteriors obtained by running
* LoopyBeliefPropagation algorithm.
*
*/
template < typename GUM_SCALAR, template < typename > class APPROX >
class HybridApproxInference : public APPROX< GUM_SCALAR > {
class LoopySamplingInference : public APPROX< GUM_SCALAR > {
public:
/**
* Default constructor
*/
HybridApproxInference(const IBayesNet< GUM_SCALAR >* bn);
LoopySamplingInference(const IBayesNet< GUM_SCALAR >* bn);
/**
* destructor
*/
virtual ~HybridApproxInference();
virtual ~LoopySamplingInference();
/// makes the inference by generating samples w.r.t the mother class' sampling
/// method after initalizing estimators with loopy belief propagation
......@@ -82,30 +78,30 @@ namespace gum {
GUM_SCALAR _virtualLBPSize;
};
extern template class HybridApproxInference< float, WeightedSampling >;
extern template class HybridApproxInference< double, WeightedSampling >;
extern template class LoopySamplingInference< float, WeightedSampling >;
extern template class LoopySamplingInference< double, WeightedSampling >;
extern template class HybridApproxInference< float, ImportanceSampling >;
extern template class HybridApproxInference< double, ImportanceSampling >;
extern template class LoopySamplingInference< float, ImportanceSampling >;
extern template class LoopySamplingInference< double, ImportanceSampling >;
extern template class HybridApproxInference< float, MonteCarloSampling >;
extern template class HybridApproxInference< double, MonteCarloSampling >;
extern template class LoopySamplingInference< float, MonteCarloSampling >;
extern template class LoopySamplingInference< double, MonteCarloSampling >;
extern template class HybridApproxInference< float, GibbsSampling >;
extern template class HybridApproxInference< double, GibbsSampling >;
extern template class LoopySamplingInference< float, GibbsSampling >;
extern template class LoopySamplingInference< double, GibbsSampling >;
template < typename GUM_SCALAR >
using HybridMonteCarloSampling =
HybridApproxInference< GUM_SCALAR, MonteCarloSampling >;
LoopySamplingInference< GUM_SCALAR, MonteCarloSampling >;
template < typename GUM_SCALAR >
using HybridWeightedSampling =
HybridApproxInference< GUM_SCALAR, WeightedSampling >;
LoopySamplingInference< GUM_SCALAR, WeightedSampling >;
template < typename GUM_SCALAR >
using HybridImportanceSampling =
HybridApproxInference< GUM_SCALAR, ImportanceSampling >;
LoopySamplingInference< GUM_SCALAR, ImportanceSampling >;
template < typename GUM_SCALAR >
using HybridGibbsSampling = HybridApproxInference< GUM_SCALAR, GibbsSampling >;
using HybridGibbsSampling = LoopySamplingInference< GUM_SCALAR, GibbsSampling >;
}
#include <agrum/BN/inference/hybridApproxInference_tpl.h>
#include <agrum/BN/inference/loopySamplingInference_tpl.h>
#endif
......@@ -27,7 +27,7 @@
*/
#include <agrum/BN/inference/hybridApproxInference.h>
#include <agrum/BN/inference/loopySamplingInference.h>
#define DEFAULT_VIRTUAL_LBP_SIZE 5000
......@@ -35,23 +35,23 @@ namespace gum {
template < typename GUM_SCALAR, template < typename > class APPROX >
HybridApproxInference< GUM_SCALAR, APPROX >::HybridApproxInference(
LoopySamplingInference< GUM_SCALAR, APPROX >::LoopySamplingInference(
const IBayesNet< GUM_SCALAR >* BN)
: APPROX< GUM_SCALAR >(BN)
, _virtualLBPSize(DEFAULT_VIRTUAL_LBP_SIZE) {
GUM_CONSTRUCTOR(HybridApproxInference);
GUM_CONSTRUCTOR(LoopySamplingInference);
}
template < typename GUM_SCALAR, template < typename > class APPROX >
HybridApproxInference< GUM_SCALAR, APPROX >::~HybridApproxInference() {
LoopySamplingInference< GUM_SCALAR, APPROX >::~LoopySamplingInference() {
GUM_DESTRUCTOR(HybridApproxInference);
GUM_DESTRUCTOR(LoopySamplingInference);
}
template < typename GUM_SCALAR, template < typename > class APPROX >
void HybridApproxInference< GUM_SCALAR, APPROX >::_makeInference() {
void LoopySamplingInference< GUM_SCALAR, APPROX >::_makeInference() {
LoopyBeliefPropagation< GUM_SCALAR > lbp(&this->BN());
for (const auto x : this->hardEvidence()) {
......
......@@ -964,6 +964,13 @@ namespace gum_tests {
TS_ASSERT_THROWS( bn = gum::BayesNet<int>::fastPrototype( "a->b->c->a" ),
gum::InvalidDirectedCycle );
bn=gum.BayesNet<char>::fastPrototype("a{yes|maybe|no}->b->c;a->c");
TS_ASSERT_EQUALS( bn.size(), gum::Size( 3 ) );
TS_ASSERT_EQUALS( bn.sizeArcs(), gum::Size( 3 ) );
TS_ASSERT_EQUALS(
bn.dim(),
gum::Size( ( 3 - 1 ) + ( 3 * ( 2 - 1 ) ) + ( 3 * 2 * ( 2 - 1 ) ) ) );
}
};
......
......@@ -26,7 +26,7 @@
#include <agrum/BN/BayesNet.h>
#include <agrum/BN/inference/ShaferShenoyInference.h>
#include <agrum/BN/inference/hybridApproxInference.h>
#include <agrum/BN/inference/loopySamplingInference.h>
#include <agrum/BN/inference/lazyPropagation.h>
#include <agrum/BN/inference/variableElimination.h>
#include <agrum/BN/io/BIF/BIFReader.h>
......
......@@ -24,7 +24,7 @@
#include <cxxtest/testsuite_utils.h>
#include <agrum/BN/BayesNet.h>
#include <agrum/BN/inference/hybridApproxInference.h>
#include <agrum/BN/inference/loopySamplingInference.h>
#include <agrum/BN/inference/lazyPropagation.h>
#include <agrum/BN/inference/loopyBeliefPropagation.h>
#include <agrum/multidim/multiDimArray.h>
......@@ -65,7 +65,7 @@ namespace gum_tests {
std::string getMess() { return __mess; }
};
class HybridApproxTestSuite : public CxxTest::TestSuite {
class loopySamplingInferenceTestSuite : public CxxTest::TestSuite {
public:
void testHybridBinaryTreeWithoutEvidence() {
auto bn = gum::BayesNet< float >::fastPrototype(
......@@ -80,7 +80,7 @@ namespace gum_tests {
compareInference(__FILE__, __LINE__, bn, lazy, lbp);
GUM_APPROX_TEST_BEGIN_ITERATION
gum::HybridApproxInference< float, gum::WeightedSampling > inf(&bn);
gum::LoopySamplingInference< float, gum::WeightedSampling > inf(&bn);
inf.setEpsilon(EPSILON_FOR_HYBRID);
inf.setVerbosity(false);
inf.makeInference();
......@@ -105,7 +105,7 @@ namespace gum_tests {
lazy.makeInference();
GUM_APPROX_TEST_BEGIN_ITERATION
gum::HybridApproxInference< float, gum::ImportanceSampling > inf(&bn);
gum::LoopySamplingInference< float, gum::ImportanceSampling > inf(&bn);
inf.addEvidence(bn.idFromName(ev), 0);
inf.setEpsilon(EPSILON_FOR_HYBRID);
inf.makeInference();
......@@ -128,7 +128,7 @@ namespace gum_tests {
lazy.makeInference();
GUM_APPROX_TEST_BEGIN_ITERATION
gum::HybridApproxInference< float, gum::MonteCarloSampling > inf(&bn);
gum::LoopySamplingInference< float, gum::MonteCarloSampling > inf(&bn);
inf.addEvidence(bn.idFromName(ev), 0);
inf.setEpsilon(EPSILON_FOR_HYBRID);
inf.setVerbosity(false);
......@@ -153,7 +153,7 @@ namespace gum_tests {
lazy.makeInference();
GUM_APPROX_TEST_BEGIN_ITERATION
gum::HybridApproxInference< float, gum::WeightedSampling > inf(&bn);
gum::LoopySamplingInference< float, gum::WeightedSampling > inf(&bn);
inf.addEvidence(bn.idFromName(ev), 0);
inf.setEpsilon(EPSILON_FOR_HYBRID);
inf.setVerbosity(false);
......@@ -179,7 +179,7 @@ namespace gum_tests {
lazy.makeInference();
GUM_APPROX_TEST_BEGIN_ITERATION
gum::HybridApproxInference< float, gum::ImportanceSampling > inf(&bn);
gum::LoopySamplingInference< float, gum::ImportanceSampling > inf(&bn);
inf.addEvidence(bn.idFromName("e"), 0);
inf.addEvidence(bn.idFromName("b"), 1);
inf.addEvidence(bn.idFromName("h"), 0);
......@@ -209,7 +209,7 @@ namespace gum_tests {
lazy.makeInference();
GUM_APPROX_TEST_BEGIN_ITERATION
gum::HybridApproxInference< float, gum::ImportanceSampling > inf(&bn);
gum::LoopySamplingInference< float, gum::ImportanceSampling > inf(&bn);
inf.addEvidence(bn.idFromName("e"), 0);
inf.addEvidence(bn.idFromName("b"), 1);
inf.addEvidence(bn.idFromName("h"), 0);
......@@ -235,7 +235,7 @@ namespace gum_tests {
lazy.makeInference();
GUM_APPROX_TEST_BEGIN_ITERATION;
gum::HybridApproxInference< float, gum::GibbsSampling > inf(&bn);
gum::LoopySamplingInference< float, gum::GibbsSampling > inf(&bn);
inf.setVerbosity(false);
inf.setEpsilon(EPSILON_FOR_HYBRID);
inf.makeInference();
......@@ -254,7 +254,7 @@ namespace gum_tests {
lazy.makeInference();
GUM_APPROX_TEST_BEGIN_ITERATION;
gum::HybridApproxInference< float, gum::ImportanceSampling > inf(&bn);
gum::LoopySamplingInference< float, gum::ImportanceSampling > inf(&bn);
inf.addEvidence(bn.idFromName("a"), 0);
inf.setVerbosity(false);
inf.setEpsilon(EPSILON_FOR_HYBRID);
......@@ -274,7 +274,7 @@ namespace gum_tests {
lazy.makeInference();
GUM_APPROX_TEST_BEGIN_ITERATION;
gum::HybridApproxInference< float, gum::ImportanceSampling > inf(&bn);
gum::LoopySamplingInference< float, gum::ImportanceSampling > inf(&bn);
inf.addEvidence(bn.idFromName("d"), 0);
inf.setVerbosity(false);
inf.setEpsilon(EPSILON_FOR_HYBRID);
......@@ -299,7 +299,7 @@ namespace gum_tests {
lazy.makeInference();
GUM_APPROX_TEST_BEGIN_ITERATION;
gum::HybridApproxInference< float, gum::WeightedSampling > inf(&bn);
gum::LoopySamplingInference< float, gum::WeightedSampling > inf(&bn);
inf.setVerbosity(false);
inf.setEpsilon(EPSILON_FOR_HYBRID);
inf.makeInference();
......@@ -318,7 +318,7 @@ namespace gum_tests {
lazy.makeInference();
GUM_APPROX_TEST_BEGIN_ITERATION;
gum::HybridApproxInference< float, gum::ImportanceSampling > inf(&bn);
gum::LoopySamplingInference< float, gum::ImportanceSampling > inf(&bn);
inf.addEvidence(bn.idFromName("a"), 0);
inf.setVerbosity(false);
inf.setEpsilon(EPSILON_FOR_HYBRID);
......@@ -338,7 +338,7 @@ namespace gum_tests {
lazy.makeInference();
GUM_APPROX_TEST_BEGIN_ITERATION;
gum::HybridApproxInference< float, gum::GibbsSampling > inf(&bn);
gum::LoopySamplingInference< float, gum::GibbsSampling > inf(&bn);
inf.addEvidence(bn.idFromName("d"), 0);
inf.setVerbosity(false);
inf.setEpsilon(EPSILON_FOR_HARD_HYBRID);
......@@ -366,7 +366,7 @@ namespace gum_tests {
lazy.makeInference();
GUM_APPROX_TEST_BEGIN_ITERATION;
gum::HybridApproxInference< float, gum::GibbsSampling > inf(&bn);
gum::LoopySamplingInference< float, gum::GibbsSampling > inf(&bn);
inf.setVerbosity(false);
inf.setEpsilon(EPSILON_FOR_HARD_HYBRID);
inf.makeInference();
......@@ -396,7 +396,7 @@ namespace gum_tests {
GUM_APPROX_TEST_BEGIN_ITERATION;
gum::HybridApproxInference< float, gum::WeightedSampling > inf(&bn);
gum::LoopySamplingInference< float, gum::WeightedSampling > inf(&bn);
inf.setVerbosity(false);
inf.setEpsilon(EPSILON_FOR_HARD_HYBRID);
inf.makeInference();
......
......@@ -45,7 +45,7 @@ from .pyAgrum import Potential, Instantiation, UtilityTable
from .pyAgrum import BruteForceKL, GibbsSampling
from .pyAgrum import LazyPropagation, ShaferShenoyInference, VariableElimination
from .pyAgrum import LoopyBeliefPropagation, GibbsSampling, MonteCarloSampling, ImportanceSampling, WeightedSampling
from .pyAgrum import HybridImportanceSampling,HybridGibbsSampling,HybridWeightedSampling
from .pyAgrum import LoopyImportanceSampling,LoopyGibbsSampling,LoopyWeightedSampling
from .pyAgrum import PythonApproximationListener, PythonBNListener, PythonLoadListener
from .pyAgrum import BNGenerator, IDGenerator, JTGenerator
from .pyAgrum import BNLearner
......@@ -69,7 +69,7 @@ __all__=[
'Potential','Instantiation','UtilityTable',
'BruteForceKL','GibbsKL',
'LoopyBeliefPropagation','GibbsSampling','MonteCarloSampling', 'ImportanceSampling', 'WeightedSampling',
'HybridImportanceSampling','HybridGibbsSampling','HybridWeightedSampling',
'LoopyImportanceSampling','LoopyGibbsSampling','LoopyWeightedSampling',
'LazyPropagation','ShaferShenoyInference','VariableElimination',
'PythonApproximationListener','PythonBNListener','PythonLoadListener',
'BNGenerator','IDGenerator','JTGenerator',
......
......@@ -56,9 +56,9 @@ IMPROVE_INFERENCE_API(ImportanceSampling<double>)
IMPROVE_INFERENCE_API(WeightedSampling<double>)
IMPROVE_INFERENCE_API(MonteCarloSampling<double>)
IMPROVE_INFERENCE_API(LoopyBeliefPropagation<double>)
IMPROVE_INFERENCE_API(HybridApproxInference<double,gum::GibbsSampling>)
IMPROVE_INFERENCE_API(HybridApproxInference<double,gum::ImportanceSampling>)
IMPROVE_INFERENCE_API(HybridApproxInference<double,gum::WaeightedSampling>)
IMPROVE_INFERENCE_API(LoopySamplingInference<double,gum::GibbsSampling>)
IMPROVE_INFERENCE_API(LoopySamplingInference<double,gum::ImportanceSampling>)
IMPROVE_INFERENCE_API(LoopySamplingInference<double,gum::WeightedSampling>)
%define IMPROVE_JOINT_INFERENCE_API(classname)
......@@ -123,6 +123,6 @@ IMPROVE_APPROX_INFERENCE_API(ImportanceSampling,ImportanceSampling<double>)
IMPROVE_APPROX_INFERENCE_API(WeightedSampling,WeightedSampling<double>)
IMPROVE_APPROX_INFERENCE_API(MonteCarloSampling,MonteCarloSampling<double>)
IMPROVE_APPROX_INFERENCE_API(LoopyBeliefPropagation,LoopyBeliefPropagation<double>)
IMPROVE_APPROX_INFERENCE_API(HybridApproxInference,HybridApproxInference<double,gum::GibbsSampling>)
IMPROVE_APPROX_INFERENCE_API(HybridApproxInference,HybridApproxInference<double,gum::ImportanceSampling>)
IMPROVE_APPROX_INFERENCE_API(HybridApproxInference,HybridApproxInference<double,gum::WaeightedSampling>)
IMPROVE_APPROX_INFERENCE_API(LoopySamplingInference,LoopySamplingInference<double,gum::GibbsSampling>)
IMPROVE_APPROX_INFERENCE_API(LoopySamplingInference,LoopySamplingInference<double,gum::ImportanceSampling>)
IMPROVE_APPROX_INFERENCE_API(LoopySamplingInference,LoopySamplingInference<double,gum::WeightedSampling>)
......@@ -85,9 +85,9 @@ GibbsSampling = GibbsSampling_double
ImportanceSampling=ImportanceSampling_double
WeightedSampling=WeightedSampling_double
MonteCarloSampling=MonteCarloSampling_double
HybridImportanceSampling=HybridImportanceSampling_double
HybridGibbsSampling=HybridGibbsSampling_double
HybridWeightedSampling=HybridWeightedSampling_double
LoopyImportanceSampling=LoopyImportanceSampling_double
LoopyGibbsSampling=LoopyGibbsSampling_double
LoopyWeightedSampling=LoopyWeightedSampling_double
LoopyBeliefPropagation = LoopyBeliefPropagation_double
......
......@@ -5,7 +5,7 @@ import platform
import sys
from sys import platform as os_platform
os.chdir(os.path.dirname('./' + __file__))
os.chdir(os.path.dirname( __file__))
libagrum = os.path.abspath("../../../build/release/wrappers")
sys.path.insert(0, libagrum) # to force to use local pyAgrum for the tests (and not installed one)
......
......@@ -27,7 +27,7 @@ class GibbsTestCase(pyAgrumTestCase):
[0.9, 0.1]]
self.bn.cpt(self.r)[:] = [[0.8, 0.2],
[0.2, 0.8]]
self.bn.cpt(self.w)[0, 0, :] = [1, 0]
self.bn.cpt(self.w)[0, 0, :] = [0.9, 0.1]
self.bn.cpt(self.w)[0, 1, :] = [0.1, 0.9]
self.bn.cpt(self.w)[1, 0, :] = [0.1, 0.9]
self.bn.cpt(self.w)[1, 1, :] = [0.01, 0.99]
......@@ -78,45 +78,62 @@ class GibbsTestCase(pyAgrumTestCase):
class TestDictFeature(GibbsTestCase):
def testDictOfSequences(self):
ie = gum.GibbsSampling(self.bn)
proto = gum.LazyPropagation(self.bn)
proto.addEvidence('s', 1)
proto.addEvidence('w', 0)
proto.makeInference()
ie = gum.ImportanceSampling(self.bn)
ie.setVerbosity(False)
ie.setEpsilon(0.01)
ie.setMinEpsilonRate(0.01)
ie.setMinEpsilonRate(0.0001)
ie.setEvidence({'s': [0, 1], 'w': (1, 0)})
print(ie.evidence(0))
ie.makeInference()
result = ie.posterior(self.r)
self.assertGreatEqual(0.01, (proto - result).abs().max())
ie2 = gum.GibbsSampling(self.bn)
ie2 = gum.ImportanceSampling(self.bn)
ie2.setVerbosity(False)
ie2.setEpsilon(0.01)
ie2.setMinEpsilonRate(0.01)
ie2.setMinEpsilonRate(0.0001)
ie2.setEvidence({'s': 1, 'w': 0})
ie2.makeInference()
result2 = ie2.posterior(self.r)
self.assertDelta(result.tolist(), result2.tolist(),2)
self.assertGreatEqual(0.01, (proto - result2).abs().max())
def testDictOfLabels(self):
ie = gum.GibbsSampling(self.bn)
protoie = gum.LazyPropagation(self.bn)
protoie.addEvidence('s', 0)
protoie.addEvidence('w', 1)
protoie.makeInference()
proto=protoie.posterior(self.r)
print(proto.tolist())
ie = gum.LoopyGibbsSampling(self.bn)
ie.setVerbosity(False)
ie.setEpsilon(0.01)
ie.setMinEpsilonRate(0.001)
ie.setMinEpsilonRate(0.0001)
ie.setEvidence({'s': 0, 'w': 1})
ie.makeInference()
result = ie.posterior(self.r).tolist()
result = ie.posterior(self.r)
print(result.tolist())
print((proto - result).abs().max())
self.assertGreatEqual(0.01, (proto - result).abs().max())
ie2 = gum.GibbsSampling(self.bn)
ie2 = gum.LoopyGibbsSampling(self.bn)
ie2.setVerbosity(False)
ie2.setEpsilon(0.01)
ie2.setMinEpsilonRate(0.001)
ie2.setMinEpsilonRate(0.0001)
ie2.setEvidence({'s': 'no', 'w': 'yes'})
ie2.makeInference()
result2 = ie2.posterior(self.r).tolist()
self.assertListsAlmostEqual(result, result2, 2)
result2 = ie2.posterior(self.r)
print(result2.tolist())
(proto - result2).abs().max()
self.assertGreatEqual(0.01, (proto - result2).abs().max())
def testDictOfLabelsWithId(self):
ie = gum.GibbsSampling(self.bn)
ie = gum.LoopyGibbsSampling(self.bn)
ie.setVerbosity(False)
ie.setEpsilon(0.05)
ie.setMinEpsilonRate(0.01)
......@@ -124,7 +141,7 @@ class TestDictFeature(GibbsTestCase):
ie.makeInference()
result = ie.posterior(self.r)
ie2 = gum.GibbsSampling(self.bn)
ie2 = gum.LoopyGibbsSampling(self.bn)
ie2.setVerbosity(False)
ie2.setEpsilon(0.05)
ie2.setMinEpsilonRate(0.01)
......@@ -137,7 +154,7 @@ class TestDictFeature(GibbsTestCase):
self.assertListsAlmostEqual(result.tolist(), result2.tolist())
def testWithDifferentVariables(self):
ie = gum.GibbsSampling(self.bn)
ie = gum.LoopyGibbsSampling(self.bn)
ie.setVerbosity(False)
ie.setEpsilon(0.1)
ie.setMinEpsilonRate(0.01)
......@@ -145,7 +162,7 @@ class TestDictFeature(GibbsTestCase):
ie.makeInference()
result = ie.posterior(self.s).tolist()
ie = gum.GibbsSampling(self.bni)
ie = gum.LoopyGibbsSampling(self.bni)
ie.setVerbosity(False)
ie.setEpsilon(0.1)
ie.setMinEpsilonRate(0.01)
......@@ -154,7 +171,7 @@ class TestDictFeature(GibbsTestCase):
result2 = ie.posterior(self.si).tolist()
self.assertDelta(result, result2)
ie = gum.GibbsSampling(self.bn)
ie = gum.LoopyGibbsSampling(self.bn)
ie.setVerbosity(False)
ie.setEpsilon(0.1)
ie.setMinEpsilonRate(0.01)
......@@ -163,7 +180,7 @@ class TestDictFeature(GibbsTestCase):
result = ie.posterior(self.s).tolist()
self.assertDelta(result, result2)
ie = gum.GibbsSampling(self.bni)
ie = gum.LoopyGibbsSampling(self.bni)
ie.setVerbosity(False)
ie.setEpsilon(0.1)
ie.setMinEpsilonRate(0.01)
......@@ -175,14 +192,14 @@ class TestDictFeature(GibbsTestCase):
class TestInferenceResults(GibbsTestCase):
def testOpenBayesSiteExamples(self):
ie = gum.GibbsSampling(self.bn)
ie = gum.LoopyGibbsSampling(self.bn)
ie.setVerbosity(False)
ie.setEpsilon(0.1)
ie.setMinEpsilonRate(0.01)
result = ie.posterior(self.w)
self.assertDelta(result.tolist(), [0.3529, 0.6471])
ie = gum.GibbsSampling(self.bn)
ie = gum.LoopyGibbsSampling(self.bn)
ie.setVerbosity(False)
ie.setEpsilon(0.1)
ie.setMinEpsilonRate(0.01)
......@@ -192,7 +209,7 @@ class TestInferenceResults(GibbsTestCase):
self.assertDelta(result.tolist(), [0.082, 0.918])
def testWikipediaExample(self):
ie = gum.GibbsSampling(self.bn2)
ie = gum.LoopyGibbsSampling(self.bn2)
ie.setVerbosity(False)
ie.setEpsilon(0.1)
ie.setMinEpsilonRate(0.001)
......
......@@ -90,7 +90,7 @@
#include <agrum/BN/inference/importanceSampling.h>
#include <agrum/BN/inference/weightedSampling.h>
#include <agrum/BN/inference/MonteCarloSampling.h>
#include <agrum/BN/inference/hybridApproxInference.h>
#include <agrum/BN/inference/loopySamplingInference.h>
#include <agrum/BN/inference/loopyBeliefPropagation.h>
......@@ -177,9 +177,9 @@ ADD_APPROXIMATIONSCHEME_API(gum::ApproximationScheme,gum::GibbsSampling<double>)
ADD_APPROXIMATIONSCHEME_API(gum::ApproximationScheme,gum::ImportanceSampling<double>)
ADD_APPROXIMATIONSCHEME_API(gum::ApproximationScheme,gum::WeightedSampling<double>)
ADD_APPROXIMATIONSCHEME_API(gum::ApproximationScheme,gum::MonteCarloSampling<double>)
ADD_APPROXIMATIONSCHEME_API(gum::ApproximationScheme,gum::HybridApproxInference<double,gum::ImportanceSampling>)
ADD_APPROXIMATIONSCHEME_API(gum::ApproximationScheme,gum::HybridApproxInference<double,gum::WeightedSampling>)
ADD_APPROXIMATIONSCHEME_API(gum::ApproximationScheme,gum::HybridApproxInference<double,gum::GibbsSampling>)
ADD_APPROXIMATIONSCHEME_API(gum::ApproximationScheme,gum::LoopySamplingInference<double,gum::ImportanceSampling>)
ADD_APPROXIMATIONSCHEME_API(gum::ApproximationScheme,gum::LoopySamplingInference<double,gum::WeightedSampling>)
ADD_APPROXIMATIONSCHEME_API(gum::ApproximationScheme,gum::LoopySamplingInference<double,gum::GibbsSampling>)
ADD_APPROXIMATIONSCHEME_API(gum::ApproximationScheme,gum::LoopyBeliefPropagation<double>)
......@@ -318,7 +318,7 @@ ADD_APPROXIMATIONSCHEME_API(gum::learning::genericBNLearner,gum::learning::BNLea
%include <agrum/BN/inference/importanceSampling.h>
%include <agrum/BN/inference/weightedSampling.h>
%include <agrum/BN/inference/MonteCarloSampling.h>
%include <agrum/BN/inference/hybridApproxInference.h>
%include <agrum/BN/inference/loopySamplingInference.h>
%include <agrum/BN/inference/loopyBeliefPropagation.h>
......@@ -372,9 +372,9 @@ ADD_APPROXIMATIONSCHEME_API(gum::learning::genericBNLearner,gum::learning::BNLea
%template ( ImportanceSampling_double ) gum::ImportanceSampling<double>;
%template ( WeightedSampling_double ) gum::WeightedSampling<double>;
%template ( MonteCarloSampling_double ) gum::MonteCarloSampling<double>;
%template ( HybridImportanceSampling_double ) gum::HybridApproxInference<double,gum::ImportanceSampling>;
%template ( HybridWeightedSampling_double ) gum::HybridApproxInference<double,gum::WeightedSampling>;
%template ( HybridGibbsSampling_double ) gum::HybridApproxInference<double,gum::GibbsSampling>;
%template ( LoopyImportanceSampling_double ) gum::LoopySamplingInference<double,gum::ImportanceSampling>;
%template ( LoopyWeightedSampling_double ) gum::LoopySamplingInference<double,gum::WeightedSampling>;
%template ( LoopyGibbsSampling_double ) gum::LoopySamplingInference<double,gum::GibbsSampling>;
%template ( LoopyBeliefPropagation_double ) gum::LoopyBeliefPropagation<double>;
......
......@@ -349,8 +349,9 @@ ADD_INFERENCE_API(gum::MonteCarloSampling<double>)
ADD_INFERENCE_API(gum::WeightedSampling<double>)
ADD_INFERENCE_API(gum::ImportanceSampling<double>)
ADD_INFERENCE_API(gum::LoopyBeliefPropagation<double>)
ADD_INFERENCE_API(gum::HybridApproxInference<double,gum::ImportanceSampling>)
ADD_INFERENCE_API(gum::HybridApproxInference<double,gum::GibbsSampling>)
ADD_INFERENCE_API(gum::LoopySamplingInference<double,gum::ImportanceSampling>)
ADD_INFERENCE_API(gum::LoopySamplingInference<double,gum::GibbsSampling>)
ADD_INFERENCE_API(gum::LoopySamplingInference<double,gum::WeightedSampling>)
%define ADD_JOINT_INFERENCE_API(classname)
%extend classname {
......@@ -403,10 +404,10 @@ ADD_JOINT_INFERENCE_API(gum::ShaferShenoyInference<double>)
}
%enddef
ADD_GIBBS_OPERATOR_API(gum::GibbsSampling<double>)
ADD_GIBBS_OPERATOR_API(gum::HybridApproxInference<double,gum::GibbsSampling>)
ADD_GIBBS_OPERATOR_API(gum::LoopySamplingInference<double,gum::GibbsSampling>)
ADD_GIBBS_OPERATOR_API(gum::GibbsKL<double>)
%extend gum::HybridApproxInference<double,gum::GibbsSampling> {
%extend gum::LoopySamplingInference<double,gum::GibbsSampling> {
gum::Size burnIn() const { return self->gum::GibbsSampling<double>::burnIn();}
void setBurnIn(gum::Size b) { self->gum::GibbsSampling<double>::setBurnIn(b);}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment