[aGrUM] final refarctoring for sampling inference

parent 9f808e31
/***************************************************************************
* Copyright (C) 2005 by Pierre-Henri WUILLEMIN et Christophe GONZALES *
* {prenom.nom}_at_lip6.fr *
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
* This program is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU General Public License for more details. *
* *
* You should have received a copy of the GNU General Public License *
* along with this program; if not, write to the *
* Free Software Foundation, Inc., *
* 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
***************************************************************************/
#include <agrum/BN/inference/GibbsInference.h>
template class gum::GibbsInference<float>;
template class gum::GibbsInference<double>;
/***************************************************************************
* Copyright (C) 2005 by Pierre-Henri WUILLEMIN et Christophe GONZALES *
* {prenom.nom}_at_lip6.fr *
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
* This program is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU General Public License for more details. *
* *
* You should have received a copy of the GNU General Public License *
* along with this program; if not, write to the *
* Free Software Foundation, Inc., *
* 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
***************************************************************************/
/**
* @file
* @brief This file contains gibbs sampling (for BNs) class definitions.
* @author Pierre-Henri WUILLEMIN and Christophe GONZALES
*/
#ifndef GUM_GIBBS_INFERENCE_H
#define GUM_GIBBS_INFERENCE_H
#include <agrum/BN/inference/tools/marginalTargetedInference.h>
#include <agrum/BN/samplers/GibbsSampler.h>
#include <agrum/core/approximations/approximationScheme.h>
namespace gum {
/**
* @class GibbsInference GibbsInference.h
*<agrum/BN/inference/GibbsInference.h>
* @brief class for making Gibbs sampling inference in bayesian networks.
* @ingroup bn_inference
*
*/
template <typename GUM_SCALAR>
class GibbsInference : public ApproximationScheme,
public MarginalTargetedInference<GUM_SCALAR>,
public samplers::GibbsSampler<GUM_SCALAR> {
public:
/**
* Default constructor
*/
GibbsInference( const IBayesNet<GUM_SCALAR>* BN );
/**
* Destructor.
*/
virtual ~GibbsInference();
using samplers::GibbsSampler<GUM_SCALAR>::particle;
using samplers::GibbsSampler<GUM_SCALAR>::initParticle;
using samplers::GibbsSampler<GUM_SCALAR>::nextParticle;
using samplers::GibbsSampler<GUM_SCALAR>::bn;
using samplers::GibbsSampler<GUM_SCALAR>::addSoftEvidenceSampler;
using samplers::GibbsSampler<GUM_SCALAR>::addHardEvidenceSampler;
using samplers::GibbsSampler<GUM_SCALAR>::eraseSoftEvidenceSampler;
using samplers::GibbsSampler<GUM_SCALAR>::eraseHardEvidenceSampler;
using samplers::GibbsSampler<GUM_SCALAR>::eraseAllEvidenceSampler;
protected:
virtual void _onEvidenceAdded( const NodeId id, bool isHardEvidence );
virtual void _onEvidenceErased( const NodeId id, bool isHardEvidence );
virtual void _onAllEvidenceErased( bool contains_hard_evidence );
virtual void _onEvidenceChanged( const NodeId id, bool hasChangedSoftHard );
virtual void _onBayesNetChanged( const IBayesNet<GUM_SCALAR>* bn );
virtual void _updateOutdatedBNStructure(){};
virtual void _updateOutdatedBNPotentials(){};
virtual void _makeInference();
virtual void _onMarginalTargetAdded( const NodeId id ){};
virtual void _onMarginalTargetErased( const NodeId id ){};
virtual void _onAllMarginalTargetsAdded(){};
virtual void _onAllMarginalTargetsErased(){};
/// asks derived classes for the posterior of a given variable
/** @param id The variable's id. */
virtual const Potential<GUM_SCALAR>& _posterior( const NodeId id );
/// the actual number of sampling for each modality by node
NodeProperty<Potential<GUM_SCALAR>> __sampling_nbr;
void __initStats();
void __updateStats_without_err();
double __updateStats_with_err( Size nbr );
};
extern template class GibbsInference<float>;
extern template class GibbsInference<double>;
} /* namespace gum */
#include <agrum/BN/inference/GibbsInference_tpl.h>
#endif /* GUM_GIBBS_INFERENCE_H */
/***************************************************************************
* Copyright (C) 2005 by Christophe GONZALES et Pierre-Henri WUILLEMIN *
* {prenom.nom}_at_lip6.fr *
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
* This program is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU General Public License for more details. *
* *
* You should have received a copy of the GNU General Public License *
* along with this program; if not, write to the *
* Free Software Foundation, Inc., *
* 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
***************************************************************************/
/**
* @file
* @brief Implementation of Gibbs sampling for inference in Bayesian Networks.
*/
#ifndef DOXYGEN_SHOULD_SKIP_THIS
#include <algorithm>
#include <sstream>
#include <string>
#define GIBBS_DEFAULT_MAXITER 10000000
#define GIBBS_DEFAULT_EPSILON 1e-4 * std::log( 2 )
#define GIBBS_DEFAULT_MIN_EPSILON_RATE 1e-4
#define GIBBS_DEFAULT_PERIOD_SIZE 500
#define GIBBS_DEFAULT_VERBOSITY false
#define GIBBS_DEFAULT_BURNIN 10000
// to ease parsing for IDE
#include <agrum/BN/inference/tools/BayesNetInference.h>
#include <agrum/BN/inference/GibbsInference.h>
#include <agrum/BN/samplers/GibbsSampler.h>
namespace gum {
/// default constructor
template <typename GUM_SCALAR>
GibbsInference<GUM_SCALAR>::GibbsInference( const IBayesNet<GUM_SCALAR>* BN )
: ApproximationScheme()
, MarginalTargetedInference<GUM_SCALAR>( BN )
, samplers::GibbsSampler<GUM_SCALAR>( *BN ) {
// for debugging purposes
GUM_CONSTRUCTOR( GibbsInference );
setEpsilon( GIBBS_DEFAULT_EPSILON );
setMinEpsilonRate( GIBBS_DEFAULT_MIN_EPSILON_RATE );
setMaxIter( GIBBS_DEFAULT_MAXITER );
setVerbosity( GIBBS_DEFAULT_VERBOSITY );
setBurnIn( GIBBS_DEFAULT_BURNIN );
setPeriodSize( GIBBS_DEFAULT_PERIOD_SIZE );
for ( auto node : bn().dag().nodes() ) {
__sampling_nbr.insert( node, Potential<GUM_SCALAR>() );
__sampling_nbr[node].add( BN->variable( node ) );
}
}
/// destructor
template <typename GUM_SCALAR>
INLINE GibbsInference<GUM_SCALAR>::~GibbsInference() {
GUM_DESTRUCTOR( GibbsInference );
}
/// Returns the probability of the variable.
template <typename GUM_SCALAR>
INLINE const Potential<GUM_SCALAR>&
GibbsInference<GUM_SCALAR>::_posterior( NodeId id ) {
return __sampling_nbr[id];
}
template <typename GUM_SCALAR>
INLINE void GibbsInference<GUM_SCALAR>::__initStats() {
for ( auto& elt : __sampling_nbr ) {
elt.second.fill( (GUM_SCALAR)0 );
}
}
/**
As a stopping criterion, we use sum of entropy.
To compute entropy,
instead of \sum p ln \frac{p}{q}
We use the fact that we have Frequency : P and Q on a (nbr=nb+1) data set.
After simplification, it remains that
entropy=\frac{1}{nbr}ln P_i \frac{P_i}{Q_i}+ln{nbr-1}{nbr}
*/
template <typename GUM_SCALAR>
INLINE double GibbsInference<GUM_SCALAR>::__updateStats_with_err( Size nb ) {
Size nbr = nb + 1; // we compute the new iteration
double sum_entropy = 0;
for ( auto& elt : __sampling_nbr ) {
GUM_SCALAR n_v = 1 + elt.second.get( particle() );
elt.second.set( particle(), n_v );
if ( n_v == (GUM_SCALAR)1 )
sum_entropy += 100;
else
sum_entropy += n_v * std::log( n_v / ( n_v - 1 ) );
}
return sum_entropy / nbr +
__sampling_nbr.size() * std::log( (double)nbr / nb );
}
/** same as __updateStats_with_err but with no entropy computation */
template <typename GUM_SCALAR>
INLINE void GibbsInference<GUM_SCALAR>::__updateStats_without_err() {
for ( auto& elt : __sampling_nbr ) {
elt.second.set( particle(), elt.second.get( particle() ) + 1 );
}
}
/// Returns the probability of the variables.
template <typename GUM_SCALAR>
void GibbsInference<GUM_SCALAR>::_makeInference() {
__initStats();
initParticle();
initApproximationScheme();
// SAMPLING
double error = 0.0;
do {
nextParticle();
updateApproximationScheme();
if ( startOfPeriod() )
error = __updateStats_with_err( nbrIterations() + burnIn() );
else
__updateStats_without_err();
} while ( continueApproximationScheme( error ) );
for ( auto& elt : __sampling_nbr ) {
elt.second.normalize();
}
}
template <typename GUM_SCALAR>
INLINE void GibbsInference<GUM_SCALAR>::_onEvidenceAdded( const NodeId id,
bool isHardEvidence ) {
if ( isHardEvidence ) {
addHardEvidenceSampler( id, this->hardEvidence()[id] );
} else {
addSoftEvidenceSampler( *( this->evidence()[id] ) );
}
}
template <typename GUM_SCALAR>
INLINE void
GibbsInference<GUM_SCALAR>::_onEvidenceErased( const NodeId id,
bool isHardEvidence ) {
if ( isHardEvidence ) {
eraseHardEvidenceSampler( id );
}
}
template <typename GUM_SCALAR>
INLINE void
GibbsInference<GUM_SCALAR>::_onAllEvidenceErased( bool contains_hard_evidence ) {
eraseAllEvidenceSampler();
}
template <typename GUM_SCALAR>
INLINE void
GibbsInference<GUM_SCALAR>::_onEvidenceChanged( const NodeId id,
bool hasChangedSoftHard ) {
if ( this->hardEvidence().exists( id ) ) {
// soft evidence has been removed
eraseSoftEvidenceSampler( id );
addHardEvidenceSampler( id, this->hardEvidence()[id] );
} else {
// hard evidence has been removed
eraseHardEvidenceSampler( id );
addSoftEvidenceSampler( *( this->evidence()[id] ) );
}
}
template <typename GUM_SCALAR>
INLINE void GibbsInference<GUM_SCALAR>::_onBayesNetChanged(
const IBayesNet<GUM_SCALAR>* bn ) {}
} /* namespace gum */
#endif // DOXYGEN_SHOULD_SKIP_THIS
......@@ -28,7 +28,7 @@
#ifndef GUM_GIBBS_SAMPLING_H
#define GUM_GIBBS_SAMPLING_H
#include <agrum/BN/inference/tools/approximateInference.h>
#include <agrum/BN/inference/tools/samplingInference.h>
#include <agrum/BN/inference/tools/gibbsOperator.h>
......@@ -49,7 +49,7 @@ namespace gum {
*/
template <typename GUM_SCALAR>
class GibbsSampling : public ApproximateInference<GUM_SCALAR>,
class GibbsSampling : public SamplingInference<GUM_SCALAR>,
public GibbsOperator<GUM_SCALAR> {
public:
......
......@@ -26,29 +26,29 @@
#include <agrum/BN/inference/GibbsSampling.h>
#define GIBBS_DEFAULT_EPSILON 1e-4 * std::log(2)
#define GIBBS_DEFAULT_MIN_EPSILON_RATE 1e-6 * std::log(2)
#define GIBBS_DEFAULT_BURNIN 1000
#define GIBBS_SAMPLING_DEFAULT_EPSILON 1e-4 * std::log(2)
#define GIBBS_SAMPLING_DEFAULT_MIN_EPSILON_RATE 1e-6 * std::log(2)
#define GIBBS_SAMPLING_DEFAULT_BURNIN 1000
#define GIBBS_POURCENT_DRAWN_SAMPLE 10 // percent drawn
#define GIBBS_DRAWN_AT_RANDOM false
#define GIBBS_SAMPLING_POURCENT_DRAWN_SAMPLE 50 // percent drawn
#define GIBBS_SAMPLING_DRAWN_AT_RANDOM false
namespace gum {
/// default constructor
template < typename GUM_SCALAR >
GibbsSampling< GUM_SCALAR >::GibbsSampling(const IBayesNet< GUM_SCALAR >* BN)
: ApproximateInference< GUM_SCALAR >(BN)
: SamplingInference< GUM_SCALAR >(BN)
, GibbsOperator< GUM_SCALAR >(
*BN,
&this->hardEvidence(),
1 + (BN->size() * GIBBS_POURCENT_DRAWN_SAMPLE / 100),
GIBBS_DRAWN_AT_RANDOM) {
1 + (BN->size() * GIBBS_SAMPLING_POURCENT_DRAWN_SAMPLE / 100),
GIBBS_SAMPLING_DRAWN_AT_RANDOM) {
GUM_CONSTRUCTOR(GibbsSampling);
this->setEpsilon(GIBBS_DEFAULT_EPSILON);
this->setMinEpsilonRate(GIBBS_DEFAULT_MIN_EPSILON_RATE);
this->setBurnIn(GIBBS_DEFAULT_BURNIN);
this->setEpsilon(GIBBS_SAMPLING_DEFAULT_EPSILON);
this->setMinEpsilonRate(GIBBS_SAMPLING_DEFAULT_MIN_EPSILON_RATE);
this->setBurnIn(GIBBS_SAMPLING_DEFAULT_BURNIN);
}
/// destructor
......
......@@ -28,7 +28,7 @@
#ifndef GUM_MONTE_CARLO_INFERENCE_H
#define GUM_MONTE_CARLO_INFERENCE_H
#include <agrum/BN/inference/tools/approximateInference.h>
#include <agrum/BN/inference/tools/samplingInference.h>
namespace gum {
......@@ -45,7 +45,7 @@ namespace gum {
template<typename GUM_SCALAR>
class MonteCarloSampling : public ApproximateInference<GUM_SCALAR> {
class MonteCarloSampling : public SamplingInference<GUM_SCALAR> {
public:
......
......@@ -35,7 +35,7 @@ namespace gum {
template < typename GUM_SCALAR >
MonteCarloSampling< GUM_SCALAR >::MonteCarloSampling(
const IBayesNet< GUM_SCALAR >* BN)
: ApproximateInference< GUM_SCALAR >(BN) {
: SamplingInference< GUM_SCALAR >(BN) {
this->setBurnIn(0);
GUM_CONSTRUCTOR(MonteCarloSampling);
......
......@@ -28,6 +28,8 @@
#ifndef GUM_HYBRID_INFERENCE_H
#define GUM_HYBRID_INFERENCE_H
#include <agrum/BN/inference/tools/marginalTargetedInference.h>
#include <agrum/BN/inference/tools/approximateInference.h>
#include <agrum/BN/inference/GibbsSampling.h>
#include <agrum/BN/inference/MonteCarloSampling.h>
#include <agrum/BN/inference/importanceSampling.h>
......
......@@ -29,7 +29,7 @@
#include <agrum/BN/inference/hybridApproxInference.h>
#define DEFAULT_VIRTUAL_LBP_SIZE 1000
#define DEFAULT_VIRTUAL_LBP_SIZE 5000
namespace gum {
......@@ -54,10 +54,13 @@ namespace gum {
void HybridApproxInference< GUM_SCALAR, APPROX >::_makeInference() {
LoopyBeliefPropagation< GUM_SCALAR > lbp(&this->BN());
for (const auto x : this->hardEvidence()) {
lbp.addEvidence(x.first, x.second);
}
lbp.makeInference();
if (!this->isSetEstimator) {
this->_setEstimatorFromLBP(&lbp,_virtualLBPSize);
this->_setEstimatorFromLBP(&lbp, _virtualLBPSize);
}
this->_loopApproxInference();
......
......@@ -28,7 +28,7 @@
#ifndef GUM_IMPORTANCE_INFERENCE_H
#define GUM_IMPORTANCE_INFERENCE_H
#include <agrum/BN/inference/tools/approximateInference.h>
#include <agrum/BN/inference/tools/samplingInference.h>
namespace gum {
......@@ -46,7 +46,7 @@ namespace gum {
template <typename GUM_SCALAR>
class ImportanceSampling : public ApproximateInference<GUM_SCALAR> {
class ImportanceSampling : public SamplingInference<GUM_SCALAR> {
public:
/**
......
......@@ -34,7 +34,7 @@ namespace gum {
template < typename GUM_SCALAR >
ImportanceSampling< GUM_SCALAR >::ImportanceSampling(
const IBayesNet< GUM_SCALAR >* BN)
: ApproximateInference< GUM_SCALAR >(BN) {
: SamplingInference< GUM_SCALAR >(BN) {
this->setBurnIn(0);
GUM_CONSTRUCTOR(ImportanceSampling);
......
......@@ -25,8 +25,7 @@
#ifndef GUM_LOOPYBELIEFPROPAGATION_H
#define GUM_LOOPYBELIEFPROPAGATION_H
#include <agrum/BN/inference/tools/marginalTargetedInference.h>
#include <agrum/core/approximations/approximationScheme.h>
#include <agrum/BN/inference/tools/approximateInference.h>
namespace gum {
/**
......@@ -37,8 +36,7 @@ namespace gum {
*
*/
template <typename GUM_SCALAR>
class LoopyBeliefPropagation : public ApproximationScheme,
public MarginalTargetedInference<GUM_SCALAR> {
class LoopyBeliefPropagation : public ApproximateInference<GUM_SCALAR> {
public:
/**
* Default constructor
......
......@@ -44,17 +44,16 @@ namespace gum {
template <typename GUM_SCALAR>
LoopyBeliefPropagation<GUM_SCALAR>::LoopyBeliefPropagation(
const IBayesNet<GUM_SCALAR>* BN )
: ApproximationScheme()
, MarginalTargetedInference<GUM_SCALAR>( BN ) {
: ApproximateInference<GUM_SCALAR>( BN ) {
// for debugging purposes
GUM_CONSTRUCTOR( LoopyBeliefPropagation );
setEpsilon( LBP_DEFAULT_EPSILON );
setMinEpsilonRate( LBP_DEFAULT_MIN_EPSILON_RATE );
setMaxIter( LBP_DEFAULT_MAXITER );
setVerbosity( LBP_DEFAULT_VERBOSITY );
setPeriodSize( LBP_DEFAULT_PERIOD_SIZE );
setBurnIn(0); //no burn in for LBP
this->setEpsilon( LBP_DEFAULT_EPSILON );
this->setMinEpsilonRate( LBP_DEFAULT_MIN_EPSILON_RATE );
this->setMaxIter( LBP_DEFAULT_MAXITER );
this->setVerbosity( LBP_DEFAULT_VERBOSITY );
this->setPeriodSize( LBP_DEFAULT_PERIOD_SIZE );
this->setBurnIn(0); //no burn in for LBP
__init_messages();
}
......@@ -218,7 +217,7 @@ namespace gum {
template <typename GUM_SCALAR>
void LoopyBeliefPropagation<GUM_SCALAR>::_makeInference() {
__initStats();
initApproximationScheme();
this->initApproximationScheme();
std::vector<NodeId> shuffleIds;
for ( const auto& node : this->BN().nodes() )
......@@ -229,12 +228,12 @@ namespace gum {
GUM_SCALAR error = 0.0;
do {
std::shuffle( std::begin( shuffleIds ), std::end( shuffleIds ), engine );
updateApproximationScheme();
this->updateApproximationScheme();
for ( const auto& node : shuffleIds ) {
GUM_SCALAR e = __updateNodeMessage( node );
if ( e > error ) error = e;
}
} while ( continueApproximationScheme( error ) );
} while ( this->continueApproximationScheme( error ) );
}
......
#include <agrum/BN/inference/tools/approximateInference.h>
template class gum::ApproximateInference<float>;
template class gum::ApproximateInference<double>;
......@@ -19,229 +19,40 @@
***************************************************************************/
/**
* @file
* @brief This file contains general methods for simulation-oriented approximate
* inference.
* @brief This file contains general methods for approximate inference.
*
* @author Paul ALAM & Pierre-Henri WUILLEMIN
* @author Pierre-Henri WUILLEMIN
*/
#ifndef GUM_APPROXIMATE_INFERENCE_H
#define GUM_APPROXIMATE_INFERENCE_H
#include <agrum/BN/BayesNetFragment.h>
#include <agrum/BN/IBayesNet.h>
#include <agrum/BN/inference/tools/estimator.h>
#include <agrum/BN/inference/tools/gibbsOperator.h>
#include <agrum/BN/inference/tools/marginalTargetedInference.h>
#include <agrum/core/approximations/approximationScheme.h>
#include <agrum/multidim/instantiation.h>
namespace gum {
/**
* @class ApproximateInference approximateInference.h
* <agrum/BN/inference/approximateInference.h>
* @brief A generic class for making approximate inference in bayesian networks
* adapted
*
* @ingroup bn_approximation
*
* The goal of this class is to define the genereal scheme used by all
* approximate inference algorithms,
* which are implemented as derived classes of ApproximateInference.
* This class inherits from MarginalTargetedInference for the handling of
* marginal targets
* and from ApproximationScheme.
*/
template < typename GUM_SCALAR >
class ApproximateInference : public MarginalTargetedInference< GUM_SCALAR >,
public ApproximationScheme {
public:
// ############################################################################
/// @name Constructors / Destructors
// ############################################################################
/// @{
/// default constructor
/** @warning By default, all the nodes of the Bayes net are targets.
* @warning note that, by aGrUM's rule, the BN is not copied but only
* referenced by the inference algorithm. */
ApproximateInference(const IBayesNet< GUM_SCALAR >* bn);
/// destructor
virtual ~ApproximateInference();
/// @}
// ############################################################################
/// @name Probability computations
// ############################################################################
/// @{
/// get the BayesNet which is used to really perform the sampling
const IBayesNet< GUM_SCALAR >& samplingBN();
/// Computes and returns the posterior of a node.
/**
* @returns a const ref to the posterior probability of the node.
* @param id the node for which we need a posterior probability
*
* @warning for efficiency reasons, the potential is returned by reference.
* In order to ensure that the potential may still exist even if the Inference
* object is destroyed, the user has to copy it explicitly.
*
* @throw UndefinedElement if node is not in the set of targets.
* @throw NotFound if node is not in the BN.
*/
virtual const Potential< GUM_SCALAR >& _posterior(const NodeId id);
/// Computes and returns the posterior of a node referred by it's name.
/**
* @returns a const ref to the posterior probability of the node referred by
* name.
* @param name the name of the node for which we need a posterior probability
*
* @warning for efficiency reasons, the potential is returned by reference.
* In order to ensure that the potential may still exist even if the Inference
* object is destroyed, the user has to copy it explicitly.
*
* @throw UndefinedElement if node corresponding to name is not in the set of
* targets.
* @throw NotFound if node corresponding to name is not in the BN.
*/
virtual const Potential< GUM_SCALAR >& _posterior(const std::string& name);
/// @}
/// Simplifying the bayesian network with relevance reasonning to lighten the
/// computational charge
/**
* Sets the reference Bayesian Network as a BayesNetFragment after having
* eliminated nodes
* that are idle for simulation and computation, such as barren or d-separated
* nodes.
* Eliminates the arcs from evidence nodes to it's children, after setting new
* CPT's for them.
*
*/
virtual void contextualize();
// ############################################################################
/// @name Estimator objects initializing
// ############################################################################
/// @{
/// Initializes the estimators object linked to the simulation
/**
* Initializes the estimator object by creating a hashtable between non
* evidence nodes and a 0-filled potential which will approximate the node's
* posterior
*
*/
virtual void _setEstimatorFromBN();
/// Initializes the estimators object linked to the simulation
/**
* @param lbp a LoopyBeliefPropagation object
* @param virtualLBPSize the size of the equivalent sampling by LBP
* @class ApproximateInference approximateInference.h
* <agrum/BN/inference/approximateInference.h>
* @brief A generic class for making approximate inference in bayesian networks
* adapted
*
* Initializes the estimator object by creating a hashtable between
* non evidence nodes and the current approximation of the node's posterior
* obtained by running LoopyBeliefPropagation algorithm
* @ingroup bn_approximation
*
* The goal of this class is to define the general scheme used by all
* approximate inference algorithms, which are implemented as derived classes of ApproximateInference.
* This class inherits from MarginalTargetedInference for the handling of
* marginal targets and from ApproximationScheme.
*/
virtual void _setEstimatorFromLBP(LoopyBeliefPropagation< GUM_SCALAR >* lbp,
GUM_SCALAR virtualLBPSize);
///@}
protected:
/// Estimator object designed to approximate target posteriors
Estimator< GUM_SCALAR > __estimator;