aGrUM  0.14.2
gum::SamplingInference< GUM_SCALAR > Class Template Referenceabstract

#include <samplingInference.h>

+ Inheritance diagram for gum::SamplingInference< GUM_SCALAR >:
+ Collaboration diagram for gum::SamplingInference< GUM_SCALAR >:

Public Attributes

Signaler3< Size, double, doubleonProgress
 Progression, error and time. More...
 
Signaler1< std::string > onStop
 Criteria messageApproximationScheme. More...
 

Public Member Functions

virtual void contextualize ()
 Simplifying the bayesian network with relevance reasonning to lighten the computational charge. More...
 
Potential< GUM_SCALAR > evidenceImpact (NodeId target, const NodeSet &evs)
 Create a gum::Potential for P(target|evs) (for all instanciation of target and evs) More...
 
Potential< GUM_SCALAR > evidenceImpact (const std::string &target, const std::vector< std::string > &evs)
 Create a gum::Potential for P(target|evs) (for all instanciation of target and evs) More...
 
Constructors / Destructors
 SamplingInference (const IBayesNet< GUM_SCALAR > *bn)
 default constructor More...
 
 ~SamplingInference () override
 destructor More...
 
const Potential< GUM_SCALAR > & currentPosterior (NodeId id)
 Computes and returns the actual estimation of the posterior of a node. More...
 
const Potential< GUM_SCALAR > & currentPosterior (const std::string &name)
 Computes and returns the actual estimation of the posterior of a node by its name. More...
 
Probability computations
const IBayesNet< GUM_SCALAR > & samplingBN ()
 get the BayesNet which is used to really perform the sampling More...
 
const Potential< GUM_SCALAR > & _posterior (NodeId id) override
 Computes and returns the posterior of a node. More...
 
Estimator objects initializing
virtual void _setEstimatorFromBN ()
 Initializes the estimators object linked to the simulation. More...
 
virtual void _setEstimatorFromLBP (LoopyBeliefPropagation< GUM_SCALAR > *lbp, GUM_SCALAR virtualLBPSize)
 Initializes the estimators object linked to the simulation. More...
 
Probability computations
virtual const Potential< GUM_SCALAR > & posterior (NodeId node)
 Computes and returns the posterior of a node. More...
 
virtual const Potential< GUM_SCALAR > & posterior (const std::string &nodeName)
 Computes and returns the posterior of a node. More...
 
Targets
virtual void eraseAllTargets ()
 Clear all previously defined targets. More...
 
virtual void addAllTargets () final
 adds all nodes as targets More...
 
virtual void addTarget (NodeId target) final
 Add a marginal target to the list of targets. More...
 
virtual void addTarget (const std::string &nodeName) final
 Add a marginal target to the list of targets. More...
 
virtual void eraseTarget (NodeId target) final
 removes an existing (marginal) target More...
 
virtual void eraseTarget (const std::string &nodeName) final
 removes an existing (marginal) target More...
 
virtual bool isTarget (NodeId node) const final
 return true if variable is a (marginal) target More...
 
virtual bool isTarget (const std::string &nodeName) const final
 return true if variable is a (marginal) target More...
 
virtual const Size nbrTargets () const noexcept final
 returns the number of marginal targets More...
 
virtual const NodeSettargets () const noexcept final
 returns the list of marginal targets More...
 
Information Theory related functions
virtual GUM_SCALAR H (NodeId X) final
 Entropy Compute Shanon's entropy of a node given the observation. More...
 
virtual GUM_SCALAR H (const std::string &nodeName) final
 Entropy Compute Shanon's entropy of a node given the observation. More...
 
Accessors / Modifiers
virtual void setBN (const IBayesNet< GUM_SCALAR > *bn)
 assigns a new BN to the inference engine More...
 
virtual const IBayesNet< GUM_SCALAR > & BN () const final
 Returns a constant reference over the IBayesNet referenced by this class. More...
 
virtual const NodeProperty< Size > & domainSizes () const final
 get the domain sizes of the random variables of the BN More...
 
virtual bool isInferenceReady () const noexcept final
 returns whether the inference object is in a ready state More...
 
virtual bool isInferenceOutdatedBNStructure () const noexcept final
 returns whether the inference object is in a OutdatedBNStructure state More...
 
virtual bool isInferenceOutdatedBNPotentials () const noexcept final
 returns whether the inference object is in a OutdatedBNPotential state More...
 
virtual bool isInferenceDone () const noexcept final
 returns whether the inference object is in a InferenceDone state More...
 
virtual bool isDone () const noexcept final
 returns whether the inference object is in a done state More...
 
virtual void prepareInference () final
 prepare the internal inference structures for the next inference More...
 
virtual void makeInference () final
 perform the heavy computations needed to compute the targets' posteriors More...
 
virtual void clear ()
 clears all the data structures allocated for the last inference More...
 
virtual StateOfInference state () const noexcept final
 returns the state of the inference engine More...
 
Evidence
virtual void addEvidence (NodeId id, const Idx val) final
 adds a new hard evidence on node id More...
 
virtual void addEvidence (const std::string &nodeName, const Idx val) final
 adds a new hard evidence on node named nodeName More...
 
virtual void addEvidence (NodeId id, const std::string &label) final
 adds a new hard evidence on node id More...
 
virtual void addEvidence (const std::string &nodeName, const std::string &label) final
 adds a new hard evidence on node named nodeName More...
 
virtual void addEvidence (NodeId id, const std::vector< GUM_SCALAR > &vals) final
 adds a new evidence on node id (might be soft or hard) More...
 
virtual void addEvidence (const std::string &nodeName, const std::vector< GUM_SCALAR > &vals) final
 adds a new evidence on node named nodeName (might be soft or hard) More...
 
virtual void addEvidence (const Potential< GUM_SCALAR > &pot) final
 adds a new evidence on node id (might be soft or hard) More...
 
virtual void addEvidence (Potential< GUM_SCALAR > &&pot) final
 adds a new evidence on node id (might be soft or hard) More...
 
virtual void addSetOfEvidence (const Set< const Potential< GUM_SCALAR > * > &potset) final
 adds a new set of evidence More...
 
virtual void addListOfEvidence (const List< const Potential< GUM_SCALAR > * > &potlist) final
 adds a new list of evidence More...
 
virtual void chgEvidence (NodeId id, const Idx val) final
 change the value of an already existing hard evidence More...
 
virtual void chgEvidence (const std::string &nodeName, const Idx val) final
 change the value of an already existing hard evidence More...
 
virtual void chgEvidence (NodeId id, const std::string &label) final
 change the value of an already existing hard evidence More...
 
virtual void chgEvidence (const std::string &nodeName, const std::string &label) final
 change the value of an already existing hard evidence More...
 
virtual void chgEvidence (NodeId id, const std::vector< GUM_SCALAR > &vals) final
 change the value of an already existing evidence (might be soft or hard) More...
 
virtual void chgEvidence (const std::string &nodeName, const std::vector< GUM_SCALAR > &vals) final
 change the value of an already existing evidence (might be soft or hard) More...
 
virtual void chgEvidence (const Potential< GUM_SCALAR > &pot) final
 change the value of an already existing evidence (might be soft or hard) More...
 
virtual void eraseAllEvidence () final
 removes all the evidence entered into the network More...
 
virtual void eraseEvidence (NodeId id) final
 removed the evidence, if any, corresponding to node id More...
 
virtual void eraseEvidence (const std::string &nodeName) final
 removed the evidence, if any, corresponding to node of name nodeName More...
 
virtual bool hasEvidence () const final
 indicates whether some node(s) have received evidence More...
 
virtual bool hasEvidence (NodeId id) const final
 indicates whether node id has received an evidence More...
 
virtual bool hasEvidence (const std::string &nodeName) const final
 indicates whether node id has received an evidence More...
 
virtual bool hasHardEvidence (NodeId id) const final
 indicates whether node id has received a hard evidence More...
 
virtual bool hasHardEvidence (const std::string &nodeName) const final
 indicates whether node id has received a hard evidence More...
 
virtual bool hasSoftEvidence (NodeId id) const final
 indicates whether node id has received a soft evidence More...
 
virtual bool hasSoftEvidence (const std::string &nodeName) const final
 indicates whether node id has received a soft evidence More...
 
virtual Size nbrEvidence () const final
 returns the number of evidence entered into the Bayesian network More...
 
virtual Size nbrHardEvidence () const final
 returns the number of hard evidence entered into the Bayesian network More...
 
virtual Size nbrSoftEvidence () const final
 returns the number of soft evidence entered into the Bayesian network More...
 
const NodeProperty< const Potential< GUM_SCALAR > *> & evidence () const
 returns the set of evidence More...
 
const NodeSetsoftEvidenceNodes () const
 returns the set of nodes with soft evidence More...
 
const NodeSethardEvidenceNodes () const
 returns the set of nodes with hard evidence More...
 
const NodeProperty< Idx > & hardEvidence () const
 indicate for each node with hard evidence which value it took More...
 
Getters and setters
void setEpsilon (double eps)
 Given that we approximate f(t), stopping criterion on |f(t+1)-f(t)|. More...
 
double epsilon () const
 Returns the value of epsilon. More...
 
void disableEpsilon ()
 Disable stopping criterion on epsilon. More...
 
void enableEpsilon ()
 Enable stopping criterion on epsilon. More...
 
bool isEnabledEpsilon () const
 Returns true if stopping criterion on epsilon is enabled, false otherwise. More...
 
void setMinEpsilonRate (double rate)
 Given that we approximate f(t), stopping criterion on d/dt(|f(t+1)-f(t)|). More...
 
double minEpsilonRate () const
 Returns the value of the minimal epsilon rate. More...
 
void disableMinEpsilonRate ()
 Disable stopping criterion on epsilon rate. More...
 
void enableMinEpsilonRate ()
 Enable stopping criterion on epsilon rate. More...
 
bool isEnabledMinEpsilonRate () const
 Returns true if stopping criterion on epsilon rate is enabled, false otherwise. More...
 
void setMaxIter (Size max)
 Stopping criterion on number of iterations. More...
 
Size maxIter () const
 Returns the criterion on number of iterations. More...
 
void disableMaxIter ()
 Disable stopping criterion on max iterations. More...
 
void enableMaxIter ()
 Enable stopping criterion on max iterations. More...
 
bool isEnabledMaxIter () const
 Returns true if stopping criterion on max iterations is enabled, false otherwise. More...
 
void setMaxTime (double timeout)
 Stopping criterion on timeout. More...
 
double maxTime () const
 Returns the timeout (in seconds). More...
 
double currentTime () const
 Returns the current running time in second. More...
 
void disableMaxTime ()
 Disable stopping criterion on timeout. More...
 
void enableMaxTime ()
 Enable stopping criterion on timeout. More...
 
bool isEnabledMaxTime () const
 Returns true if stopping criterion on timeout is enabled, false otherwise. More...
 
void setPeriodSize (Size p)
 How many samples between two stopping is enable. More...
 
Size periodSize () const
 Returns the period size. More...
 
void setVerbosity (bool v)
 Set the verbosity on (true) or off (false). More...
 
bool verbosity () const
 Returns true if verbosity is enabled. More...
 
ApproximationSchemeSTATE stateApproximationScheme () const
 Returns the approximation scheme state. More...
 
Size nbrIterations () const
 Returns the number of iterations. More...
 
const std::vector< double > & history () const
 Returns the scheme history. More...
 
void initApproximationScheme ()
 Initialise the scheme. More...
 
bool startOfPeriod ()
 Returns true if we are at the beginning of a period (compute error is mandatory). More...
 
void updateApproximationScheme (unsigned int incr=1)
 Update the scheme w.r.t the new error and increment steps. More...
 
Size remainingBurnIn ()
 Returns the remaining burn in. More...
 
void stopApproximationScheme ()
 Stop the approximation scheme. More...
 
bool continueApproximationScheme (double error)
 Update the scheme w.r.t the new error. More...
 
Getters and setters
std::string messageApproximationScheme () const
 Returns the approximation scheme message. More...
 

Public Types

enum  StateOfInference { StateOfInference::OutdatedBNStructure, StateOfInference::OutdatedBNPotentials, StateOfInference::ReadyForInference, StateOfInference::Done }
 current state of the inference More...
 
enum  ApproximationSchemeSTATE : char {
  ApproximationSchemeSTATE::Undefined, ApproximationSchemeSTATE::Continue, ApproximationSchemeSTATE::Epsilon, ApproximationSchemeSTATE::Rate,
  ApproximationSchemeSTATE::Limit, ApproximationSchemeSTATE::TimeLimit, ApproximationSchemeSTATE::Stopped
}
 The different state of an approximation scheme. More...
 

Protected Attributes

Estimator< GUM_SCALAR > __estimator
 Estimator object designed to approximate target posteriors. More...
 
bool isSetEstimator = false
 whether the Estimator object has been initialized More...
 
bool isContextualized = false
 whether the referenced Bayesian Network has been "contextualized" More...
 
double _current_epsilon
 Current epsilon. More...
 
double _last_epsilon
 Last epsilon value. More...
 
double _current_rate
 Current rate. More...
 
Size _current_step
 The current step. More...
 
Timer _timer
 The timer. More...
 
ApproximationSchemeSTATE _current_state
 The current state. More...
 
std::vector< double_history
 The scheme history, used only if verbosity == true. More...
 
double _eps
 Threshold for convergence. More...
 
bool _enabled_eps
 If true, the threshold convergence is enabled. More...
 
double _min_rate_eps
 Threshold for the epsilon rate. More...
 
bool _enabled_min_rate_eps
 If true, the minimal threshold for epsilon rate is enabled. More...
 
double _max_time
 The timeout. More...
 
bool _enabled_max_time
 If true, the timeout is enabled. More...
 
Size _max_iter
 The maximum iterations. More...
 
bool _enabled_max_iter
 If true, the maximum iterations stopping criterion is enabled. More...
 
Size _burn_in
 Number of iterations before checking stopping criteria. More...
 
Size _period_size
 Checking criteria frequency. More...
 
bool _verbosity
 If true, verbosity is enabled. More...
 

Protected Member Functions

virtual Instantiation _burnIn ()=0
 draws samples without updating the estimators More...
 
virtual Instantiation _draw (GUM_SCALAR *w, Instantiation prev)=0
 draws a sample in the bayesian network given a previous one More...
 
void _makeInference () override
 makes the inference by generating samples More...
 
void _loopApproxInference ()
 
virtual void _addVarSample (NodeId nod, Instantiation *I)
 adds a node to current instantiation More...
 
virtual void _onContextualize (BayesNetFragment< GUM_SCALAR > *bn)
 fired when Bayesian network is contextualized More...
 
void _onEvidenceAdded (const NodeId id, bool isHardEvidence) override
 fired after a new evidence is inserted More...
 
void _onEvidenceErased (const NodeId id, bool isHardEvidence) override
 fired before an evidence is removed More...
 
void _onAllEvidenceErased (bool contains_hard_evidence) override
 fired before all the evidence are erased More...
 
void _onEvidenceChanged (const NodeId id, bool hasChangedSoftHard) override
 fired after an evidence is changed, in particular when its status (soft/hard) changes More...
 
void _onBayesNetChanged (const IBayesNet< GUM_SCALAR > *bn) override
 fired after a new Bayes net has been assigned to the engine More...
 
void _updateOutdatedBNStructure () override
 prepares inference when the latter is in OutdatedBNStructure state More...
 
void _updateOutdatedBNPotentials () override
 prepares inference when the latter is in OutdatedBNPotentials state More...
 
void _onMarginalTargetAdded (const NodeId id) override
 fired after a new marginal target is inserted More...
 
void _onMarginalTargetErased (const NodeId id) override
 fired before a marginal target is removed More...
 
void _onAllMarginalTargetsAdded () override
 fired after all the nodes of the BN are added as marginal targets More...
 
void _onAllMarginalTargetsErased () override
 fired before a all marginal targets are removed More...
 
void _onStateChanged () override
 fired when the stage is changed More...
 
void _setTargetedMode ()
 
bool _isTargetedMode () const
 
void _setOutdatedBNStructureState ()
 put the inference into an outdated BN structure state More...
 
void _setOutdatedBNPotentialsState ()
 puts the inference into an OutdatedBNPotentials state if it is not already in an OutdatedBNStructure state More...
 

Detailed Description

template<typename GUM_SCALAR>
class gum::SamplingInference< GUM_SCALAR >

Definition at line 57 of file samplingInference.h.

Member Enumeration Documentation

◆ ApproximationSchemeSTATE

The different state of an approximation scheme.

Enumerator
Undefined 
Continue 
Epsilon 
Rate 
Limit 
TimeLimit 
Stopped 

Definition at line 63 of file IApproximationSchemeConfiguration.h.

63  : char {
64  Undefined,
65  Continue,
66  Epsilon,
67  Rate,
68  Limit,
69  TimeLimit,
70  Stopped
71  };

◆ StateOfInference

template<typename GUM_SCALAR >
enum gum::BayesNetInference::StateOfInference
stronginherited

current state of the inference

BayesNetInference can be in one of 4 different states:

  • OutdatedBNStructure: in this state, the inference is fully unprepared to be applied because some events changed the "logical" structure of the BN: for instance a node received a hard evidence, which implies that its outgoing arcs can be removed from the BN, hence involving a structural change in the BN. As a consequence, the (incremental) inference (probably) needs a significant amount of preparation to be ready for the next inference. In a Lazy propagation, for instance, this step amounts to compute a new join tree, hence a new structure in which inference will be applied. Note that classes that inherit from BayesNetInference may be smarter than BayesNetInference and may, in some situations, find out that their data structures are still ok for inference and, therefore, only resort to perform the actions related to the OutdatedBNPotentials state. As an example, consider a LazyPropagation inference in Bayes Net A->B->C->D->E in which C has received hard evidence e_C and E is the only target. In this case, A and B are not needed for inference, the only potentials that matter are P(D|e_C) and P(E|D). So the smallest join tree needed for inference contains only one clique DE. Now, adding new evidence e_A on A has no impact on E given hard evidence e_C. In this case, LazyPropagation can be smart and not update its join tree.
  • OutdatedBNPotentials: in this state, the structure of the BN remains unchanged, only some potentials stored in it have changed. Therefore, the inference probably just needs to invalidate some already computed potentials to be ready. Only a light amount of preparation is needed to be able to perform inference.
  • Ready4Inference: in this state, all the data structures are ready for inference. There just remains to perform the inference computations.
  • Done: the heavy computations of inference have been done. There might still remain a few light computations to perform to get the posterior potentials we need. Typically, in Lazy Propagation, all the messages in the join tree have been computed but, to get the potentials, we still need to perform the combinations of the potentials in the cliques with the messages sent to the cliques. In some inference algorithms, this step may even be empty.
Enumerator
OutdatedBNStructure 
OutdatedBNPotentials 
ReadyForInference 
Done 

Definition at line 180 of file BayesNetInference.h.

180  {
181  OutdatedBNStructure,
182  OutdatedBNPotentials,
183  ReadyForInference,
184  Done
185  };

Constructor & Destructor Documentation

◆ SamplingInference()

template<typename GUM_SCALAR >
gum::SamplingInference< GUM_SCALAR >::SamplingInference ( const IBayesNet< GUM_SCALAR > *  bn)
explicit

default constructor

Warning
By default, all the nodes of the Bayes net are targets.
note that, by aGrUM's rule, the BN is not copied but only referenced by the inference algorithm.

Definition at line 45 of file samplingInference_tpl.h.

References DEFAULT_EPSILON, DEFAULT_MAXITER, DEFAULT_MIN_EPSILON_RATE, DEFAULT_PERIOD_SIZE, DEFAULT_TIMEOUT, DEFAULT_VERBOSITY, gum::ApproximationScheme::setEpsilon(), gum::ApproximationScheme::setMaxIter(), gum::ApproximationScheme::setMaxTime(), gum::ApproximationScheme::setMinEpsilonRate(), gum::ApproximationScheme::setPeriodSize(), and gum::ApproximationScheme::setVerbosity().

46  :
47  ApproximateInference< GUM_SCALAR >(bn),
48  __estimator(), __samplingBN(nullptr) {
55  GUM_CONSTRUCTOR(SamplingInference);
56  }
#define DEFAULT_MAXITER
SamplingInference(const IBayesNet< GUM_SCALAR > *bn)
default constructor
#define DEFAULT_EPSILON
void setPeriodSize(Size p)
How many samples between two stopping is enable.
void setMinEpsilonRate(double rate)
Given that we approximate f(t), stopping criterion on d/dt(|f(t+1)-f(t)|).
void setVerbosity(bool v)
Set the verbosity on (true) or off (false).
#define DEFAULT_PERIOD_SIZE
void setMaxTime(double timeout)
Stopping criterion on timeout.
Estimator< GUM_SCALAR > __estimator
Estimator object designed to approximate target posteriors.
BayesNetFragment< GUM_SCALAR > * __samplingBN
#define DEFAULT_TIMEOUT
#define DEFAULT_MIN_EPSILON_RATE
void setMaxIter(Size max)
Stopping criterion on number of iterations.
void setEpsilon(double eps)
Given that we approximate f(t), stopping criterion on |f(t+1)-f(t)|.
#define DEFAULT_VERBOSITY
+ Here is the call graph for this function:

◆ ~SamplingInference()

template<typename GUM_SCALAR >
gum::SamplingInference< GUM_SCALAR >::~SamplingInference ( )
override

destructor

Definition at line 60 of file samplingInference_tpl.h.

References gum::SamplingInference< GUM_SCALAR >::__samplingBN, and gum::SamplingInference< GUM_SCALAR >::isContextualized.

60  {
61  GUM_DESTRUCTOR(SamplingInference);
62  if (__samplingBN != nullptr) {
63  if (isContextualized) { // otherwise __samplingBN==&BN()
64  delete __samplingBN;
65  }
66  }
67  }
SamplingInference(const IBayesNet< GUM_SCALAR > *bn)
default constructor
BayesNetFragment< GUM_SCALAR > * __samplingBN
bool isContextualized
whether the referenced Bayesian Network has been "contextualized"

Member Function Documentation

◆ _addVarSample()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_addVarSample ( NodeId  nod,
Instantiation I 
)
protectedvirtual

adds a node to current instantiation

Parameters
nodthe node to add to the sample
Ithe current sample

generates random value based on the BN's CPT's and adds the node to the Instantiation with that value

Definition at line 185 of file samplingInference_tpl.h.

References gum::Instantiation::add(), gum::Instantiation::chgVal(), and gum::SamplingInference< GUM_SCALAR >::samplingBN().

Referenced by gum::WeightedSampling< GUM_SCALAR >::_draw(), gum::MonteCarloSampling< GUM_SCALAR >::_draw(), and gum::ImportanceSampling< GUM_SCALAR >::_draw().

186  {
188 
189  I->add(samplingBN().variable(nod));
190  I->chgVal(samplingBN().variable(nod),
191  samplingBN().cpt(nod).extract(Itop).draw());
192  }
Class for assigning/browsing values to tuples of discrete variables.
Definition: instantiation.h:80
const IBayesNet< GUM_SCALAR > & samplingBN()
get the BayesNet which is used to really perform the sampling
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ _burnIn()

template<typename GUM_SCALAR >
virtual Instantiation gum::SamplingInference< GUM_SCALAR >::_burnIn ( )
protectedpure virtual

draws samples without updating the estimators

Implemented in gum::GibbsSampling< GUM_SCALAR >, gum::ImportanceSampling< GUM_SCALAR >, gum::MonteCarloSampling< GUM_SCALAR >, and gum::WeightedSampling< GUM_SCALAR >.

Referenced by gum::SamplingInference< GUM_SCALAR >::_loopApproxInference().

+ Here is the caller graph for this function:

◆ _draw()

template<typename GUM_SCALAR >
virtual Instantiation gum::SamplingInference< GUM_SCALAR >::_draw ( GUM_SCALAR *  w,
Instantiation  prev 
)
protectedpure virtual

draws a sample in the bayesian network given a previous one

Parameters
wthe weight of sample being generated
prevthe previous sample generated

Implemented in gum::GibbsSampling< GUM_SCALAR >, gum::ImportanceSampling< GUM_SCALAR >, gum::MonteCarloSampling< GUM_SCALAR >, and gum::WeightedSampling< GUM_SCALAR >.

Referenced by gum::SamplingInference< GUM_SCALAR >::_loopApproxInference().

+ Here is the caller graph for this function:

◆ _isTargetedMode()

template<typename GUM_SCALAR >
INLINE bool gum::MarginalTargetedInference< GUM_SCALAR >::_isTargetedMode ( ) const
protectedinherited

Definition at line 335 of file marginalTargetedInference_tpl.h.

References gum::MarginalTargetedInference< GUM_SCALAR >::__targeted_mode.

335  {
336  return __targeted_mode;
337  }
bool __targeted_mode
whether the actual targets are default

◆ _loopApproxInference()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_loopApproxInference ( )
protected

Definition at line 164 of file samplingInference_tpl.h.

References gum::SamplingInference< GUM_SCALAR >::__estimator, gum::SamplingInference< GUM_SCALAR >::_burnIn(), gum::SamplingInference< GUM_SCALAR >::_draw(), gum::SamplingInference< GUM_SCALAR >::contextualize(), gum::ApproximationScheme::continueApproximationScheme(), gum::ApproximationScheme::initApproximationScheme(), gum::SamplingInference< GUM_SCALAR >::isContextualized, gum::SamplingInference< GUM_SCALAR >::isSetEstimator, and gum::ApproximationScheme::updateApproximationScheme().

Referenced by gum::SamplingInference< GUM_SCALAR >::_makeInference().

164  {
165  //@todo This should be in __prepareInference
166  if (!isContextualized) { this->contextualize(); }
167 
168  this->initApproximationScheme();
170  GUM_SCALAR w = .0; //
171 
172  // Burn in
173  Ip = this->_burnIn();
174  do {
175  Ip = this->_draw(&w, Ip);
176  __estimator.update(Ip, w);
178  } while (this->continueApproximationScheme(__estimator.confidence()));
179 
180  this->isSetEstimator = false;
181  }
virtual void contextualize()
Simplifying the bayesian network with relevance reasonning to lighten the computational charge...
virtual Instantiation _draw(GUM_SCALAR *w, Instantiation prev)=0
draws a sample in the bayesian network given a previous one
void initApproximationScheme()
Initialise the scheme.
bool continueApproximationScheme(double error)
Update the scheme w.r.t the new error.
virtual Instantiation _burnIn()=0
draws samples without updating the estimators
Estimator< GUM_SCALAR > __estimator
Estimator object designed to approximate target posteriors.
Class for assigning/browsing values to tuples of discrete variables.
Definition: instantiation.h:80
bool isContextualized
whether the referenced Bayesian Network has been "contextualized"
bool isSetEstimator
whether the Estimator object has been initialized
void updateApproximationScheme(unsigned int incr=1)
Update the scheme w.r.t the new error and increment steps.
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ _makeInference()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_makeInference ( )
overrideprotectedvirtual

makes the inference by generating samples

Implements gum::BayesNetInference< GUM_SCALAR >.

Definition at line 158 of file samplingInference_tpl.h.

References gum::SamplingInference< GUM_SCALAR >::_loopApproxInference(), gum::SamplingInference< GUM_SCALAR >::_setEstimatorFromBN(), and gum::SamplingInference< GUM_SCALAR >::isSetEstimator.

158  {
159  if (!isSetEstimator) this->_setEstimatorFromBN();
161  }
virtual void _setEstimatorFromBN()
Initializes the estimators object linked to the simulation.
bool isSetEstimator
whether the Estimator object has been initialized
+ Here is the call graph for this function:

◆ _onAllEvidenceErased()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_onAllEvidenceErased ( bool  contains_hard_evidence)
overrideprotectedvirtual

fired before all the evidence are erased

Implements gum::BayesNetInference< GUM_SCALAR >.

Definition at line 212 of file samplingInference_tpl.h.

213  {}

◆ _onAllMarginalTargetsAdded()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_onAllMarginalTargetsAdded ( )
overrideprotectedvirtual

fired after all the nodes of the BN are added as marginal targets

Implements gum::MarginalTargetedInference< GUM_SCALAR >.

Definition at line 241 of file samplingInference_tpl.h.

241 {}

◆ _onAllMarginalTargetsErased()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_onAllMarginalTargetsErased ( )
overrideprotectedvirtual

fired before a all marginal targets are removed

Implements gum::MarginalTargetedInference< GUM_SCALAR >.

Definition at line 244 of file samplingInference_tpl.h.

244 {}

◆ _onBayesNetChanged()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_onBayesNetChanged ( const IBayesNet< GUM_SCALAR > *  bn)
overrideprotectedvirtual

fired after a new Bayes net has been assigned to the engine

Reimplemented from gum::MarginalTargetedInference< GUM_SCALAR >.

Definition at line 225 of file samplingInference_tpl.h.

226  {}

◆ _onContextualize()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_onContextualize ( BayesNetFragment< GUM_SCALAR > *  bn)
protectedvirtual

fired when Bayesian network is contextualized

Parameters
bnthe contextualized BayesNetFragment
targetsinference target variables
hardEvNodeshard evidence nodes
hardEvhard evidences values

Reimplemented in gum::ImportanceSampling< GUM_SCALAR >.

Definition at line 195 of file samplingInference_tpl.h.

Referenced by gum::SamplingInference< GUM_SCALAR >::contextualize().

196  {}
+ Here is the caller graph for this function:

◆ _onEvidenceAdded()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_onEvidenceAdded ( const NodeId  id,
bool  isHardEvidence 
)
overrideprotectedvirtual

fired after a new evidence is inserted

Implements gum::BayesNetInference< GUM_SCALAR >.

Definition at line 200 of file samplingInference_tpl.h.

References GUM_ERROR.

201  {
202  if (!isHardEvidence) {
203  GUM_ERROR(FatalError, "Approximated inference only accept hard evidence");
204  }
205  }
#define GUM_ERROR(type, msg)
Definition: exceptions.h:52

◆ _onEvidenceChanged()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_onEvidenceChanged ( const NodeId  id,
bool  hasChangedSoftHard 
)
overrideprotectedvirtual

fired after an evidence is changed, in particular when its status (soft/hard) changes

Parameters
nodeIdthe node of the changed evidence
hasChangedSoftHardtrue if the evidence has changed from Soft to Hard or from Hard to Soft

Implements gum::BayesNetInference< GUM_SCALAR >.

Definition at line 217 of file samplingInference_tpl.h.

References GUM_ERROR.

218  {
219  if (hasChangedSoftHard) {
220  GUM_ERROR(FatalError, "Approximated inference only accept hard evidence");
221  }
222  }
#define GUM_ERROR(type, msg)
Definition: exceptions.h:52

◆ _onEvidenceErased()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_onEvidenceErased ( const NodeId  id,
bool  isHardEvidence 
)
overrideprotectedvirtual

fired before an evidence is removed

Implements gum::BayesNetInference< GUM_SCALAR >.

Definition at line 208 of file samplingInference_tpl.h.

209  {}

◆ _onMarginalTargetAdded()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_onMarginalTargetAdded ( const NodeId  id)
overrideprotectedvirtual

fired after a new marginal target is inserted

Parameters
idThe target variable's id.

Implements gum::MarginalTargetedInference< GUM_SCALAR >.

Definition at line 235 of file samplingInference_tpl.h.

235 {}

◆ _onMarginalTargetErased()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_onMarginalTargetErased ( const NodeId  id)
overrideprotectedvirtual

fired before a marginal target is removed

Parameters
idThe target variable's id.

Implements gum::MarginalTargetedInference< GUM_SCALAR >.

Definition at line 238 of file samplingInference_tpl.h.

238 {}

◆ _onStateChanged()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_onStateChanged ( )
overrideprotectedvirtual

fired when the stage is changed

Implements gum::BayesNetInference< GUM_SCALAR >.

Definition at line 247 of file samplingInference_tpl.h.

References gum::SamplingInference< GUM_SCALAR >::__estimator, gum::ApproximationScheme::initApproximationScheme(), and gum::BayesNetInference< GUM_SCALAR >::isInferenceReady().

247  {
248  if (this->isInferenceReady()) {
249  __estimator.clear();
250  this->initApproximationScheme();
251  }
252  }
void initApproximationScheme()
Initialise the scheme.
Estimator< GUM_SCALAR > __estimator
Estimator object designed to approximate target posteriors.
virtual bool isInferenceReady() const noexcept final
returns whether the inference object is in a ready state
+ Here is the call graph for this function:

◆ _posterior()

template<typename GUM_SCALAR >
const Potential< GUM_SCALAR > & gum::SamplingInference< GUM_SCALAR >::_posterior ( NodeId  id)
overridevirtual

Computes and returns the posterior of a node.

Returns
a const ref to the posterior probability of the node.
Parameters
idthe node for which we need a posterior probability
Warning
for efficiency reasons, the potential is returned by reference. In order to ensure that the potential may still exist even if the Inference object is destroyed, the user has to copy it explicitly.
Exceptions
UndefinedElementif node is not in the set of targets.
NotFoundif node is not in the BN.

Implements gum::MarginalTargetedInference< GUM_SCALAR >.

Definition at line 106 of file samplingInference_tpl.h.

References gum::SamplingInference< GUM_SCALAR >::__estimator, and gum::BayesNetInference< GUM_SCALAR >::BN().

106  {
107  return __estimator.posterior(this->BN().variable(id));
108  }
Estimator< GUM_SCALAR > __estimator
Estimator object designed to approximate target posteriors.
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:

◆ _setEstimatorFromBN()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_setEstimatorFromBN ( )
virtual

Initializes the estimators object linked to the simulation.

Initializes the estimator object by creating a hashtable between non evidence nodes and a 0-filled potential which will approximate the node's posterior

Definition at line 79 of file samplingInference_tpl.h.

References gum::SamplingInference< GUM_SCALAR >::__estimator, gum::BayesNetInference< GUM_SCALAR >::hardEvidenceNodes(), gum::SamplingInference< GUM_SCALAR >::isSetEstimator, and gum::SamplingInference< GUM_SCALAR >::samplingBN().

Referenced by gum::SamplingInference< GUM_SCALAR >::_makeInference().

79  {
80  __estimator.setFromBN(&samplingBN(), this->hardEvidenceNodes());
81  this->isSetEstimator = true;
82  }
Estimator< GUM_SCALAR > __estimator
Estimator object designed to approximate target posteriors.
const NodeSet & hardEvidenceNodes() const
returns the set of nodes with hard evidence
bool isSetEstimator
whether the Estimator object has been initialized
const IBayesNet< GUM_SCALAR > & samplingBN()
get the BayesNet which is used to really perform the sampling
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ _setEstimatorFromLBP()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_setEstimatorFromLBP ( LoopyBeliefPropagation< GUM_SCALAR > *  lbp,
GUM_SCALAR  virtualLBPSize 
)
virtual

Initializes the estimators object linked to the simulation.

Parameters
lbpa LoopyBeliefPropagation object
virtualLBPSizethe size of the equivalent sampling by LBP

Initializes the estimator object by creating a hashtable between non evidence nodes and the current approximation of the node's posterior obtained by running LoopyBeliefPropagation algorithm

Definition at line 85 of file samplingInference_tpl.h.

References gum::SamplingInference< GUM_SCALAR >::__estimator, gum::BayesNetInference< GUM_SCALAR >::hardEvidenceNodes(), and gum::SamplingInference< GUM_SCALAR >::isSetEstimator.

86  {
87  __estimator.setFromLBP(lbp, this->hardEvidenceNodes(), virtualLBPSize);
88  this->isSetEstimator = true;
89  }
Estimator< GUM_SCALAR > __estimator
Estimator object designed to approximate target posteriors.
const NodeSet & hardEvidenceNodes() const
returns the set of nodes with hard evidence
bool isSetEstimator
whether the Estimator object has been initialized
+ Here is the call graph for this function:

◆ _setOutdatedBNPotentialsState()

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::_setOutdatedBNPotentialsState ( )
protectedinherited

puts the inference into an OutdatedBNPotentials state if it is not already in an OutdatedBNStructure state

OutdatedBNPotentials: in this state, the structure of the BN remains unchanged, only some potentials stored in it have changed. Therefore, the inference probably just needs to invalidate some already computed potentials to be ready. Only a light amount of preparation is needed to be able to perform inference.

Definition at line 682 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__setState(), and gum::BayesNetInference< GUM_SCALAR >::OutdatedBNPotentials.

682  {
684  }
virtual void __setState(const StateOfInference state) final
set the state of the inference engine and call the notification _onStateChanged when necessary (i...
+ Here is the call graph for this function:

◆ _setOutdatedBNStructureState()

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::_setOutdatedBNStructureState ( )
protectedinherited

put the inference into an outdated BN structure state

OutdatedBNStructure: in this state, the inference is fully unprepared to be applied because some events changed the "logical" structure of the BN: for instance a node received a hard evidence, which implies that its outgoing arcs can be removed from the BN, hence involving a structural change in the BN. As a consequence, the (incremental) inference (probably) needs a significant amount of preparation to be ready for the next inference. In a Lazy propagation, for instance, this step amounts to compute a new join tree, hence a new structure in which inference will be applied. Note that classes that inherit from BayesNetInference may be smarter than BayesNetInference and may, in some situations, find out that their data structures are still ok for inference and, therefore, only resort to perform the actions related to the OutdatedBNPotentials state. As an example, consider a LazyPropagation inference in Bayes Net A->B->C->D->E in which C has received hard evidence e_C and E is the only target. In this case, A and B are not needed for inference, the only potentials that matter are P(D|e_C) and P(E|D). So the smallest join tree needed for inference contains only one clique DE. Now, adding new evidence e_A on A has no impact on E given hard evidence e_C. In this case, LazyPropagation can be smart and not update its join tree.

Definition at line 674 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__setState(), and gum::BayesNetInference< GUM_SCALAR >::OutdatedBNStructure.

674  {
676  }
virtual void __setState(const StateOfInference state) final
set the state of the inference engine and call the notification _onStateChanged when necessary (i...
+ Here is the call graph for this function:

◆ _setTargetedMode()

template<typename GUM_SCALAR >
INLINE void gum::MarginalTargetedInference< GUM_SCALAR >::_setTargetedMode ( )
protectedinherited

Definition at line 339 of file marginalTargetedInference_tpl.h.

References gum::MarginalTargetedInference< GUM_SCALAR >::__targeted_mode, gum::MarginalTargetedInference< GUM_SCALAR >::__targets, and gum::Set< Key, Alloc >::clear().

Referenced by gum::MarginalTargetedInference< GUM_SCALAR >::addAllTargets(), gum::JointTargetedInference< GUM_SCALAR >::addJointTarget(), gum::MarginalTargetedInference< GUM_SCALAR >::addTarget(), and gum::MarginalTargetedInference< GUM_SCALAR >::eraseAllTargets().

339  {
340  if (!__targeted_mode) {
341  __targets.clear();
342  __targeted_mode = true;
343  }
344  }
NodeSet __targets
the set of marginal targets
void clear()
Removes all the elements, if any, from the set.
Definition: set_tpl.h:372
bool __targeted_mode
whether the actual targets are default
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ _updateOutdatedBNPotentials()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_updateOutdatedBNPotentials ( )
overrideprotectedvirtual

prepares inference when the latter is in OutdatedBNPotentials state

Note that the values of evidence are not necessarily known and can be changed between _updateOutdatedBNPotentials and _makeInference.

Implements gum::BayesNetInference< GUM_SCALAR >.

Definition at line 232 of file samplingInference_tpl.h.

232 {}

◆ _updateOutdatedBNStructure()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_updateOutdatedBNStructure ( )
overrideprotectedvirtual

prepares inference when the latter is in OutdatedBNStructure state

Note that the values of evidence are not necessarily known and can be changed between _updateOutdatedBNStructure and _makeInference.

Implements gum::BayesNetInference< GUM_SCALAR >.

Definition at line 229 of file samplingInference_tpl.h.

229 {}

◆ addAllTargets()

template<typename GUM_SCALAR >
void gum::MarginalTargetedInference< GUM_SCALAR >::addAllTargets ( )
finalvirtualinherited

adds all nodes as targets

Definition at line 133 of file marginalTargetedInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__bn, gum::BayesNetInference< GUM_SCALAR >::__setState(), gum::MarginalTargetedInference< GUM_SCALAR >::__targets, gum::MarginalTargetedInference< GUM_SCALAR >::_onMarginalTargetAdded(), gum::MarginalTargetedInference< GUM_SCALAR >::_setTargetedMode(), gum::Set< Key, Alloc >::contains(), GUM_ERROR, and gum::Set< Key, Alloc >::insert().

133  {
134  // check if the node belongs to the Bayesian network
135  if (this->__bn == nullptr)
136  GUM_ERROR(NullElement,
137  "No Bayes net has been assigned to the "
138  "inference algorithm");
139 
140 
141  _setTargetedMode(); // does nothing if already in targeted mode
142  for (const auto target : this->__bn->dag()) {
143  if (!__targets.contains(target)) {
144  __targets.insert(target);
145  _onMarginalTargetAdded(target);
146  this->__setState(
148  }
149  }
150  }
bool contains(const Key &k) const
Indicates whether a given elements belong to the set.
Definition: set_tpl.h:578
virtual void __setState(const StateOfInference state) final
set the state of the inference engine and call the notification _onStateChanged when necessary (i...
NodeSet __targets
the set of marginal targets
virtual void _onMarginalTargetAdded(const NodeId id)=0
fired after a new marginal target is inserted
const IBayesNet< GUM_SCALAR > * __bn
the Bayes net on which we perform inferences
void insert(const Key &k)
Inserts a new element into the set.
Definition: set_tpl.h:610
#define GUM_ERROR(type, msg)
Definition: exceptions.h:52
+ Here is the call graph for this function:

◆ addEvidence() [1/8]

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::addEvidence ( NodeId  id,
const Idx  val 
)
finalvirtualinherited

adds a new hard evidence on node id

Exceptions
UndefinedElementif id does not belong to the Bayesian network
InvalidArgumentif val is not a value for id
InvalidArgumentif id already has an evidence

Definition at line 244 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__createHardEvidence().

Referenced by gum::LoopySamplingInference< GUM_SCALAR, APPROX >::_makeInference(), gum::BayesNetInference< GUM_SCALAR >::addEvidence(), gum::BayesNetInference< GUM_SCALAR >::addListOfEvidence(), gum::BayesNetInference< GUM_SCALAR >::addSetOfEvidence(), gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact(), and gum::JointTargetedInference< GUM_SCALAR >::evidenceJointImpact().

245  {
247  }
virtual void addEvidence(NodeId id, const Idx val) final
adds a new hard evidence on node id
Potential< GUM_SCALAR > __createHardEvidence(NodeId id, Idx val) const
create the internal structure for a hard evidence
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ addEvidence() [2/8]

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::addEvidence ( const std::string &  nodeName,
const Idx  val 
)
finalvirtualinherited

adds a new hard evidence on node named nodeName

Exceptions
UndefinedElementif nodeName does not belong to the Bayesian network
InvalidArgumentif val is not a value for id
InvalidArgumentif nodeName already has an evidence

Definition at line 252 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::addEvidence(), and gum::BayesNetInference< GUM_SCALAR >::BN().

253  {
254  addEvidence(this->BN().idFromName(nodeName), val);
255  }
virtual void addEvidence(NodeId id, const Idx val) final
adds a new hard evidence on node id
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:

◆ addEvidence() [3/8]

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::addEvidence ( NodeId  id,
const std::string &  label 
)
finalvirtualinherited

adds a new hard evidence on node id

Exceptions
UndefinedElementif id does not belong to the Bayesian network
InvalidArgumentif val is not a value for id
InvalidArgumentif id already has an evidence

Definition at line 260 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::addEvidence(), and gum::BayesNetInference< GUM_SCALAR >::BN().

261  {
262  addEvidence(id, this->BN().variable(id)[label]);
263  }
virtual void addEvidence(NodeId id, const Idx val) final
adds a new hard evidence on node id
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:

◆ addEvidence() [4/8]

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::addEvidence ( const std::string &  nodeName,
const std::string &  label 
)
finalvirtualinherited

adds a new hard evidence on node named nodeName

Exceptions
UndefinedElementif nodeName does not belong to the Bayesian network
InvalidArgumentif val is not a value for id
InvalidArgumentif nodeName already has an evidence

Definition at line 268 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::addEvidence(), and gum::BayesNetInference< GUM_SCALAR >::BN().

269  {
270  NodeId id = this->BN().idFromName(nodeName);
271  addEvidence(id, this->BN().variable(id)[label]);
272  }
virtual void addEvidence(NodeId id, const Idx val) final
adds a new hard evidence on node id
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
Size NodeId
Type for node ids.
Definition: graphElements.h:97
+ Here is the call graph for this function:

◆ addEvidence() [5/8]

template<typename GUM_SCALAR >
void gum::BayesNetInference< GUM_SCALAR >::addEvidence ( NodeId  id,
const std::vector< GUM_SCALAR > &  vals 
)
finalvirtualinherited

adds a new evidence on node id (might be soft or hard)

Exceptions
UndefinedElementif id does not belong to the Bayesian network
InvalidArgumentif id already has an evidence
FatalErrorif vals=[0,0,...,0]
InvalidArgumentif the size of vals is different from the domain size of node id

Definition at line 276 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__bn, gum::MultiDimDecorator< GUM_SCALAR >::add(), gum::BayesNetInference< GUM_SCALAR >::addEvidence(), and GUM_ERROR.

277  {
278  // checks that the evidence is meaningful
279  if (__bn == nullptr)
280  GUM_ERROR(NullElement,
281  "No Bayes net has been assigned to the "
282  "inference algorithm");
283 
284  if (!__bn->dag().exists(id)) {
285  GUM_ERROR(UndefinedElement, id << " is not a NodeId in the bn");
286  }
287 
288  if (__bn->variable(id).domainSize() != vals.size()) {
289  GUM_ERROR(InvalidArgument,
290  "node " << __bn->variable(id)
291  << " and its evidence vector have different sizes.");
292  }
293 
294  Potential< GUM_SCALAR > pot;
295  pot.add(__bn->variable(id));
296  pot.fillWith(vals);
297  addEvidence(std::move(pot));
298  }
const IBayesNet< GUM_SCALAR > * __bn
the Bayes net on which we perform inferences
virtual void addEvidence(NodeId id, const Idx val) final
adds a new hard evidence on node id
#define GUM_ERROR(type, msg)
Definition: exceptions.h:52
+ Here is the call graph for this function:

◆ addEvidence() [6/8]

template<typename GUM_SCALAR >
void gum::BayesNetInference< GUM_SCALAR >::addEvidence ( const std::string &  nodeName,
const std::vector< GUM_SCALAR > &  vals 
)
finalvirtualinherited

adds a new evidence on node named nodeName (might be soft or hard)

Exceptions
UndefinedElementif id does not belong to the Bayesian network
InvalidArgumentif nodeName already has an evidence
FatalErrorif vals=[0,0,...,0]
InvalidArgumentif the size of vals is different from the domain size of node nodeName

Definition at line 302 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::addEvidence(), and gum::BayesNetInference< GUM_SCALAR >::BN().

303  {
304  addEvidence(this->BN().idFromName(nodeName), vals);
305  }
virtual void addEvidence(NodeId id, const Idx val) final
adds a new hard evidence on node id
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:

◆ addEvidence() [7/8]

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::addEvidence ( const Potential< GUM_SCALAR > &  pot)
finalvirtualinherited

adds a new evidence on node id (might be soft or hard)

Exceptions
UndefinedElementif the potential is defined over several nodes
UndefinedElementif the node on which the potential is defined does not belong to the Bayesian network
InvalidArgumentif the node of the potential already has an evidence
FatalErrorif pot=[0,0,...,0]

Definition at line 351 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::addEvidence().

352  {
353  Potential< GUM_SCALAR > new_pot(pot);
354  addEvidence(std::move(new_pot));
355  }
virtual void addEvidence(NodeId id, const Idx val) final
adds a new hard evidence on node id
+ Here is the call graph for this function:

◆ addEvidence() [8/8]

template<typename GUM_SCALAR >
void gum::BayesNetInference< GUM_SCALAR >::addEvidence ( Potential< GUM_SCALAR > &&  pot)
finalvirtualinherited

adds a new evidence on node id (might be soft or hard)

Exceptions
UndefinedElementif the potential is defined over several nodes
UndefinedElementif the node on which the potential is defined does not belong to the Bayesian network
InvalidArgumentif the node of the potential already has an evidence
FatalErrorif pot=[0,0,...,0]

Definition at line 310 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__bn, gum::BayesNetInference< GUM_SCALAR >::__evidence, gum::BayesNetInference< GUM_SCALAR >::__hard_evidence, gum::BayesNetInference< GUM_SCALAR >::__hard_evidence_nodes, gum::BayesNetInference< GUM_SCALAR >::__isHardEvidence(), gum::BayesNetInference< GUM_SCALAR >::__setState(), gum::BayesNetInference< GUM_SCALAR >::__soft_evidence_nodes, gum::BayesNetInference< GUM_SCALAR >::_onEvidenceAdded(), GUM_ERROR, gum::BayesNetInference< GUM_SCALAR >::hasEvidence(), gum::Set< Key, Alloc >::insert(), and gum::BayesNetInference< GUM_SCALAR >::OutdatedBNStructure.

310  {
311  // check if the potential corresponds to an evidence
312  if (pot.nbrDim() != 1) {
313  GUM_ERROR(InvalidArgument, pot << " is not mono-dimensional.");
314  }
315  if (__bn == nullptr)
316  GUM_ERROR(NullElement,
317  "No Bayes net has been assigned to the "
318  "inference algorithm");
319 
320  NodeId id = __bn->nodeId(pot.variable(0));
321 
322  if (hasEvidence(id)) {
323  GUM_ERROR(InvalidArgument,
324  " node " << id
325  << " already has an evidence. Please use chgEvidence().");
326  }
327 
328  // check whether we have a hard evidence (and also check whether the
329  // potential only contains 0 (in this case, this will automatically raise
330  // an exception) )
331  Idx val;
332  bool is_hard_evidence = __isHardEvidence(pot, val);
333 
334  // insert the evidence
335  __evidence.insert(
336  id,
337  new Potential< GUM_SCALAR >(std::forward< Potential< GUM_SCALAR > >(pot)));
338  if (is_hard_evidence) { // pot is deterministic
339  __hard_evidence.insert(id, val);
341  } else {
343  }
345  _onEvidenceAdded(id, is_hard_evidence);
346  }
NodeProperty< const Potential< GUM_SCALAR > *> __evidence
the set of evidence entered into the network
virtual void __setState(const StateOfInference state) final
set the state of the inference engine and call the notification _onStateChanged when necessary (i...
NodeProperty< Idx > __hard_evidence
assign to each node with a hard evidence the index of its observed value
virtual void _onEvidenceAdded(const NodeId id, bool isHardEvidence)=0
fired after a new evidence is inserted
const IBayesNet< GUM_SCALAR > * __bn
the Bayes net on which we perform inferences
bool __isHardEvidence(const Potential< GUM_SCALAR > &pot, Idx &val) const
checks whether a potential corresponds to a hard evidence or not
NodeSet __soft_evidence_nodes
the set of nodes that received soft evidence
virtual bool hasEvidence() const final
indicates whether some node(s) have received evidence
NodeSet __hard_evidence_nodes
the set of nodes that received hard evidence
Size NodeId
Type for node ids.
Definition: graphElements.h:97
void insert(const Key &k)
Inserts a new element into the set.
Definition: set_tpl.h:610
#define GUM_ERROR(type, msg)
Definition: exceptions.h:52
+ Here is the call graph for this function:

◆ addListOfEvidence()

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::addListOfEvidence ( const List< const Potential< GUM_SCALAR > * > &  potlist)
finalvirtualinherited

adds a new list of evidence

Exceptions
UndefinedElementif some potential is defined over several nodes
UndefinedElementif the node on which some potential is defined does not belong to the Bayesian network
InvalidArgumentif the node of some potential already has an evidence
FatalErrorif pot=[0,0,...,0]

Definition at line 360 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::addEvidence().

361  {
362  for (const auto pot : potlist)
363  addEvidence(*pot);
364  }
virtual void addEvidence(NodeId id, const Idx val) final
adds a new hard evidence on node id
+ Here is the call graph for this function:

◆ addSetOfEvidence()

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::addSetOfEvidence ( const Set< const Potential< GUM_SCALAR > * > &  potset)
finalvirtualinherited

adds a new set of evidence

Exceptions
UndefinedElementif some potential is defined over several nodes
UndefinedElementif the node on which some potential is defined does not belong to the Bayesian network
InvalidArgumentif the node of some potential already has an evidence
FatalErrorif pot=[0,0,...,0]

Definition at line 369 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::addEvidence().

370  {
371  for (const auto pot : potset)
372  addEvidence(*pot);
373  }
virtual void addEvidence(NodeId id, const Idx val) final
adds a new hard evidence on node id
+ Here is the call graph for this function:

◆ addTarget() [1/2]

template<typename GUM_SCALAR >
void gum::MarginalTargetedInference< GUM_SCALAR >::addTarget ( NodeId  target)
finalvirtualinherited

Add a marginal target to the list of targets.

Exceptions
UndefinedElementif target is not a NodeId in the Bayes net

Definition at line 109 of file marginalTargetedInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__bn, gum::BayesNetInference< GUM_SCALAR >::__setState(), gum::MarginalTargetedInference< GUM_SCALAR >::__targets, gum::MarginalTargetedInference< GUM_SCALAR >::_onMarginalTargetAdded(), gum::MarginalTargetedInference< GUM_SCALAR >::_setTargetedMode(), gum::Set< Key, Alloc >::contains(), GUM_ERROR, and gum::Set< Key, Alloc >::insert().

Referenced by gum::MarginalTargetedInference< GUM_SCALAR >::addTarget(), and gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact().

109  {
110  // check if the node belongs to the Bayesian network
111  if (this->__bn == nullptr)
112  GUM_ERROR(NullElement,
113  "No Bayes net has been assigned to the "
114  "inference algorithm");
115 
116  if (!this->__bn->dag().exists(target)) {
117  GUM_ERROR(UndefinedElement, target << " is not a NodeId in the bn");
118  }
119 
120  _setTargetedMode(); // does nothing if already in targeted mode
121  // add the new target
122  if (!__targets.contains(target)) {
123  __targets.insert(target);
124  _onMarginalTargetAdded(target);
125  this->__setState(
127  }
128  }
bool contains(const Key &k) const
Indicates whether a given elements belong to the set.
Definition: set_tpl.h:578
virtual void __setState(const StateOfInference state) final
set the state of the inference engine and call the notification _onStateChanged when necessary (i...
NodeSet __targets
the set of marginal targets
virtual void _onMarginalTargetAdded(const NodeId id)=0
fired after a new marginal target is inserted
const IBayesNet< GUM_SCALAR > * __bn
the Bayes net on which we perform inferences
void insert(const Key &k)
Inserts a new element into the set.
Definition: set_tpl.h:610
#define GUM_ERROR(type, msg)
Definition: exceptions.h:52
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ addTarget() [2/2]

template<typename GUM_SCALAR >
void gum::MarginalTargetedInference< GUM_SCALAR >::addTarget ( const std::string &  nodeName)
finalvirtualinherited

Add a marginal target to the list of targets.

Exceptions
UndefinedElementif target is not a NodeId in the Bayes net

Definition at line 155 of file marginalTargetedInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__bn, gum::MarginalTargetedInference< GUM_SCALAR >::addTarget(), and GUM_ERROR.

156  {
157  // check if the node belongs to the Bayesian network
158  if (this->__bn == nullptr)
159  GUM_ERROR(NullElement,
160  "No Bayes net has been assigned to the "
161  "inference algorithm");
162 
163  addTarget(this->__bn->idFromName(nodeName));
164  }
const IBayesNet< GUM_SCALAR > * __bn
the Bayes net on which we perform inferences
virtual void addTarget(NodeId target) final
Add a marginal target to the list of targets.
#define GUM_ERROR(type, msg)
Definition: exceptions.h:52
+ Here is the call graph for this function:

◆ BN()

template<typename GUM_SCALAR >
INLINE const IBayesNet< GUM_SCALAR > & gum::BayesNetInference< GUM_SCALAR >::BN ( ) const
finalvirtualinherited

Returns a constant reference over the IBayesNet referenced by this class.

Exceptions
UndefinedElementis raised if no Bayes net has been assigned to the inference.

Definition at line 118 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__bn, and GUM_ERROR.

Referenced by gum::WeightedSampling< GUM_SCALAR >::_draw(), gum::MonteCarloSampling< GUM_SCALAR >::_draw(), gum::ImportanceSampling< GUM_SCALAR >::_draw(), gum::SamplingInference< GUM_SCALAR >::_posterior(), gum::BayesNetInference< GUM_SCALAR >::addEvidence(), gum::BayesNetInference< GUM_SCALAR >::chgEvidence(), gum::SamplingInference< GUM_SCALAR >::contextualize(), gum::SamplingInference< GUM_SCALAR >::currentPosterior(), gum::BayesNetInference< GUM_SCALAR >::eraseEvidence(), gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact(), gum::JointTargetedInference< GUM_SCALAR >::evidenceJointImpact(), gum::MarginalTargetedInference< GUM_SCALAR >::H(), gum::BayesNetInference< GUM_SCALAR >::hasEvidence(), gum::BayesNetInference< GUM_SCALAR >::hasHardEvidence(), gum::BayesNetInference< GUM_SCALAR >::hasSoftEvidence(), gum::JointTargetedInference< GUM_SCALAR >::I(), gum::JointTargetedInference< GUM_SCALAR >::jointMutualInformation(), gum::MarginalTargetedInference< GUM_SCALAR >::posterior(), gum::JointTargetedInference< GUM_SCALAR >::posterior(), gum::SamplingInference< GUM_SCALAR >::samplingBN(), and gum::Estimator< GUM_SCALAR >::setFromLBP().

118  {
119  if (__bn == nullptr)
120  GUM_ERROR(UndefinedElement,
121  "No Bayes net has been assigned to "
122  "the inference algorithm.");
123  return *__bn;
124  }
const IBayesNet< GUM_SCALAR > * __bn
the Bayes net on which we perform inferences
#define GUM_ERROR(type, msg)
Definition: exceptions.h:52
+ Here is the caller graph for this function:

◆ chgEvidence() [1/7]

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::chgEvidence ( NodeId  id,
const Idx  val 
)
finalvirtualinherited

change the value of an already existing hard evidence

Exceptions
UndefinedElementif id does not belong to the Bayesian network
InvalidArgumentif val is not a value for id
InvalidArgumentif id does not already have an evidence

Definition at line 429 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__createHardEvidence().

Referenced by gum::BayesNetInference< GUM_SCALAR >::chgEvidence(), gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact(), and gum::JointTargetedInference< GUM_SCALAR >::evidenceJointImpact().

430  {
432  }
virtual void chgEvidence(NodeId id, const Idx val) final
change the value of an already existing hard evidence
Potential< GUM_SCALAR > __createHardEvidence(NodeId id, Idx val) const
create the internal structure for a hard evidence
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ chgEvidence() [2/7]

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::chgEvidence ( const std::string &  nodeName,
const Idx  val 
)
finalvirtualinherited

change the value of an already existing hard evidence

Exceptions
UndefinedElementif nodeName does not belong to the Bayesian network
InvalidArgumentif val is not a value for id
InvalidArgumentif id does not already have an evidence

Definition at line 437 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::BN(), and gum::BayesNetInference< GUM_SCALAR >::chgEvidence().

438  {
439  chgEvidence(this->BN().idFromName(nodeName), val);
440  }
virtual void chgEvidence(NodeId id, const Idx val) final
change the value of an already existing hard evidence
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:

◆ chgEvidence() [3/7]

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::chgEvidence ( NodeId  id,
const std::string &  label 
)
finalvirtualinherited

change the value of an already existing hard evidence

Exceptions
UndefinedElementif id does not belong to the Bayesian network
InvalidArgumentif val is not a value for id
InvalidArgumentif id does not already have an evidence

Definition at line 445 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::BN(), and gum::BayesNetInference< GUM_SCALAR >::chgEvidence().

446  {
447  chgEvidence(id, this->BN().variable(id)[label]);
448  }
virtual void chgEvidence(NodeId id, const Idx val) final
change the value of an already existing hard evidence
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:

◆ chgEvidence() [4/7]

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::chgEvidence ( const std::string &  nodeName,
const std::string &  label 
)
finalvirtualinherited

change the value of an already existing hard evidence

Exceptions
UndefinedElementif nodeName does not belong to the Bayesian network
InvalidArgumentif val is not a value for id
InvalidArgumentif id does not already have an evidence

Definition at line 453 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::BN(), and gum::BayesNetInference< GUM_SCALAR >::chgEvidence().

454  {
455  NodeId id = this->BN().idFromName(nodeName);
456  chgEvidence(id, this->BN().variable(id)[label]);
457  }
virtual void chgEvidence(NodeId id, const Idx val) final
change the value of an already existing hard evidence
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
Size NodeId
Type for node ids.
Definition: graphElements.h:97
+ Here is the call graph for this function:

◆ chgEvidence() [5/7]

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::chgEvidence ( NodeId  id,
const std::vector< GUM_SCALAR > &  vals 
)
finalvirtualinherited

change the value of an already existing evidence (might be soft or hard)

Exceptions
UndefinedElementif id does not belong to the Bayesian network
InvalidArgumentif the node does not already have an evidence
FatalErrorif vals=[0,0,...,0]
InvalidArgumentif the size of vals is different from the domain size of node id

Definition at line 461 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__bn, gum::MultiDimDecorator< GUM_SCALAR >::add(), gum::BayesNetInference< GUM_SCALAR >::chgEvidence(), and GUM_ERROR.

462  {
463  // check whether this corresponds to an evidence
464  if (__bn == nullptr)
465  GUM_ERROR(NullElement,
466  "No Bayes net has been assigned to the "
467  "inference algorithm");
468 
469  if (!__bn->dag().exists(id)) {
470  GUM_ERROR(UndefinedElement, id << " is not a NodeId in the bn");
471  }
472 
473  if (__bn->variable(id).domainSize() != vals.size()) {
474  GUM_ERROR(InvalidArgument,
475  "node " << __bn->variable(id)
476  << " and its evidence have different sizes.");
477  }
478 
479  // create the potential corresponding to vals
480  Potential< GUM_SCALAR > pot;
481  pot.add(__bn->variable(id));
482  pot.fillWith(vals);
483  chgEvidence(pot);
484  }
const IBayesNet< GUM_SCALAR > * __bn
the Bayes net on which we perform inferences
virtual void chgEvidence(NodeId id, const Idx val) final
change the value of an already existing hard evidence
#define GUM_ERROR(type, msg)
Definition: exceptions.h:52
+ Here is the call graph for this function:

◆ chgEvidence() [6/7]

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::chgEvidence ( const std::string &  nodeName,
const std::vector< GUM_SCALAR > &  vals 
)
finalvirtualinherited

change the value of an already existing evidence (might be soft or hard)

Exceptions
UndefinedElementif nodeName does not belong to the Bayesian network
InvalidArgumentif the node does not already have an evidence
FatalErrorif vals=[0,0,...,0]
InvalidArgumentif the size of vals is different from the domain size of node id

Definition at line 488 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::BN(), and gum::BayesNetInference< GUM_SCALAR >::chgEvidence().

489  {
490  chgEvidence(this->BN().idFromName(nodeName), vals);
491  }
virtual void chgEvidence(NodeId id, const Idx val) final
change the value of an already existing hard evidence
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:

◆ chgEvidence() [7/7]

template<typename GUM_SCALAR >
void gum::BayesNetInference< GUM_SCALAR >::chgEvidence ( const Potential< GUM_SCALAR > &  pot)
finalvirtualinherited

change the value of an already existing evidence (might be soft or hard)

Exceptions
UndefinedElementif the potential is defined over several nodes
UndefinedElementif the node on which the potential is defined does not belong to the Bayesian network
InvalidArgumentif the node of the potential does not already have an evidence
FatalErrorif pot=[0,0,...,0]

Definition at line 496 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__bn, gum::BayesNetInference< GUM_SCALAR >::__evidence, gum::BayesNetInference< GUM_SCALAR >::__hard_evidence, gum::BayesNetInference< GUM_SCALAR >::__hard_evidence_nodes, gum::BayesNetInference< GUM_SCALAR >::__isHardEvidence(), gum::BayesNetInference< GUM_SCALAR >::__setState(), gum::BayesNetInference< GUM_SCALAR >::__soft_evidence_nodes, gum::BayesNetInference< GUM_SCALAR >::_onEvidenceChanged(), gum::Instantiation::end(), gum::Set< Key, Alloc >::erase(), GUM_ERROR, gum::BayesNetInference< GUM_SCALAR >::hasEvidence(), gum::BayesNetInference< GUM_SCALAR >::hasHardEvidence(), gum::Instantiation::inc(), gum::Set< Key, Alloc >::insert(), gum::BayesNetInference< GUM_SCALAR >::isInferenceOutdatedBNStructure(), gum::MultiDimDecorator< GUM_SCALAR >::nbrDim(), gum::BayesNetInference< GUM_SCALAR >::OutdatedBNPotentials, gum::BayesNetInference< GUM_SCALAR >::OutdatedBNStructure, gum::MultiDimDecorator< GUM_SCALAR >::set(), gum::Instantiation::setFirst(), and gum::MultiDimDecorator< GUM_SCALAR >::variable().

497  {
498  // check if the potential corresponds to an evidence
499  if (pot.nbrDim() != 1) {
500  GUM_ERROR(InvalidArgument, pot << " is not a mono-dimensional potential.");
501  }
502  if (__bn == nullptr)
503  GUM_ERROR(NullElement,
504  "No Bayes net has been assigned to the "
505  "inference algorithm");
506 
507  NodeId id = __bn->nodeId(pot.variable(0));
508 
509  if (!hasEvidence(id)) {
510  GUM_ERROR(InvalidArgument,
511  id << " has no evidence. Please use addEvidence().");
512  }
513 
514  // check whether we have a hard evidence (and also check whether the
515  // potential only contains 0 (in this case, this will automatically raise
516  // an exception) )
517  Idx val;
518  bool is_hard_evidence = __isHardEvidence(pot, val);
519 
520  // modify the evidence already stored
521  const Potential< GUM_SCALAR >* localPot = __evidence[id];
522  Instantiation I(pot);
523  for (I.setFirst(); !I.end(); I.inc()) {
524  localPot->set(I, pot[I]);
525  }
526 
527  // the inference state will be different
528  // whether evidence change from Hard to Soft or not.
529  bool hasChangedSoftHard = false;
530 
531  if (is_hard_evidence) {
532  if (!hasHardEvidence(id)) {
533  hasChangedSoftHard = true;
534  __hard_evidence.insert(id, val);
537  } else {
538  __hard_evidence[id] = val;
539  }
540  } else {
541  if (hasHardEvidence(id)) { // evidence was hard
542  __hard_evidence.erase(id);
545  hasChangedSoftHard = true;
546  }
547  }
548 
549  if (hasChangedSoftHard) {
551  } else {
554  }
555  }
556 
557  _onEvidenceChanged(id, hasChangedSoftHard);
558  }
NodeProperty< const Potential< GUM_SCALAR > *> __evidence
the set of evidence entered into the network
virtual void __setState(const StateOfInference state) final
set the state of the inference engine and call the notification _onStateChanged when necessary (i...
NodeProperty< Idx > __hard_evidence
assign to each node with a hard evidence the index of its observed value
virtual bool hasHardEvidence(NodeId id) const final
indicates whether node id has received a hard evidence
virtual void _onEvidenceChanged(const NodeId id, bool hasChangedSoftHard)=0
fired after an evidence is changed, in particular when its status (soft/hard) changes ...
virtual bool isInferenceOutdatedBNStructure() const noexcept final
returns whether the inference object is in a OutdatedBNStructure state
const IBayesNet< GUM_SCALAR > * __bn
the Bayes net on which we perform inferences
void erase(const Key &k)
Erases an element from the set.
Definition: set_tpl.h:653
bool __isHardEvidence(const Potential< GUM_SCALAR > &pot, Idx &val) const
checks whether a potential corresponds to a hard evidence or not
NodeSet __soft_evidence_nodes
the set of nodes that received soft evidence
virtual bool hasEvidence() const final
indicates whether some node(s) have received evidence
NodeSet __hard_evidence_nodes
the set of nodes that received hard evidence
Size NodeId
Type for node ids.
Definition: graphElements.h:97
void insert(const Key &k)
Inserts a new element into the set.
Definition: set_tpl.h:610
#define GUM_ERROR(type, msg)
Definition: exceptions.h:52
+ Here is the call graph for this function:

◆ clear()

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::clear ( )
virtualinherited

clears all the data structures allocated for the last inference

Definition at line 150 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__setState(), gum::BayesNetInference< GUM_SCALAR >::eraseAllEvidence(), and gum::BayesNetInference< GUM_SCALAR >::OutdatedBNStructure.

Referenced by gum::BayesNetInference< GUM_SCALAR >::setBN().

150  {
153  }
virtual void __setState(const StateOfInference state) final
set the state of the inference engine and call the notification _onStateChanged when necessary (i...
virtual void eraseAllEvidence() final
removes all the evidence entered into the network
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ contextualize()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::contextualize ( )
virtual

Simplifying the bayesian network with relevance reasonning to lighten the computational charge.

Sets the reference Bayesian Network as a BayesNetFragment after having eliminated nodes that are idle for simulation and computation, such as barren or d-separated nodes. Eliminates the arcs from evidence nodes to it's children, after setting new CPT's for them.

Definition at line 111 of file samplingInference_tpl.h.

References gum::SamplingInference< GUM_SCALAR >::__samplingBN, gum::SamplingInference< GUM_SCALAR >::_onContextualize(), gum::Instantiation::add(), gum::BarrenNodesFinder::barrenNodes(), gum::BayesNetInference< GUM_SCALAR >::BN(), gum::Instantiation::chgVal(), gum::BayesNetInference< GUM_SCALAR >::hardEvidence(), gum::BayesNetInference< GUM_SCALAR >::hardEvidenceNodes(), gum::SamplingInference< GUM_SCALAR >::isContextualized, gum::dSeparation::requisiteNodes(), gum::BarrenNodesFinder::setEvidence(), gum::BarrenNodesFinder::setTargets(), gum::BayesNetInference< GUM_SCALAR >::softEvidenceNodes(), and gum::MarginalTargetedInference< GUM_SCALAR >::targets().

Referenced by gum::SamplingInference< GUM_SCALAR >::_loopApproxInference().

111  {
112  // Finding Barren nodes
113 
114  BarrenNodesFinder barr_nodes = BarrenNodesFinder(&this->BN().dag());
115  barr_nodes.setTargets(&this->targets());
116  barr_nodes.setEvidence(&this->hardEvidenceNodes());
117  const NodeSet& barren = barr_nodes.barrenNodes();
118 
119  // creating BN fragment
120  __samplingBN = new BayesNetFragment< GUM_SCALAR >(this->BN());
121  for (const auto elmt : this->BN().dag().asNodeSet() - barren)
122  __samplingBN->installNode(elmt);
123 
124  // D-separated nodes
125 
126  dSeparation dsep = gum::dSeparation();
127  NodeSet requisite;
128  dsep.requisiteNodes(
129  this->BN().dag(),
130  this->BN().nodes().asNodeSet(), // no target for approximateInference
131  this->hardEvidenceNodes(),
132  this->softEvidenceNodes(), // should be empty
133  requisite);
134  requisite += this->hardEvidenceNodes();
135 
136  auto nonRequisite = this->BN().dag().asNodeSet() - requisite;
137 
138  for (const auto elmt : nonRequisite)
139  __samplingBN->uninstallNode(elmt);
140  for (const auto hard : this->hardEvidenceNodes()) {
142  I.add(this->BN().variable(hard));
143  I.chgVal(this->BN().variable(hard), this->hardEvidence()[hard]);
144 
145  for (const auto& child : this->BN().children(hard)) {
146  auto p = new gum::Potential< GUM_SCALAR >();
147  *p = this->BN().cpt(child).extract(I);
148  __samplingBN->installCPT(child, p);
149  }
150  }
151 
152  this->isContextualized = true;
154  }
aGrUM&#39;s Potential is a multi-dimensional array with tensor operators.
Definition: potential.h:57
Set< NodeId > NodeSet
Some typdefs and define for shortcuts ...
the d-separation algorithm as described in Koller & Friedman (2009)
Definition: dSeparation.h:41
Instantiation & chgVal(const DiscreteVariable &v, Idx newval)
Assign newval to variable v in the Instantiation.
const NodeSet & softEvidenceNodes() const
returns the set of nodes with soft evidence
const NodeProperty< Idx > & hardEvidence() const
indicate for each node with hard evidence which value it took
BayesNetFragment< GUM_SCALAR > * __samplingBN
const NodeSet & hardEvidenceNodes() const
returns the set of nodes with hard evidence
Class for assigning/browsing values to tuples of discrete variables.
Definition: instantiation.h:80
bool isContextualized
whether the referenced Bayesian Network has been "contextualized"
virtual void _onContextualize(BayesNetFragment< GUM_SCALAR > *bn)
fired when Bayesian network is contextualized
virtual const NodeSet & targets() const noexcept final
returns the list of marginal targets
void add(const DiscreteVariable &v) final
Adds a new variable in the Instantiation.
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ continueApproximationScheme()

INLINE bool gum::ApproximationScheme::continueApproximationScheme ( double  error)
inherited

Update the scheme w.r.t the new error.

Test the stopping criterion that are enabled.

Parameters
errorThe new error value.
Returns
false if state become != ApproximationSchemeSTATE::Continue
Exceptions
OperationNotAllowedRaised if state != ApproximationSchemeSTATE::Continue.

Definition at line 225 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_current_epsilon, gum::ApproximationScheme::_current_rate, gum::ApproximationScheme::_current_state, gum::ApproximationScheme::_current_step, gum::ApproximationScheme::_enabled_eps, gum::ApproximationScheme::_enabled_max_iter, gum::ApproximationScheme::_enabled_max_time, gum::ApproximationScheme::_enabled_min_rate_eps, gum::ApproximationScheme::_eps, gum::ApproximationScheme::_history, gum::ApproximationScheme::_last_epsilon, gum::ApproximationScheme::_max_iter, gum::ApproximationScheme::_max_time, gum::ApproximationScheme::_min_rate_eps, gum::ApproximationScheme::_stopScheme(), gum::ApproximationScheme::_timer, gum::IApproximationSchemeConfiguration::Continue, gum::IApproximationSchemeConfiguration::Epsilon, GUM_EMIT3, GUM_ERROR, gum::IApproximationSchemeConfiguration::Limit, gum::IApproximationSchemeConfiguration::messageApproximationScheme(), gum::IApproximationSchemeConfiguration::onProgress, gum::IApproximationSchemeConfiguration::Rate, gum::ApproximationScheme::startOfPeriod(), gum::ApproximationScheme::stateApproximationScheme(), gum::Timer::step(), gum::IApproximationSchemeConfiguration::TimeLimit, and gum::ApproximationScheme::verbosity().

Referenced by gum::GibbsBNdistance< GUM_SCALAR >::_computeKL(), gum::SamplingInference< GUM_SCALAR >::_loopApproxInference(), gum::learning::DAG2BNLearner< ALLOC >::createBN(), gum::learning::GreedyHillClimbing::learnStructure(), gum::learning::LocalSearchWithTabuList::learnStructure(), and gum::credal::CNMonteCarloSampling< GUM_SCALAR, BNInferenceEngine >::makeInference().

225  {
226  // For coherence, we fix the time used in the method
227 
228  double timer_step = _timer.step();
229 
230  if (_enabled_max_time) {
231  if (timer_step > _max_time) {
233  return false;
234  }
235  }
236 
237  if (!startOfPeriod()) { return true; }
238 
240  GUM_ERROR(OperationNotAllowed,
241  "state of the approximation scheme is not correct : "
243  }
244 
245  if (verbosity()) { _history.push_back(error); }
246 
247  if (_enabled_max_iter) {
248  if (_current_step > _max_iter) {
250  return false;
251  }
252  }
253 
255  _current_epsilon = error; // eps rate isEnabled needs it so affectation was
256  // moved from eps isEnabled below
257 
258  if (_enabled_eps) {
259  if (_current_epsilon <= _eps) {
261  return false;
262  }
263  }
264 
265  if (_last_epsilon >= 0.) {
266  if (_current_epsilon > .0) {
267  // ! _current_epsilon can be 0. AND epsilon
268  // isEnabled can be disabled !
269  _current_rate =
271  }
272  // limit with current eps ---> 0 is | 1 - ( last_eps / 0 ) | --->
273  // infinity the else means a return false if we isEnabled the rate below,
274  // as we would have returned false if epsilon isEnabled was enabled
275  else {
277  }
278 
279  if (_enabled_min_rate_eps) {
280  if (_current_rate <= _min_rate_eps) {
282  return false;
283  }
284  }
285  }
286 
288  if (onProgress.hasListener()) {
290  }
291 
292  return true;
293  } else {
294  return false;
295  }
296  }
double step() const
Returns the delta time between now and the last reset() call (or the constructor).
Definition: timer_inl.h:39
Signaler3< Size, double, double > onProgress
Progression, error and time.
bool _enabled_max_iter
If true, the maximum iterations stopping criterion is enabled.
bool _enabled_eps
If true, the threshold convergence is enabled.
void _stopScheme(ApproximationSchemeSTATE new_state)
Stop the scheme given a new state.
double _current_epsilon
Current epsilon.
bool _enabled_min_rate_eps
If true, the minimal threshold for epsilon rate is enabled.
bool startOfPeriod()
Returns true if we are at the beginning of a period (compute error is mandatory). ...
double _eps
Threshold for convergence.
double _current_rate
Current rate.
bool _enabled_max_time
If true, the timeout is enabled.
Size _current_step
The current step.
std::vector< double > _history
The scheme history, used only if verbosity == true.
double _min_rate_eps
Threshold for the epsilon rate.
ApproximationSchemeSTATE stateApproximationScheme() const
Returns the approximation scheme state.
bool verbosity() const
Returns true if verbosity is enabled.
std::string messageApproximationScheme() const
Returns the approximation scheme message.
double _last_epsilon
Last epsilon value.
Size _max_iter
The maximum iterations.
#define GUM_EMIT3(signal, arg1, arg2, arg3)
Definition: signaler3.h:40
ApproximationSchemeSTATE _current_state
The current state.
double _max_time
The timeout.
#define GUM_ERROR(type, msg)
Definition: exceptions.h:52
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ currentPosterior() [1/2]

template<typename GUM_SCALAR >
const Potential< GUM_SCALAR > & gum::SamplingInference< GUM_SCALAR >::currentPosterior ( NodeId  id)

Computes and returns the actual estimation of the posterior of a node.

Returns
a const ref to the posterior probability of the node.
Parameters
idthe node for which we need a posterior probability
Warning
for efficiency reasons, the potential is returned by reference. In order to ensure that the potential may still exist even if the Inference object is destroyed, the user has to copy it explicitly.
Exceptions
UndefinedElementif node is not in the set of targets.
NotFoundif node is not in the BN.

Definition at line 94 of file samplingInference_tpl.h.

References gum::SamplingInference< GUM_SCALAR >::__estimator, and gum::BayesNetInference< GUM_SCALAR >::BN().

Referenced by gum::SamplingInference< GUM_SCALAR >::currentPosterior().

94  {
95  return __estimator.posterior(this->BN().variable(id));
96  }
Estimator< GUM_SCALAR > __estimator
Estimator object designed to approximate target posteriors.
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ currentPosterior() [2/2]

template<typename GUM_SCALAR >
const Potential< GUM_SCALAR > & gum::SamplingInference< GUM_SCALAR >::currentPosterior ( const std::string &  name)

Computes and returns the actual estimation of the posterior of a node by its name.

Returns
a const ref to the posterior probability of the node referred by name.
Parameters
namethe name of the node for which we need a posterior probability
Warning
for efficiency reasons, the potential is returned by reference. In order to ensure that the potential may still exist even if the Inference object is destroyed, the user has to copy it explicitly.
Exceptions
UndefinedElementif node corresponding to name is not in the set of targets.
NotFoundif node corresponding to name is not in the BN.

Definition at line 100 of file samplingInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::BN(), and gum::SamplingInference< GUM_SCALAR >::currentPosterior().

100  {
101  return currentPosterior(this->BN().idFromName(name));
102  }
const Potential< GUM_SCALAR > & currentPosterior(NodeId id)
Computes and returns the actual estimation of the posterior of a node.
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:

◆ currentTime()

INLINE double gum::ApproximationScheme::currentTime ( ) const
virtualinherited

Returns the current running time in second.

Returns
Returns the current running time in second.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 126 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_timer, and gum::Timer::step().

Referenced by gum::learning::genericBNLearner::currentTime().

126 { return _timer.step(); }
double step() const
Returns the delta time between now and the last reset() call (or the constructor).
Definition: timer_inl.h:39
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ disableEpsilon()

INLINE void gum::ApproximationScheme::disableEpsilon ( )
virtualinherited

Disable stopping criterion on epsilon.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 52 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_eps.

Referenced by gum::learning::genericBNLearner::disableEpsilon().

52 { _enabled_eps = false; }
bool _enabled_eps
If true, the threshold convergence is enabled.
+ Here is the caller graph for this function:

◆ disableMaxIter()

INLINE void gum::ApproximationScheme::disableMaxIter ( )
virtualinherited

Disable stopping criterion on max iterations.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 103 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_max_iter.

Referenced by gum::credal::CNMonteCarloSampling< GUM_SCALAR, BNInferenceEngine >::__mcInitApproximationScheme(), gum::learning::genericBNLearner::disableMaxIter(), and gum::learning::GreedyHillClimbing::GreedyHillClimbing().

103 { _enabled_max_iter = false; }
bool _enabled_max_iter
If true, the maximum iterations stopping criterion is enabled.
+ Here is the caller graph for this function:

◆ disableMaxTime()

INLINE void gum::ApproximationScheme::disableMaxTime ( )
virtualinherited

Disable stopping criterion on timeout.

Returns
Disable stopping criterion on timeout.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 129 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_max_time.

Referenced by gum::learning::genericBNLearner::disableMaxTime(), and gum::learning::GreedyHillClimbing::GreedyHillClimbing().

129 { _enabled_max_time = false; }
bool _enabled_max_time
If true, the timeout is enabled.
+ Here is the caller graph for this function:

◆ disableMinEpsilonRate()

INLINE void gum::ApproximationScheme::disableMinEpsilonRate ( )
virtualinherited

Disable stopping criterion on epsilon rate.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 77 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_min_rate_eps.

Referenced by gum::credal::CNMonteCarloSampling< GUM_SCALAR, BNInferenceEngine >::__mcInitApproximationScheme(), gum::GibbsBNdistance< GUM_SCALAR >::_computeKL(), gum::learning::genericBNLearner::disableMinEpsilonRate(), and gum::learning::GreedyHillClimbing::GreedyHillClimbing().

77  {
78  _enabled_min_rate_eps = false;
79  }
bool _enabled_min_rate_eps
If true, the minimal threshold for epsilon rate is enabled.
+ Here is the caller graph for this function:

◆ domainSizes()

template<typename GUM_SCALAR >
INLINE const NodeProperty< Size > & gum::BayesNetInference< GUM_SCALAR >::domainSizes ( ) const
finalvirtualinherited

get the domain sizes of the random variables of the BN

Definition at line 171 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__domain_sizes.

171  {
172  return __domain_sizes;
173  }
NodeProperty< Size > __domain_sizes
the domain sizes of the random variables

◆ enableEpsilon()

INLINE void gum::ApproximationScheme::enableEpsilon ( )
virtualinherited

Enable stopping criterion on epsilon.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 55 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_eps.

Referenced by gum::credal::CNMonteCarloSampling< GUM_SCALAR, BNInferenceEngine >::__mcInitApproximationScheme(), and gum::learning::genericBNLearner::enableEpsilon().

55 { _enabled_eps = true; }
bool _enabled_eps
If true, the threshold convergence is enabled.
+ Here is the caller graph for this function:

◆ enableMaxIter()

INLINE void gum::ApproximationScheme::enableMaxIter ( )
virtualinherited

Enable stopping criterion on max iterations.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 106 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_max_iter.

Referenced by gum::learning::genericBNLearner::enableMaxIter().

106 { _enabled_max_iter = true; }
bool _enabled_max_iter
If true, the maximum iterations stopping criterion is enabled.
+ Here is the caller graph for this function:

◆ enableMaxTime()

INLINE void gum::ApproximationScheme::enableMaxTime ( )
virtualinherited

Enable stopping criterion on timeout.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 132 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_max_time.

Referenced by gum::credal::CNMonteCarloSampling< GUM_SCALAR, BNInferenceEngine >::CNMonteCarloSampling(), and gum::learning::genericBNLearner::enableMaxTime().

132 { _enabled_max_time = true; }
bool _enabled_max_time
If true, the timeout is enabled.
+ Here is the caller graph for this function:

◆ enableMinEpsilonRate()

INLINE void gum::ApproximationScheme::enableMinEpsilonRate ( )
virtualinherited

Enable stopping criterion on epsilon rate.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 82 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_min_rate_eps.

Referenced by gum::GibbsBNdistance< GUM_SCALAR >::_computeKL(), and gum::learning::genericBNLearner::enableMinEpsilonRate().

82  {
83  _enabled_min_rate_eps = true;
84  }
bool _enabled_min_rate_eps
If true, the minimal threshold for epsilon rate is enabled.
+ Here is the caller graph for this function:

◆ epsilon()

INLINE double gum::ApproximationScheme::epsilon ( ) const
virtualinherited

Returns the value of epsilon.

Returns
Returns the value of epsilon.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 49 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_eps.

Referenced by gum::ImportanceSampling< GUM_SCALAR >::_onContextualize(), and gum::learning::genericBNLearner::epsilon().

49 { return _eps; }
double _eps
Threshold for convergence.
+ Here is the caller graph for this function:

◆ eraseAllEvidence()

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::eraseAllEvidence ( )
finalvirtualinherited

removes all the evidence entered into the network

Definition at line 592 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__evidence, gum::BayesNetInference< GUM_SCALAR >::__hard_evidence, gum::BayesNetInference< GUM_SCALAR >::__hard_evidence_nodes, gum::BayesNetInference< GUM_SCALAR >::__setState(), gum::BayesNetInference< GUM_SCALAR >::__soft_evidence_nodes, gum::BayesNetInference< GUM_SCALAR >::_onAllEvidenceErased(), gum::Set< Key, Alloc >::clear(), gum::BayesNetInference< GUM_SCALAR >::isInferenceOutdatedBNStructure(), gum::BayesNetInference< GUM_SCALAR >::OutdatedBNPotentials, and gum::BayesNetInference< GUM_SCALAR >::OutdatedBNStructure.

Referenced by gum::BayesNetInference< GUM_SCALAR >::clear(), gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact(), gum::JointTargetedInference< GUM_SCALAR >::evidenceJointImpact(), and gum::JointTargetedInference< GUM_SCALAR >::jointMutualInformation().

592  {
593  bool has_hard_evidence = !__hard_evidence.empty();
594  this->_onAllEvidenceErased(has_hard_evidence);
595 
596  for (const auto& pair : __evidence) {
597  if (pair.second != nullptr) { delete (pair.second); }
598  }
599 
600  __evidence.clear();
601  __hard_evidence.clear();
604 
605  if (has_hard_evidence) {
607  } else {
610  }
611  }
612  }
NodeProperty< const Potential< GUM_SCALAR > *> __evidence
the set of evidence entered into the network
virtual void __setState(const StateOfInference state) final
set the state of the inference engine and call the notification _onStateChanged when necessary (i...
NodeProperty< Idx > __hard_evidence
assign to each node with a hard evidence the index of its observed value
virtual void _onAllEvidenceErased(bool contains_hard_evidence)=0
fired before all the evidence are erased
virtual bool isInferenceOutdatedBNStructure() const noexcept final
returns whether the inference object is in a OutdatedBNStructure state
NodeSet __soft_evidence_nodes
the set of nodes that received soft evidence
NodeSet __hard_evidence_nodes
the set of nodes that received hard evidence
void clear()
Removes all the elements, if any, from the set.
Definition: set_tpl.h:372
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ eraseAllTargets()

template<typename GUM_SCALAR >
INLINE void gum::MarginalTargetedInference< GUM_SCALAR >::eraseAllTargets ( )
virtualinherited

Clear all previously defined targets.

Reimplemented in gum::JointTargetedInference< GUM_SCALAR >.

Definition at line 96 of file marginalTargetedInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__setState(), gum::MarginalTargetedInference< GUM_SCALAR >::__targets, gum::MarginalTargetedInference< GUM_SCALAR >::_onAllMarginalTargetsErased(), gum::MarginalTargetedInference< GUM_SCALAR >::_setTargetedMode(), and gum::Set< Key, Alloc >::clear().

Referenced by gum::JointTargetedInference< GUM_SCALAR >::eraseAllMarginalTargets(), and gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact().

96  {
98 
99  __targets.clear();
100  _setTargetedMode(); // does nothing if already in targeted mode
101 
102  this->__setState(
104  }
virtual void __setState(const StateOfInference state) final
set the state of the inference engine and call the notification _onStateChanged when necessary (i...
NodeSet __targets
the set of marginal targets
virtual void _onAllMarginalTargetsErased()=0
fired before a all marginal targets are removed
void clear()
Removes all the elements, if any, from the set.
Definition: set_tpl.h:372
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ eraseEvidence() [1/2]

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::eraseEvidence ( NodeId  id)
finalvirtualinherited

removed the evidence, if any, corresponding to node id

Definition at line 563 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__evidence, gum::BayesNetInference< GUM_SCALAR >::__hard_evidence, gum::BayesNetInference< GUM_SCALAR >::__hard_evidence_nodes, gum::BayesNetInference< GUM_SCALAR >::__setState(), gum::BayesNetInference< GUM_SCALAR >::__soft_evidence_nodes, gum::BayesNetInference< GUM_SCALAR >::_onEvidenceErased(), gum::Set< Key, Alloc >::erase(), gum::BayesNetInference< GUM_SCALAR >::hasEvidence(), gum::BayesNetInference< GUM_SCALAR >::hasHardEvidence(), gum::BayesNetInference< GUM_SCALAR >::isInferenceOutdatedBNStructure(), gum::BayesNetInference< GUM_SCALAR >::OutdatedBNPotentials, and gum::BayesNetInference< GUM_SCALAR >::OutdatedBNStructure.

Referenced by gum::BayesNetInference< GUM_SCALAR >::eraseEvidence().

563  {
564  if (hasEvidence(id)) {
565  if (hasHardEvidence(id)) {
566  _onEvidenceErased(id, true);
567  __hard_evidence.erase(id);
570  } else {
571  _onEvidenceErased(id, false);
575  }
576  }
577 
578  delete (__evidence[id]);
579  __evidence.erase(id);
580  }
581  }
NodeProperty< const Potential< GUM_SCALAR > *> __evidence
the set of evidence entered into the network
virtual void __setState(const StateOfInference state) final
set the state of the inference engine and call the notification _onStateChanged when necessary (i...
NodeProperty< Idx > __hard_evidence
assign to each node with a hard evidence the index of its observed value
virtual bool hasHardEvidence(NodeId id) const final
indicates whether node id has received a hard evidence
virtual bool isInferenceOutdatedBNStructure() const noexcept final
returns whether the inference object is in a OutdatedBNStructure state
void erase(const Key &k)
Erases an element from the set.
Definition: set_tpl.h:653
NodeSet __soft_evidence_nodes
the set of nodes that received soft evidence
virtual bool hasEvidence() const final
indicates whether some node(s) have received evidence
NodeSet __hard_evidence_nodes
the set of nodes that received hard evidence
virtual void _onEvidenceErased(const NodeId id, bool isHardEvidence)=0
fired before an evidence is removed
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ eraseEvidence() [2/2]

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::eraseEvidence ( const std::string &  nodeName)
finalvirtualinherited

removed the evidence, if any, corresponding to node of name nodeName

Definition at line 585 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::BN(), and gum::BayesNetInference< GUM_SCALAR >::eraseEvidence().

585  {
586  eraseEvidence(this->BN().idFromName(nodeName));
587  }
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
virtual void eraseEvidence(NodeId id) final
removed the evidence, if any, corresponding to node id
+ Here is the call graph for this function:

◆ eraseTarget() [1/2]

template<typename GUM_SCALAR >
void gum::MarginalTargetedInference< GUM_SCALAR >::eraseTarget ( NodeId  target)
finalvirtualinherited

removes an existing (marginal) target

Warning
If the target does not already exist, the method does nothing. In particular, it does not raise any exception.

Definition at line 169 of file marginalTargetedInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__bn, gum::BayesNetInference< GUM_SCALAR >::__setState(), gum::MarginalTargetedInference< GUM_SCALAR >::__targeted_mode, gum::MarginalTargetedInference< GUM_SCALAR >::__targets, gum::MarginalTargetedInference< GUM_SCALAR >::_onMarginalTargetErased(), gum::Set< Key, Alloc >::contains(), gum::Set< Key, Alloc >::erase(), and GUM_ERROR.

Referenced by gum::MarginalTargetedInference< GUM_SCALAR >::eraseTarget().

169  {
170  // check if the node belongs to the Bayesian network
171  if (this->__bn == nullptr)
172  GUM_ERROR(NullElement,
173  "No Bayes net has been assigned to the "
174  "inference algorithm");
175 
176  if (!this->__bn->dag().exists(target)) {
177  GUM_ERROR(UndefinedElement, target << " is not a NodeId in the bn");
178  }
179 
180 
181  if (__targets.contains(target)) {
182  __targeted_mode = true; // we do not use _setTargetedMode because we do not
183  // want to clear the targets
184  _onMarginalTargetErased(target);
185  __targets.erase(target);
186  this->__setState(
188  }
189  }
bool contains(const Key &k) const
Indicates whether a given elements belong to the set.
Definition: set_tpl.h:578
virtual void __setState(const StateOfInference state) final
set the state of the inference engine and call the notification _onStateChanged when necessary (i...
NodeSet __targets
the set of marginal targets
const IBayesNet< GUM_SCALAR > * __bn
the Bayes net on which we perform inferences
void erase(const Key &k)
Erases an element from the set.
Definition: set_tpl.h:653
virtual void _onMarginalTargetErased(const NodeId id)=0
fired before a marginal target is removed
bool __targeted_mode
whether the actual targets are default
#define GUM_ERROR(type, msg)
Definition: exceptions.h:52
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ eraseTarget() [2/2]

template<typename GUM_SCALAR >
void gum::MarginalTargetedInference< GUM_SCALAR >::eraseTarget ( const std::string &  nodeName)
finalvirtualinherited

removes an existing (marginal) target

Warning
If the target does not already exist, the method does nothing. In particular, it does not raise any exception.

Definition at line 194 of file marginalTargetedInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__bn, gum::MarginalTargetedInference< GUM_SCALAR >::eraseTarget(), and GUM_ERROR.

195  {
196  // check if the node belongs to the Bayesian network
197  if (this->__bn == nullptr)
198  GUM_ERROR(NullElement,
199  "No Bayes net has been assigned to the "
200  "inference algorithm");
201 
202  eraseTarget(this->__bn->idFromName(nodeName));
203  }
virtual void eraseTarget(NodeId target) final
removes an existing (marginal) target
const IBayesNet< GUM_SCALAR > * __bn
the Bayes net on which we perform inferences
#define GUM_ERROR(type, msg)
Definition: exceptions.h:52
+ Here is the call graph for this function:

◆ evidence()

template<typename GUM_SCALAR >
INLINE const NodeProperty< const Potential< GUM_SCALAR > *> & gum::BayesNetInference< GUM_SCALAR >::evidence ( ) const
inherited

returns the set of evidence

Definition at line 647 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__evidence.

Referenced by gum::ImportanceSampling< GUM_SCALAR >::_onContextualize(), and gum::MarginalTargetedInference< GUM_SCALAR >::posterior().

647  {
648  return __evidence;
649  }
NodeProperty< const Potential< GUM_SCALAR > *> __evidence
the set of evidence entered into the network
+ Here is the caller graph for this function:

◆ evidenceImpact() [1/2]

template<typename GUM_SCALAR >
Potential< GUM_SCALAR > gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact ( NodeId  target,
const NodeSet evs 
)
inherited

Create a gum::Potential for P(target|evs) (for all instanciation of target and evs)

Warning
If some evs are d-separated, they are not included in the Potential
Parameters
bnthe BayesNet
targetthe nodeId of the targetted variable
evsthe vector of nodeId of the observed variables
Returns
a Potential

Definition at line 282 of file marginalTargetedInference_tpl.h.

References gum::MultiDimDecorator< GUM_SCALAR >::add(), gum::BayesNetInference< GUM_SCALAR >::addEvidence(), gum::MarginalTargetedInference< GUM_SCALAR >::addTarget(), gum::BayesNetInference< GUM_SCALAR >::BN(), gum::BayesNetInference< GUM_SCALAR >::chgEvidence(), gum::Set< Key, Alloc >::contains(), gum::Instantiation::end(), gum::BayesNetInference< GUM_SCALAR >::eraseAllEvidence(), gum::MarginalTargetedInference< GUM_SCALAR >::eraseAllTargets(), GUM_ERROR, gum::Instantiation::incNotVar(), gum::Instantiation::incVar(), gum::BayesNetInference< GUM_SCALAR >::makeInference(), gum::MarginalTargetedInference< GUM_SCALAR >::posterior(), gum::MultiDimDecorator< GUM_SCALAR >::set(), gum::Instantiation::setFirst(), gum::Instantiation::setFirstVar(), and gum::Instantiation::val().

Referenced by gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact().

283  {
284  const auto& vtarget = this->BN().variable(target);
285 
286  if (evs.contains(target)) {
287  GUM_ERROR(InvalidArgument,
288  "Target <" << vtarget.name() << "> (" << target
289  << ") can not be in evs (" << evs << ").");
290  }
291  auto condset = this->BN().minimalCondSet(target, evs);
292 
293  Potential< GUM_SCALAR > res;
294  this->eraseAllTargets();
295  this->eraseAllEvidence();
296  res.add(this->BN().variable(target));
297  this->addTarget(target);
298  for (const auto& n : condset) {
299  res.add(this->BN().variable(n));
300  this->addEvidence(n, 0);
301  }
302 
303  Instantiation inst(res);
304  for (inst.setFirst(); !inst.end(); inst.incNotVar(vtarget)) {
305  // inferring
306  for (const auto& n : condset)
307  this->chgEvidence(n, inst.val(this->BN().variable(n)));
308  this->makeInference();
309  // populate res
310  for (inst.setFirstVar(vtarget); !inst.end(); inst.incVar(vtarget)) {
311  res.set(inst, this->posterior(target)[inst]);
312  }
313  inst.setFirstVar(vtarget); // remove inst.end() flag
314  }
315 
316  return res;
317  }
virtual void makeInference() final
perform the heavy computations needed to compute the targets&#39; posteriors
virtual void addTarget(NodeId target) final
Add a marginal target to the list of targets.
virtual void chgEvidence(NodeId id, const Idx val) final
change the value of an already existing hard evidence
virtual void addEvidence(NodeId id, const Idx val) final
adds a new hard evidence on node id
virtual void eraseAllEvidence() final
removes all the evidence entered into the network
virtual const Potential< GUM_SCALAR > & posterior(NodeId node)
Computes and returns the posterior of a node.
virtual void eraseAllTargets()
Clear all previously defined targets.
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
#define GUM_ERROR(type, msg)
Definition: exceptions.h:52
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ evidenceImpact() [2/2]

template<typename GUM_SCALAR >
Potential< GUM_SCALAR > gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact ( const std::string &  target,
const std::vector< std::string > &  evs 
)
inherited

Create a gum::Potential for P(target|evs) (for all instanciation of target and evs)

Warning
If some evs are d-separated, they are not included in the Potential
Parameters
targetthe nodeId of the target variable
evsthe nodeId of the observed variable
Returns
a Potential

Definition at line 321 of file marginalTargetedInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::BN(), gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact(), and gum::Set< Key, Alloc >::insert().

322  {
323  const auto& bn = this->BN();
324 
325  gum::NodeSet evsId;
326  for (const auto& evname : evs) {
327  evsId.insert(bn.idFromName(evname));
328  }
329 
330  return evidenceImpact(bn.idFromName(target), evsId);
331  }
Potential< GUM_SCALAR > evidenceImpact(NodeId target, const NodeSet &evs)
Create a gum::Potential for P(target|evs) (for all instanciation of target and evs) ...
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
void insert(const Key &k)
Inserts a new element into the set.
Definition: set_tpl.h:610
+ Here is the call graph for this function:

◆ H() [1/2]

template<typename GUM_SCALAR >
INLINE GUM_SCALAR gum::MarginalTargetedInference< GUM_SCALAR >::H ( NodeId  X)
finalvirtualinherited

Entropy Compute Shanon's entropy of a node given the observation.

See also
http://en.wikipedia.org/wiki/Information_entropy

Definition at line 266 of file marginalTargetedInference_tpl.h.

References gum::MarginalTargetedInference< GUM_SCALAR >::posterior().

Referenced by gum::MarginalTargetedInference< GUM_SCALAR >::H(), and gum::JointTargetedInference< GUM_SCALAR >::VI().

266  {
267  return posterior(X).entropy();
268  }
virtual const Potential< GUM_SCALAR > & posterior(NodeId node)
Computes and returns the posterior of a node.
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ H() [2/2]

template<typename GUM_SCALAR >
INLINE GUM_SCALAR gum::MarginalTargetedInference< GUM_SCALAR >::H ( const std::string &  nodeName)
finalvirtualinherited

Entropy Compute Shanon's entropy of a node given the observation.

See also
http://en.wikipedia.org/wiki/Information_entropy

Definition at line 275 of file marginalTargetedInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::BN(), and gum::MarginalTargetedInference< GUM_SCALAR >::H().

275  {
276  return H(this->BN().idFromName(nodeName));
277  }
virtual GUM_SCALAR H(NodeId X) final
Entropy Compute Shanon&#39;s entropy of a node given the observation.
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:

◆ hardEvidence()

template<typename GUM_SCALAR >
INLINE const NodeProperty< Idx > & gum::BayesNetInference< GUM_SCALAR >::hardEvidence ( ) const
inherited

indicate for each node with hard evidence which value it took

Definition at line 639 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__hard_evidence.

Referenced by gum::WeightedSampling< GUM_SCALAR >::_draw(), gum::MonteCarloSampling< GUM_SCALAR >::_draw(), gum::ImportanceSampling< GUM_SCALAR >::_draw(), and gum::SamplingInference< GUM_SCALAR >::contextualize().

639  {
640  return __hard_evidence;
641  }
NodeProperty< Idx > __hard_evidence
assign to each node with a hard evidence the index of its observed value
+ Here is the caller graph for this function:

◆ hardEvidenceNodes()

template<typename GUM_SCALAR >
INLINE const NodeSet & gum::BayesNetInference< GUM_SCALAR >::hardEvidenceNodes ( ) const
inherited

returns the set of nodes with hard evidence

the set of nodes that received hard evidence

Definition at line 663 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__hard_evidence_nodes.

Referenced by gum::WeightedSampling< GUM_SCALAR >::_draw(), gum::MonteCarloSampling< GUM_SCALAR >::_draw(), gum::ImportanceSampling< GUM_SCALAR >::_draw(), gum::ImportanceSampling< GUM_SCALAR >::_onContextualize(), gum::SamplingInference< GUM_SCALAR >::_setEstimatorFromBN(), gum::SamplingInference< GUM_SCALAR >::_setEstimatorFromLBP(), gum::SamplingInference< GUM_SCALAR >::contextualize(), and gum::MarginalTargetedInference< GUM_SCALAR >::posterior().

663  {
664  return __hard_evidence_nodes;
665  }
NodeSet __hard_evidence_nodes
the set of nodes that received hard evidence
+ Here is the caller graph for this function:

◆ hasEvidence() [1/3]

template<typename GUM_SCALAR >
INLINE bool gum::BayesNetInference< GUM_SCALAR >::hasEvidence ( ) const
finalvirtualinherited

indicates whether some node(s) have received evidence

Definition at line 378 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__evidence.

Referenced by gum::BayesNetInference< GUM_SCALAR >::addEvidence(), gum::BayesNetInference< GUM_SCALAR >::chgEvidence(), gum::BayesNetInference< GUM_SCALAR >::eraseEvidence(), and gum::BayesNetInference< GUM_SCALAR >::hasEvidence().

378  {
379  return !__evidence.empty();
380  }
NodeProperty< const Potential< GUM_SCALAR > *> __evidence
the set of evidence entered into the network
+ Here is the caller graph for this function:

◆ hasEvidence() [2/3]

template<typename GUM_SCALAR >
INLINE bool gum::BayesNetInference< GUM_SCALAR >::hasEvidence ( NodeId  id) const
finalvirtualinherited

indicates whether node id has received an evidence

Definition at line 385 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__evidence.

385  {
386  return __evidence.exists(id);
387  }
NodeProperty< const Potential< GUM_SCALAR > *> __evidence
the set of evidence entered into the network

◆ hasEvidence() [3/3]

template<typename GUM_SCALAR >
INLINE bool gum::BayesNetInference< GUM_SCALAR >::hasEvidence ( const std::string &  nodeName) const
finalvirtualinherited

indicates whether node id has received an evidence

Definition at line 406 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::BN(), and gum::BayesNetInference< GUM_SCALAR >::hasEvidence().

407  {
408  return hasEvidence(this->BN().idFromName(nodeName));
409  }
virtual bool hasEvidence() const final
indicates whether some node(s) have received evidence
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:

◆ hasHardEvidence() [1/2]

template<typename GUM_SCALAR >
INLINE bool gum::BayesNetInference< GUM_SCALAR >::hasHardEvidence ( NodeId  id) const
finalvirtualinherited

indicates whether node id has received a hard evidence

Definition at line 392 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__hard_evidence_nodes, and gum::Set< Key, Alloc >::exists().

Referenced by gum::ImportanceSampling< GUM_SCALAR >::_draw(), gum::BayesNetInference< GUM_SCALAR >::chgEvidence(), gum::BayesNetInference< GUM_SCALAR >::eraseEvidence(), and gum::BayesNetInference< GUM_SCALAR >::hasHardEvidence().

392  {
393  return __hard_evidence_nodes.exists(id);
394  }
bool exists(const Key &k) const
Indicates whether a given elements belong to the set.
Definition: set_tpl.h:604
NodeSet __hard_evidence_nodes
the set of nodes that received hard evidence
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ hasHardEvidence() [2/2]

template<typename GUM_SCALAR >
INLINE bool gum::BayesNetInference< GUM_SCALAR >::hasHardEvidence ( const std::string &  nodeName) const
finalvirtualinherited

indicates whether node id has received a hard evidence

Definition at line 414 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::BN(), and gum::BayesNetInference< GUM_SCALAR >::hasHardEvidence().

415  {
416  return hasHardEvidence(this->BN().idFromName(nodeName));
417  }
virtual bool hasHardEvidence(NodeId id) const final
indicates whether node id has received a hard evidence
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:

◆ hasSoftEvidence() [1/2]

template<typename GUM_SCALAR >
INLINE bool gum::BayesNetInference< GUM_SCALAR >::hasSoftEvidence ( NodeId  id) const
finalvirtualinherited

indicates whether node id has received a soft evidence

Definition at line 399 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__soft_evidence_nodes, and gum::Set< Key, Alloc >::exists().

Referenced by gum::BayesNetInference< GUM_SCALAR >::hasSoftEvidence().

399  {
400  return __soft_evidence_nodes.exists(id);
401  }
bool exists(const Key &k) const
Indicates whether a given elements belong to the set.
Definition: set_tpl.h:604
NodeSet __soft_evidence_nodes
the set of nodes that received soft evidence
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ hasSoftEvidence() [2/2]

template<typename GUM_SCALAR >
INLINE bool gum::BayesNetInference< GUM_SCALAR >::hasSoftEvidence ( const std::string &  nodeName) const
finalvirtualinherited

indicates whether node id has received a soft evidence

Definition at line 422 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::BN(), and gum::BayesNetInference< GUM_SCALAR >::hasSoftEvidence().

423  {
424  return hasSoftEvidence(this->BN().idFromName(nodeName));
425  }
virtual bool hasSoftEvidence(NodeId id) const final
indicates whether node id has received a soft evidence
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:

◆ history()

INLINE const std::vector< double > & gum::ApproximationScheme::history ( ) const
virtualinherited

Returns the scheme history.

Returns
Returns the scheme history.
Exceptions
OperationNotAllowedRaised if the scheme did not performed or if verbosity is set to false.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 171 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_history, GUM_ERROR, gum::ApproximationScheme::stateApproximationScheme(), gum::IApproximationSchemeConfiguration::Undefined, and gum::ApproximationScheme::verbosity().

Referenced by gum::learning::genericBNLearner::history().

171  {
173  GUM_ERROR(OperationNotAllowed,
174  "state of the approximation scheme is udefined");
175  }
176 
177  if (verbosity() == false) {
178  GUM_ERROR(OperationNotAllowed, "No history when verbosity=false");
179  }
180 
181  return _history;
182  }
std::vector< double > _history
The scheme history, used only if verbosity == true.
ApproximationSchemeSTATE stateApproximationScheme() const
Returns the approximation scheme state.
bool verbosity() const
Returns true if verbosity is enabled.
#define GUM_ERROR(type, msg)
Definition: exceptions.h:52
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ initApproximationScheme()

INLINE void gum::ApproximationScheme::initApproximationScheme ( )
inherited

Initialise the scheme.

Definition at line 185 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_current_epsilon, gum::ApproximationScheme::_current_rate, gum::ApproximationScheme::_current_state, gum::ApproximationScheme::_current_step, gum::ApproximationScheme::_history, gum::ApproximationScheme::_timer, gum::IApproximationSchemeConfiguration::Continue, and gum::Timer::reset().

Referenced by gum::credal::CNMonteCarloSampling< GUM_SCALAR, BNInferenceEngine >::__mcInitApproximationScheme(), gum::GibbsBNdistance< GUM_SCALAR >::_computeKL(), gum::SamplingInference< GUM_SCALAR >::_loopApproxInference(), gum::SamplingInference< GUM_SCALAR >::_onStateChanged(), gum::learning::DAG2BNLearner< ALLOC >::createBN(), gum::learning::GreedyHillClimbing::learnStructure(), and gum::learning::LocalSearchWithTabuList::learnStructure().

185  {
187  _current_step = 0;
189  _history.clear();
190  _timer.reset();
191  }
double _current_epsilon
Current epsilon.
void reset()
Reset the timer.
Definition: timer_inl.h:29
double _current_rate
Current rate.
Size _current_step
The current step.
std::vector< double > _history
The scheme history, used only if verbosity == true.
ApproximationSchemeSTATE _current_state
The current state.
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ isDone()

template<typename GUM_SCALAR >
INLINE bool gum::BayesNetInference< GUM_SCALAR >::isDone ( ) const
finalvirtualnoexceptinherited

returns whether the inference object is in a done state

The inference object is in a done state when the posteriors can be retrieved without performing a new inference, i.e., all the heavy computations have already been performed. Typically, in a junction tree algorithm, this corresponds to a situation in which all the messages needed in the JT have been computed and sent.

Definition at line 93 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__state, and gum::BayesNetInference< GUM_SCALAR >::Done.

Referenced by gum::JointTargetedInference< GUM_SCALAR >::jointPosterior(), gum::BayesNetInference< GUM_SCALAR >::makeInference(), gum::MarginalTargetedInference< GUM_SCALAR >::posterior(), and gum::BayesNetInference< GUM_SCALAR >::prepareInference().

93  {
94  return (__state == StateOfInference::Done);
95  }
StateOfInference __state
the current state of the inference (outdated/ready/done)
+ Here is the caller graph for this function:

◆ isEnabledEpsilon()

INLINE bool gum::ApproximationScheme::isEnabledEpsilon ( ) const
virtualinherited

Returns true if stopping criterion on epsilon is enabled, false otherwise.

Returns
Returns true if stopping criterion on epsilon is enabled, false otherwise.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 59 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_eps.

Referenced by gum::learning::genericBNLearner::isEnabledEpsilon().

59  {
60  return _enabled_eps;
61  }
bool _enabled_eps
If true, the threshold convergence is enabled.
+ Here is the caller graph for this function:

◆ isEnabledMaxIter()

INLINE bool gum::ApproximationScheme::isEnabledMaxIter ( ) const
virtualinherited

Returns true if stopping criterion on max iterations is enabled, false otherwise.

Returns
Returns true if stopping criterion on max iterations is enabled, false otherwise.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 110 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_max_iter.

Referenced by gum::learning::genericBNLearner::isEnabledMaxIter().

110  {
111  return _enabled_max_iter;
112  }
bool _enabled_max_iter
If true, the maximum iterations stopping criterion is enabled.
+ Here is the caller graph for this function:

◆ isEnabledMaxTime()

INLINE bool gum::ApproximationScheme::isEnabledMaxTime ( ) const
virtualinherited

Returns true if stopping criterion on timeout is enabled, false otherwise.

Returns
Returns true if stopping criterion on timeout is enabled, false otherwise.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 136 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_max_time.

Referenced by gum::learning::genericBNLearner::isEnabledMaxTime().

136  {
137  return _enabled_max_time;
138  }
bool _enabled_max_time
If true, the timeout is enabled.
+ Here is the caller graph for this function:

◆ isEnabledMinEpsilonRate()

INLINE bool gum::ApproximationScheme::isEnabledMinEpsilonRate ( ) const
virtualinherited

Returns true if stopping criterion on epsilon rate is enabled, false otherwise.

Returns
Returns true if stopping criterion on epsilon rate is enabled, false otherwise.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 88 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_min_rate_eps.

Referenced by gum::GibbsBNdistance< GUM_SCALAR >::_computeKL(), and gum::learning::genericBNLearner::isEnabledMinEpsilonRate().

88  {
89  return _enabled_min_rate_eps;
90  }
bool _enabled_min_rate_eps
If true, the minimal threshold for epsilon rate is enabled.
+ Here is the caller graph for this function:

◆ isInferenceDone()

template<typename GUM_SCALAR >
INLINE bool gum::BayesNetInference< GUM_SCALAR >::isInferenceDone ( ) const
finalvirtualnoexceptinherited

returns whether the inference object is in a InferenceDone state

Definition at line 86 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__state, and gum::BayesNetInference< GUM_SCALAR >::Done.

86  {
87  return (__state == StateOfInference::Done);
88  }
StateOfInference __state
the current state of the inference (outdated/ready/done)

◆ isInferenceOutdatedBNPotentials()

template<typename GUM_SCALAR >
INLINE bool gum::BayesNetInference< GUM_SCALAR >::isInferenceOutdatedBNPotentials ( ) const
finalvirtualnoexceptinherited

returns whether the inference object is in a OutdatedBNPotential state

Definition at line 80 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__state, and gum::BayesNetInference< GUM_SCALAR >::OutdatedBNPotentials.

81  {
83  }
StateOfInference __state
the current state of the inference (outdated/ready/done)

◆ isInferenceOutdatedBNStructure()

template<typename GUM_SCALAR >
INLINE bool gum::BayesNetInference< GUM_SCALAR >::isInferenceOutdatedBNStructure ( ) const
finalvirtualnoexceptinherited

returns whether the inference object is in a OutdatedBNStructure state

Definition at line 73 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__state, and gum::BayesNetInference< GUM_SCALAR >::OutdatedBNStructure.

Referenced by gum::BayesNetInference< GUM_SCALAR >::chgEvidence(), gum::BayesNetInference< GUM_SCALAR >::eraseAllEvidence(), and gum::BayesNetInference< GUM_SCALAR >::eraseEvidence().

74  {
76  }
StateOfInference __state
the current state of the inference (outdated/ready/done)
+ Here is the caller graph for this function:

◆ isInferenceReady()

template<typename GUM_SCALAR >
INLINE bool gum::BayesNetInference< GUM_SCALAR >::isInferenceReady ( ) const
finalvirtualnoexceptinherited

returns whether the inference object is in a ready state

Definition at line 67 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__state, and gum::BayesNetInference< GUM_SCALAR >::ReadyForInference.

Referenced by gum::SamplingInference< GUM_SCALAR >::_onStateChanged(), gum::BayesNetInference< GUM_SCALAR >::makeInference(), and gum::BayesNetInference< GUM_SCALAR >::prepareInference().

67  {
69  }
StateOfInference __state
the current state of the inference (outdated/ready/done)
+ Here is the caller graph for this function:

◆ isTarget() [1/2]

template<typename GUM_SCALAR >
INLINE bool gum::MarginalTargetedInference< GUM_SCALAR >::isTarget ( NodeId  node) const
finalvirtualinherited

return true if variable is a (marginal) target

Definition at line 73 of file marginalTargetedInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__bn, gum::MarginalTargetedInference< GUM_SCALAR >::__targets, gum::Set< Key, Alloc >::contains(), and GUM_ERROR.

Referenced by gum::MarginalTargetedInference< GUM_SCALAR >::isTarget(), gum::MarginalTargetedInference< GUM_SCALAR >::posterior(), and gum::JointTargetedInference< GUM_SCALAR >::posterior().

73  {
74  // check that the variable belongs to the bn
75  if (this->__bn == nullptr)
76  GUM_ERROR(NullElement,
77  "No Bayes net has been assigned to the "
78  "inference algorithm");
79  if (!this->__bn->dag().exists(node)) {
80  GUM_ERROR(UndefinedElement, node << " is not a NodeId in the bn");
81  }
82 
83  return __targets.contains(node);
84  }
bool contains(const Key &k) const
Indicates whether a given elements belong to the set.
Definition: set_tpl.h:578
NodeSet __targets
the set of marginal targets
const IBayesNet< GUM_SCALAR > * __bn
the Bayes net on which we perform inferences
#define GUM_ERROR(type, msg)
Definition: exceptions.h:52
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ isTarget() [2/2]

template<typename GUM_SCALAR >
INLINE bool gum::MarginalTargetedInference< GUM_SCALAR >::isTarget ( const std::string &  nodeName) const
finalvirtualinherited

return true if variable is a (marginal) target

Definition at line 88 of file marginalTargetedInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__bn, and gum::MarginalTargetedInference< GUM_SCALAR >::isTarget().

89  {
90  return isTarget(this->__bn->idFromName(nodeName));
91  }
const IBayesNet< GUM_SCALAR > * __bn
the Bayes net on which we perform inferences
virtual bool isTarget(NodeId node) const final
return true if variable is a (marginal) target
+ Here is the call graph for this function:

◆ makeInference()

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::makeInference ( )
finalvirtualinherited

perform the heavy computations needed to compute the targets' posteriors

In a Junction tree propagation scheme, for instance, the heavy computations are those of the messages sent in the JT. This is precisely what makeInference should compute. Later, the computations of the posteriors can be done "lightly" by multiplying and projecting those messages.

Definition at line 708 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__setState(), gum::BayesNetInference< GUM_SCALAR >::_makeInference(), gum::BayesNetInference< GUM_SCALAR >::Done, gum::BayesNetInference< GUM_SCALAR >::isDone(), gum::BayesNetInference< GUM_SCALAR >::isInferenceReady(), and gum::BayesNetInference< GUM_SCALAR >::prepareInference().

Referenced by gum::LoopySamplingInference< GUM_SCALAR, APPROX >::_makeInference(), gum::MCBayesNetGenerator< GUM_SCALAR, ICPTGenerator, ICPTDisturber >::disturbBN(), gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact(), gum::JointTargetedInference< GUM_SCALAR >::evidenceJointImpact(), gum::JointTargetedInference< GUM_SCALAR >::jointMutualInformation(), gum::JointTargetedInference< GUM_SCALAR >::jointPosterior(), and gum::MarginalTargetedInference< GUM_SCALAR >::posterior().

708  {
709  if (isDone()) { return; }
710 
711  if (!isInferenceReady()) { prepareInference(); }
712 
713  _makeInference();
714 
716  }
virtual void __setState(const StateOfInference state) final
set the state of the inference engine and call the notification _onStateChanged when necessary (i...
virtual void _makeInference()=0
called when the inference has to be performed effectively
virtual void prepareInference() final
prepare the internal inference structures for the next inference
virtual bool isDone() const noexcept final
returns whether the inference object is in a done state
virtual bool isInferenceReady() const noexcept final
returns whether the inference object is in a ready state
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ maxIter()

INLINE Size gum::ApproximationScheme::maxIter ( ) const
virtualinherited

Returns the criterion on number of iterations.

Returns
Returns the criterion on number of iterations.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 100 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_max_iter.

Referenced by gum::learning::genericBNLearner::maxIter().

100 { return _max_iter; }
Size _max_iter
The maximum iterations.
+ Here is the caller graph for this function:

◆ maxTime()

INLINE double gum::ApproximationScheme::maxTime ( ) const
virtualinherited

Returns the timeout (in seconds).

Returns
Returns the timeout (in seconds).

Implements gum::IApproximationSchemeConfiguration.

Definition at line 123 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_max_time.

Referenced by gum::learning::genericBNLearner::maxTime().

123 { return _max_time; }
double _max_time
The timeout.
+ Here is the caller graph for this function:

◆ messageApproximationScheme()

INLINE std::string gum::IApproximationSchemeConfiguration::messageApproximationScheme ( ) const
inherited

Returns the approximation scheme message.

Returns
Returns the approximation scheme message.

Definition at line 38 of file IApproximationSchemeConfiguration_inl.h.

References gum::IApproximationSchemeConfiguration::Continue, gum::IApproximationSchemeConfiguration::Epsilon, gum::IApproximationSchemeConfiguration::epsilon(), gum::IApproximationSchemeConfiguration::Limit, gum::IApproximationSchemeConfiguration::maxIter(), gum::IApproximationSchemeConfiguration::maxTime(), gum::IApproximationSchemeConfiguration::minEpsilonRate(), gum::IApproximationSchemeConfiguration::Rate, gum::IApproximationSchemeConfiguration::stateApproximationScheme(), gum::IApproximationSchemeConfiguration::Stopped, gum::IApproximationSchemeConfiguration::TimeLimit, and gum::IApproximationSchemeConfiguration::Undefined.

Referenced by gum::ApproximationScheme::_stopScheme(), gum::ApproximationScheme::continueApproximationScheme(), and gum::credal::InferenceEngine< GUM_SCALAR >::getApproximationSchemeMsg().

38  {
39  std::stringstream s;
40 
41  switch (stateApproximationScheme()) {
42  case ApproximationSchemeSTATE::Continue: s << "in progress"; break;
43 
45  s << "stopped with epsilon=" << epsilon();
46  break;
47 
49  s << "stopped with rate=" << minEpsilonRate();
50  break;
51 
53  s << "stopped with max iteration=" << maxIter();
54  break;
55 
57  s << "stopped with timeout=" << maxTime();
58  break;
59 
60  case ApproximationSchemeSTATE::Stopped: s << "stopped on request"; break;
61 
62  case ApproximationSchemeSTATE::Undefined: s << "undefined state"; break;
63  };
64 
65  return s.str();
66  }
virtual double epsilon() const =0
Returns the value of epsilon.
virtual ApproximationSchemeSTATE stateApproximationScheme() const =0
Returns the approximation scheme state.
virtual double maxTime() const =0
Returns the timeout (in seconds).
virtual Size maxIter() const =0
Returns the criterion on number of iterations.
virtual double minEpsilonRate() const =0
Returns the value of the minimal epsilon rate.
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ minEpsilonRate()

INLINE double gum::ApproximationScheme::minEpsilonRate ( ) const
virtualinherited

Returns the value of the minimal epsilon rate.

Returns
Returns the value of the minimal epsilon rate.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 72 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_min_rate_eps.

Referenced by gum::learning::genericBNLearner::minEpsilonRate().

72  {
73  return _min_rate_eps;
74  }
double _min_rate_eps
Threshold for the epsilon rate.
+ Here is the caller graph for this function:

◆ nbrEvidence()

template<typename GUM_SCALAR >
INLINE Size gum::BayesNetInference< GUM_SCALAR >::nbrEvidence ( ) const
finalvirtualinherited

returns the number of evidence entered into the Bayesian network

Definition at line 617 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__evidence.

617  {
618  return __evidence.size();
619  }
NodeProperty< const Potential< GUM_SCALAR > *> __evidence
the set of evidence entered into the network

◆ nbrHardEvidence()

template<typename GUM_SCALAR >
INLINE Size gum::BayesNetInference< GUM_SCALAR >::nbrHardEvidence ( ) const
finalvirtualinherited

returns the number of hard evidence entered into the Bayesian network

Definition at line 624 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__hard_evidence_nodes, and gum::Set< Key, Alloc >::size().

624  {
625  return __hard_evidence_nodes.size();
626  }
NodeSet __hard_evidence_nodes
the set of nodes that received hard evidence
Size size() const noexcept
Returns the number of elements in the set.
Definition: set_tpl.h:698
+ Here is the call graph for this function:

◆ nbrIterations()

INLINE Size gum::ApproximationScheme::nbrIterations ( ) const
virtualinherited

Returns the number of iterations.

Returns
Returns the number of iterations.
Exceptions
OperationNotAllowedRaised if the scheme did not perform.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 161 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_current_step, GUM_ERROR, gum::ApproximationScheme::stateApproximationScheme(), and gum::IApproximationSchemeConfiguration::Undefined.

Referenced by gum::GibbsBNdistance< GUM_SCALAR >::_computeKL(), and gum::learning::genericBNLearner::nbrIterations().

161  {
163  GUM_ERROR(OperationNotAllowed,
164  "state of the approximation scheme is undefined");
165  }
166 
167  return _current_step;
168  }
Size _current_step
The current step.
ApproximationSchemeSTATE stateApproximationScheme() const
Returns the approximation scheme state.
#define GUM_ERROR(type, msg)
Definition: exceptions.h:52
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ nbrSoftEvidence()

template<typename GUM_SCALAR >
INLINE Size gum::BayesNetInference< GUM_SCALAR >::nbrSoftEvidence ( ) const
finalvirtualinherited

returns the number of soft evidence entered into the Bayesian network

Definition at line 631 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__soft_evidence_nodes, and gum::Set< Key, Alloc >::size().

631  {
632  return __soft_evidence_nodes.size();
633  }
NodeSet __soft_evidence_nodes
the set of nodes that received soft evidence
Size size() const noexcept
Returns the number of elements in the set.
Definition: set_tpl.h:698
+ Here is the call graph for this function:

◆ nbrTargets()

template<typename GUM_SCALAR >
INLINE const Size gum::MarginalTargetedInference< GUM_SCALAR >::nbrTargets ( ) const
finalvirtualnoexceptinherited

returns the number of marginal targets

Definition at line 215 of file marginalTargetedInference_tpl.h.

References gum::MarginalTargetedInference< GUM_SCALAR >::__targets, and gum::Set< Key, Alloc >::size().

216  {
217  return __targets.size();
218  }
NodeSet __targets
the set of marginal targets
Size size() const noexcept
Returns the number of elements in the set.
Definition: set_tpl.h:698
+ Here is the call graph for this function:

◆ periodSize()

INLINE Size gum::ApproximationScheme::periodSize ( ) const
virtualinherited

Returns the period size.

Returns
Returns the period size.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 147 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_period_size.

Referenced by gum::credal::CNMonteCarloSampling< GUM_SCALAR, BNInferenceEngine >::makeInference(), and gum::learning::genericBNLearner::periodSize().

147 { return _period_size; }
Size _period_size
Checking criteria frequency.
+ Here is the caller graph for this function:

◆ posterior() [1/2]

template<typename GUM_SCALAR >
const Potential< GUM_SCALAR > & gum::MarginalTargetedInference< GUM_SCALAR >::posterior ( NodeId  node)
virtualinherited

Computes and returns the posterior of a node.

Returns
a const ref to the posterior probability of the node.
Parameters
nodethe node for which we need a posterior probability
Warning
for efficiency reasons, the potential is stored into the inference engine and is returned by reference. In order to ensure that the potential may still exist even if the Inference object is destroyed, the user has to copy it explicitly.
prepareInference and makeInference may be applied if needed by the posterior method.
Exceptions
UndefinedElementif node is not in the set of targets

Reimplemented in gum::JointTargetedInference< GUM_SCALAR >.

Definition at line 239 of file marginalTargetedInference_tpl.h.

References gum::MarginalTargetedInference< GUM_SCALAR >::_posterior(), gum::BayesNetInference< GUM_SCALAR >::evidence(), GUM_ERROR, gum::BayesNetInference< GUM_SCALAR >::hardEvidenceNodes(), gum::BayesNetInference< GUM_SCALAR >::isDone(), gum::MarginalTargetedInference< GUM_SCALAR >::isTarget(), and gum::BayesNetInference< GUM_SCALAR >::makeInference().

Referenced by gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact(), gum::MarginalTargetedInference< GUM_SCALAR >::H(), gum::JointTargetedInference< GUM_SCALAR >::posterior(), gum::MarginalTargetedInference< GUM_SCALAR >::posterior(), and gum::Estimator< GUM_SCALAR >::setFromLBP().

239  {
240  if (this->hardEvidenceNodes().contains(node)) {
241  return *(this->evidence()[node]);
242  }
243 
244  if (!isTarget(node)) {
245  // throws UndefinedElement if var is not a target
246  GUM_ERROR(UndefinedElement, node << " is not a target node");
247  }
248 
249  if (!this->isDone()) { this->makeInference(); }
250 
251  return _posterior(node);
252  }
const NodeProperty< const Potential< GUM_SCALAR > *> & evidence() const
returns the set of evidence
virtual bool isTarget(NodeId node) const final
return true if variable is a (marginal) target
virtual void makeInference() final
perform the heavy computations needed to compute the targets&#39; posteriors
virtual bool isDone() const noexcept final
returns whether the inference object is in a done state
const NodeSet & hardEvidenceNodes() const
returns the set of nodes with hard evidence
virtual const Potential< GUM_SCALAR > & _posterior(NodeId id)=0
asks derived classes for the posterior of a given variable
#define GUM_ERROR(type, msg)
Definition: exceptions.h:52
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ posterior() [2/2]

template<typename GUM_SCALAR >
const Potential< GUM_SCALAR > & gum::MarginalTargetedInference< GUM_SCALAR >::posterior ( const std::string &  nodeName)
virtualinherited

Computes and returns the posterior of a node.

Returns
a const ref to the posterior probability of the node.
Parameters
nodeNamethe anme of the node for which we need a posterior probability
Warning
for efficiency reasons, the potential is stored into the inference engine and is returned by reference. In order to ensure that the potential may still exist even if the Inference object is destroyed, the user has to copy it explicitly.
prepareInference and makeInference may be applied if needed by the posterior method.
Exceptions
UndefinedElementif node is not in the set of targets

Reimplemented in gum::JointTargetedInference< GUM_SCALAR >.

Definition at line 257 of file marginalTargetedInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::BN(), and gum::MarginalTargetedInference< GUM_SCALAR >::posterior().

258  {
259  return posterior(this->BN().idFromName(nodeName));
260  }
virtual const Potential< GUM_SCALAR > & posterior(NodeId node)
Computes and returns the posterior of a node.
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:

◆ prepareInference()

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::prepareInference ( )
finalvirtualinherited

prepare the internal inference structures for the next inference

Definition at line 689 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__bn, gum::BayesNetInference< GUM_SCALAR >::__setState(), gum::BayesNetInference< GUM_SCALAR >::__state, gum::BayesNetInference< GUM_SCALAR >::_updateOutdatedBNPotentials(), gum::BayesNetInference< GUM_SCALAR >::_updateOutdatedBNStructure(), GUM_ERROR, gum::BayesNetInference< GUM_SCALAR >::isDone(), gum::BayesNetInference< GUM_SCALAR >::isInferenceReady(), gum::BayesNetInference< GUM_SCALAR >::OutdatedBNStructure, and gum::BayesNetInference< GUM_SCALAR >::ReadyForInference.

Referenced by gum::BayesNetInference< GUM_SCALAR >::makeInference(), and gum::SamplingInference< GUM_SCALAR >::samplingBN().

689  {
690  if (isInferenceReady() || isDone()) { return; }
691 
692  if (__bn == nullptr)
693  GUM_ERROR(NullElement,
694  "No Bayes net has been assigned to the "
695  "inference algorithm");
696 
699  else
701 
703  }
virtual void __setState(const StateOfInference state) final
set the state of the inference engine and call the notification _onStateChanged when necessary (i...
StateOfInference __state
the current state of the inference (outdated/ready/done)
const IBayesNet< GUM_SCALAR > * __bn
the Bayes net on which we perform inferences
virtual void _updateOutdatedBNPotentials()=0
prepares inference when the latter is in OutdatedBNPotentials state
virtual bool isDone() const noexcept final
returns whether the inference object is in a done state
virtual void _updateOutdatedBNStructure()=0
prepares inference when the latter is in OutdatedBNStructure state
virtual bool isInferenceReady() const noexcept final
returns whether the inference object is in a ready state
#define GUM_ERROR(type, msg)
Definition: exceptions.h:52
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ remainingBurnIn()

INLINE Size gum::ApproximationScheme::remainingBurnIn ( )
inherited

Returns the remaining burn in.

Returns
Returns the remaining burn in.

Definition at line 208 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_burn_in, and gum::ApproximationScheme::_current_step.

208  {
209  if (_burn_in > _current_step) {
210  return _burn_in - _current_step;
211  } else {
212  return 0;
213  }
214  }
Size _burn_in
Number of iterations before checking stopping criteria.
Size _current_step
The current step.

◆ samplingBN()

template<typename GUM_SCALAR >
INLINE const IBayesNet< GUM_SCALAR > & gum::SamplingInference< GUM_SCALAR >::samplingBN ( )

get the BayesNet which is used to really perform the sampling

Definition at line 71 of file samplingInference_tpl.h.

References gum::SamplingInference< GUM_SCALAR >::__samplingBN, gum::BayesNetInference< GUM_SCALAR >::BN(), and gum::BayesNetInference< GUM_SCALAR >::prepareInference().

Referenced by gum::SamplingInference< GUM_SCALAR >::_addVarSample(), gum::ImportanceSampling< GUM_SCALAR >::_draw(), and gum::SamplingInference< GUM_SCALAR >::_setEstimatorFromBN().

71  {
72  this->prepareInference();
73  if (__samplingBN == nullptr)
74  return this->BN();
75  else
76  return *__samplingBN;
77  }
virtual void prepareInference() final
prepare the internal inference structures for the next inference
BayesNetFragment< GUM_SCALAR > * __samplingBN
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ setBN()

template<typename GUM_SCALAR >
void gum::BayesNetInference< GUM_SCALAR >::setBN ( const IBayesNet< GUM_SCALAR > *  bn)
virtualinherited

assigns a new BN to the inference engine

Assigns a new BN to the BayesNetInference engine and sends messages to the descendants of BayesNetInference to inform them that the BN has changed.

Warning
By default, all the nodes of the Bayes net are targets.
note that, by aGrUM's rule, the bn is not copied into the inference engine but only referenced.

Definition at line 129 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__bn, gum::BayesNetInference< GUM_SCALAR >::__computeDomainSizes(), gum::BayesNetInference< GUM_SCALAR >::__setState(), gum::BayesNetInference< GUM_SCALAR >::_onBayesNetChanged(), gum::BayesNetInference< GUM_SCALAR >::clear(), and gum::BayesNetInference< GUM_SCALAR >::OutdatedBNStructure.

129  {
130  clear();
131  __bn = bn;
133  _onBayesNetChanged(bn);
135  }
virtual void __setState(const StateOfInference state) final
set the state of the inference engine and call the notification _onStateChanged when necessary (i...
virtual void _onBayesNetChanged(const IBayesNet< GUM_SCALAR > *bn)=0
fired after a new Bayes net has been assigned to the engine
const IBayesNet< GUM_SCALAR > * __bn
the Bayes net on which we perform inferences
virtual void clear()
clears all the data structures allocated for the last inference
void __computeDomainSizes()
computes the domain sizes of the random variables
+ Here is the call graph for this function:

◆ setEpsilon()

INLINE void gum::ApproximationScheme::setEpsilon ( double  eps)
virtualinherited

Given that we approximate f(t), stopping criterion on |f(t+1)-f(t)|.

If the criterion was disabled it will be enabled.

Parameters
epsThe new epsilon value.
Exceptions
OutOfLowerBoundRaised if eps < 0.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 41 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_eps, gum::ApproximationScheme::_eps, and GUM_ERROR.

Referenced by gum::credal::CNMonteCarloSampling< GUM_SCALAR, BNInferenceEngine >::__mcInitApproximationScheme(), gum::GibbsBNdistance< GUM_SCALAR >::GibbsBNdistance(), gum::GibbsSampling< GUM_SCALAR >::GibbsSampling(), gum::learning::GreedyHillClimbing::GreedyHillClimbing(), gum::SamplingInference< GUM_SCALAR >::SamplingInference(), and gum::learning::genericBNLearner::setEpsilon().

41  {
42  if (eps < 0.) { GUM_ERROR(OutOfLowerBound, "eps should be >=0"); }
43 
44  _eps = eps;
45  _enabled_eps = true;
46  }
bool _enabled_eps
If true, the threshold convergence is enabled.
double _eps
Threshold for convergence.
#define GUM_ERROR(type, msg)
Definition: exceptions.h:52
+ Here is the caller graph for this function:

◆ setMaxIter()

INLINE void gum::ApproximationScheme::setMaxIter ( Size  max)
virtualinherited

Stopping criterion on number of iterations.

If the criterion was disabled it will be enabled.

Parameters
maxThe maximum number of iterations.
Exceptions
OutOfLowerBoundRaised if max <= 1.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 93 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_max_iter, gum::ApproximationScheme::_max_iter, and GUM_ERROR.

Referenced by gum::GibbsBNdistance< GUM_SCALAR >::GibbsBNdistance(), gum::SamplingInference< GUM_SCALAR >::SamplingInference(), and gum::learning::genericBNLearner::setMaxIter().

93  {
94  if (max < 1) { GUM_ERROR(OutOfLowerBound, "max should be >=1"); }
95  _max_iter = max;
96  _enabled_max_iter = true;
97  }
bool _enabled_max_iter
If true, the maximum iterations stopping criterion is enabled.
Size _max_iter
The maximum iterations.
#define GUM_ERROR(type, msg)
Definition: exceptions.h:52
+ Here is the caller graph for this function:

◆ setMaxTime()

INLINE void gum::ApproximationScheme::setMaxTime ( double  timeout)
virtualinherited

Stopping criterion on timeout.

If the criterion was disabled it will be enabled.

Parameters
timeoutThe timeout value in seconds.
Exceptions
OutOfLowerBoundRaised if timeout <= 0.0.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 116 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_max_time, gum::ApproximationScheme::_max_time, and GUM_ERROR.

Referenced by gum::credal::CNMonteCarloSampling< GUM_SCALAR, BNInferenceEngine >::CNMonteCarloSampling(), gum::GibbsBNdistance< GUM_SCALAR >::GibbsBNdistance(), gum::SamplingInference< GUM_SCALAR >::SamplingInference(), and gum::learning::genericBNLearner::setMaxTime().

116  {
117  if (timeout <= 0.) { GUM_ERROR(OutOfLowerBound, "timeout should be >0."); }
118  _max_time = timeout;
119  _enabled_max_time = true;
120  }
bool _enabled_max_time
If true, the timeout is enabled.
double _max_time
The timeout.
#define GUM_ERROR(type, msg)
Definition: exceptions.h:52
+ Here is the caller graph for this function:

◆ setMinEpsilonRate()

INLINE void gum::ApproximationScheme::setMinEpsilonRate ( double  rate)
virtualinherited

Given that we approximate f(t), stopping criterion on d/dt(|f(t+1)-f(t)|).

If the criterion was disabled it will be enabled

Parameters
rateThe minimal epsilon rate.
Exceptions
OutOfLowerBoundif rate<0

Implements gum::IApproximationSchemeConfiguration.

Definition at line 64 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_min_rate_eps, gum::ApproximationScheme::_min_rate_eps, and GUM_ERROR.

Referenced by gum::GibbsBNdistance< GUM_SCALAR >::GibbsBNdistance(), gum::GibbsSampling< GUM_SCALAR >::GibbsSampling(), gum::SamplingInference< GUM_SCALAR >::SamplingInference(), and gum::learning::genericBNLearner::setMinEpsilonRate().

64  {
65  if (rate < 0) { GUM_ERROR(OutOfLowerBound, "rate should be >=0"); }
66 
67  _min_rate_eps = rate;
68  _enabled_min_rate_eps = true;
69  }
bool _enabled_min_rate_eps
If true, the minimal threshold for epsilon rate is enabled.
double _min_rate_eps
Threshold for the epsilon rate.
#define GUM_ERROR(type, msg)
Definition: exceptions.h:52
+ Here is the caller graph for this function:

◆ setPeriodSize()

INLINE void gum::ApproximationScheme::setPeriodSize ( Size  p)
virtualinherited

How many samples between two stopping is enable.

Parameters
pThe new period value.
Exceptions
OutOfLowerBoundRaised if p < 1.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 141 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_period_size, and GUM_ERROR.

Referenced by gum::credal::CNMonteCarloSampling< GUM_SCALAR, BNInferenceEngine >::CNMonteCarloSampling(), gum::GibbsBNdistance< GUM_SCALAR >::GibbsBNdistance(), gum::SamplingInference< GUM_SCALAR >::SamplingInference(), and gum::learning::genericBNLearner::setPeriodSize().

141  {
142  if (p < 1) { GUM_ERROR(OutOfLowerBound, "p should be >=1"); }
143 
144  _period_size = p;
145  }
Size _period_size
Checking criteria frequency.
#define GUM_ERROR(type, msg)
Definition: exceptions.h:52
+ Here is the caller graph for this function:

◆ setVerbosity()

INLINE void gum::ApproximationScheme::setVerbosity ( bool  v)
virtualinherited

Set the verbosity on (true) or off (false).

Parameters
vIf true, then verbosity is turned on.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 150 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_verbosity.

Referenced by gum::GibbsBNdistance< GUM_SCALAR >::GibbsBNdistance(), gum::SamplingInference< GUM_SCALAR >::SamplingInference(), and gum::learning::genericBNLearner::setVerbosity().

150 { _verbosity = v; }
bool _verbosity
If true, verbosity is enabled.
+ Here is the caller graph for this function:

◆ softEvidenceNodes()

template<typename GUM_SCALAR >
INLINE const NodeSet & gum::BayesNetInference< GUM_SCALAR >::softEvidenceNodes ( ) const
inherited

returns the set of nodes with soft evidence

the set of nodes that received soft evidence

Definition at line 655 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__soft_evidence_nodes.

Referenced by gum::SamplingInference< GUM_SCALAR >::contextualize().

655  {
656  return __soft_evidence_nodes;
657  }
NodeSet __soft_evidence_nodes
the set of nodes that received soft evidence
+ Here is the caller graph for this function:

◆ startOfPeriod()

INLINE bool gum::ApproximationScheme::startOfPeriod ( )
inherited

Returns true if we are at the beginning of a period (compute error is mandatory).

Returns
Returns true if we are at the beginning of a period (compute error is mandatory).

Definition at line 195 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_burn_in, gum::ApproximationScheme::_current_step, and gum::ApproximationScheme::_period_size.

Referenced by gum::ApproximationScheme::continueApproximationScheme().

195  {
196  if (_current_step < _burn_in) { return false; }
197 
198  if (_period_size == 1) { return true; }
199 
200  return ((_current_step - _burn_in) % _period_size == 0);
201  }
Size _burn_in
Number of iterations before checking stopping criteria.
Size _current_step
The current step.
Size _period_size
Checking criteria frequency.
+ Here is the caller graph for this function:

◆ state()

template<typename GUM_SCALAR >
INLINE BayesNetInference< GUM_SCALAR >::StateOfInference gum::BayesNetInference< GUM_SCALAR >::state ( ) const
finalvirtualnoexceptinherited

returns the state of the inference engine

Definition at line 101 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__state.

Referenced by gum::BayesNetInference< GUM_SCALAR >::__setState().

101  {
102  return __state;
103  }
StateOfInference __state
the current state of the inference (outdated/ready/done)
+ Here is the caller graph for this function:

◆ stateApproximationScheme()

INLINE IApproximationSchemeConfiguration::ApproximationSchemeSTATE gum::ApproximationScheme::stateApproximationScheme ( ) const
virtualinherited

Returns the approximation scheme state.

Returns
Returns the approximation scheme state.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 156 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_current_state.

Referenced by gum::ApproximationScheme::continueApproximationScheme(), gum::ApproximationScheme::history(), gum::ApproximationScheme::nbrIterations(), and gum::learning::genericBNLearner::stateApproximationScheme().

156  {
157  return _current_state;
158  }
ApproximationSchemeSTATE _current_state
The current state.
+ Here is the caller graph for this function:

◆ stopApproximationScheme()

INLINE void gum::ApproximationScheme::stopApproximationScheme ( )
inherited

Stop the approximation scheme.

Definition at line 217 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_current_state, gum::ApproximationScheme::_stopScheme(), gum::IApproximationSchemeConfiguration::Continue, and gum::IApproximationSchemeConfiguration::Stopped.

Referenced by gum::learning::DAG2BNLearner< ALLOC >::createBN(), gum::learning::GreedyHillClimbing::learnStructure(), and gum::learning::LocalSearchWithTabuList::learnStructure().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ targets()

template<typename GUM_SCALAR >
INLINE const NodeSet & gum::MarginalTargetedInference< GUM_SCALAR >::targets ( ) const
finalvirtualnoexceptinherited

returns the list of marginal targets

Definition at line 208 of file marginalTargetedInference_tpl.h.

References gum::MarginalTargetedInference< GUM_SCALAR >::__targets.

Referenced by gum::SamplingInference< GUM_SCALAR >::contextualize().

209  {
210  return __targets;
211  }
NodeSet __targets
the set of marginal targets
+ Here is the caller graph for this function:

◆ updateApproximationScheme()

INLINE void gum::ApproximationScheme::updateApproximationScheme ( unsigned int  incr = 1)
inherited

Update the scheme w.r.t the new error and increment steps.

Parameters
incrThe new increment steps.

Definition at line 204 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_current_step.

Referenced by gum::GibbsBNdistance< GUM_SCALAR >::_computeKL(), gum::SamplingInference< GUM_SCALAR >::_loopApproxInference(), gum::learning::DAG2BNLearner< ALLOC >::createBN(), gum::learning::GreedyHillClimbing::learnStructure(), gum::learning::LocalSearchWithTabuList::learnStructure(), and gum::credal::CNMonteCarloSampling< GUM_SCALAR, BNInferenceEngine >::makeInference().

204  {
205  _current_step += incr;
206  }
Size _current_step
The current step.
+ Here is the caller graph for this function:

◆ verbosity()

INLINE bool gum::ApproximationScheme::verbosity ( ) const
virtualinherited

Returns true if verbosity is enabled.

Returns
Returns true if verbosity is enabled.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 152 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_verbosity.

Referenced by gum::ApproximationScheme::continueApproximationScheme(), gum::ApproximationScheme::history(), and gum::learning::genericBNLearner::verbosity().

152 { return _verbosity; }
bool _verbosity
If true, verbosity is enabled.
+ Here is the caller graph for this function:

Member Data Documentation

◆ __estimator

◆ __samplingBN

◆ _burn_in

◆ _current_epsilon

double gum::ApproximationScheme::_current_epsilon
protectedinherited

◆ _current_rate

double gum::ApproximationScheme::_current_rate
protectedinherited

◆ _current_state

◆ _current_step

◆ _enabled_eps

◆ _enabled_max_iter

bool gum::ApproximationScheme::_enabled_max_iter
protectedinherited

◆ _enabled_max_time

◆ _enabled_min_rate_eps

bool gum::ApproximationScheme::_enabled_min_rate_eps
protectedinherited

◆ _eps

double gum::ApproximationScheme::_eps
protectedinherited

◆ _history

std::vector< double > gum::ApproximationScheme::_history
protectedinherited

◆ _last_epsilon

double gum::ApproximationScheme::_last_epsilon
protectedinherited

Last epsilon value.

Definition at line 370 of file approximationScheme.h.

Referenced by gum::ApproximationScheme::continueApproximationScheme().

◆ _max_iter

Size gum::ApproximationScheme::_max_iter
protectedinherited

◆ _max_time

double gum::ApproximationScheme::_max_time
protectedinherited

◆ _min_rate_eps

double gum::ApproximationScheme::_min_rate_eps
protectedinherited

◆ _period_size

Size gum::ApproximationScheme::_period_size
protectedinherited

◆ _timer

◆ _verbosity

bool gum::ApproximationScheme::_verbosity
protectedinherited

If true, verbosity is enabled.

Definition at line 418 of file approximationScheme.h.

Referenced by gum::ApproximationScheme::setVerbosity(), and gum::ApproximationScheme::verbosity().

◆ isContextualized

template<typename GUM_SCALAR >
bool gum::SamplingInference< GUM_SCALAR >::isContextualized = false
protected

◆ isSetEstimator

◆ onProgress

◆ onStop

Signaler1< std::string > gum::IApproximationSchemeConfiguration::onStop
inherited

Criteria messageApproximationScheme.

Definition at line 60 of file IApproximationSchemeConfiguration.h.

Referenced by gum::ApproximationScheme::_stopScheme(), and gum::learning::genericBNLearner::distributeStop().


The documentation for this class was generated from the following files: