aGrUM  0.16.0
gum::WeightedSampling< GUM_SCALAR > Class Template Reference

#include <weightedSampling.h>

+ Inheritance diagram for gum::WeightedSampling< GUM_SCALAR >:
+ Collaboration diagram for gum::WeightedSampling< GUM_SCALAR >:

Public Attributes

Signaler3< Size, double, doubleonProgress
 Progression, error and time. More...
 
Signaler1< std::string > onStop
 Criteria messageApproximationScheme. More...
 

Public Member Functions

 WeightedSampling (const IBayesNet< GUM_SCALAR > *bn)
 Default constructor. More...
 
 ~WeightedSampling () override
 Destructor. More...
 
virtual void contextualize ()
 Simplifying the bayesian network with relevance reasonning to lighten the computational charge. More...
 
Potential< GUM_SCALAR > evidenceImpact (NodeId target, const NodeSet &evs)
 Create a gum::Potential for P(target|evs) (for all instanciation of target and evs) More...
 
Potential< GUM_SCALAR > evidenceImpact (const std::string &target, const std::vector< std::string > &evs)
 Create a gum::Potential for P(target|evs) (for all instanciation of target and evs) More...
 
Constructors / Destructors
const Potential< GUM_SCALAR > & currentPosterior (NodeId id)
 Computes and returns the actual estimation of the posterior of a node. More...
 
const Potential< GUM_SCALAR > & currentPosterior (const std::string &name)
 Computes and returns the actual estimation of the posterior of a node by its name. More...
 
Probability computations
const IBayesNet< GUM_SCALAR > & samplingBN ()
 get the BayesNet which is used to really perform the sampling More...
 
const Potential< GUM_SCALAR > & _posterior (NodeId id) override
 Computes and returns the posterior of a node. More...
 
Estimator objects initializing
virtual void _setEstimatorFromBN ()
 Initializes the estimators object linked to the simulation. More...
 
virtual void _setEstimatorFromLBP (LoopyBeliefPropagation< GUM_SCALAR > *lbp, GUM_SCALAR virtualLBPSize)
 Initializes the estimators object linked to the simulation. More...
 
Probability computations
virtual const Potential< GUM_SCALAR > & posterior (NodeId node)
 Computes and returns the posterior of a node. More...
 
virtual const Potential< GUM_SCALAR > & posterior (const std::string &nodeName)
 Computes and returns the posterior of a node. More...
 
Targets
virtual void eraseAllTargets ()
 Clear all previously defined targets. More...
 
virtual void addAllTargets () final
 adds all nodes as targets More...
 
virtual void addTarget (NodeId target) final
 Add a marginal target to the list of targets. More...
 
virtual void addTarget (const std::string &nodeName) final
 Add a marginal target to the list of targets. More...
 
virtual void eraseTarget (NodeId target) final
 removes an existing (marginal) target More...
 
virtual void eraseTarget (const std::string &nodeName) final
 removes an existing (marginal) target More...
 
virtual bool isTarget (NodeId node) const final
 return true if variable is a (marginal) target More...
 
virtual bool isTarget (const std::string &nodeName) const final
 return true if variable is a (marginal) target More...
 
virtual const Size nbrTargets () const noexcept final
 returns the number of marginal targets More...
 
virtual const NodeSettargets () const noexcept final
 returns the list of marginal targets More...
 
Information Theory related functions
virtual GUM_SCALAR H (NodeId X) final
 Entropy Compute Shanon's entropy of a node given the observation. More...
 
virtual GUM_SCALAR H (const std::string &nodeName) final
 Entropy Compute Shanon's entropy of a node given the observation. More...
 
Accessors / Modifiers
virtual void setBN (const IBayesNet< GUM_SCALAR > *bn)
 assigns a new BN to the inference engine More...
 
virtual const IBayesNet< GUM_SCALAR > & BN () const final
 Returns a constant reference over the IBayesNet referenced by this class. More...
 
virtual const NodeProperty< Size > & domainSizes () const final
 get the domain sizes of the random variables of the BN More...
 
virtual bool isInferenceReady () const noexcept final
 returns whether the inference object is in a ready state More...
 
virtual bool isInferenceOutdatedBNStructure () const noexcept final
 returns whether the inference object is in a OutdatedBNStructure state More...
 
virtual bool isInferenceOutdatedBNPotentials () const noexcept final
 returns whether the inference object is in a OutdatedBNPotential state More...
 
virtual bool isInferenceDone () const noexcept final
 returns whether the inference object is in a InferenceDone state More...
 
virtual bool isDone () const noexcept final
 returns whether the inference object is in a done state More...
 
virtual void prepareInference () final
 prepare the internal inference structures for the next inference More...
 
virtual void makeInference () final
 perform the heavy computations needed to compute the targets' posteriors More...
 
virtual void clear ()
 clears all the data structures allocated for the last inference More...
 
virtual StateOfInference state () const noexcept final
 returns the state of the inference engine More...
 
Evidence
virtual void addEvidence (NodeId id, const Idx val) final
 adds a new hard evidence on node id More...
 
virtual void addEvidence (const std::string &nodeName, const Idx val) final
 adds a new hard evidence on node named nodeName More...
 
virtual void addEvidence (NodeId id, const std::string &label) final
 adds a new hard evidence on node id More...
 
virtual void addEvidence (const std::string &nodeName, const std::string &label) final
 adds a new hard evidence on node named nodeName More...
 
virtual void addEvidence (NodeId id, const std::vector< GUM_SCALAR > &vals) final
 adds a new evidence on node id (might be soft or hard) More...
 
virtual void addEvidence (const std::string &nodeName, const std::vector< GUM_SCALAR > &vals) final
 adds a new evidence on node named nodeName (might be soft or hard) More...
 
virtual void addEvidence (const Potential< GUM_SCALAR > &pot) final
 adds a new evidence on node id (might be soft or hard) More...
 
virtual void addEvidence (Potential< GUM_SCALAR > &&pot) final
 adds a new evidence on node id (might be soft or hard) More...
 
virtual void addSetOfEvidence (const Set< const Potential< GUM_SCALAR > * > &potset) final
 adds a new set of evidence More...
 
virtual void addListOfEvidence (const List< const Potential< GUM_SCALAR > * > &potlist) final
 adds a new list of evidence More...
 
virtual void chgEvidence (NodeId id, const Idx val) final
 change the value of an already existing hard evidence More...
 
virtual void chgEvidence (const std::string &nodeName, const Idx val) final
 change the value of an already existing hard evidence More...
 
virtual void chgEvidence (NodeId id, const std::string &label) final
 change the value of an already existing hard evidence More...
 
virtual void chgEvidence (const std::string &nodeName, const std::string &label) final
 change the value of an already existing hard evidence More...
 
virtual void chgEvidence (NodeId id, const std::vector< GUM_SCALAR > &vals) final
 change the value of an already existing evidence (might be soft or hard) More...
 
virtual void chgEvidence (const std::string &nodeName, const std::vector< GUM_SCALAR > &vals) final
 change the value of an already existing evidence (might be soft or hard) More...
 
virtual void chgEvidence (const Potential< GUM_SCALAR > &pot) final
 change the value of an already existing evidence (might be soft or hard) More...
 
virtual void eraseAllEvidence () final
 removes all the evidence entered into the network More...
 
virtual void eraseEvidence (NodeId id) final
 removed the evidence, if any, corresponding to node id More...
 
virtual void eraseEvidence (const std::string &nodeName) final
 removed the evidence, if any, corresponding to node of name nodeName More...
 
virtual bool hasEvidence () const final
 indicates whether some node(s) have received evidence More...
 
virtual bool hasEvidence (NodeId id) const final
 indicates whether node id has received an evidence More...
 
virtual bool hasEvidence (const std::string &nodeName) const final
 indicates whether node id has received an evidence More...
 
virtual bool hasHardEvidence (NodeId id) const final
 indicates whether node id has received a hard evidence More...
 
virtual bool hasHardEvidence (const std::string &nodeName) const final
 indicates whether node id has received a hard evidence More...
 
virtual bool hasSoftEvidence (NodeId id) const final
 indicates whether node id has received a soft evidence More...
 
virtual bool hasSoftEvidence (const std::string &nodeName) const final
 indicates whether node id has received a soft evidence More...
 
virtual Size nbrEvidence () const final
 returns the number of evidence entered into the Bayesian network More...
 
virtual Size nbrHardEvidence () const final
 returns the number of hard evidence entered into the Bayesian network More...
 
virtual Size nbrSoftEvidence () const final
 returns the number of soft evidence entered into the Bayesian network More...
 
const NodeProperty< const Potential< GUM_SCALAR > *> & evidence () const
 returns the set of evidence More...
 
const NodeSetsoftEvidenceNodes () const
 returns the set of nodes with soft evidence More...
 
const NodeSethardEvidenceNodes () const
 returns the set of nodes with hard evidence More...
 
const NodeProperty< Idx > & hardEvidence () const
 indicate for each node with hard evidence which value it took More...
 
Getters and setters
void setEpsilon (double eps)
 Given that we approximate f(t), stopping criterion on |f(t+1)-f(t)|. More...
 
double epsilon () const
 Returns the value of epsilon. More...
 
void disableEpsilon ()
 Disable stopping criterion on epsilon. More...
 
void enableEpsilon ()
 Enable stopping criterion on epsilon. More...
 
bool isEnabledEpsilon () const
 Returns true if stopping criterion on epsilon is enabled, false otherwise. More...
 
void setMinEpsilonRate (double rate)
 Given that we approximate f(t), stopping criterion on d/dt(|f(t+1)-f(t)|). More...
 
double minEpsilonRate () const
 Returns the value of the minimal epsilon rate. More...
 
void disableMinEpsilonRate ()
 Disable stopping criterion on epsilon rate. More...
 
void enableMinEpsilonRate ()
 Enable stopping criterion on epsilon rate. More...
 
bool isEnabledMinEpsilonRate () const
 Returns true if stopping criterion on epsilon rate is enabled, false otherwise. More...
 
void setMaxIter (Size max)
 Stopping criterion on number of iterations. More...
 
Size maxIter () const
 Returns the criterion on number of iterations. More...
 
void disableMaxIter ()
 Disable stopping criterion on max iterations. More...
 
void enableMaxIter ()
 Enable stopping criterion on max iterations. More...
 
bool isEnabledMaxIter () const
 Returns true if stopping criterion on max iterations is enabled, false otherwise. More...
 
void setMaxTime (double timeout)
 Stopping criterion on timeout. More...
 
double maxTime () const
 Returns the timeout (in seconds). More...
 
double currentTime () const
 Returns the current running time in second. More...
 
void disableMaxTime ()
 Disable stopping criterion on timeout. More...
 
void enableMaxTime ()
 Enable stopping criterion on timeout. More...
 
bool isEnabledMaxTime () const
 Returns true if stopping criterion on timeout is enabled, false otherwise. More...
 
void setPeriodSize (Size p)
 How many samples between two stopping is enable. More...
 
Size periodSize () const
 Returns the period size. More...
 
void setVerbosity (bool v)
 Set the verbosity on (true) or off (false). More...
 
bool verbosity () const
 Returns true if verbosity is enabled. More...
 
ApproximationSchemeSTATE stateApproximationScheme () const
 Returns the approximation scheme state. More...
 
Size nbrIterations () const
 Returns the number of iterations. More...
 
const std::vector< double > & history () const
 Returns the scheme history. More...
 
void initApproximationScheme ()
 Initialise the scheme. More...
 
bool startOfPeriod ()
 Returns true if we are at the beginning of a period (compute error is mandatory). More...
 
void updateApproximationScheme (unsigned int incr=1)
 Update the scheme w.r.t the new error and increment steps. More...
 
Size remainingBurnIn ()
 Returns the remaining burn in. More...
 
void stopApproximationScheme ()
 Stop the approximation scheme. More...
 
bool continueApproximationScheme (double error)
 Update the scheme w.r.t the new error. More...
 
Getters and setters
std::string messageApproximationScheme () const
 Returns the approximation scheme message. More...
 

Public Types

enum  StateOfInference { StateOfInference::OutdatedBNStructure, StateOfInference::OutdatedBNPotentials, StateOfInference::ReadyForInference, StateOfInference::Done }
 current state of the inference More...
 
enum  ApproximationSchemeSTATE : char {
  ApproximationSchemeSTATE::Undefined, ApproximationSchemeSTATE::Continue, ApproximationSchemeSTATE::Epsilon, ApproximationSchemeSTATE::Rate,
  ApproximationSchemeSTATE::Limit, ApproximationSchemeSTATE::TimeLimit, ApproximationSchemeSTATE::Stopped
}
 The different state of an approximation scheme. More...
 

Protected Attributes

Estimator< GUM_SCALAR > __estimator
 Estimator object designed to approximate target posteriors. More...
 
bool isSetEstimator = false
 whether the Estimator object has been initialized More...
 
bool isContextualized = false
 whether the referenced Bayesian Network has been "contextualized" More...
 
double _current_epsilon
 Current epsilon. More...
 
double _last_epsilon
 Last epsilon value. More...
 
double _current_rate
 Current rate. More...
 
Size _current_step
 The current step. More...
 
Timer _timer
 The timer. More...
 
ApproximationSchemeSTATE _current_state
 The current state. More...
 
std::vector< double_history
 The scheme history, used only if verbosity == true. More...
 
double _eps
 Threshold for convergence. More...
 
bool _enabled_eps
 If true, the threshold convergence is enabled. More...
 
double _min_rate_eps
 Threshold for the epsilon rate. More...
 
bool _enabled_min_rate_eps
 If true, the minimal threshold for epsilon rate is enabled. More...
 
double _max_time
 The timeout. More...
 
bool _enabled_max_time
 If true, the timeout is enabled. More...
 
Size _max_iter
 The maximum iterations. More...
 
bool _enabled_max_iter
 If true, the maximum iterations stopping criterion is enabled. More...
 
Size _burn_in
 Number of iterations before checking stopping criteria. More...
 
Size _period_size
 Checking criteria frequency. More...
 
bool _verbosity
 If true, verbosity is enabled. More...
 

Protected Member Functions

Instantiation _burnIn () override
 draws a defined number of samples without updating the estimators More...
 
Instantiation _draw (GUM_SCALAR *w, Instantiation prev) override
 draws a sample according to Weighted sampling More...
 
void _makeInference () override
 makes the inference by generating samples More...
 
void _loopApproxInference ()
 
virtual void _addVarSample (NodeId nod, Instantiation *I)
 adds a node to current instantiation More...
 
virtual void _onContextualize (BayesNetFragment< GUM_SCALAR > *bn)
 fired when Bayesian network is contextualized More...
 
void _onEvidenceAdded (const NodeId id, bool isHardEvidence) override
 fired after a new evidence is inserted More...
 
void _onEvidenceErased (const NodeId id, bool isHardEvidence) override
 fired before an evidence is removed More...
 
void _onAllEvidenceErased (bool contains_hard_evidence) override
 fired before all the evidence are erased More...
 
void _onEvidenceChanged (const NodeId id, bool hasChangedSoftHard) override
 fired after an evidence is changed, in particular when its status (soft/hard) changes More...
 
void _onBayesNetChanged (const IBayesNet< GUM_SCALAR > *bn) override
 fired after a new Bayes net has been assigned to the engine More...
 
void _updateOutdatedBNStructure () override
 prepares inference when the latter is in OutdatedBNStructure state More...
 
void _updateOutdatedBNPotentials () override
 prepares inference when the latter is in OutdatedBNPotentials state More...
 
void _onMarginalTargetAdded (const NodeId id) override
 fired after a new marginal target is inserted More...
 
void _onMarginalTargetErased (const NodeId id) override
 fired before a marginal target is removed More...
 
void _onAllMarginalTargetsAdded () override
 fired after all the nodes of the BN are added as marginal targets More...
 
void _onAllMarginalTargetsErased () override
 fired before a all marginal targets are removed More...
 
void _onStateChanged () override
 fired when the stage is changed More...
 
void _setTargetedMode ()
 
bool _isTargetedMode () const
 
void _setOutdatedBNStructureState ()
 put the inference into an outdated BN structure state More...
 
void _setOutdatedBNPotentialsState ()
 puts the inference into an OutdatedBNPotentials state if it is not already in an OutdatedBNStructure state More...
 

Detailed Description

template<typename GUM_SCALAR>
class gum::WeightedSampling< GUM_SCALAR >

Definition at line 51 of file weightedSampling.h.

Member Enumeration Documentation

◆ ApproximationSchemeSTATE

The different state of an approximation scheme.

Enumerator
Undefined 
Continue 
Epsilon 
Rate 
Limit 
TimeLimit 
Stopped 

Definition at line 65 of file IApproximationSchemeConfiguration.h.

65  : char {
66  Undefined,
67  Continue,
68  Epsilon,
69  Rate,
70  Limit,
71  TimeLimit,
72  Stopped
73  };

◆ StateOfInference

template<typename GUM_SCALAR >
enum gum::BayesNetInference::StateOfInference
stronginherited

current state of the inference

BayesNetInference can be in one of 4 different states:

  • OutdatedBNStructure: in this state, the inference is fully unprepared to be applied because some events changed the "logical" structure of the BN: for instance a node received a hard evidence, which implies that its outgoing arcs can be removed from the BN, hence involving a structural change in the BN. As a consequence, the (incremental) inference (probably) needs a significant amount of preparation to be ready for the next inference. In a Lazy propagation, for instance, this step amounts to compute a new join tree, hence a new structure in which inference will be applied. Note that classes that inherit from BayesNetInference may be smarter than BayesNetInference and may, in some situations, find out that their data structures are still ok for inference and, therefore, only resort to perform the actions related to the OutdatedBNPotentials state. As an example, consider a LazyPropagation inference in Bayes Net A->B->C->D->E in which C has received hard evidence e_C and E is the only target. In this case, A and B are not needed for inference, the only potentials that matter are P(D|e_C) and P(E|D). So the smallest join tree needed for inference contains only one clique DE. Now, adding new evidence e_A on A has no impact on E given hard evidence e_C. In this case, LazyPropagation can be smart and not update its join tree.
  • OutdatedBNPotentials: in this state, the structure of the BN remains unchanged, only some potentials stored in it have changed. Therefore, the inference probably just needs to invalidate some already computed potentials to be ready. Only a light amount of preparation is needed to be able to perform inference.
  • Ready4Inference: in this state, all the data structures are ready for inference. There just remains to perform the inference computations.
  • Done: the heavy computations of inference have been done. There might still remain a few light computations to perform to get the posterior potentials we need. Typically, in Lazy Propagation, all the messages in the join tree have been computed but, to get the potentials, we still need to perform the combinations of the potentials in the cliques with the messages sent to the cliques. In some inference algorithms, this step may even be empty.
Enumerator
OutdatedBNStructure 
OutdatedBNPotentials 
ReadyForInference 
Done 

Definition at line 183 of file BayesNetInference.h.

183  {
184  OutdatedBNStructure,
185  OutdatedBNPotentials,
186  ReadyForInference,
187  Done
188  };

Constructor & Destructor Documentation

◆ WeightedSampling()

template<typename GUM_SCALAR >
gum::WeightedSampling< GUM_SCALAR >::WeightedSampling ( const IBayesNet< GUM_SCALAR > *  bn)
explicit

Default constructor.

Definition at line 39 of file weightedSampling_tpl.h.

40  :
41  SamplingInference< GUM_SCALAR >(bn) {
42  GUM_CONSTRUCTOR(WeightedSampling);
43  }
WeightedSampling(const IBayesNet< GUM_SCALAR > *bn)
Default constructor.

◆ ~WeightedSampling()

template<typename GUM_SCALAR >
gum::WeightedSampling< GUM_SCALAR >::~WeightedSampling ( )
override

Destructor.

Definition at line 48 of file weightedSampling_tpl.h.

48  {
49  GUM_DESTRUCTOR(WeightedSampling);
50  }
WeightedSampling(const IBayesNet< GUM_SCALAR > *bn)
Default constructor.

Member Function Documentation

◆ _addVarSample()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_addVarSample ( NodeId  nod,
Instantiation I 
)
protectedvirtualinherited

adds a node to current instantiation

Parameters
nodthe node to add to the sample
Ithe current sample

generates random value based on the BN's CPT's and adds the node to the Instantiation with that value

Definition at line 188 of file samplingInference_tpl.h.

References gum::Instantiation::add(), gum::Instantiation::chgVal(), and gum::SamplingInference< GUM_SCALAR >::samplingBN().

Referenced by gum::WeightedSampling< GUM_SCALAR >::_draw(), gum::MonteCarloSampling< GUM_SCALAR >::_draw(), and gum::ImportanceSampling< GUM_SCALAR >::_draw().

189  {
191 
192  I->add(samplingBN().variable(nod));
193  I->chgVal(samplingBN().variable(nod),
194  samplingBN().cpt(nod).extract(Itop).draw());
195  }
Class for assigning/browsing values to tuples of discrete variables.
Definition: instantiation.h:83
const IBayesNet< GUM_SCALAR > & samplingBN()
get the BayesNet which is used to really perform the sampling
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ _burnIn()

template<typename GUM_SCALAR >
Instantiation gum::WeightedSampling< GUM_SCALAR >::_burnIn ( )
overrideprotectedvirtual

draws a defined number of samples without updating the estimators

No burn in needed for Weighted sampling.

Implements gum::SamplingInference< GUM_SCALAR >.

Definition at line 55 of file weightedSampling_tpl.h.

55  {
57  return I;
58  }
Class for assigning/browsing values to tuples of discrete variables.
Definition: instantiation.h:83

◆ _draw()

template<typename GUM_SCALAR >
Instantiation gum::WeightedSampling< GUM_SCALAR >::_draw ( GUM_SCALAR *  w,
Instantiation  prev 
)
overrideprotectedvirtual

draws a sample according to Weighted sampling

Parameters
wthe weight of sample being generated
prevthe previous sample generated
bnthe bayesian network containing the evidence
hardEvNodeshard evidence nodes
hardEvhard evidences values

Generates a new sample in topological order. Each sample has a weight bias. The sample weight is the product of each node's weight.

Implements gum::SamplingInference< GUM_SCALAR >.

Definition at line 62 of file weightedSampling_tpl.h.

References gum::SamplingInference< GUM_SCALAR >::_addVarSample(), gum::Instantiation::add(), gum::BayesNetInference< GUM_SCALAR >::BN(), gum::Instantiation::chgVal(), gum::Instantiation::clear(), gum::BayesNetInference< GUM_SCALAR >::hardEvidence(), and gum::BayesNetInference< GUM_SCALAR >::hardEvidenceNodes().

63  {
64  *w = 1.0f;
65  bool wrongValue = false;
66  do {
67  prev.clear();
68  wrongValue = false;
69  *w = 1.0f;
70 
71  for (const auto nod : this->BN().topologicalOrder()) {
72  if (this->hardEvidenceNodes().contains(nod)) {
73  prev.add(this->BN().variable(nod));
74  prev.chgVal(this->BN().variable(nod), this->hardEvidence()[nod]);
75  auto localp = this->BN().cpt(nod).get(prev);
76 
77  if (localp == 0) {
78  wrongValue = true;
79  break;
80  }
81 
82  *w *= localp;
83  } else {
84  this->_addVarSample(nod, &prev);
85  }
86  }
87  } while (wrongValue);
88  return prev;
89  }
const NodeProperty< Idx > & hardEvidence() const
indicate for each node with hard evidence which value it took
const NodeSet & hardEvidenceNodes() const
returns the set of nodes with hard evidence
virtual void _addVarSample(NodeId nod, Instantiation *I)
adds a node to current instantiation
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:

◆ _isTargetedMode()

template<typename GUM_SCALAR >
INLINE bool gum::MarginalTargetedInference< GUM_SCALAR >::_isTargetedMode ( ) const
protectedinherited

Definition at line 338 of file marginalTargetedInference_tpl.h.

References gum::MarginalTargetedInference< GUM_SCALAR >::__targeted_mode.

338  {
339  return __targeted_mode;
340  }
bool __targeted_mode
whether the actual targets are default

◆ _loopApproxInference()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_loopApproxInference ( )
protectedinherited

Definition at line 167 of file samplingInference_tpl.h.

References gum::SamplingInference< GUM_SCALAR >::__estimator, gum::SamplingInference< GUM_SCALAR >::_burnIn(), gum::SamplingInference< GUM_SCALAR >::_draw(), gum::SamplingInference< GUM_SCALAR >::contextualize(), gum::ApproximationScheme::continueApproximationScheme(), gum::ApproximationScheme::initApproximationScheme(), gum::SamplingInference< GUM_SCALAR >::isContextualized, gum::SamplingInference< GUM_SCALAR >::isSetEstimator, and gum::ApproximationScheme::updateApproximationScheme().

Referenced by gum::SamplingInference< GUM_SCALAR >::_makeInference().

167  {
168  //@todo This should be in __prepareInference
169  if (!isContextualized) { this->contextualize(); }
170 
171  this->initApproximationScheme();
173  GUM_SCALAR w = .0; //
174 
175  // Burn in
176  Ip = this->_burnIn();
177  do {
178  Ip = this->_draw(&w, Ip);
179  __estimator.update(Ip, w);
181  } while (this->continueApproximationScheme(__estimator.confidence()));
182 
183  this->isSetEstimator = false;
184  }
virtual void contextualize()
Simplifying the bayesian network with relevance reasonning to lighten the computational charge...
virtual Instantiation _draw(GUM_SCALAR *w, Instantiation prev)=0
draws a sample in the bayesian network given a previous one
void initApproximationScheme()
Initialise the scheme.
bool continueApproximationScheme(double error)
Update the scheme w.r.t the new error.
virtual Instantiation _burnIn()=0
draws samples without updating the estimators
Estimator< GUM_SCALAR > __estimator
Estimator object designed to approximate target posteriors.
Class for assigning/browsing values to tuples of discrete variables.
Definition: instantiation.h:83
bool isContextualized
whether the referenced Bayesian Network has been "contextualized"
bool isSetEstimator
whether the Estimator object has been initialized
void updateApproximationScheme(unsigned int incr=1)
Update the scheme w.r.t the new error and increment steps.
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ _makeInference()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_makeInference ( )
overrideprotectedvirtualinherited

makes the inference by generating samples

Implements gum::BayesNetInference< GUM_SCALAR >.

Definition at line 161 of file samplingInference_tpl.h.

References gum::SamplingInference< GUM_SCALAR >::_loopApproxInference(), gum::SamplingInference< GUM_SCALAR >::_setEstimatorFromBN(), and gum::SamplingInference< GUM_SCALAR >::isSetEstimator.

161  {
162  if (!isSetEstimator) this->_setEstimatorFromBN();
164  }
virtual void _setEstimatorFromBN()
Initializes the estimators object linked to the simulation.
bool isSetEstimator
whether the Estimator object has been initialized
+ Here is the call graph for this function:

◆ _onAllEvidenceErased()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_onAllEvidenceErased ( bool  contains_hard_evidence)
overrideprotectedvirtualinherited

fired before all the evidence are erased

Implements gum::BayesNetInference< GUM_SCALAR >.

Definition at line 215 of file samplingInference_tpl.h.

216  {}

◆ _onAllMarginalTargetsAdded()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_onAllMarginalTargetsAdded ( )
overrideprotectedvirtualinherited

fired after all the nodes of the BN are added as marginal targets

Implements gum::MarginalTargetedInference< GUM_SCALAR >.

Definition at line 244 of file samplingInference_tpl.h.

244 {}

◆ _onAllMarginalTargetsErased()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_onAllMarginalTargetsErased ( )
overrideprotectedvirtualinherited

fired before a all marginal targets are removed

Implements gum::MarginalTargetedInference< GUM_SCALAR >.

Definition at line 247 of file samplingInference_tpl.h.

247 {}

◆ _onBayesNetChanged()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_onBayesNetChanged ( const IBayesNet< GUM_SCALAR > *  bn)
overrideprotectedvirtualinherited

fired after a new Bayes net has been assigned to the engine

Reimplemented from gum::MarginalTargetedInference< GUM_SCALAR >.

Definition at line 228 of file samplingInference_tpl.h.

229  {}

◆ _onContextualize()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_onContextualize ( BayesNetFragment< GUM_SCALAR > *  bn)
protectedvirtualinherited

fired when Bayesian network is contextualized

Parameters
bnthe contextualized BayesNetFragment
targetsinference target variables
hardEvNodeshard evidence nodes
hardEvhard evidences values

Reimplemented in gum::ImportanceSampling< GUM_SCALAR >.

Definition at line 198 of file samplingInference_tpl.h.

Referenced by gum::SamplingInference< GUM_SCALAR >::contextualize().

199  {}
+ Here is the caller graph for this function:

◆ _onEvidenceAdded()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_onEvidenceAdded ( const NodeId  id,
bool  isHardEvidence 
)
overrideprotectedvirtualinherited

fired after a new evidence is inserted

Implements gum::BayesNetInference< GUM_SCALAR >.

Definition at line 203 of file samplingInference_tpl.h.

References GUM_ERROR.

204  {
205  if (!isHardEvidence) {
206  GUM_ERROR(FatalError, "Approximated inference only accept hard evidence");
207  }
208  }
#define GUM_ERROR(type, msg)
Definition: exceptions.h:55

◆ _onEvidenceChanged()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_onEvidenceChanged ( const NodeId  id,
bool  hasChangedSoftHard 
)
overrideprotectedvirtualinherited

fired after an evidence is changed, in particular when its status (soft/hard) changes

Parameters
nodeIdthe node of the changed evidence
hasChangedSoftHardtrue if the evidence has changed from Soft to Hard or from Hard to Soft

Implements gum::BayesNetInference< GUM_SCALAR >.

Definition at line 220 of file samplingInference_tpl.h.

References GUM_ERROR.

221  {
222  if (hasChangedSoftHard) {
223  GUM_ERROR(FatalError, "Approximated inference only accept hard evidence");
224  }
225  }
#define GUM_ERROR(type, msg)
Definition: exceptions.h:55

◆ _onEvidenceErased()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_onEvidenceErased ( const NodeId  id,
bool  isHardEvidence 
)
overrideprotectedvirtualinherited

fired before an evidence is removed

Implements gum::BayesNetInference< GUM_SCALAR >.

Definition at line 211 of file samplingInference_tpl.h.

212  {}

◆ _onMarginalTargetAdded()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_onMarginalTargetAdded ( const NodeId  id)
overrideprotectedvirtualinherited

fired after a new marginal target is inserted

Parameters
idThe target variable's id.

Implements gum::MarginalTargetedInference< GUM_SCALAR >.

Definition at line 238 of file samplingInference_tpl.h.

238 {}

◆ _onMarginalTargetErased()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_onMarginalTargetErased ( const NodeId  id)
overrideprotectedvirtualinherited

fired before a marginal target is removed

Parameters
idThe target variable's id.

Implements gum::MarginalTargetedInference< GUM_SCALAR >.

Definition at line 241 of file samplingInference_tpl.h.

241 {}

◆ _onStateChanged()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_onStateChanged ( )
overrideprotectedvirtualinherited

fired when the stage is changed

Implements gum::BayesNetInference< GUM_SCALAR >.

Definition at line 250 of file samplingInference_tpl.h.

References gum::SamplingInference< GUM_SCALAR >::__estimator, gum::ApproximationScheme::initApproximationScheme(), and gum::BayesNetInference< GUM_SCALAR >::isInferenceReady().

250  {
251  if (this->isInferenceReady()) {
252  __estimator.clear();
253  this->initApproximationScheme();
254  }
255  }
void initApproximationScheme()
Initialise the scheme.
Estimator< GUM_SCALAR > __estimator
Estimator object designed to approximate target posteriors.
virtual bool isInferenceReady() const noexcept final
returns whether the inference object is in a ready state
+ Here is the call graph for this function:

◆ _posterior()

template<typename GUM_SCALAR >
const Potential< GUM_SCALAR > & gum::SamplingInference< GUM_SCALAR >::_posterior ( NodeId  id)
overridevirtualinherited

Computes and returns the posterior of a node.

Returns
a const ref to the posterior probability of the node.
Parameters
idthe node for which we need a posterior probability
Warning
for efficiency reasons, the potential is returned by reference. In order to ensure that the potential may still exist even if the Inference object is destroyed, the user has to copy it explicitly.
Exceptions
UndefinedElementif node is not in the set of targets.
NotFoundif node is not in the BN.

Implements gum::MarginalTargetedInference< GUM_SCALAR >.

Definition at line 109 of file samplingInference_tpl.h.

References gum::SamplingInference< GUM_SCALAR >::__estimator, and gum::BayesNetInference< GUM_SCALAR >::BN().

109  {
110  return __estimator.posterior(this->BN().variable(id));
111  }
Estimator< GUM_SCALAR > __estimator
Estimator object designed to approximate target posteriors.
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:

◆ _setEstimatorFromBN()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_setEstimatorFromBN ( )
virtualinherited

Initializes the estimators object linked to the simulation.

Initializes the estimator object by creating a hashtable between non evidence nodes and a 0-filled potential which will approximate the node's posterior

Definition at line 82 of file samplingInference_tpl.h.

References gum::SamplingInference< GUM_SCALAR >::__estimator, gum::BayesNetInference< GUM_SCALAR >::hardEvidenceNodes(), gum::SamplingInference< GUM_SCALAR >::isSetEstimator, and gum::SamplingInference< GUM_SCALAR >::samplingBN().

Referenced by gum::SamplingInference< GUM_SCALAR >::_makeInference().

82  {
83  __estimator.setFromBN(&samplingBN(), this->hardEvidenceNodes());
84  this->isSetEstimator = true;
85  }
Estimator< GUM_SCALAR > __estimator
Estimator object designed to approximate target posteriors.
const NodeSet & hardEvidenceNodes() const
returns the set of nodes with hard evidence
bool isSetEstimator
whether the Estimator object has been initialized
const IBayesNet< GUM_SCALAR > & samplingBN()
get the BayesNet which is used to really perform the sampling
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ _setEstimatorFromLBP()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_setEstimatorFromLBP ( LoopyBeliefPropagation< GUM_SCALAR > *  lbp,
GUM_SCALAR  virtualLBPSize 
)
virtualinherited

Initializes the estimators object linked to the simulation.

Parameters
lbpa LoopyBeliefPropagation object
virtualLBPSizethe size of the equivalent sampling by LBP

Initializes the estimator object by creating a hashtable between non evidence nodes and the current approximation of the node's posterior obtained by running LoopyBeliefPropagation algorithm

Definition at line 88 of file samplingInference_tpl.h.

References gum::SamplingInference< GUM_SCALAR >::__estimator, gum::BayesNetInference< GUM_SCALAR >::hardEvidenceNodes(), and gum::SamplingInference< GUM_SCALAR >::isSetEstimator.

89  {
90  __estimator.setFromLBP(lbp, this->hardEvidenceNodes(), virtualLBPSize);
91  this->isSetEstimator = true;
92  }
Estimator< GUM_SCALAR > __estimator
Estimator object designed to approximate target posteriors.
const NodeSet & hardEvidenceNodes() const
returns the set of nodes with hard evidence
bool isSetEstimator
whether the Estimator object has been initialized
+ Here is the call graph for this function:

◆ _setOutdatedBNPotentialsState()

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::_setOutdatedBNPotentialsState ( )
protectedinherited

puts the inference into an OutdatedBNPotentials state if it is not already in an OutdatedBNStructure state

OutdatedBNPotentials: in this state, the structure of the BN remains unchanged, only some potentials stored in it have changed. Therefore, the inference probably just needs to invalidate some already computed potentials to be ready. Only a light amount of preparation is needed to be able to perform inference.

Definition at line 685 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__setState(), and gum::BayesNetInference< GUM_SCALAR >::OutdatedBNPotentials.

685  {
687  }
virtual void __setState(const StateOfInference state) final
set the state of the inference engine and call the notification _onStateChanged when necessary (i...
+ Here is the call graph for this function:

◆ _setOutdatedBNStructureState()

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::_setOutdatedBNStructureState ( )
protectedinherited

put the inference into an outdated BN structure state

OutdatedBNStructure: in this state, the inference is fully unprepared to be applied because some events changed the "logical" structure of the BN: for instance a node received a hard evidence, which implies that its outgoing arcs can be removed from the BN, hence involving a structural change in the BN. As a consequence, the (incremental) inference (probably) needs a significant amount of preparation to be ready for the next inference. In a Lazy propagation, for instance, this step amounts to compute a new join tree, hence a new structure in which inference will be applied. Note that classes that inherit from BayesNetInference may be smarter than BayesNetInference and may, in some situations, find out that their data structures are still ok for inference and, therefore, only resort to perform the actions related to the OutdatedBNPotentials state. As an example, consider a LazyPropagation inference in Bayes Net A->B->C->D->E in which C has received hard evidence e_C and E is the only target. In this case, A and B are not needed for inference, the only potentials that matter are P(D|e_C) and P(E|D). So the smallest join tree needed for inference contains only one clique DE. Now, adding new evidence e_A on A has no impact on E given hard evidence e_C. In this case, LazyPropagation can be smart and not update its join tree.

Definition at line 677 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__setState(), and gum::BayesNetInference< GUM_SCALAR >::OutdatedBNStructure.

677  {
679  }
virtual void __setState(const StateOfInference state) final
set the state of the inference engine and call the notification _onStateChanged when necessary (i...
+ Here is the call graph for this function:

◆ _setTargetedMode()

template<typename GUM_SCALAR >
INLINE void gum::MarginalTargetedInference< GUM_SCALAR >::_setTargetedMode ( )
protectedinherited

Definition at line 342 of file marginalTargetedInference_tpl.h.

References gum::MarginalTargetedInference< GUM_SCALAR >::__targeted_mode, gum::MarginalTargetedInference< GUM_SCALAR >::__targets, and gum::Set< Key, Alloc >::clear().

Referenced by gum::MarginalTargetedInference< GUM_SCALAR >::addAllTargets(), gum::JointTargetedInference< GUM_SCALAR >::addJointTarget(), gum::MarginalTargetedInference< GUM_SCALAR >::addTarget(), and gum::MarginalTargetedInference< GUM_SCALAR >::eraseAllTargets().

342  {
343  if (!__targeted_mode) {
344  __targets.clear();
345  __targeted_mode = true;
346  }
347  }
NodeSet __targets
the set of marginal targets
void clear()
Removes all the elements, if any, from the set.
Definition: set_tpl.h:375
bool __targeted_mode
whether the actual targets are default
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ _updateOutdatedBNPotentials()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_updateOutdatedBNPotentials ( )
overrideprotectedvirtualinherited

prepares inference when the latter is in OutdatedBNPotentials state

Note that the values of evidence are not necessarily known and can be changed between _updateOutdatedBNPotentials and _makeInference.

Implements gum::BayesNetInference< GUM_SCALAR >.

Definition at line 235 of file samplingInference_tpl.h.

235 {}

◆ _updateOutdatedBNStructure()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::_updateOutdatedBNStructure ( )
overrideprotectedvirtualinherited

prepares inference when the latter is in OutdatedBNStructure state

Note that the values of evidence are not necessarily known and can be changed between _updateOutdatedBNStructure and _makeInference.

Implements gum::BayesNetInference< GUM_SCALAR >.

Definition at line 232 of file samplingInference_tpl.h.

232 {}

◆ addAllTargets()

template<typename GUM_SCALAR >
void gum::MarginalTargetedInference< GUM_SCALAR >::addAllTargets ( )
finalvirtualinherited

adds all nodes as targets

Definition at line 136 of file marginalTargetedInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__bn, gum::BayesNetInference< GUM_SCALAR >::__setState(), gum::MarginalTargetedInference< GUM_SCALAR >::__targets, gum::MarginalTargetedInference< GUM_SCALAR >::_onMarginalTargetAdded(), gum::MarginalTargetedInference< GUM_SCALAR >::_setTargetedMode(), gum::Set< Key, Alloc >::contains(), GUM_ERROR, and gum::Set< Key, Alloc >::insert().

136  {
137  // check if the node belongs to the Bayesian network
138  if (this->__bn == nullptr)
139  GUM_ERROR(NullElement,
140  "No Bayes net has been assigned to the "
141  "inference algorithm");
142 
143 
144  _setTargetedMode(); // does nothing if already in targeted mode
145  for (const auto target : this->__bn->dag()) {
146  if (!__targets.contains(target)) {
147  __targets.insert(target);
148  _onMarginalTargetAdded(target);
149  this->__setState(
151  }
152  }
153  }
bool contains(const Key &k) const
Indicates whether a given elements belong to the set.
Definition: set_tpl.h:581
virtual void __setState(const StateOfInference state) final
set the state of the inference engine and call the notification _onStateChanged when necessary (i...
NodeSet __targets
the set of marginal targets
virtual void _onMarginalTargetAdded(const NodeId id)=0
fired after a new marginal target is inserted
const IBayesNet< GUM_SCALAR > * __bn
the Bayes net on which we perform inferences
void insert(const Key &k)
Inserts a new element into the set.
Definition: set_tpl.h:613
#define GUM_ERROR(type, msg)
Definition: exceptions.h:55
+ Here is the call graph for this function:

◆ addEvidence() [1/8]

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::addEvidence ( NodeId  id,
const Idx  val 
)
finalvirtualinherited

adds a new hard evidence on node id

Exceptions
UndefinedElementif id does not belong to the Bayesian network
InvalidArgumentif val is not a value for id
InvalidArgumentif id already has an evidence

Definition at line 247 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__createHardEvidence().

Referenced by gum::LoopySamplingInference< GUM_SCALAR, APPROX >::_makeInference(), gum::BayesNetInference< GUM_SCALAR >::addEvidence(), gum::BayesNetInference< GUM_SCALAR >::addListOfEvidence(), gum::BayesNetInference< GUM_SCALAR >::addSetOfEvidence(), gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact(), and gum::JointTargetedInference< GUM_SCALAR >::evidenceJointImpact().

248  {
250  }
virtual void addEvidence(NodeId id, const Idx val) final
adds a new hard evidence on node id
Potential< GUM_SCALAR > __createHardEvidence(NodeId id, Idx val) const
create the internal structure for a hard evidence
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ addEvidence() [2/8]

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::addEvidence ( const std::string &  nodeName,
const Idx  val 
)
finalvirtualinherited

adds a new hard evidence on node named nodeName

Exceptions
UndefinedElementif nodeName does not belong to the Bayesian network
InvalidArgumentif val is not a value for id
InvalidArgumentif nodeName already has an evidence

Definition at line 255 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::addEvidence(), and gum::BayesNetInference< GUM_SCALAR >::BN().

256  {
257  addEvidence(this->BN().idFromName(nodeName), val);
258  }
virtual void addEvidence(NodeId id, const Idx val) final
adds a new hard evidence on node id
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:

◆ addEvidence() [3/8]

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::addEvidence ( NodeId  id,
const std::string &  label 
)
finalvirtualinherited

adds a new hard evidence on node id

Exceptions
UndefinedElementif id does not belong to the Bayesian network
InvalidArgumentif val is not a value for id
InvalidArgumentif id already has an evidence

Definition at line 263 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::addEvidence(), and gum::BayesNetInference< GUM_SCALAR >::BN().

264  {
265  addEvidence(id, this->BN().variable(id)[label]);
266  }
virtual void addEvidence(NodeId id, const Idx val) final
adds a new hard evidence on node id
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:

◆ addEvidence() [4/8]

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::addEvidence ( const std::string &  nodeName,
const std::string &  label 
)
finalvirtualinherited

adds a new hard evidence on node named nodeName

Exceptions
UndefinedElementif nodeName does not belong to the Bayesian network
InvalidArgumentif val is not a value for id
InvalidArgumentif nodeName already has an evidence

Definition at line 271 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::addEvidence(), and gum::BayesNetInference< GUM_SCALAR >::BN().

272  {
273  NodeId id = this->BN().idFromName(nodeName);
274  addEvidence(id, this->BN().variable(id)[label]);
275  }
virtual void addEvidence(NodeId id, const Idx val) final
adds a new hard evidence on node id
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
Size NodeId
Type for node ids.
Definition: graphElements.h:98
+ Here is the call graph for this function:

◆ addEvidence() [5/8]

template<typename GUM_SCALAR >
void gum::BayesNetInference< GUM_SCALAR >::addEvidence ( NodeId  id,
const std::vector< GUM_SCALAR > &  vals 
)
finalvirtualinherited

adds a new evidence on node id (might be soft or hard)

Exceptions
UndefinedElementif id does not belong to the Bayesian network
InvalidArgumentif id already has an evidence
FatalErrorif vals=[0,0,...,0]
InvalidArgumentif the size of vals is different from the domain size of node id

Definition at line 279 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__bn, gum::MultiDimDecorator< GUM_SCALAR >::add(), gum::BayesNetInference< GUM_SCALAR >::addEvidence(), and GUM_ERROR.

280  {
281  // checks that the evidence is meaningful
282  if (__bn == nullptr)
283  GUM_ERROR(NullElement,
284  "No Bayes net has been assigned to the "
285  "inference algorithm");
286 
287  if (!__bn->dag().exists(id)) {
288  GUM_ERROR(UndefinedElement, id << " is not a NodeId in the bn");
289  }
290 
291  if (__bn->variable(id).domainSize() != vals.size()) {
292  GUM_ERROR(InvalidArgument,
293  "node " << __bn->variable(id)
294  << " and its evidence vector have different sizes.");
295  }
296 
297  Potential< GUM_SCALAR > pot;
298  pot.add(__bn->variable(id));
299  pot.fillWith(vals);
300  addEvidence(std::move(pot));
301  }
const IBayesNet< GUM_SCALAR > * __bn
the Bayes net on which we perform inferences
virtual void addEvidence(NodeId id, const Idx val) final
adds a new hard evidence on node id
#define GUM_ERROR(type, msg)
Definition: exceptions.h:55
+ Here is the call graph for this function:

◆ addEvidence() [6/8]

template<typename GUM_SCALAR >
void gum::BayesNetInference< GUM_SCALAR >::addEvidence ( const std::string &  nodeName,
const std::vector< GUM_SCALAR > &  vals 
)
finalvirtualinherited

adds a new evidence on node named nodeName (might be soft or hard)

Exceptions
UndefinedElementif id does not belong to the Bayesian network
InvalidArgumentif nodeName already has an evidence
FatalErrorif vals=[0,0,...,0]
InvalidArgumentif the size of vals is different from the domain size of node nodeName

Definition at line 305 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::addEvidence(), and gum::BayesNetInference< GUM_SCALAR >::BN().

306  {
307  addEvidence(this->BN().idFromName(nodeName), vals);
308  }
virtual void addEvidence(NodeId id, const Idx val) final
adds a new hard evidence on node id
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:

◆ addEvidence() [7/8]

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::addEvidence ( const Potential< GUM_SCALAR > &  pot)
finalvirtualinherited

adds a new evidence on node id (might be soft or hard)

Exceptions
UndefinedElementif the potential is defined over several nodes
UndefinedElementif the node on which the potential is defined does not belong to the Bayesian network
InvalidArgumentif the node of the potential already has an evidence
FatalErrorif pot=[0,0,...,0]

Definition at line 354 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::addEvidence().

355  {
356  Potential< GUM_SCALAR > new_pot(pot);
357  addEvidence(std::move(new_pot));
358  }
virtual void addEvidence(NodeId id, const Idx val) final
adds a new hard evidence on node id
+ Here is the call graph for this function:

◆ addEvidence() [8/8]

template<typename GUM_SCALAR >
void gum::BayesNetInference< GUM_SCALAR >::addEvidence ( Potential< GUM_SCALAR > &&  pot)
finalvirtualinherited

adds a new evidence on node id (might be soft or hard)

Exceptions
UndefinedElementif the potential is defined over several nodes
UndefinedElementif the node on which the potential is defined does not belong to the Bayesian network
InvalidArgumentif the node of the potential already has an evidence
FatalErrorif pot=[0,0,...,0]

Definition at line 313 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__bn, gum::BayesNetInference< GUM_SCALAR >::__evidence, gum::BayesNetInference< GUM_SCALAR >::__hard_evidence, gum::BayesNetInference< GUM_SCALAR >::__hard_evidence_nodes, gum::BayesNetInference< GUM_SCALAR >::__isHardEvidence(), gum::BayesNetInference< GUM_SCALAR >::__setState(), gum::BayesNetInference< GUM_SCALAR >::__soft_evidence_nodes, gum::BayesNetInference< GUM_SCALAR >::_onEvidenceAdded(), GUM_ERROR, gum::BayesNetInference< GUM_SCALAR >::hasEvidence(), gum::Set< Key, Alloc >::insert(), and gum::BayesNetInference< GUM_SCALAR >::OutdatedBNStructure.

313  {
314  // check if the potential corresponds to an evidence
315  if (pot.nbrDim() != 1) {
316  GUM_ERROR(InvalidArgument, pot << " is not mono-dimensional.");
317  }
318  if (__bn == nullptr)
319  GUM_ERROR(NullElement,
320  "No Bayes net has been assigned to the "
321  "inference algorithm");
322 
323  NodeId id = __bn->nodeId(pot.variable(0));
324 
325  if (hasEvidence(id)) {
326  GUM_ERROR(InvalidArgument,
327  " node " << id
328  << " already has an evidence. Please use chgEvidence().");
329  }
330 
331  // check whether we have a hard evidence (and also check whether the
332  // potential only contains 0 (in this case, this will automatically raise
333  // an exception) )
334  Idx val;
335  bool is_hard_evidence = __isHardEvidence(pot, val);
336 
337  // insert the evidence
338  __evidence.insert(
339  id,
340  new Potential< GUM_SCALAR >(std::forward< Potential< GUM_SCALAR > >(pot)));
341  if (is_hard_evidence) { // pot is deterministic
342  __hard_evidence.insert(id, val);
344  } else {
346  }
348  _onEvidenceAdded(id, is_hard_evidence);
349  }
NodeProperty< const Potential< GUM_SCALAR > *> __evidence
the set of evidence entered into the network
virtual void __setState(const StateOfInference state) final
set the state of the inference engine and call the notification _onStateChanged when necessary (i...
NodeProperty< Idx > __hard_evidence
assign to each node with a hard evidence the index of its observed value
virtual void _onEvidenceAdded(const NodeId id, bool isHardEvidence)=0
fired after a new evidence is inserted
const IBayesNet< GUM_SCALAR > * __bn
the Bayes net on which we perform inferences
bool __isHardEvidence(const Potential< GUM_SCALAR > &pot, Idx &val) const
checks whether a potential corresponds to a hard evidence or not
NodeSet __soft_evidence_nodes
the set of nodes that received soft evidence
virtual bool hasEvidence() const final
indicates whether some node(s) have received evidence
NodeSet __hard_evidence_nodes
the set of nodes that received hard evidence
Size NodeId
Type for node ids.
Definition: graphElements.h:98
void insert(const Key &k)
Inserts a new element into the set.
Definition: set_tpl.h:613
#define GUM_ERROR(type, msg)
Definition: exceptions.h:55
+ Here is the call graph for this function:

◆ addListOfEvidence()

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::addListOfEvidence ( const List< const Potential< GUM_SCALAR > * > &  potlist)
finalvirtualinherited

adds a new list of evidence

Exceptions
UndefinedElementif some potential is defined over several nodes
UndefinedElementif the node on which some potential is defined does not belong to the Bayesian network
InvalidArgumentif the node of some potential already has an evidence
FatalErrorif pot=[0,0,...,0]

Definition at line 363 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::addEvidence().

364  {
365  for (const auto pot : potlist)
366  addEvidence(*pot);
367  }
virtual void addEvidence(NodeId id, const Idx val) final
adds a new hard evidence on node id
+ Here is the call graph for this function:

◆ addSetOfEvidence()

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::addSetOfEvidence ( const Set< const Potential< GUM_SCALAR > * > &  potset)
finalvirtualinherited

adds a new set of evidence

Exceptions
UndefinedElementif some potential is defined over several nodes
UndefinedElementif the node on which some potential is defined does not belong to the Bayesian network
InvalidArgumentif the node of some potential already has an evidence
FatalErrorif pot=[0,0,...,0]

Definition at line 372 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::addEvidence().

373  {
374  for (const auto pot : potset)
375  addEvidence(*pot);
376  }
virtual void addEvidence(NodeId id, const Idx val) final
adds a new hard evidence on node id
+ Here is the call graph for this function:

◆ addTarget() [1/2]

template<typename GUM_SCALAR >
void gum::MarginalTargetedInference< GUM_SCALAR >::addTarget ( NodeId  target)
finalvirtualinherited

Add a marginal target to the list of targets.

Exceptions
UndefinedElementif target is not a NodeId in the Bayes net

Definition at line 112 of file marginalTargetedInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__bn, gum::BayesNetInference< GUM_SCALAR >::__setState(), gum::MarginalTargetedInference< GUM_SCALAR >::__targets, gum::MarginalTargetedInference< GUM_SCALAR >::_onMarginalTargetAdded(), gum::MarginalTargetedInference< GUM_SCALAR >::_setTargetedMode(), gum::Set< Key, Alloc >::contains(), GUM_ERROR, and gum::Set< Key, Alloc >::insert().

Referenced by gum::MarginalTargetedInference< GUM_SCALAR >::addTarget(), and gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact().

112  {
113  // check if the node belongs to the Bayesian network
114  if (this->__bn == nullptr)
115  GUM_ERROR(NullElement,
116  "No Bayes net has been assigned to the "
117  "inference algorithm");
118 
119  if (!this->__bn->dag().exists(target)) {
120  GUM_ERROR(UndefinedElement, target << " is not a NodeId in the bn");
121  }
122 
123  _setTargetedMode(); // does nothing if already in targeted mode
124  // add the new target
125  if (!__targets.contains(target)) {
126  __targets.insert(target);
127  _onMarginalTargetAdded(target);
128  this->__setState(
130  }
131  }
bool contains(const Key &k) const
Indicates whether a given elements belong to the set.
Definition: set_tpl.h:581
virtual void __setState(const StateOfInference state) final
set the state of the inference engine and call the notification _onStateChanged when necessary (i...
NodeSet __targets
the set of marginal targets
virtual void _onMarginalTargetAdded(const NodeId id)=0
fired after a new marginal target is inserted
const IBayesNet< GUM_SCALAR > * __bn
the Bayes net on which we perform inferences
void insert(const Key &k)
Inserts a new element into the set.
Definition: set_tpl.h:613
#define GUM_ERROR(type, msg)
Definition: exceptions.h:55
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ addTarget() [2/2]

template<typename GUM_SCALAR >
void gum::MarginalTargetedInference< GUM_SCALAR >::addTarget ( const std::string &  nodeName)
finalvirtualinherited

Add a marginal target to the list of targets.

Exceptions
UndefinedElementif target is not a NodeId in the Bayes net

Definition at line 158 of file marginalTargetedInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__bn, gum::MarginalTargetedInference< GUM_SCALAR >::addTarget(), and GUM_ERROR.

159  {
160  // check if the node belongs to the Bayesian network
161  if (this->__bn == nullptr)
162  GUM_ERROR(NullElement,
163  "No Bayes net has been assigned to the "
164  "inference algorithm");
165 
166  addTarget(this->__bn->idFromName(nodeName));
167  }
const IBayesNet< GUM_SCALAR > * __bn
the Bayes net on which we perform inferences
virtual void addTarget(NodeId target) final
Add a marginal target to the list of targets.
#define GUM_ERROR(type, msg)
Definition: exceptions.h:55
+ Here is the call graph for this function:

◆ BN()

template<typename GUM_SCALAR >
INLINE const IBayesNet< GUM_SCALAR > & gum::BayesNetInference< GUM_SCALAR >::BN ( ) const
finalvirtualinherited

Returns a constant reference over the IBayesNet referenced by this class.

Exceptions
UndefinedElementis raised if no Bayes net has been assigned to the inference.

Definition at line 121 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__bn, and GUM_ERROR.

Referenced by gum::WeightedSampling< GUM_SCALAR >::_draw(), gum::MonteCarloSampling< GUM_SCALAR >::_draw(), gum::ImportanceSampling< GUM_SCALAR >::_draw(), gum::SamplingInference< GUM_SCALAR >::_posterior(), gum::BayesNetInference< GUM_SCALAR >::addEvidence(), gum::BayesNetInference< GUM_SCALAR >::chgEvidence(), gum::SamplingInference< GUM_SCALAR >::contextualize(), gum::SamplingInference< GUM_SCALAR >::currentPosterior(), gum::BayesNetInference< GUM_SCALAR >::eraseEvidence(), gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact(), gum::JointTargetedInference< GUM_SCALAR >::evidenceJointImpact(), gum::MarginalTargetedInference< GUM_SCALAR >::H(), gum::BayesNetInference< GUM_SCALAR >::hasEvidence(), gum::BayesNetInference< GUM_SCALAR >::hasHardEvidence(), gum::BayesNetInference< GUM_SCALAR >::hasSoftEvidence(), gum::JointTargetedInference< GUM_SCALAR >::I(), gum::JointTargetedInference< GUM_SCALAR >::jointMutualInformation(), gum::MarginalTargetedInference< GUM_SCALAR >::posterior(), gum::JointTargetedInference< GUM_SCALAR >::posterior(), gum::SamplingInference< GUM_SCALAR >::samplingBN(), and gum::Estimator< GUM_SCALAR >::setFromLBP().

121  {
122  if (__bn == nullptr)
123  GUM_ERROR(UndefinedElement,
124  "No Bayes net has been assigned to "
125  "the inference algorithm.");
126  return *__bn;
127  }
const IBayesNet< GUM_SCALAR > * __bn
the Bayes net on which we perform inferences
#define GUM_ERROR(type, msg)
Definition: exceptions.h:55
+ Here is the caller graph for this function:

◆ chgEvidence() [1/7]

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::chgEvidence ( NodeId  id,
const Idx  val 
)
finalvirtualinherited

change the value of an already existing hard evidence

Exceptions
UndefinedElementif id does not belong to the Bayesian network
InvalidArgumentif val is not a value for id
InvalidArgumentif id does not already have an evidence

Definition at line 432 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__createHardEvidence().

Referenced by gum::BayesNetInference< GUM_SCALAR >::chgEvidence(), gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact(), and gum::JointTargetedInference< GUM_SCALAR >::evidenceJointImpact().

433  {
435  }
virtual void chgEvidence(NodeId id, const Idx val) final
change the value of an already existing hard evidence
Potential< GUM_SCALAR > __createHardEvidence(NodeId id, Idx val) const
create the internal structure for a hard evidence
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ chgEvidence() [2/7]

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::chgEvidence ( const std::string &  nodeName,
const Idx  val 
)
finalvirtualinherited

change the value of an already existing hard evidence

Exceptions
UndefinedElementif nodeName does not belong to the Bayesian network
InvalidArgumentif val is not a value for id
InvalidArgumentif id does not already have an evidence

Definition at line 440 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::BN(), and gum::BayesNetInference< GUM_SCALAR >::chgEvidence().

441  {
442  chgEvidence(this->BN().idFromName(nodeName), val);
443  }
virtual void chgEvidence(NodeId id, const Idx val) final
change the value of an already existing hard evidence
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:

◆ chgEvidence() [3/7]

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::chgEvidence ( NodeId  id,
const std::string &  label 
)
finalvirtualinherited

change the value of an already existing hard evidence

Exceptions
UndefinedElementif id does not belong to the Bayesian network
InvalidArgumentif val is not a value for id
InvalidArgumentif id does not already have an evidence

Definition at line 448 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::BN(), and gum::BayesNetInference< GUM_SCALAR >::chgEvidence().

449  {
450  chgEvidence(id, this->BN().variable(id)[label]);
451  }
virtual void chgEvidence(NodeId id, const Idx val) final
change the value of an already existing hard evidence
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:

◆ chgEvidence() [4/7]

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::chgEvidence ( const std::string &  nodeName,
const std::string &  label 
)
finalvirtualinherited

change the value of an already existing hard evidence

Exceptions
UndefinedElementif nodeName does not belong to the Bayesian network
InvalidArgumentif val is not a value for id
InvalidArgumentif id does not already have an evidence

Definition at line 456 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::BN(), and gum::BayesNetInference< GUM_SCALAR >::chgEvidence().

457  {
458  NodeId id = this->BN().idFromName(nodeName);
459  chgEvidence(id, this->BN().variable(id)[label]);
460  }
virtual void chgEvidence(NodeId id, const Idx val) final
change the value of an already existing hard evidence
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
Size NodeId
Type for node ids.
Definition: graphElements.h:98
+ Here is the call graph for this function:

◆ chgEvidence() [5/7]

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::chgEvidence ( NodeId  id,
const std::vector< GUM_SCALAR > &  vals 
)
finalvirtualinherited

change the value of an already existing evidence (might be soft or hard)

Exceptions
UndefinedElementif id does not belong to the Bayesian network
InvalidArgumentif the node does not already have an evidence
FatalErrorif vals=[0,0,...,0]
InvalidArgumentif the size of vals is different from the domain size of node id

Definition at line 464 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__bn, gum::MultiDimDecorator< GUM_SCALAR >::add(), gum::BayesNetInference< GUM_SCALAR >::chgEvidence(), and GUM_ERROR.

465  {
466  // check whether this corresponds to an evidence
467  if (__bn == nullptr)
468  GUM_ERROR(NullElement,
469  "No Bayes net has been assigned to the "
470  "inference algorithm");
471 
472  if (!__bn->dag().exists(id)) {
473  GUM_ERROR(UndefinedElement, id << " is not a NodeId in the bn");
474  }
475 
476  if (__bn->variable(id).domainSize() != vals.size()) {
477  GUM_ERROR(InvalidArgument,
478  "node " << __bn->variable(id)
479  << " and its evidence have different sizes.");
480  }
481 
482  // create the potential corresponding to vals
483  Potential< GUM_SCALAR > pot;
484  pot.add(__bn->variable(id));
485  pot.fillWith(vals);
486  chgEvidence(pot);
487  }
const IBayesNet< GUM_SCALAR > * __bn
the Bayes net on which we perform inferences
virtual void chgEvidence(NodeId id, const Idx val) final
change the value of an already existing hard evidence
#define GUM_ERROR(type, msg)
Definition: exceptions.h:55
+ Here is the call graph for this function:

◆ chgEvidence() [6/7]

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::chgEvidence ( const std::string &  nodeName,
const std::vector< GUM_SCALAR > &  vals 
)
finalvirtualinherited

change the value of an already existing evidence (might be soft or hard)

Exceptions
UndefinedElementif nodeName does not belong to the Bayesian network
InvalidArgumentif the node does not already have an evidence
FatalErrorif vals=[0,0,...,0]
InvalidArgumentif the size of vals is different from the domain size of node id

Definition at line 491 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::BN(), and gum::BayesNetInference< GUM_SCALAR >::chgEvidence().

492  {
493  chgEvidence(this->BN().idFromName(nodeName), vals);
494  }
virtual void chgEvidence(NodeId id, const Idx val) final
change the value of an already existing hard evidence
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:

◆ chgEvidence() [7/7]

template<typename GUM_SCALAR >
void gum::BayesNetInference< GUM_SCALAR >::chgEvidence ( const Potential< GUM_SCALAR > &  pot)
finalvirtualinherited

change the value of an already existing evidence (might be soft or hard)

Exceptions
UndefinedElementif the potential is defined over several nodes
UndefinedElementif the node on which the potential is defined does not belong to the Bayesian network
InvalidArgumentif the node of the potential does not already have an evidence
FatalErrorif pot=[0,0,...,0]

Definition at line 499 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__bn, gum::BayesNetInference< GUM_SCALAR >::__evidence, gum::BayesNetInference< GUM_SCALAR >::__hard_evidence, gum::BayesNetInference< GUM_SCALAR >::__hard_evidence_nodes, gum::BayesNetInference< GUM_SCALAR >::__isHardEvidence(), gum::BayesNetInference< GUM_SCALAR >::__setState(), gum::BayesNetInference< GUM_SCALAR >::__soft_evidence_nodes, gum::BayesNetInference< GUM_SCALAR >::_onEvidenceChanged(), gum::Instantiation::end(), gum::Set< Key, Alloc >::erase(), GUM_ERROR, gum::BayesNetInference< GUM_SCALAR >::hasEvidence(), gum::BayesNetInference< GUM_SCALAR >::hasHardEvidence(), gum::Instantiation::inc(), gum::Set< Key, Alloc >::insert(), gum::BayesNetInference< GUM_SCALAR >::isInferenceOutdatedBNStructure(), gum::MultiDimDecorator< GUM_SCALAR >::nbrDim(), gum::BayesNetInference< GUM_SCALAR >::OutdatedBNPotentials, gum::BayesNetInference< GUM_SCALAR >::OutdatedBNStructure, gum::MultiDimDecorator< GUM_SCALAR >::set(), gum::Instantiation::setFirst(), and gum::MultiDimDecorator< GUM_SCALAR >::variable().

500  {
501  // check if the potential corresponds to an evidence
502  if (pot.nbrDim() != 1) {
503  GUM_ERROR(InvalidArgument, pot << " is not a mono-dimensional potential.");
504  }
505  if (__bn == nullptr)
506  GUM_ERROR(NullElement,
507  "No Bayes net has been assigned to the "
508  "inference algorithm");
509 
510  NodeId id = __bn->nodeId(pot.variable(0));
511 
512  if (!hasEvidence(id)) {
513  GUM_ERROR(InvalidArgument,
514  id << " has no evidence. Please use addEvidence().");
515  }
516 
517  // check whether we have a hard evidence (and also check whether the
518  // potential only contains 0 (in this case, this will automatically raise
519  // an exception) )
520  Idx val;
521  bool is_hard_evidence = __isHardEvidence(pot, val);
522 
523  // modify the evidence already stored
524  const Potential< GUM_SCALAR >* localPot = __evidence[id];
525  Instantiation I(pot);
526  for (I.setFirst(); !I.end(); I.inc()) {
527  localPot->set(I, pot[I]);
528  }
529 
530  // the inference state will be different
531  // whether evidence change from Hard to Soft or not.
532  bool hasChangedSoftHard = false;
533 
534  if (is_hard_evidence) {
535  if (!hasHardEvidence(id)) {
536  hasChangedSoftHard = true;
537  __hard_evidence.insert(id, val);
540  } else {
541  __hard_evidence[id] = val;
542  }
543  } else {
544  if (hasHardEvidence(id)) { // evidence was hard
545  __hard_evidence.erase(id);
548  hasChangedSoftHard = true;
549  }
550  }
551 
552  if (hasChangedSoftHard) {
554  } else {
557  }
558  }
559 
560  _onEvidenceChanged(id, hasChangedSoftHard);
561  }
NodeProperty< const Potential< GUM_SCALAR > *> __evidence
the set of evidence entered into the network
virtual void __setState(const StateOfInference state) final
set the state of the inference engine and call the notification _onStateChanged when necessary (i...
NodeProperty< Idx > __hard_evidence
assign to each node with a hard evidence the index of its observed value
virtual bool hasHardEvidence(NodeId id) const final
indicates whether node id has received a hard evidence
virtual void _onEvidenceChanged(const NodeId id, bool hasChangedSoftHard)=0
fired after an evidence is changed, in particular when its status (soft/hard) changes ...
virtual bool isInferenceOutdatedBNStructure() const noexcept final
returns whether the inference object is in a OutdatedBNStructure state
const IBayesNet< GUM_SCALAR > * __bn
the Bayes net on which we perform inferences
void erase(const Key &k)
Erases an element from the set.
Definition: set_tpl.h:656
bool __isHardEvidence(const Potential< GUM_SCALAR > &pot, Idx &val) const
checks whether a potential corresponds to a hard evidence or not
NodeSet __soft_evidence_nodes
the set of nodes that received soft evidence
virtual bool hasEvidence() const final
indicates whether some node(s) have received evidence
NodeSet __hard_evidence_nodes
the set of nodes that received hard evidence
Size NodeId
Type for node ids.
Definition: graphElements.h:98
void insert(const Key &k)
Inserts a new element into the set.
Definition: set_tpl.h:613
#define GUM_ERROR(type, msg)
Definition: exceptions.h:55
+ Here is the call graph for this function:

◆ clear()

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::clear ( )
virtualinherited

clears all the data structures allocated for the last inference

Definition at line 153 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__setState(), gum::BayesNetInference< GUM_SCALAR >::eraseAllEvidence(), and gum::BayesNetInference< GUM_SCALAR >::OutdatedBNStructure.

Referenced by gum::BayesNetInference< GUM_SCALAR >::setBN().

153  {
156  }
virtual void __setState(const StateOfInference state) final
set the state of the inference engine and call the notification _onStateChanged when necessary (i...
virtual void eraseAllEvidence() final
removes all the evidence entered into the network
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ contextualize()

template<typename GUM_SCALAR >
void gum::SamplingInference< GUM_SCALAR >::contextualize ( )
virtualinherited

Simplifying the bayesian network with relevance reasonning to lighten the computational charge.

Sets the reference Bayesian Network as a BayesNetFragment after having eliminated nodes that are idle for simulation and computation, such as barren or d-separated nodes. Eliminates the arcs from evidence nodes to it's children, after setting new CPT's for them.

Definition at line 114 of file samplingInference_tpl.h.

References gum::SamplingInference< GUM_SCALAR >::__samplingBN, gum::SamplingInference< GUM_SCALAR >::_onContextualize(), gum::Instantiation::add(), gum::BarrenNodesFinder::barrenNodes(), gum::BayesNetInference< GUM_SCALAR >::BN(), gum::Instantiation::chgVal(), gum::BayesNetInference< GUM_SCALAR >::hardEvidence(), gum::BayesNetInference< GUM_SCALAR >::hardEvidenceNodes(), gum::SamplingInference< GUM_SCALAR >::isContextualized, gum::dSeparation::requisiteNodes(), gum::BarrenNodesFinder::setEvidence(), gum::BarrenNodesFinder::setTargets(), gum::BayesNetInference< GUM_SCALAR >::softEvidenceNodes(), and gum::MarginalTargetedInference< GUM_SCALAR >::targets().

Referenced by gum::SamplingInference< GUM_SCALAR >::_loopApproxInference().

114  {
115  // Finding Barren nodes
116 
117  BarrenNodesFinder barr_nodes = BarrenNodesFinder(&this->BN().dag());
118  barr_nodes.setTargets(&this->targets());
119  barr_nodes.setEvidence(&this->hardEvidenceNodes());
120  const NodeSet& barren = barr_nodes.barrenNodes();
121 
122  // creating BN fragment
123  __samplingBN = new BayesNetFragment< GUM_SCALAR >(this->BN());
124  for (const auto elmt : this->BN().dag().asNodeSet() - barren)
125  __samplingBN->installNode(elmt);
126 
127  // D-separated nodes
128 
129  dSeparation dsep = gum::dSeparation();
130  NodeSet requisite;
131  dsep.requisiteNodes(
132  this->BN().dag(),
133  this->BN().nodes().asNodeSet(), // no target for approximateInference
134  this->hardEvidenceNodes(),
135  this->softEvidenceNodes(), // should be empty
136  requisite);
137  requisite += this->hardEvidenceNodes();
138 
139  auto nonRequisite = this->BN().dag().asNodeSet() - requisite;
140 
141  for (const auto elmt : nonRequisite)
142  __samplingBN->uninstallNode(elmt);
143  for (const auto hard : this->hardEvidenceNodes()) {
145  I.add(this->BN().variable(hard));
146  I.chgVal(this->BN().variable(hard), this->hardEvidence()[hard]);
147 
148  for (const auto& child : this->BN().children(hard)) {
149  auto p = new gum::Potential< GUM_SCALAR >();
150  *p = this->BN().cpt(child).extract(I);
151  __samplingBN->installCPT(child, p);
152  }
153  }
154 
155  this->isContextualized = true;
157  }
aGrUM&#39;s Potential is a multi-dimensional array with tensor operators.
Definition: potential.h:60
Set< NodeId > NodeSet
Some typdefs and define for shortcuts ...
the d-separation algorithm as described in Koller & Friedman (2009)
Definition: dSeparation.h:44
Instantiation & chgVal(const DiscreteVariable &v, Idx newval)
Assign newval to variable v in the Instantiation.
const NodeSet & softEvidenceNodes() const
returns the set of nodes with soft evidence
const NodeProperty< Idx > & hardEvidence() const
indicate for each node with hard evidence which value it took
BayesNetFragment< GUM_SCALAR > * __samplingBN
const NodeSet & hardEvidenceNodes() const
returns the set of nodes with hard evidence
Class for assigning/browsing values to tuples of discrete variables.
Definition: instantiation.h:83
bool isContextualized
whether the referenced Bayesian Network has been "contextualized"
virtual void _onContextualize(BayesNetFragment< GUM_SCALAR > *bn)
fired when Bayesian network is contextualized
virtual const NodeSet & targets() const noexcept final
returns the list of marginal targets
void add(const DiscreteVariable &v) final
Adds a new variable in the Instantiation.
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ continueApproximationScheme()

INLINE bool gum::ApproximationScheme::continueApproximationScheme ( double  error)
inherited

Update the scheme w.r.t the new error.

Test the stopping criterion that are enabled.

Parameters
errorThe new error value.
Returns
false if state become != ApproximationSchemeSTATE::Continue
Exceptions
OperationNotAllowedRaised if state != ApproximationSchemeSTATE::Continue.

Definition at line 227 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_current_epsilon, gum::ApproximationScheme::_current_rate, gum::ApproximationScheme::_current_state, gum::ApproximationScheme::_current_step, gum::ApproximationScheme::_enabled_eps, gum::ApproximationScheme::_enabled_max_iter, gum::ApproximationScheme::_enabled_max_time, gum::ApproximationScheme::_enabled_min_rate_eps, gum::ApproximationScheme::_eps, gum::ApproximationScheme::_history, gum::ApproximationScheme::_last_epsilon, gum::ApproximationScheme::_max_iter, gum::ApproximationScheme::_max_time, gum::ApproximationScheme::_min_rate_eps, gum::ApproximationScheme::_stopScheme(), gum::ApproximationScheme::_timer, gum::IApproximationSchemeConfiguration::Continue, gum::IApproximationSchemeConfiguration::Epsilon, GUM_EMIT3, GUM_ERROR, gum::IApproximationSchemeConfiguration::Limit, gum::IApproximationSchemeConfiguration::messageApproximationScheme(), gum::IApproximationSchemeConfiguration::onProgress, gum::IApproximationSchemeConfiguration::Rate, gum::ApproximationScheme::startOfPeriod(), gum::ApproximationScheme::stateApproximationScheme(), gum::Timer::step(), gum::IApproximationSchemeConfiguration::TimeLimit, and gum::ApproximationScheme::verbosity().

Referenced by gum::GibbsBNdistance< GUM_SCALAR >::_computeKL(), gum::SamplingInference< GUM_SCALAR >::_loopApproxInference(), gum::learning::DAG2BNLearner< ALLOC >::createBN(), gum::learning::GreedyHillClimbing::learnStructure(), gum::learning::LocalSearchWithTabuList::learnStructure(), and gum::credal::CNMonteCarloSampling< GUM_SCALAR, BNInferenceEngine >::makeInference().

227  {
228  // For coherence, we fix the time used in the method
229 
230  double timer_step = _timer.step();
231 
232  if (_enabled_max_time) {
233  if (timer_step > _max_time) {
235  return false;
236  }
237  }
238 
239  if (!startOfPeriod()) { return true; }
240 
242  GUM_ERROR(OperationNotAllowed,
243  "state of the approximation scheme is not correct : "
245  }
246 
247  if (verbosity()) { _history.push_back(error); }
248 
249  if (_enabled_max_iter) {
250  if (_current_step > _max_iter) {
252  return false;
253  }
254  }
255 
257  _current_epsilon = error; // eps rate isEnabled needs it so affectation was
258  // moved from eps isEnabled below
259 
260  if (_enabled_eps) {
261  if (_current_epsilon <= _eps) {
263  return false;
264  }
265  }
266 
267  if (_last_epsilon >= 0.) {
268  if (_current_epsilon > .0) {
269  // ! _current_epsilon can be 0. AND epsilon
270  // isEnabled can be disabled !
271  _current_rate =
273  }
274  // limit with current eps ---> 0 is | 1 - ( last_eps / 0 ) | --->
275  // infinity the else means a return false if we isEnabled the rate below,
276  // as we would have returned false if epsilon isEnabled was enabled
277  else {
279  }
280 
281  if (_enabled_min_rate_eps) {
282  if (_current_rate <= _min_rate_eps) {
284  return false;
285  }
286  }
287  }
288 
290  if (onProgress.hasListener()) {
292  }
293 
294  return true;
295  } else {
296  return false;
297  }
298  }
double step() const
Returns the delta time between now and the last reset() call (or the constructor).
Definition: timer_inl.h:42
Signaler3< Size, double, double > onProgress
Progression, error and time.
bool _enabled_max_iter
If true, the maximum iterations stopping criterion is enabled.
bool _enabled_eps
If true, the threshold convergence is enabled.
void _stopScheme(ApproximationSchemeSTATE new_state)
Stop the scheme given a new state.
double _current_epsilon
Current epsilon.
bool _enabled_min_rate_eps
If true, the minimal threshold for epsilon rate is enabled.
bool startOfPeriod()
Returns true if we are at the beginning of a period (compute error is mandatory). ...
double _eps
Threshold for convergence.
double _current_rate
Current rate.
bool _enabled_max_time
If true, the timeout is enabled.
Size _current_step
The current step.
std::vector< double > _history
The scheme history, used only if verbosity == true.
double _min_rate_eps
Threshold for the epsilon rate.
ApproximationSchemeSTATE stateApproximationScheme() const
Returns the approximation scheme state.
bool verbosity() const
Returns true if verbosity is enabled.
std::string messageApproximationScheme() const
Returns the approximation scheme message.
double _last_epsilon
Last epsilon value.
Size _max_iter
The maximum iterations.
#define GUM_EMIT3(signal, arg1, arg2, arg3)
Definition: signaler3.h:42
ApproximationSchemeSTATE _current_state
The current state.
double _max_time
The timeout.
#define GUM_ERROR(type, msg)
Definition: exceptions.h:55
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ currentPosterior() [1/2]

template<typename GUM_SCALAR >
const Potential< GUM_SCALAR > & gum::SamplingInference< GUM_SCALAR >::currentPosterior ( NodeId  id)
inherited

Computes and returns the actual estimation of the posterior of a node.

Returns
a const ref to the posterior probability of the node.
Parameters
idthe node for which we need a posterior probability
Warning
for efficiency reasons, the potential is returned by reference. In order to ensure that the potential may still exist even if the Inference object is destroyed, the user has to copy it explicitly.
Exceptions
UndefinedElementif node is not in the set of targets.
NotFoundif node is not in the BN.

Definition at line 97 of file samplingInference_tpl.h.

References gum::SamplingInference< GUM_SCALAR >::__estimator, and gum::BayesNetInference< GUM_SCALAR >::BN().

Referenced by gum::SamplingInference< GUM_SCALAR >::currentPosterior().

97  {
98  return __estimator.posterior(this->BN().variable(id));
99  }
Estimator< GUM_SCALAR > __estimator
Estimator object designed to approximate target posteriors.
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ currentPosterior() [2/2]

template<typename GUM_SCALAR >
const Potential< GUM_SCALAR > & gum::SamplingInference< GUM_SCALAR >::currentPosterior ( const std::string &  name)
inherited

Computes and returns the actual estimation of the posterior of a node by its name.

Returns
a const ref to the posterior probability of the node referred by name.
Parameters
namethe name of the node for which we need a posterior probability
Warning
for efficiency reasons, the potential is returned by reference. In order to ensure that the potential may still exist even if the Inference object is destroyed, the user has to copy it explicitly.
Exceptions
UndefinedElementif node corresponding to name is not in the set of targets.
NotFoundif node corresponding to name is not in the BN.

Definition at line 103 of file samplingInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::BN(), and gum::SamplingInference< GUM_SCALAR >::currentPosterior().

103  {
104  return currentPosterior(this->BN().idFromName(name));
105  }
const Potential< GUM_SCALAR > & currentPosterior(NodeId id)
Computes and returns the actual estimation of the posterior of a node.
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:

◆ currentTime()

INLINE double gum::ApproximationScheme::currentTime ( ) const
virtualinherited

Returns the current running time in second.

Returns
Returns the current running time in second.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 128 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_timer, and gum::Timer::step().

Referenced by gum::learning::genericBNLearner::currentTime().

128 { return _timer.step(); }
double step() const
Returns the delta time between now and the last reset() call (or the constructor).
Definition: timer_inl.h:42
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ disableEpsilon()

INLINE void gum::ApproximationScheme::disableEpsilon ( )
virtualinherited

Disable stopping criterion on epsilon.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 54 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_eps.

Referenced by gum::learning::genericBNLearner::disableEpsilon().

54 { _enabled_eps = false; }
bool _enabled_eps
If true, the threshold convergence is enabled.
+ Here is the caller graph for this function:

◆ disableMaxIter()

INLINE void gum::ApproximationScheme::disableMaxIter ( )
virtualinherited

Disable stopping criterion on max iterations.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 105 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_max_iter.

Referenced by gum::credal::CNMonteCarloSampling< GUM_SCALAR, BNInferenceEngine >::__mcInitApproximationScheme(), gum::learning::genericBNLearner::disableMaxIter(), and gum::learning::GreedyHillClimbing::GreedyHillClimbing().

105 { _enabled_max_iter = false; }
bool _enabled_max_iter
If true, the maximum iterations stopping criterion is enabled.
+ Here is the caller graph for this function:

◆ disableMaxTime()

INLINE void gum::ApproximationScheme::disableMaxTime ( )
virtualinherited

Disable stopping criterion on timeout.

Returns
Disable stopping criterion on timeout.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 131 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_max_time.

Referenced by gum::learning::genericBNLearner::disableMaxTime(), and gum::learning::GreedyHillClimbing::GreedyHillClimbing().

131 { _enabled_max_time = false; }
bool _enabled_max_time
If true, the timeout is enabled.
+ Here is the caller graph for this function:

◆ disableMinEpsilonRate()

INLINE void gum::ApproximationScheme::disableMinEpsilonRate ( )
virtualinherited

Disable stopping criterion on epsilon rate.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 79 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_min_rate_eps.

Referenced by gum::credal::CNMonteCarloSampling< GUM_SCALAR, BNInferenceEngine >::__mcInitApproximationScheme(), gum::GibbsBNdistance< GUM_SCALAR >::_computeKL(), gum::learning::genericBNLearner::disableMinEpsilonRate(), and gum::learning::GreedyHillClimbing::GreedyHillClimbing().

79  {
80  _enabled_min_rate_eps = false;
81  }
bool _enabled_min_rate_eps
If true, the minimal threshold for epsilon rate is enabled.
+ Here is the caller graph for this function:

◆ domainSizes()

template<typename GUM_SCALAR >
INLINE const NodeProperty< Size > & gum::BayesNetInference< GUM_SCALAR >::domainSizes ( ) const
finalvirtualinherited

get the domain sizes of the random variables of the BN

Definition at line 174 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__domain_sizes.

174  {
175  return __domain_sizes;
176  }
NodeProperty< Size > __domain_sizes
the domain sizes of the random variables

◆ enableEpsilon()

INLINE void gum::ApproximationScheme::enableEpsilon ( )
virtualinherited

Enable stopping criterion on epsilon.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 57 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_eps.

Referenced by gum::credal::CNMonteCarloSampling< GUM_SCALAR, BNInferenceEngine >::__mcInitApproximationScheme(), and gum::learning::genericBNLearner::enableEpsilon().

57 { _enabled_eps = true; }
bool _enabled_eps
If true, the threshold convergence is enabled.
+ Here is the caller graph for this function:

◆ enableMaxIter()

INLINE void gum::ApproximationScheme::enableMaxIter ( )
virtualinherited

Enable stopping criterion on max iterations.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 108 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_max_iter.

Referenced by gum::learning::genericBNLearner::enableMaxIter().

108 { _enabled_max_iter = true; }
bool _enabled_max_iter
If true, the maximum iterations stopping criterion is enabled.
+ Here is the caller graph for this function:

◆ enableMaxTime()

INLINE void gum::ApproximationScheme::enableMaxTime ( )
virtualinherited

Enable stopping criterion on timeout.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 134 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_max_time.

Referenced by gum::credal::CNMonteCarloSampling< GUM_SCALAR, BNInferenceEngine >::CNMonteCarloSampling(), and gum::learning::genericBNLearner::enableMaxTime().

134 { _enabled_max_time = true; }
bool _enabled_max_time
If true, the timeout is enabled.
+ Here is the caller graph for this function:

◆ enableMinEpsilonRate()

INLINE void gum::ApproximationScheme::enableMinEpsilonRate ( )
virtualinherited

Enable stopping criterion on epsilon rate.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 84 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_min_rate_eps.

Referenced by gum::GibbsBNdistance< GUM_SCALAR >::_computeKL(), and gum::learning::genericBNLearner::enableMinEpsilonRate().

84  {
85  _enabled_min_rate_eps = true;
86  }
bool _enabled_min_rate_eps
If true, the minimal threshold for epsilon rate is enabled.
+ Here is the caller graph for this function:

◆ epsilon()

INLINE double gum::ApproximationScheme::epsilon ( ) const
virtualinherited

Returns the value of epsilon.

Returns
Returns the value of epsilon.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 51 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_eps.

Referenced by gum::ImportanceSampling< GUM_SCALAR >::_onContextualize(), and gum::learning::genericBNLearner::epsilon().

51 { return _eps; }
double _eps
Threshold for convergence.
+ Here is the caller graph for this function:

◆ eraseAllEvidence()

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::eraseAllEvidence ( )
finalvirtualinherited

removes all the evidence entered into the network

Definition at line 595 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__evidence, gum::BayesNetInference< GUM_SCALAR >::__hard_evidence, gum::BayesNetInference< GUM_SCALAR >::__hard_evidence_nodes, gum::BayesNetInference< GUM_SCALAR >::__setState(), gum::BayesNetInference< GUM_SCALAR >::__soft_evidence_nodes, gum::BayesNetInference< GUM_SCALAR >::_onAllEvidenceErased(), gum::Set< Key, Alloc >::clear(), gum::BayesNetInference< GUM_SCALAR >::isInferenceOutdatedBNStructure(), gum::BayesNetInference< GUM_SCALAR >::OutdatedBNPotentials, and gum::BayesNetInference< GUM_SCALAR >::OutdatedBNStructure.

Referenced by gum::BayesNetInference< GUM_SCALAR >::clear(), gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact(), gum::JointTargetedInference< GUM_SCALAR >::evidenceJointImpact(), and gum::JointTargetedInference< GUM_SCALAR >::jointMutualInformation().

595  {
596  bool has_hard_evidence = !__hard_evidence.empty();
597  this->_onAllEvidenceErased(has_hard_evidence);
598 
599  for (const auto& pair : __evidence) {
600  if (pair.second != nullptr) { delete (pair.second); }
601  }
602 
603  __evidence.clear();
604  __hard_evidence.clear();
607 
608  if (has_hard_evidence) {
610  } else {
613  }
614  }
615  }
NodeProperty< const Potential< GUM_SCALAR > *> __evidence
the set of evidence entered into the network
virtual void __setState(const StateOfInference state) final
set the state of the inference engine and call the notification _onStateChanged when necessary (i...
NodeProperty< Idx > __hard_evidence
assign to each node with a hard evidence the index of its observed value
virtual void _onAllEvidenceErased(bool contains_hard_evidence)=0
fired before all the evidence are erased
virtual bool isInferenceOutdatedBNStructure() const noexcept final
returns whether the inference object is in a OutdatedBNStructure state
NodeSet __soft_evidence_nodes
the set of nodes that received soft evidence
NodeSet __hard_evidence_nodes
the set of nodes that received hard evidence
void clear()
Removes all the elements, if any, from the set.
Definition: set_tpl.h:375
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ eraseAllTargets()

template<typename GUM_SCALAR >
INLINE void gum::MarginalTargetedInference< GUM_SCALAR >::eraseAllTargets ( )
virtualinherited

Clear all previously defined targets.

Reimplemented in gum::JointTargetedInference< GUM_SCALAR >.

Definition at line 99 of file marginalTargetedInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__setState(), gum::MarginalTargetedInference< GUM_SCALAR >::__targets, gum::MarginalTargetedInference< GUM_SCALAR >::_onAllMarginalTargetsErased(), gum::MarginalTargetedInference< GUM_SCALAR >::_setTargetedMode(), and gum::Set< Key, Alloc >::clear().

Referenced by gum::JointTargetedInference< GUM_SCALAR >::eraseAllMarginalTargets(), and gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact().

99  {
101 
102  __targets.clear();
103  _setTargetedMode(); // does nothing if already in targeted mode
104 
105  this->__setState(
107  }
virtual void __setState(const StateOfInference state) final
set the state of the inference engine and call the notification _onStateChanged when necessary (i...
NodeSet __targets
the set of marginal targets
virtual void _onAllMarginalTargetsErased()=0
fired before a all marginal targets are removed
void clear()
Removes all the elements, if any, from the set.
Definition: set_tpl.h:375
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ eraseEvidence() [1/2]

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::eraseEvidence ( NodeId  id)
finalvirtualinherited

removed the evidence, if any, corresponding to node id

Definition at line 566 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__evidence, gum::BayesNetInference< GUM_SCALAR >::__hard_evidence, gum::BayesNetInference< GUM_SCALAR >::__hard_evidence_nodes, gum::BayesNetInference< GUM_SCALAR >::__setState(), gum::BayesNetInference< GUM_SCALAR >::__soft_evidence_nodes, gum::BayesNetInference< GUM_SCALAR >::_onEvidenceErased(), gum::Set< Key, Alloc >::erase(), gum::BayesNetInference< GUM_SCALAR >::hasEvidence(), gum::BayesNetInference< GUM_SCALAR >::hasHardEvidence(), gum::BayesNetInference< GUM_SCALAR >::isInferenceOutdatedBNStructure(), gum::BayesNetInference< GUM_SCALAR >::OutdatedBNPotentials, and gum::BayesNetInference< GUM_SCALAR >::OutdatedBNStructure.

Referenced by gum::BayesNetInference< GUM_SCALAR >::eraseEvidence().

566  {
567  if (hasEvidence(id)) {
568  if (hasHardEvidence(id)) {
569  _onEvidenceErased(id, true);
570  __hard_evidence.erase(id);
573  } else {
574  _onEvidenceErased(id, false);
578  }
579  }
580 
581  delete (__evidence[id]);
582  __evidence.erase(id);
583  }
584  }
NodeProperty< const Potential< GUM_SCALAR > *> __evidence
the set of evidence entered into the network
virtual void __setState(const StateOfInference state) final
set the state of the inference engine and call the notification _onStateChanged when necessary (i...
NodeProperty< Idx > __hard_evidence
assign to each node with a hard evidence the index of its observed value
virtual bool hasHardEvidence(NodeId id) const final
indicates whether node id has received a hard evidence
virtual bool isInferenceOutdatedBNStructure() const noexcept final
returns whether the inference object is in a OutdatedBNStructure state
void erase(const Key &k)
Erases an element from the set.
Definition: set_tpl.h:656
NodeSet __soft_evidence_nodes
the set of nodes that received soft evidence
virtual bool hasEvidence() const final
indicates whether some node(s) have received evidence
NodeSet __hard_evidence_nodes
the set of nodes that received hard evidence
virtual void _onEvidenceErased(const NodeId id, bool isHardEvidence)=0
fired before an evidence is removed
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ eraseEvidence() [2/2]

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::eraseEvidence ( const std::string &  nodeName)
finalvirtualinherited

removed the evidence, if any, corresponding to node of name nodeName

Definition at line 588 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::BN(), and gum::BayesNetInference< GUM_SCALAR >::eraseEvidence().

588  {
589  eraseEvidence(this->BN().idFromName(nodeName));
590  }
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
virtual void eraseEvidence(NodeId id) final
removed the evidence, if any, corresponding to node id
+ Here is the call graph for this function:

◆ eraseTarget() [1/2]

template<typename GUM_SCALAR >
void gum::MarginalTargetedInference< GUM_SCALAR >::eraseTarget ( NodeId  target)
finalvirtualinherited

removes an existing (marginal) target

Warning
If the target does not already exist, the method does nothing. In particular, it does not raise any exception.

Definition at line 172 of file marginalTargetedInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__bn, gum::BayesNetInference< GUM_SCALAR >::__setState(), gum::MarginalTargetedInference< GUM_SCALAR >::__targeted_mode, gum::MarginalTargetedInference< GUM_SCALAR >::__targets, gum::MarginalTargetedInference< GUM_SCALAR >::_onMarginalTargetErased(), gum::Set< Key, Alloc >::contains(), gum::Set< Key, Alloc >::erase(), and GUM_ERROR.

Referenced by gum::MarginalTargetedInference< GUM_SCALAR >::eraseTarget().

172  {
173  // check if the node belongs to the Bayesian network
174  if (this->__bn == nullptr)
175  GUM_ERROR(NullElement,
176  "No Bayes net has been assigned to the "
177  "inference algorithm");
178 
179  if (!this->__bn->dag().exists(target)) {
180  GUM_ERROR(UndefinedElement, target << " is not a NodeId in the bn");
181  }
182 
183 
184  if (__targets.contains(target)) {
185  __targeted_mode = true; // we do not use _setTargetedMode because we do not
186  // want to clear the targets
187  _onMarginalTargetErased(target);
188  __targets.erase(target);
189  this->__setState(
191  }
192  }
bool contains(const Key &k) const
Indicates whether a given elements belong to the set.
Definition: set_tpl.h:581
virtual void __setState(const StateOfInference state) final
set the state of the inference engine and call the notification _onStateChanged when necessary (i...
NodeSet __targets
the set of marginal targets
const IBayesNet< GUM_SCALAR > * __bn
the Bayes net on which we perform inferences
void erase(const Key &k)
Erases an element from the set.
Definition: set_tpl.h:656
virtual void _onMarginalTargetErased(const NodeId id)=0
fired before a marginal target is removed
bool __targeted_mode
whether the actual targets are default
#define GUM_ERROR(type, msg)
Definition: exceptions.h:55
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ eraseTarget() [2/2]

template<typename GUM_SCALAR >
void gum::MarginalTargetedInference< GUM_SCALAR >::eraseTarget ( const std::string &  nodeName)
finalvirtualinherited

removes an existing (marginal) target

Warning
If the target does not already exist, the method does nothing. In particular, it does not raise any exception.

Definition at line 197 of file marginalTargetedInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__bn, gum::MarginalTargetedInference< GUM_SCALAR >::eraseTarget(), and GUM_ERROR.

198  {
199  // check if the node belongs to the Bayesian network
200  if (this->__bn == nullptr)
201  GUM_ERROR(NullElement,
202  "No Bayes net has been assigned to the "
203  "inference algorithm");
204 
205  eraseTarget(this->__bn->idFromName(nodeName));
206  }
virtual void eraseTarget(NodeId target) final
removes an existing (marginal) target
const IBayesNet< GUM_SCALAR > * __bn
the Bayes net on which we perform inferences
#define GUM_ERROR(type, msg)
Definition: exceptions.h:55
+ Here is the call graph for this function:

◆ evidence()

template<typename GUM_SCALAR >
INLINE const NodeProperty< const Potential< GUM_SCALAR > *> & gum::BayesNetInference< GUM_SCALAR >::evidence ( ) const
inherited

returns the set of evidence

Definition at line 650 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__evidence.

Referenced by gum::ImportanceSampling< GUM_SCALAR >::_onContextualize(), and gum::MarginalTargetedInference< GUM_SCALAR >::posterior().

650  {
651  return __evidence;
652  }
NodeProperty< const Potential< GUM_SCALAR > *> __evidence
the set of evidence entered into the network
+ Here is the caller graph for this function:

◆ evidenceImpact() [1/2]

template<typename GUM_SCALAR >
Potential< GUM_SCALAR > gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact ( NodeId  target,
const NodeSet evs 
)
inherited

Create a gum::Potential for P(target|evs) (for all instanciation of target and evs)

Warning
If some evs are d-separated, they are not included in the Potential
Parameters
bnthe BayesNet
targetthe nodeId of the targetted variable
evsthe vector of nodeId of the observed variables
Returns
a Potential

Definition at line 285 of file marginalTargetedInference_tpl.h.

References gum::MultiDimDecorator< GUM_SCALAR >::add(), gum::BayesNetInference< GUM_SCALAR >::addEvidence(), gum::MarginalTargetedInference< GUM_SCALAR >::addTarget(), gum::BayesNetInference< GUM_SCALAR >::BN(), gum::BayesNetInference< GUM_SCALAR >::chgEvidence(), gum::Set< Key, Alloc >::contains(), gum::Instantiation::end(), gum::BayesNetInference< GUM_SCALAR >::eraseAllEvidence(), gum::MarginalTargetedInference< GUM_SCALAR >::eraseAllTargets(), GUM_ERROR, gum::Instantiation::incNotVar(), gum::Instantiation::incVar(), gum::BayesNetInference< GUM_SCALAR >::makeInference(), gum::MarginalTargetedInference< GUM_SCALAR >::posterior(), gum::MultiDimDecorator< GUM_SCALAR >::set(), gum::Instantiation::setFirst(), gum::Instantiation::setFirstVar(), and gum::Instantiation::val().

Referenced by gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact().

286  {
287  const auto& vtarget = this->BN().variable(target);
288 
289  if (evs.contains(target)) {
290  GUM_ERROR(InvalidArgument,
291  "Target <" << vtarget.name() << "> (" << target
292  << ") can not be in evs (" << evs << ").");
293  }
294  auto condset = this->BN().minimalCondSet(target, evs);
295 
296  Potential< GUM_SCALAR > res;
297  this->eraseAllTargets();
298  this->eraseAllEvidence();
299  res.add(this->BN().variable(target));
300  this->addTarget(target);
301  for (const auto& n : condset) {
302  res.add(this->BN().variable(n));
303  this->addEvidence(n, 0);
304  }
305 
306  Instantiation inst(res);
307  for (inst.setFirst(); !inst.end(); inst.incNotVar(vtarget)) {
308  // inferring
309  for (const auto& n : condset)
310  this->chgEvidence(n, inst.val(this->BN().variable(n)));
311  this->makeInference();
312  // populate res
313  for (inst.setFirstVar(vtarget); !inst.end(); inst.incVar(vtarget)) {
314  res.set(inst, this->posterior(target)[inst]);
315  }
316  inst.setFirstVar(vtarget); // remove inst.end() flag
317  }
318 
319  return res;
320  }
virtual void makeInference() final
perform the heavy computations needed to compute the targets&#39; posteriors
virtual void addTarget(NodeId target) final
Add a marginal target to the list of targets.
virtual void chgEvidence(NodeId id, const Idx val) final
change the value of an already existing hard evidence
virtual void addEvidence(NodeId id, const Idx val) final
adds a new hard evidence on node id
virtual void eraseAllEvidence() final
removes all the evidence entered into the network
virtual const Potential< GUM_SCALAR > & posterior(NodeId node)
Computes and returns the posterior of a node.
virtual void eraseAllTargets()
Clear all previously defined targets.
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
#define GUM_ERROR(type, msg)
Definition: exceptions.h:55
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ evidenceImpact() [2/2]

template<typename GUM_SCALAR >
Potential< GUM_SCALAR > gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact ( const std::string &  target,
const std::vector< std::string > &  evs 
)
inherited

Create a gum::Potential for P(target|evs) (for all instanciation of target and evs)

Warning
If some evs are d-separated, they are not included in the Potential
Parameters
targetthe nodeId of the target variable
evsthe nodeId of the observed variable
Returns
a Potential

Definition at line 324 of file marginalTargetedInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::BN(), gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact(), and gum::Set< Key, Alloc >::insert().

325  {
326  const auto& bn = this->BN();
327 
328  gum::NodeSet evsId;
329  for (const auto& evname : evs) {
330  evsId.insert(bn.idFromName(evname));
331  }
332 
333  return evidenceImpact(bn.idFromName(target), evsId);
334  }
Potential< GUM_SCALAR > evidenceImpact(NodeId target, const NodeSet &evs)
Create a gum::Potential for P(target|evs) (for all instanciation of target and evs) ...
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
void insert(const Key &k)
Inserts a new element into the set.
Definition: set_tpl.h:613
+ Here is the call graph for this function:

◆ H() [1/2]

template<typename GUM_SCALAR >
INLINE GUM_SCALAR gum::MarginalTargetedInference< GUM_SCALAR >::H ( NodeId  X)
finalvirtualinherited

Entropy Compute Shanon's entropy of a node given the observation.

See also
http://en.wikipedia.org/wiki/Information_entropy

Definition at line 269 of file marginalTargetedInference_tpl.h.

References gum::MarginalTargetedInference< GUM_SCALAR >::posterior().

Referenced by gum::MarginalTargetedInference< GUM_SCALAR >::H(), and gum::JointTargetedInference< GUM_SCALAR >::VI().

269  {
270  return posterior(X).entropy();
271  }
virtual const Potential< GUM_SCALAR > & posterior(NodeId node)
Computes and returns the posterior of a node.
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ H() [2/2]

template<typename GUM_SCALAR >
INLINE GUM_SCALAR gum::MarginalTargetedInference< GUM_SCALAR >::H ( const std::string &  nodeName)
finalvirtualinherited

Entropy Compute Shanon's entropy of a node given the observation.

See also
http://en.wikipedia.org/wiki/Information_entropy

Definition at line 278 of file marginalTargetedInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::BN(), and gum::MarginalTargetedInference< GUM_SCALAR >::H().

278  {
279  return H(this->BN().idFromName(nodeName));
280  }
virtual GUM_SCALAR H(NodeId X) final
Entropy Compute Shanon&#39;s entropy of a node given the observation.
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:

◆ hardEvidence()

template<typename GUM_SCALAR >
INLINE const NodeProperty< Idx > & gum::BayesNetInference< GUM_SCALAR >::hardEvidence ( ) const
inherited

indicate for each node with hard evidence which value it took

Definition at line 642 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__hard_evidence.

Referenced by gum::WeightedSampling< GUM_SCALAR >::_draw(), gum::MonteCarloSampling< GUM_SCALAR >::_draw(), gum::ImportanceSampling< GUM_SCALAR >::_draw(), and gum::SamplingInference< GUM_SCALAR >::contextualize().

642  {
643  return __hard_evidence;
644  }
NodeProperty< Idx > __hard_evidence
assign to each node with a hard evidence the index of its observed value
+ Here is the caller graph for this function:

◆ hardEvidenceNodes()

template<typename GUM_SCALAR >
INLINE const NodeSet & gum::BayesNetInference< GUM_SCALAR >::hardEvidenceNodes ( ) const
inherited

returns the set of nodes with hard evidence

the set of nodes that received hard evidence

Definition at line 666 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__hard_evidence_nodes.

Referenced by gum::WeightedSampling< GUM_SCALAR >::_draw(), gum::MonteCarloSampling< GUM_SCALAR >::_draw(), gum::ImportanceSampling< GUM_SCALAR >::_draw(), gum::ImportanceSampling< GUM_SCALAR >::_onContextualize(), gum::SamplingInference< GUM_SCALAR >::_setEstimatorFromBN(), gum::SamplingInference< GUM_SCALAR >::_setEstimatorFromLBP(), gum::SamplingInference< GUM_SCALAR >::contextualize(), and gum::MarginalTargetedInference< GUM_SCALAR >::posterior().

666  {
667  return __hard_evidence_nodes;
668  }
NodeSet __hard_evidence_nodes
the set of nodes that received hard evidence
+ Here is the caller graph for this function:

◆ hasEvidence() [1/3]

template<typename GUM_SCALAR >
INLINE bool gum::BayesNetInference< GUM_SCALAR >::hasEvidence ( ) const
finalvirtualinherited

indicates whether some node(s) have received evidence

Definition at line 381 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__evidence.

Referenced by gum::BayesNetInference< GUM_SCALAR >::addEvidence(), gum::BayesNetInference< GUM_SCALAR >::chgEvidence(), gum::BayesNetInference< GUM_SCALAR >::eraseEvidence(), and gum::BayesNetInference< GUM_SCALAR >::hasEvidence().

381  {
382  return !__evidence.empty();
383  }
NodeProperty< const Potential< GUM_SCALAR > *> __evidence
the set of evidence entered into the network
+ Here is the caller graph for this function:

◆ hasEvidence() [2/3]

template<typename GUM_SCALAR >
INLINE bool gum::BayesNetInference< GUM_SCALAR >::hasEvidence ( NodeId  id) const
finalvirtualinherited

indicates whether node id has received an evidence

Definition at line 388 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__evidence.

388  {
389  return __evidence.exists(id);
390  }
NodeProperty< const Potential< GUM_SCALAR > *> __evidence
the set of evidence entered into the network

◆ hasEvidence() [3/3]

template<typename GUM_SCALAR >
INLINE bool gum::BayesNetInference< GUM_SCALAR >::hasEvidence ( const std::string &  nodeName) const
finalvirtualinherited

indicates whether node id has received an evidence

Definition at line 409 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::BN(), and gum::BayesNetInference< GUM_SCALAR >::hasEvidence().

410  {
411  return hasEvidence(this->BN().idFromName(nodeName));
412  }
virtual bool hasEvidence() const final
indicates whether some node(s) have received evidence
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:

◆ hasHardEvidence() [1/2]

template<typename GUM_SCALAR >
INLINE bool gum::BayesNetInference< GUM_SCALAR >::hasHardEvidence ( NodeId  id) const
finalvirtualinherited

indicates whether node id has received a hard evidence

Definition at line 395 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__hard_evidence_nodes, and gum::Set< Key, Alloc >::exists().

Referenced by gum::ImportanceSampling< GUM_SCALAR >::_draw(), gum::BayesNetInference< GUM_SCALAR >::chgEvidence(), gum::BayesNetInference< GUM_SCALAR >::eraseEvidence(), and gum::BayesNetInference< GUM_SCALAR >::hasHardEvidence().

395  {
396  return __hard_evidence_nodes.exists(id);
397  }
bool exists(const Key &k) const
Indicates whether a given elements belong to the set.
Definition: set_tpl.h:607
NodeSet __hard_evidence_nodes
the set of nodes that received hard evidence
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ hasHardEvidence() [2/2]

template<typename GUM_SCALAR >
INLINE bool gum::BayesNetInference< GUM_SCALAR >::hasHardEvidence ( const std::string &  nodeName) const
finalvirtualinherited

indicates whether node id has received a hard evidence

Definition at line 417 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::BN(), and gum::BayesNetInference< GUM_SCALAR >::hasHardEvidence().

418  {
419  return hasHardEvidence(this->BN().idFromName(nodeName));
420  }
virtual bool hasHardEvidence(NodeId id) const final
indicates whether node id has received a hard evidence
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:

◆ hasSoftEvidence() [1/2]

template<typename GUM_SCALAR >
INLINE bool gum::BayesNetInference< GUM_SCALAR >::hasSoftEvidence ( NodeId  id) const
finalvirtualinherited

indicates whether node id has received a soft evidence

Definition at line 402 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__soft_evidence_nodes, and gum::Set< Key, Alloc >::exists().

Referenced by gum::BayesNetInference< GUM_SCALAR >::hasSoftEvidence().

402  {
403  return __soft_evidence_nodes.exists(id);
404  }
bool exists(const Key &k) const
Indicates whether a given elements belong to the set.
Definition: set_tpl.h:607
NodeSet __soft_evidence_nodes
the set of nodes that received soft evidence
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ hasSoftEvidence() [2/2]

template<typename GUM_SCALAR >
INLINE bool gum::BayesNetInference< GUM_SCALAR >::hasSoftEvidence ( const std::string &  nodeName) const
finalvirtualinherited

indicates whether node id has received a soft evidence

Definition at line 425 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::BN(), and gum::BayesNetInference< GUM_SCALAR >::hasSoftEvidence().

426  {
427  return hasSoftEvidence(this->BN().idFromName(nodeName));
428  }
virtual bool hasSoftEvidence(NodeId id) const final
indicates whether node id has received a soft evidence
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:

◆ history()

INLINE const std::vector< double > & gum::ApproximationScheme::history ( ) const
virtualinherited

Returns the scheme history.

Returns
Returns the scheme history.
Exceptions
OperationNotAllowedRaised if the scheme did not performed or if verbosity is set to false.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 173 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_history, GUM_ERROR, gum::ApproximationScheme::stateApproximationScheme(), gum::IApproximationSchemeConfiguration::Undefined, and gum::ApproximationScheme::verbosity().

Referenced by gum::learning::genericBNLearner::history().

173  {
175  GUM_ERROR(OperationNotAllowed,
176  "state of the approximation scheme is udefined");
177  }
178 
179  if (verbosity() == false) {
180  GUM_ERROR(OperationNotAllowed, "No history when verbosity=false");
181  }
182 
183  return _history;
184  }
std::vector< double > _history
The scheme history, used only if verbosity == true.
ApproximationSchemeSTATE stateApproximationScheme() const
Returns the approximation scheme state.
bool verbosity() const
Returns true if verbosity is enabled.
#define GUM_ERROR(type, msg)
Definition: exceptions.h:55
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ initApproximationScheme()

INLINE void gum::ApproximationScheme::initApproximationScheme ( )
inherited

Initialise the scheme.

Definition at line 187 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_current_epsilon, gum::ApproximationScheme::_current_rate, gum::ApproximationScheme::_current_state, gum::ApproximationScheme::_current_step, gum::ApproximationScheme::_history, gum::ApproximationScheme::_timer, gum::IApproximationSchemeConfiguration::Continue, and gum::Timer::reset().

Referenced by gum::credal::CNMonteCarloSampling< GUM_SCALAR, BNInferenceEngine >::__mcInitApproximationScheme(), gum::GibbsBNdistance< GUM_SCALAR >::_computeKL(), gum::SamplingInference< GUM_SCALAR >::_loopApproxInference(), gum::SamplingInference< GUM_SCALAR >::_onStateChanged(), gum::learning::DAG2BNLearner< ALLOC >::createBN(), gum::learning::GreedyHillClimbing::learnStructure(), and gum::learning::LocalSearchWithTabuList::learnStructure().

187  {
189  _current_step = 0;
191  _history.clear();
192  _timer.reset();
193  }
double _current_epsilon
Current epsilon.
void reset()
Reset the timer.
Definition: timer_inl.h:32
double _current_rate
Current rate.
Size _current_step
The current step.
std::vector< double > _history
The scheme history, used only if verbosity == true.
ApproximationSchemeSTATE _current_state
The current state.
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ isDone()

template<typename GUM_SCALAR >
INLINE bool gum::BayesNetInference< GUM_SCALAR >::isDone ( ) const
finalvirtualnoexceptinherited

returns whether the inference object is in a done state

The inference object is in a done state when the posteriors can be retrieved without performing a new inference, i.e., all the heavy computations have already been performed. Typically, in a junction tree algorithm, this corresponds to a situation in which all the messages needed in the JT have been computed and sent.

Definition at line 96 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__state, and gum::BayesNetInference< GUM_SCALAR >::Done.

Referenced by gum::JointTargetedInference< GUM_SCALAR >::jointPosterior(), gum::BayesNetInference< GUM_SCALAR >::makeInference(), gum::MarginalTargetedInference< GUM_SCALAR >::posterior(), and gum::BayesNetInference< GUM_SCALAR >::prepareInference().

96  {
97  return (__state == StateOfInference::Done);
98  }
StateOfInference __state
the current state of the inference (outdated/ready/done)
+ Here is the caller graph for this function:

◆ isEnabledEpsilon()

INLINE bool gum::ApproximationScheme::isEnabledEpsilon ( ) const
virtualinherited

Returns true if stopping criterion on epsilon is enabled, false otherwise.

Returns
Returns true if stopping criterion on epsilon is enabled, false otherwise.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 61 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_eps.

Referenced by gum::learning::genericBNLearner::isEnabledEpsilon().

61  {
62  return _enabled_eps;
63  }
bool _enabled_eps
If true, the threshold convergence is enabled.
+ Here is the caller graph for this function:

◆ isEnabledMaxIter()

INLINE bool gum::ApproximationScheme::isEnabledMaxIter ( ) const
virtualinherited

Returns true if stopping criterion on max iterations is enabled, false otherwise.

Returns
Returns true if stopping criterion on max iterations is enabled, false otherwise.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 112 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_max_iter.

Referenced by gum::learning::genericBNLearner::isEnabledMaxIter().

112  {
113  return _enabled_max_iter;
114  }
bool _enabled_max_iter
If true, the maximum iterations stopping criterion is enabled.
+ Here is the caller graph for this function:

◆ isEnabledMaxTime()

INLINE bool gum::ApproximationScheme::isEnabledMaxTime ( ) const
virtualinherited

Returns true if stopping criterion on timeout is enabled, false otherwise.

Returns
Returns true if stopping criterion on timeout is enabled, false otherwise.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 138 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_max_time.

Referenced by gum::learning::genericBNLearner::isEnabledMaxTime().

138  {
139  return _enabled_max_time;
140  }
bool _enabled_max_time
If true, the timeout is enabled.
+ Here is the caller graph for this function:

◆ isEnabledMinEpsilonRate()

INLINE bool gum::ApproximationScheme::isEnabledMinEpsilonRate ( ) const
virtualinherited

Returns true if stopping criterion on epsilon rate is enabled, false otherwise.

Returns
Returns true if stopping criterion on epsilon rate is enabled, false otherwise.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 90 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_min_rate_eps.

Referenced by gum::GibbsBNdistance< GUM_SCALAR >::_computeKL(), and gum::learning::genericBNLearner::isEnabledMinEpsilonRate().

90  {
91  return _enabled_min_rate_eps;
92  }
bool _enabled_min_rate_eps
If true, the minimal threshold for epsilon rate is enabled.
+ Here is the caller graph for this function:

◆ isInferenceDone()

template<typename GUM_SCALAR >
INLINE bool gum::BayesNetInference< GUM_SCALAR >::isInferenceDone ( ) const
finalvirtualnoexceptinherited

returns whether the inference object is in a InferenceDone state

Definition at line 89 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__state, and gum::BayesNetInference< GUM_SCALAR >::Done.

89  {
90  return (__state == StateOfInference::Done);
91  }
StateOfInference __state
the current state of the inference (outdated/ready/done)

◆ isInferenceOutdatedBNPotentials()

template<typename GUM_SCALAR >
INLINE bool gum::BayesNetInference< GUM_SCALAR >::isInferenceOutdatedBNPotentials ( ) const
finalvirtualnoexceptinherited

returns whether the inference object is in a OutdatedBNPotential state

Definition at line 83 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__state, and gum::BayesNetInference< GUM_SCALAR >::OutdatedBNPotentials.

84  {
86  }
StateOfInference __state
the current state of the inference (outdated/ready/done)

◆ isInferenceOutdatedBNStructure()

template<typename GUM_SCALAR >
INLINE bool gum::BayesNetInference< GUM_SCALAR >::isInferenceOutdatedBNStructure ( ) const
finalvirtualnoexceptinherited

returns whether the inference object is in a OutdatedBNStructure state

Definition at line 76 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__state, and gum::BayesNetInference< GUM_SCALAR >::OutdatedBNStructure.

Referenced by gum::BayesNetInference< GUM_SCALAR >::chgEvidence(), gum::BayesNetInference< GUM_SCALAR >::eraseAllEvidence(), and gum::BayesNetInference< GUM_SCALAR >::eraseEvidence().

77  {
79  }
StateOfInference __state
the current state of the inference (outdated/ready/done)
+ Here is the caller graph for this function:

◆ isInferenceReady()

template<typename GUM_SCALAR >
INLINE bool gum::BayesNetInference< GUM_SCALAR >::isInferenceReady ( ) const
finalvirtualnoexceptinherited

returns whether the inference object is in a ready state

Definition at line 70 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__state, and gum::BayesNetInference< GUM_SCALAR >::ReadyForInference.

Referenced by gum::SamplingInference< GUM_SCALAR >::_onStateChanged(), gum::BayesNetInference< GUM_SCALAR >::makeInference(), and gum::BayesNetInference< GUM_SCALAR >::prepareInference().

70  {
72  }
StateOfInference __state
the current state of the inference (outdated/ready/done)
+ Here is the caller graph for this function:

◆ isTarget() [1/2]

template<typename GUM_SCALAR >
INLINE bool gum::MarginalTargetedInference< GUM_SCALAR >::isTarget ( NodeId  node) const
finalvirtualinherited

return true if variable is a (marginal) target

Definition at line 76 of file marginalTargetedInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__bn, gum::MarginalTargetedInference< GUM_SCALAR >::__targets, gum::Set< Key, Alloc >::contains(), and GUM_ERROR.

Referenced by gum::MarginalTargetedInference< GUM_SCALAR >::isTarget(), gum::MarginalTargetedInference< GUM_SCALAR >::posterior(), and gum::JointTargetedInference< GUM_SCALAR >::posterior().

76  {
77  // check that the variable belongs to the bn
78  if (this->__bn == nullptr)
79  GUM_ERROR(NullElement,
80  "No Bayes net has been assigned to the "
81  "inference algorithm");
82  if (!this->__bn->dag().exists(node)) {
83  GUM_ERROR(UndefinedElement, node << " is not a NodeId in the bn");
84  }
85 
86  return __targets.contains(node);
87  }
bool contains(const Key &k) const
Indicates whether a given elements belong to the set.
Definition: set_tpl.h:581
NodeSet __targets
the set of marginal targets
const IBayesNet< GUM_SCALAR > * __bn
the Bayes net on which we perform inferences
#define GUM_ERROR(type, msg)
Definition: exceptions.h:55
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ isTarget() [2/2]

template<typename GUM_SCALAR >
INLINE bool gum::MarginalTargetedInference< GUM_SCALAR >::isTarget ( const std::string &  nodeName) const
finalvirtualinherited

return true if variable is a (marginal) target

Definition at line 91 of file marginalTargetedInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__bn, and gum::MarginalTargetedInference< GUM_SCALAR >::isTarget().

92  {
93  return isTarget(this->__bn->idFromName(nodeName));
94  }
const IBayesNet< GUM_SCALAR > * __bn
the Bayes net on which we perform inferences
virtual bool isTarget(NodeId node) const final
return true if variable is a (marginal) target
+ Here is the call graph for this function:

◆ makeInference()

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::makeInference ( )
finalvirtualinherited

perform the heavy computations needed to compute the targets' posteriors

In a Junction tree propagation scheme, for instance, the heavy computations are those of the messages sent in the JT. This is precisely what makeInference should compute. Later, the computations of the posteriors can be done "lightly" by multiplying and projecting those messages.

Definition at line 711 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__setState(), gum::BayesNetInference< GUM_SCALAR >::_makeInference(), gum::BayesNetInference< GUM_SCALAR >::Done, gum::BayesNetInference< GUM_SCALAR >::isDone(), gum::BayesNetInference< GUM_SCALAR >::isInferenceReady(), and gum::BayesNetInference< GUM_SCALAR >::prepareInference().

Referenced by gum::LoopySamplingInference< GUM_SCALAR, APPROX >::_makeInference(), gum::MCBayesNetGenerator< GUM_SCALAR, ICPTGenerator, ICPTDisturber >::disturbBN(), gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact(), gum::JointTargetedInference< GUM_SCALAR >::evidenceJointImpact(), gum::JointTargetedInference< GUM_SCALAR >::jointMutualInformation(), gum::JointTargetedInference< GUM_SCALAR >::jointPosterior(), and gum::MarginalTargetedInference< GUM_SCALAR >::posterior().

711  {
712  if (isDone()) { return; }
713 
714  if (!isInferenceReady()) { prepareInference(); }
715 
716  _makeInference();
717 
719  }
virtual void __setState(const StateOfInference state) final
set the state of the inference engine and call the notification _onStateChanged when necessary (i...
virtual void _makeInference()=0
called when the inference has to be performed effectively
virtual void prepareInference() final
prepare the internal inference structures for the next inference
virtual bool isDone() const noexcept final
returns whether the inference object is in a done state
virtual bool isInferenceReady() const noexcept final
returns whether the inference object is in a ready state
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ maxIter()

INLINE Size gum::ApproximationScheme::maxIter ( ) const
virtualinherited

Returns the criterion on number of iterations.

Returns
Returns the criterion on number of iterations.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 102 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_max_iter.

Referenced by gum::learning::genericBNLearner::maxIter().

102 { return _max_iter; }
Size _max_iter
The maximum iterations.
+ Here is the caller graph for this function:

◆ maxTime()

INLINE double gum::ApproximationScheme::maxTime ( ) const
virtualinherited

Returns the timeout (in seconds).

Returns
Returns the timeout (in seconds).

Implements gum::IApproximationSchemeConfiguration.

Definition at line 125 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_max_time.

Referenced by gum::learning::genericBNLearner::maxTime().

125 { return _max_time; }
double _max_time
The timeout.
+ Here is the caller graph for this function:

◆ messageApproximationScheme()

INLINE std::string gum::IApproximationSchemeConfiguration::messageApproximationScheme ( ) const
inherited

Returns the approximation scheme message.

Returns
Returns the approximation scheme message.

Definition at line 40 of file IApproximationSchemeConfiguration_inl.h.

References gum::IApproximationSchemeConfiguration::Continue, gum::IApproximationSchemeConfiguration::Epsilon, gum::IApproximationSchemeConfiguration::epsilon(), gum::IApproximationSchemeConfiguration::Limit, gum::IApproximationSchemeConfiguration::maxIter(), gum::IApproximationSchemeConfiguration::maxTime(), gum::IApproximationSchemeConfiguration::minEpsilonRate(), gum::IApproximationSchemeConfiguration::Rate, gum::IApproximationSchemeConfiguration::stateApproximationScheme(), gum::IApproximationSchemeConfiguration::Stopped, gum::IApproximationSchemeConfiguration::TimeLimit, and gum::IApproximationSchemeConfiguration::Undefined.

Referenced by gum::ApproximationScheme::_stopScheme(), gum::ApproximationScheme::continueApproximationScheme(), and gum::credal::InferenceEngine< GUM_SCALAR >::getApproximationSchemeMsg().

40  {
41  std::stringstream s;
42 
43  switch (stateApproximationScheme()) {
44  case ApproximationSchemeSTATE::Continue: s << "in progress"; break;
45 
47  s << "stopped with epsilon=" << epsilon();
48  break;
49 
51  s << "stopped with rate=" << minEpsilonRate();
52  break;
53 
55  s << "stopped with max iteration=" << maxIter();
56  break;
57 
59  s << "stopped with timeout=" << maxTime();
60  break;
61 
62  case ApproximationSchemeSTATE::Stopped: s << "stopped on request"; break;
63 
64  case ApproximationSchemeSTATE::Undefined: s << "undefined state"; break;
65  };
66 
67  return s.str();
68  }
virtual double epsilon() const =0
Returns the value of epsilon.
virtual ApproximationSchemeSTATE stateApproximationScheme() const =0
Returns the approximation scheme state.
virtual double maxTime() const =0
Returns the timeout (in seconds).
virtual Size maxIter() const =0
Returns the criterion on number of iterations.
virtual double minEpsilonRate() const =0
Returns the value of the minimal epsilon rate.
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ minEpsilonRate()

INLINE double gum::ApproximationScheme::minEpsilonRate ( ) const
virtualinherited

Returns the value of the minimal epsilon rate.

Returns
Returns the value of the minimal epsilon rate.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 74 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_min_rate_eps.

Referenced by gum::learning::genericBNLearner::minEpsilonRate().

74  {
75  return _min_rate_eps;
76  }
double _min_rate_eps
Threshold for the epsilon rate.
+ Here is the caller graph for this function:

◆ nbrEvidence()

template<typename GUM_SCALAR >
INLINE Size gum::BayesNetInference< GUM_SCALAR >::nbrEvidence ( ) const
finalvirtualinherited

returns the number of evidence entered into the Bayesian network

Definition at line 620 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__evidence.

620  {
621  return __evidence.size();
622  }
NodeProperty< const Potential< GUM_SCALAR > *> __evidence
the set of evidence entered into the network

◆ nbrHardEvidence()

template<typename GUM_SCALAR >
INLINE Size gum::BayesNetInference< GUM_SCALAR >::nbrHardEvidence ( ) const
finalvirtualinherited

returns the number of hard evidence entered into the Bayesian network

Definition at line 627 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__hard_evidence_nodes, and gum::Set< Key, Alloc >::size().

627  {
628  return __hard_evidence_nodes.size();
629  }
NodeSet __hard_evidence_nodes
the set of nodes that received hard evidence
Size size() const noexcept
Returns the number of elements in the set.
Definition: set_tpl.h:701
+ Here is the call graph for this function:

◆ nbrIterations()

INLINE Size gum::ApproximationScheme::nbrIterations ( ) const
virtualinherited

Returns the number of iterations.

Returns
Returns the number of iterations.
Exceptions
OperationNotAllowedRaised if the scheme did not perform.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 163 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_current_step, GUM_ERROR, gum::ApproximationScheme::stateApproximationScheme(), and gum::IApproximationSchemeConfiguration::Undefined.

Referenced by gum::GibbsBNdistance< GUM_SCALAR >::_computeKL(), and gum::learning::genericBNLearner::nbrIterations().

163  {
165  GUM_ERROR(OperationNotAllowed,
166  "state of the approximation scheme is undefined");
167  }
168 
169  return _current_step;
170  }
Size _current_step
The current step.
ApproximationSchemeSTATE stateApproximationScheme() const
Returns the approximation scheme state.
#define GUM_ERROR(type, msg)
Definition: exceptions.h:55
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ nbrSoftEvidence()

template<typename GUM_SCALAR >
INLINE Size gum::BayesNetInference< GUM_SCALAR >::nbrSoftEvidence ( ) const
finalvirtualinherited

returns the number of soft evidence entered into the Bayesian network

Definition at line 634 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__soft_evidence_nodes, and gum::Set< Key, Alloc >::size().

634  {
635  return __soft_evidence_nodes.size();
636  }
NodeSet __soft_evidence_nodes
the set of nodes that received soft evidence
Size size() const noexcept
Returns the number of elements in the set.
Definition: set_tpl.h:701
+ Here is the call graph for this function:

◆ nbrTargets()

template<typename GUM_SCALAR >
INLINE const Size gum::MarginalTargetedInference< GUM_SCALAR >::nbrTargets ( ) const
finalvirtualnoexceptinherited

returns the number of marginal targets

Definition at line 218 of file marginalTargetedInference_tpl.h.

References gum::MarginalTargetedInference< GUM_SCALAR >::__targets, and gum::Set< Key, Alloc >::size().

219  {
220  return __targets.size();
221  }
NodeSet __targets
the set of marginal targets
Size size() const noexcept
Returns the number of elements in the set.
Definition: set_tpl.h:701
+ Here is the call graph for this function:

◆ periodSize()

INLINE Size gum::ApproximationScheme::periodSize ( ) const
virtualinherited

Returns the period size.

Returns
Returns the period size.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 149 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_period_size.

Referenced by gum::credal::CNMonteCarloSampling< GUM_SCALAR, BNInferenceEngine >::makeInference(), and gum::learning::genericBNLearner::periodSize().

149 { return _period_size; }
Size _period_size
Checking criteria frequency.
+ Here is the caller graph for this function:

◆ posterior() [1/2]

template<typename GUM_SCALAR >
const Potential< GUM_SCALAR > & gum::MarginalTargetedInference< GUM_SCALAR >::posterior ( NodeId  node)
virtualinherited

Computes and returns the posterior of a node.

Returns
a const ref to the posterior probability of the node.
Parameters
nodethe node for which we need a posterior probability
Warning
for efficiency reasons, the potential is stored into the inference engine and is returned by reference. In order to ensure that the potential may still exist even if the Inference object is destroyed, the user has to copy it explicitly.
prepareInference and makeInference may be applied if needed by the posterior method.
Exceptions
UndefinedElementif node is not in the set of targets

Reimplemented in gum::JointTargetedInference< GUM_SCALAR >.

Definition at line 242 of file marginalTargetedInference_tpl.h.

References gum::MarginalTargetedInference< GUM_SCALAR >::_posterior(), gum::BayesNetInference< GUM_SCALAR >::evidence(), GUM_ERROR, gum::BayesNetInference< GUM_SCALAR >::hardEvidenceNodes(), gum::BayesNetInference< GUM_SCALAR >::isDone(), gum::MarginalTargetedInference< GUM_SCALAR >::isTarget(), and gum::BayesNetInference< GUM_SCALAR >::makeInference().

Referenced by gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact(), gum::MarginalTargetedInference< GUM_SCALAR >::H(), gum::JointTargetedInference< GUM_SCALAR >::posterior(), gum::MarginalTargetedInference< GUM_SCALAR >::posterior(), and gum::Estimator< GUM_SCALAR >::setFromLBP().

242  {
243  if (this->hardEvidenceNodes().contains(node)) {
244  return *(this->evidence()[node]);
245  }
246 
247  if (!isTarget(node)) {
248  // throws UndefinedElement if var is not a target
249  GUM_ERROR(UndefinedElement, node << " is not a target node");
250  }
251 
252  if (!this->isDone()) { this->makeInference(); }
253 
254  return _posterior(node);
255  }
const NodeProperty< const Potential< GUM_SCALAR > *> & evidence() const
returns the set of evidence
virtual bool isTarget(NodeId node) const final
return true if variable is a (marginal) target
virtual void makeInference() final
perform the heavy computations needed to compute the targets&#39; posteriors
virtual bool isDone() const noexcept final
returns whether the inference object is in a done state
const NodeSet & hardEvidenceNodes() const
returns the set of nodes with hard evidence
virtual const Potential< GUM_SCALAR > & _posterior(NodeId id)=0
asks derived classes for the posterior of a given variable
#define GUM_ERROR(type, msg)
Definition: exceptions.h:55
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ posterior() [2/2]

template<typename GUM_SCALAR >
const Potential< GUM_SCALAR > & gum::MarginalTargetedInference< GUM_SCALAR >::posterior ( const std::string &  nodeName)
virtualinherited

Computes and returns the posterior of a node.

Returns
a const ref to the posterior probability of the node.
Parameters
nodeNamethe anme of the node for which we need a posterior probability
Warning
for efficiency reasons, the potential is stored into the inference engine and is returned by reference. In order to ensure that the potential may still exist even if the Inference object is destroyed, the user has to copy it explicitly.
prepareInference and makeInference may be applied if needed by the posterior method.
Exceptions
UndefinedElementif node is not in the set of targets

Reimplemented in gum::JointTargetedInference< GUM_SCALAR >.

Definition at line 260 of file marginalTargetedInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::BN(), and gum::MarginalTargetedInference< GUM_SCALAR >::posterior().

261  {
262  return posterior(this->BN().idFromName(nodeName));
263  }
virtual const Potential< GUM_SCALAR > & posterior(NodeId node)
Computes and returns the posterior of a node.
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:

◆ prepareInference()

template<typename GUM_SCALAR >
INLINE void gum::BayesNetInference< GUM_SCALAR >::prepareInference ( )
finalvirtualinherited

prepare the internal inference structures for the next inference

Definition at line 692 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__bn, gum::BayesNetInference< GUM_SCALAR >::__setState(), gum::BayesNetInference< GUM_SCALAR >::__state, gum::BayesNetInference< GUM_SCALAR >::_updateOutdatedBNPotentials(), gum::BayesNetInference< GUM_SCALAR >::_updateOutdatedBNStructure(), GUM_ERROR, gum::BayesNetInference< GUM_SCALAR >::isDone(), gum::BayesNetInference< GUM_SCALAR >::isInferenceReady(), gum::BayesNetInference< GUM_SCALAR >::OutdatedBNStructure, and gum::BayesNetInference< GUM_SCALAR >::ReadyForInference.

Referenced by gum::BayesNetInference< GUM_SCALAR >::makeInference(), and gum::SamplingInference< GUM_SCALAR >::samplingBN().

692  {
693  if (isInferenceReady() || isDone()) { return; }
694 
695  if (__bn == nullptr)
696  GUM_ERROR(NullElement,
697  "No Bayes net has been assigned to the "
698  "inference algorithm");
699 
702  else
704 
706  }
virtual void __setState(const StateOfInference state) final
set the state of the inference engine and call the notification _onStateChanged when necessary (i...
StateOfInference __state
the current state of the inference (outdated/ready/done)
const IBayesNet< GUM_SCALAR > * __bn
the Bayes net on which we perform inferences
virtual void _updateOutdatedBNPotentials()=0
prepares inference when the latter is in OutdatedBNPotentials state
virtual bool isDone() const noexcept final
returns whether the inference object is in a done state
virtual void _updateOutdatedBNStructure()=0
prepares inference when the latter is in OutdatedBNStructure state
virtual bool isInferenceReady() const noexcept final
returns whether the inference object is in a ready state
#define GUM_ERROR(type, msg)
Definition: exceptions.h:55
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ remainingBurnIn()

INLINE Size gum::ApproximationScheme::remainingBurnIn ( )
inherited

Returns the remaining burn in.

Returns
Returns the remaining burn in.

Definition at line 210 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_burn_in, and gum::ApproximationScheme::_current_step.

210  {
211  if (_burn_in > _current_step) {
212  return _burn_in - _current_step;
213  } else {
214  return 0;
215  }
216  }
Size _burn_in
Number of iterations before checking stopping criteria.
Size _current_step
The current step.

◆ samplingBN()

template<typename GUM_SCALAR >
INLINE const IBayesNet< GUM_SCALAR > & gum::SamplingInference< GUM_SCALAR >::samplingBN ( )
inherited

get the BayesNet which is used to really perform the sampling

Definition at line 74 of file samplingInference_tpl.h.

References gum::SamplingInference< GUM_SCALAR >::__samplingBN, gum::BayesNetInference< GUM_SCALAR >::BN(), and gum::BayesNetInference< GUM_SCALAR >::prepareInference().

Referenced by gum::SamplingInference< GUM_SCALAR >::_addVarSample(), gum::ImportanceSampling< GUM_SCALAR >::_draw(), and gum::SamplingInference< GUM_SCALAR >::_setEstimatorFromBN().

74  {
75  this->prepareInference();
76  if (__samplingBN == nullptr)
77  return this->BN();
78  else
79  return *__samplingBN;
80  }
virtual void prepareInference() final
prepare the internal inference structures for the next inference
BayesNetFragment< GUM_SCALAR > * __samplingBN
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ setBN()

template<typename GUM_SCALAR >
void gum::BayesNetInference< GUM_SCALAR >::setBN ( const IBayesNet< GUM_SCALAR > *  bn)
virtualinherited

assigns a new BN to the inference engine

Assigns a new BN to the BayesNetInference engine and sends messages to the descendants of BayesNetInference to inform them that the BN has changed.

Warning
By default, all the nodes of the Bayes net are targets.
note that, by aGrUM's rule, the bn is not copied into the inference engine but only referenced.

Definition at line 132 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__bn, gum::BayesNetInference< GUM_SCALAR >::__computeDomainSizes(), gum::BayesNetInference< GUM_SCALAR >::__setState(), gum::BayesNetInference< GUM_SCALAR >::_onBayesNetChanged(), gum::BayesNetInference< GUM_SCALAR >::clear(), and gum::BayesNetInference< GUM_SCALAR >::OutdatedBNStructure.

132  {
133  clear();
134  __bn = bn;
136  _onBayesNetChanged(bn);
138  }
virtual void __setState(const StateOfInference state) final
set the state of the inference engine and call the notification _onStateChanged when necessary (i...
virtual void _onBayesNetChanged(const IBayesNet< GUM_SCALAR > *bn)=0
fired after a new Bayes net has been assigned to the engine
const IBayesNet< GUM_SCALAR > * __bn
the Bayes net on which we perform inferences
virtual void clear()
clears all the data structures allocated for the last inference
void __computeDomainSizes()
computes the domain sizes of the random variables
+ Here is the call graph for this function:

◆ setEpsilon()

INLINE void gum::ApproximationScheme::setEpsilon ( double  eps)
virtualinherited

Given that we approximate f(t), stopping criterion on |f(t+1)-f(t)|.

If the criterion was disabled it will be enabled.

Parameters
epsThe new epsilon value.
Exceptions
OutOfLowerBoundRaised if eps < 0.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 43 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_eps, gum::ApproximationScheme::_eps, and GUM_ERROR.

Referenced by gum::credal::CNMonteCarloSampling< GUM_SCALAR, BNInferenceEngine >::__mcInitApproximationScheme(), gum::GibbsBNdistance< GUM_SCALAR >::GibbsBNdistance(), gum::GibbsSampling< GUM_SCALAR >::GibbsSampling(), gum::learning::GreedyHillClimbing::GreedyHillClimbing(), gum::SamplingInference< GUM_SCALAR >::SamplingInference(), and gum::learning::genericBNLearner::setEpsilon().

43  {
44  if (eps < 0.) { GUM_ERROR(OutOfLowerBound, "eps should be >=0"); }
45 
46  _eps = eps;
47  _enabled_eps = true;
48  }
bool _enabled_eps
If true, the threshold convergence is enabled.
double _eps
Threshold for convergence.
#define GUM_ERROR(type, msg)
Definition: exceptions.h:55
+ Here is the caller graph for this function:

◆ setMaxIter()

INLINE void gum::ApproximationScheme::setMaxIter ( Size  max)
virtualinherited

Stopping criterion on number of iterations.

If the criterion was disabled it will be enabled.

Parameters
maxThe maximum number of iterations.
Exceptions
OutOfLowerBoundRaised if max <= 1.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 95 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_max_iter, gum::ApproximationScheme::_max_iter, and GUM_ERROR.

Referenced by gum::GibbsBNdistance< GUM_SCALAR >::GibbsBNdistance(), gum::SamplingInference< GUM_SCALAR >::SamplingInference(), and gum::learning::genericBNLearner::setMaxIter().

95  {
96  if (max < 1) { GUM_ERROR(OutOfLowerBound, "max should be >=1"); }
97  _max_iter = max;
98  _enabled_max_iter = true;
99  }
bool _enabled_max_iter
If true, the maximum iterations stopping criterion is enabled.
Size _max_iter
The maximum iterations.
#define GUM_ERROR(type, msg)
Definition: exceptions.h:55
+ Here is the caller graph for this function:

◆ setMaxTime()

INLINE void gum::ApproximationScheme::setMaxTime ( double  timeout)
virtualinherited

Stopping criterion on timeout.

If the criterion was disabled it will be enabled.

Parameters
timeoutThe timeout value in seconds.
Exceptions
OutOfLowerBoundRaised if timeout <= 0.0.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 118 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_max_time, gum::ApproximationScheme::_max_time, and GUM_ERROR.

Referenced by gum::credal::CNMonteCarloSampling< GUM_SCALAR, BNInferenceEngine >::CNMonteCarloSampling(), gum::GibbsBNdistance< GUM_SCALAR >::GibbsBNdistance(), gum::SamplingInference< GUM_SCALAR >::SamplingInference(), and gum::learning::genericBNLearner::setMaxTime().

118  {
119  if (timeout <= 0.) { GUM_ERROR(OutOfLowerBound, "timeout should be >0."); }
120  _max_time = timeout;
121  _enabled_max_time = true;
122  }
bool _enabled_max_time
If true, the timeout is enabled.
double _max_time
The timeout.
#define GUM_ERROR(type, msg)
Definition: exceptions.h:55
+ Here is the caller graph for this function:

◆ setMinEpsilonRate()

INLINE void gum::ApproximationScheme::setMinEpsilonRate ( double  rate)
virtualinherited

Given that we approximate f(t), stopping criterion on d/dt(|f(t+1)-f(t)|).

If the criterion was disabled it will be enabled

Parameters
rateThe minimal epsilon rate.
Exceptions
OutOfLowerBoundif rate<0

Implements gum::IApproximationSchemeConfiguration.

Definition at line 66 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_enabled_min_rate_eps, gum::ApproximationScheme::_min_rate_eps, and GUM_ERROR.

Referenced by gum::GibbsBNdistance< GUM_SCALAR >::GibbsBNdistance(), gum::GibbsSampling< GUM_SCALAR >::GibbsSampling(), gum::SamplingInference< GUM_SCALAR >::SamplingInference(), and gum::learning::genericBNLearner::setMinEpsilonRate().

66  {
67  if (rate < 0) { GUM_ERROR(OutOfLowerBound, "rate should be >=0"); }
68 
69  _min_rate_eps = rate;
70  _enabled_min_rate_eps = true;
71  }
bool _enabled_min_rate_eps
If true, the minimal threshold for epsilon rate is enabled.
double _min_rate_eps
Threshold for the epsilon rate.
#define GUM_ERROR(type, msg)
Definition: exceptions.h:55
+ Here is the caller graph for this function:

◆ setPeriodSize()

INLINE void gum::ApproximationScheme::setPeriodSize ( Size  p)
virtualinherited

How many samples between two stopping is enable.

Parameters
pThe new period value.
Exceptions
OutOfLowerBoundRaised if p < 1.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 143 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_period_size, and GUM_ERROR.

Referenced by gum::credal::CNMonteCarloSampling< GUM_SCALAR, BNInferenceEngine >::CNMonteCarloSampling(), gum::GibbsBNdistance< GUM_SCALAR >::GibbsBNdistance(), gum::SamplingInference< GUM_SCALAR >::SamplingInference(), and gum::learning::genericBNLearner::setPeriodSize().

143  {
144  if (p < 1) { GUM_ERROR(OutOfLowerBound, "p should be >=1"); }
145 
146  _period_size = p;
147  }
Size _period_size
Checking criteria frequency.
#define GUM_ERROR(type, msg)
Definition: exceptions.h:55
+ Here is the caller graph for this function:

◆ setVerbosity()

INLINE void gum::ApproximationScheme::setVerbosity ( bool  v)
virtualinherited

Set the verbosity on (true) or off (false).

Parameters
vIf true, then verbosity is turned on.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 152 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_verbosity.

Referenced by gum::GibbsBNdistance< GUM_SCALAR >::GibbsBNdistance(), gum::SamplingInference< GUM_SCALAR >::SamplingInference(), and gum::learning::genericBNLearner::setVerbosity().

152 { _verbosity = v; }
bool _verbosity
If true, verbosity is enabled.
+ Here is the caller graph for this function:

◆ softEvidenceNodes()

template<typename GUM_SCALAR >
INLINE const NodeSet & gum::BayesNetInference< GUM_SCALAR >::softEvidenceNodes ( ) const
inherited

returns the set of nodes with soft evidence

the set of nodes that received soft evidence

Definition at line 658 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__soft_evidence_nodes.

Referenced by gum::SamplingInference< GUM_SCALAR >::contextualize().

658  {
659  return __soft_evidence_nodes;
660  }
NodeSet __soft_evidence_nodes
the set of nodes that received soft evidence
+ Here is the caller graph for this function:

◆ startOfPeriod()

INLINE bool gum::ApproximationScheme::startOfPeriod ( )
inherited

Returns true if we are at the beginning of a period (compute error is mandatory).

Returns
Returns true if we are at the beginning of a period (compute error is mandatory).

Definition at line 197 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_burn_in, gum::ApproximationScheme::_current_step, and gum::ApproximationScheme::_period_size.

Referenced by gum::ApproximationScheme::continueApproximationScheme().

197  {
198  if (_current_step < _burn_in) { return false; }
199 
200  if (_period_size == 1) { return true; }
201 
202  return ((_current_step - _burn_in) % _period_size == 0);
203  }
Size _burn_in
Number of iterations before checking stopping criteria.
Size _current_step
The current step.
Size _period_size
Checking criteria frequency.
+ Here is the caller graph for this function:

◆ state()

template<typename GUM_SCALAR >
INLINE BayesNetInference< GUM_SCALAR >::StateOfInference gum::BayesNetInference< GUM_SCALAR >::state ( ) const
finalvirtualnoexceptinherited

returns the state of the inference engine

Definition at line 104 of file BayesNetInference_tpl.h.

References gum::BayesNetInference< GUM_SCALAR >::__state.

Referenced by gum::BayesNetInference< GUM_SCALAR >::__setState().

104  {
105  return __state;
106  }
StateOfInference __state
the current state of the inference (outdated/ready/done)
+ Here is the caller graph for this function:

◆ stateApproximationScheme()

INLINE IApproximationSchemeConfiguration::ApproximationSchemeSTATE gum::ApproximationScheme::stateApproximationScheme ( ) const
virtualinherited

Returns the approximation scheme state.

Returns
Returns the approximation scheme state.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 158 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_current_state.

Referenced by gum::ApproximationScheme::continueApproximationScheme(), gum::ApproximationScheme::history(), gum::ApproximationScheme::nbrIterations(), and gum::learning::genericBNLearner::stateApproximationScheme().

158  {
159  return _current_state;
160  }
ApproximationSchemeSTATE _current_state
The current state.
+ Here is the caller graph for this function:

◆ stopApproximationScheme()

INLINE void gum::ApproximationScheme::stopApproximationScheme ( )
inherited

Stop the approximation scheme.

Definition at line 219 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_current_state, gum::ApproximationScheme::_stopScheme(), gum::IApproximationSchemeConfiguration::Continue, and gum::IApproximationSchemeConfiguration::Stopped.

Referenced by gum::learning::DAG2BNLearner< ALLOC >::createBN(), gum::learning::GreedyHillClimbing::learnStructure(), and gum::learning::LocalSearchWithTabuList::learnStructure().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ targets()

template<typename GUM_SCALAR >
INLINE const NodeSet & gum::MarginalTargetedInference< GUM_SCALAR >::targets ( ) const
finalvirtualnoexceptinherited

returns the list of marginal targets

Definition at line 211 of file marginalTargetedInference_tpl.h.

References gum::MarginalTargetedInference< GUM_SCALAR >::__targets.

Referenced by gum::SamplingInference< GUM_SCALAR >::contextualize().

212  {
213  return __targets;
214  }
NodeSet __targets
the set of marginal targets
+ Here is the caller graph for this function:

◆ updateApproximationScheme()

INLINE void gum::ApproximationScheme::updateApproximationScheme ( unsigned int  incr = 1)
inherited

Update the scheme w.r.t the new error and increment steps.

Parameters
incrThe new increment steps.

Definition at line 206 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_current_step.

Referenced by gum::GibbsBNdistance< GUM_SCALAR >::_computeKL(), gum::SamplingInference< GUM_SCALAR >::_loopApproxInference(), gum::learning::DAG2BNLearner< ALLOC >::createBN(), gum::learning::GreedyHillClimbing::learnStructure(), gum::learning::LocalSearchWithTabuList::learnStructure(), and gum::credal::CNMonteCarloSampling< GUM_SCALAR, BNInferenceEngine >::makeInference().

206  {
207  _current_step += incr;
208  }
Size _current_step
The current step.
+ Here is the caller graph for this function:

◆ verbosity()

INLINE bool gum::ApproximationScheme::verbosity ( ) const
virtualinherited

Returns true if verbosity is enabled.

Returns
Returns true if verbosity is enabled.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 154 of file approximationScheme_inl.h.

References gum::ApproximationScheme::_verbosity.

Referenced by gum::ApproximationScheme::continueApproximationScheme(), gum::ApproximationScheme::history(), and gum::learning::genericBNLearner::verbosity().

154 { return _verbosity; }
bool _verbosity
If true, verbosity is enabled.
+ Here is the caller graph for this function:

Member Data Documentation

◆ __estimator

◆ _burn_in

◆ _current_epsilon

double gum::ApproximationScheme::_current_epsilon
protectedinherited

◆ _current_rate

double gum::ApproximationScheme::_current_rate
protectedinherited

◆ _current_state

◆ _current_step

◆ _enabled_eps

◆ _enabled_max_iter

bool gum::ApproximationScheme::_enabled_max_iter
protectedinherited

◆ _enabled_max_time

◆ _enabled_min_rate_eps

bool gum::ApproximationScheme::_enabled_min_rate_eps
protectedinherited

◆ _eps

double gum::ApproximationScheme::_eps
protectedinherited

◆ _history

std::vector< double > gum::ApproximationScheme::_history
protectedinherited

◆ _last_epsilon

double gum::ApproximationScheme::_last_epsilon
protectedinherited

Last epsilon value.

Definition at line 372 of file approximationScheme.h.

Referenced by gum::ApproximationScheme::continueApproximationScheme().

◆ _max_iter

Size gum::ApproximationScheme::_max_iter
protectedinherited

◆ _max_time

double gum::ApproximationScheme::_max_time
protectedinherited

◆ _min_rate_eps

double gum::ApproximationScheme::_min_rate_eps
protectedinherited

◆ _period_size

Size gum::ApproximationScheme::_period_size
protectedinherited

◆ _timer

◆ _verbosity

bool gum::ApproximationScheme::_verbosity
protectedinherited

If true, verbosity is enabled.

Definition at line 420 of file approximationScheme.h.

Referenced by gum::ApproximationScheme::setVerbosity(), and gum::ApproximationScheme::verbosity().

◆ isContextualized

template<typename GUM_SCALAR >
bool gum::SamplingInference< GUM_SCALAR >::isContextualized = false
protectedinherited

◆ isSetEstimator

◆ onProgress

◆ onStop

Signaler1< std::string > gum::IApproximationSchemeConfiguration::onStop
inherited

Criteria messageApproximationScheme.

Definition at line 62 of file IApproximationSchemeConfiguration.h.

Referenced by gum::ApproximationScheme::_stopScheme(), and gum::learning::genericBNLearner::distributeStop().


The documentation for this class was generated from the following files: