aGrUM  0.17.2
a C++ library for (probabilistic) graphical models
samplingInference_tpl.h
Go to the documentation of this file.
1 
35 
36 
37 #define DEFAULT_MAXITER 10000000
38 #define DEFAULT_PERIOD_SIZE 100
39 #define DEFAULT_VERBOSITY false
40 #define DEFAULT_TIMEOUT 6000
41 #define DEFAULT_EPSILON 1e-2
42 #define DEFAULT_MIN_EPSILON_RATE 1e-5
43 
44 
45 namespace gum {
46 
47  template < typename GUM_SCALAR >
49  const IBayesNet< GUM_SCALAR >* bn) :
50  ApproximateInference< GUM_SCALAR >(bn),
51  __estimator(), __samplingBN(nullptr) {
58  GUM_CONSTRUCTOR(SamplingInference);
59  }
60 
61 
62  template < typename GUM_SCALAR >
64  GUM_DESTRUCTOR(SamplingInference);
65  if (__samplingBN != nullptr) {
66  if (isContextualized) { // otherwise __samplingBN==&BN()
67  delete __samplingBN;
68  }
69  }
70  }
71 
72  template < typename GUM_SCALAR >
73  INLINE const IBayesNet< GUM_SCALAR >&
75  this->prepareInference();
76  if (__samplingBN == nullptr)
77  return this->BN();
78  else
79  return *__samplingBN;
80  }
81  template < typename GUM_SCALAR >
83  __estimator.setFromBN(&samplingBN(), this->hardEvidenceNodes());
84  this->isSetEstimator = true;
85  }
86 
87  template < typename GUM_SCALAR >
89  LoopyBeliefPropagation< GUM_SCALAR >* lbp, GUM_SCALAR virtualLBPSize) {
90  __estimator.setFromLBP(lbp, this->hardEvidenceNodes(), virtualLBPSize);
91  this->isSetEstimator = true;
92  }
93 
94 
95  template < typename GUM_SCALAR >
98  return __estimator.posterior(this->BN().variable(id));
99  }
100 
101  template < typename GUM_SCALAR >
104  return currentPosterior(this->BN().idFromName(name));
105  }
106 
107  template < typename GUM_SCALAR >
110  return __estimator.posterior(this->BN().variable(id));
111  }
112 
113  template < typename GUM_SCALAR >
115  // Finding Barren nodes
116 
117  BarrenNodesFinder barr_nodes = BarrenNodesFinder(&this->BN().dag());
118  barr_nodes.setTargets(&this->targets());
119  barr_nodes.setEvidence(&this->hardEvidenceNodes());
120  const NodeSet& barren = barr_nodes.barrenNodes();
121 
122  // creating BN fragment
124  for (const auto elmt: this->BN().dag().asNodeSet() - barren)
125  __samplingBN->installNode(elmt);
126 
127  // D-separated nodes
128 
129  dSeparation dsep = gum::dSeparation();
130  NodeSet requisite;
131  dsep.requisiteNodes(
132  this->BN().dag(),
133  this->BN().nodes().asNodeSet(), // no target for approximateInference
134  this->hardEvidenceNodes(),
135  this->softEvidenceNodes(), // should be empty
136  requisite);
137  requisite += this->hardEvidenceNodes();
138 
139  auto nonRequisite = this->BN().dag().asNodeSet() - requisite;
140 
141  for (const auto elmt: nonRequisite)
142  __samplingBN->uninstallNode(elmt);
143  for (const auto hard: this->hardEvidenceNodes()) {
145  I.add(this->BN().variable(hard));
146  I.chgVal(this->BN().variable(hard), this->hardEvidence()[hard]);
147 
148  for (const auto& child: this->BN().children(hard)) {
149  __samplingBN->installCPT(child, this->BN().cpt(child).extract(I));
150  }
151  }
152 
153  this->isContextualized = true;
155  }
156 
157 
158  template < typename GUM_SCALAR >
160  if (!isSetEstimator) this->_setEstimatorFromBN();
162  }
163 
164  template < typename GUM_SCALAR >
166  //@todo This should be in __prepareInference
167  if (!isContextualized) { this->contextualize(); }
168 
169  this->initApproximationScheme();
171  GUM_SCALAR w = .0; //
172 
173  // Burn in
174  Ip = this->_burnIn();
175  do {
176  Ip = this->_draw(&w, Ip);
177  __estimator.update(Ip, w);
179  } while (this->continueApproximationScheme(__estimator.confidence()));
180 
181  this->isSetEstimator = false;
182  }
183 
184 
185  template < typename GUM_SCALAR >
187  Instantiation* I) {
189 
190  I->add(samplingBN().variable(nod));
191  I->chgVal(samplingBN().variable(nod),
192  samplingBN().cpt(nod).extract(Itop).draw());
193  }
194 
195  template < typename GUM_SCALAR >
198 
199 
200  template < typename GUM_SCALAR >
202  bool isHardEvidence) {
203  if (!isHardEvidence) {
204  GUM_ERROR(FatalError, "Approximated inference only accept hard evidence");
205  }
206  }
207 
208  template < typename GUM_SCALAR >
210  bool isHardEvidence) {}
211 
212  template < typename GUM_SCALAR >
214  bool contains_hard_evidence) {}
215 
216  template < typename GUM_SCALAR >
217  void
219  bool hasChangedSoftHard) {
220  if (hasChangedSoftHard) {
221  GUM_ERROR(FatalError, "Approximated inference only accept hard evidence");
222  }
223  }
224 
225  template < typename GUM_SCALAR >
227  const IBayesNet< GUM_SCALAR >* bn) {}
228 
229  template < typename GUM_SCALAR >
231 
232  template < typename GUM_SCALAR >
234 
235  template < typename GUM_SCALAR >
237 
238  template < typename GUM_SCALAR >
240 
241  template < typename GUM_SCALAR >
243 
244  template < typename GUM_SCALAR >
246 
247  template < typename GUM_SCALAR >
249  if (this->isInferenceReady()) {
250  __estimator.clear();
251  this->initApproximationScheme();
252  }
253  }
254 } // namespace gum
void _onAllEvidenceErased(bool contains_hard_evidence) override
fired before all the evidence are erased
aGrUM&#39;s Potential is a multi-dimensional array with tensor operators.
Definition: potential.h:60
virtual void contextualize()
Simplifying the bayesian network with relevance reasonning to lighten the computational charge...
#define DEFAULT_MAXITER
virtual Instantiation _draw(GUM_SCALAR *w, Instantiation prev)=0
draws a sample in the bayesian network given a previous one
SamplingInference(const IBayesNet< GUM_SCALAR > *bn)
default constructor
void _updateOutdatedBNStructure() override
prepares inference when the latter is in OutdatedBNStructure state
Copyright 2005-2020 Pierre-Henri WUILLEMIN () et Christophe GONZALES () info_at_agrum_dot_org.
#define DEFAULT_EPSILON
the d-separation algorithm as described in Koller & Friedman (2009)
Definition: dSeparation.h:44
void setPeriodSize(Size p)
How many samples between two stopping is enable.
virtual void prepareInference() final
prepare the internal inference structures for the next inference
~SamplingInference() override
destructor
Instantiation & chgVal(const DiscreteVariable &v, Idx newval)
Assign newval to variable v in the Instantiation.
void initApproximationScheme()
Initialise the scheme.
virtual void _setEstimatorFromBN()
Initializes the estimators object linked to the simulation.
Class representing the minimal interface for Bayesian Network.
Definition: IBayesNet.h:62
Copyright 2005-2020 Pierre-Henri WUILLEMIN () et Christophe GONZALES () info_at_agrum_dot_org.
Definition: agrum.h:25
void setMinEpsilonRate(double rate)
Given that we approximate f(t), stopping criterion on d/dt(|f(t+1)-f(t)|).
void _onMarginalTargetAdded(const NodeId id) override
fired after a new marginal target is inserted
void setVerbosity(bool v)
Set the verbosity on (true) or off (false).
#define DEFAULT_PERIOD_SIZE
void setMaxTime(double timeout)
Stopping criterion on timeout.
void _onStateChanged() override
fired when the stage is changed
void _onAllMarginalTargetsAdded() override
fired after all the nodes of the BN are added as marginal targets
void _onEvidenceAdded(const NodeId id, bool isHardEvidence) override
fired after a new evidence is inserted
bool continueApproximationScheme(double error)
Update the scheme w.r.t the new error.
const NodeSet & softEvidenceNodes() const
returns the set of nodes with soft evidence
<agrum/BN/inference/samplingInference.h>
void _onEvidenceErased(const NodeId id, bool isHardEvidence) override
fired before an evidence is removed
Copyright 2005-2020 Pierre-Henri WUILLEMIN () et Christophe GONZALES () info_at_agrum_dot_org.
const NodeProperty< Idx > & hardEvidence() const
indicate for each node with hard evidence which value it took
const Potential< GUM_SCALAR > & currentPosterior(NodeId id)
Computes and returns the actual estimation of the posterior of a node.
virtual Instantiation _burnIn()=0
draws samples without updating the estimators
Estimator< GUM_SCALAR > __estimator
Estimator object designed to approximate target posteriors.
<agrum/BN/inference/loopyBeliefPropagation.h>
NodeSet barrenNodes()
returns the set of barren nodes
Portion of a BN identified by the list of nodes and a BayesNet.
BayesNetFragment< GUM_SCALAR > * __samplingBN
const Potential< GUM_SCALAR > & _posterior(NodeId id) override
Computes and returns the posterior of a node.
const NodeSet & hardEvidenceNodes() const
returns the set of nodes with hard evidence
#define DEFAULT_TIMEOUT
#define DEFAULT_MIN_EPSILON_RATE
void _makeInference() override
makes the inference by generating samples
Class for assigning/browsing values to tuples of discrete variables.
Definition: instantiation.h:83
void setMaxIter(Size max)
Stopping criterion on number of iterations.
void requisiteNodes(const DAG &dag, const NodeSet &query, const NodeSet &hardEvidence, const NodeSet &softEvidence, NodeSet &requisite)
Fill the &#39;requisite&#39; nodeset with the requisite nodes in dag given a query and evidence.
Definition: dSeparation.cpp:41
Copyright 2005-2020 Pierre-Henri WUILLEMIN () et Christophe GONZALES () info_at_agrum_dot_org.
virtual void _setEstimatorFromLBP(LoopyBeliefPropagation< GUM_SCALAR > *lbp, GUM_SCALAR virtualLBPSize)
Initializes the estimators object linked to the simulation.
void _onMarginalTargetErased(const NodeId id) override
fired before a marginal target is removed
bool isContextualized
whether the referenced Bayesian Network has been "contextualized"
void _onAllMarginalTargetsErased() override
fired before a all marginal targets are removed
virtual bool isInferenceReady() const noexcept final
returns whether the inference object is in a ready state
void setEpsilon(double eps)
Given that we approximate f(t), stopping criterion on |f(t+1)-f(t)|.
void setTargets(const NodeSet *target_nodes)
sets the set of target nodes we are interested in
void setEvidence(const NodeSet *observed_nodes)
sets the observed nodes in the DAG
virtual void _onContextualize(BayesNetFragment< GUM_SCALAR > *bn)
fired when Bayesian network is contextualized
virtual const NodeSet & targets() const noexcept final
returns the list of marginal targets
void add(const DiscreteVariable &v) final
Adds a new variable in the Instantiation.
Detect barren nodes for inference in Bayesian networks.
void _updateOutdatedBNPotentials() override
prepares inference when the latter is in OutdatedBNPotentials state
bool isSetEstimator
whether the Estimator object has been initialized
virtual void _addVarSample(NodeId nod, Instantiation *I)
adds a node to current instantiation
void _onBayesNetChanged(const IBayesNet< GUM_SCALAR > *bn) override
fired after a new Bayes net has been assigned to the engine
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
Size NodeId
Type for node ids.
Definition: graphElements.h:98
#define DEFAULT_VERBOSITY
const IBayesNet< GUM_SCALAR > & samplingBN()
get the BayesNet which is used to really perform the sampling
#define GUM_ERROR(type, msg)
Definition: exceptions.h:55
void updateApproximationScheme(unsigned int incr=1)
Update the scheme w.r.t the new error and increment steps.
Copyright 2005-2020 Pierre-Henri WUILLEMIN () et Christophe GONZALES () info_at_agrum_dot_org.
void _onEvidenceChanged(const NodeId id, bool hasChangedSoftHard) override
fired after an evidence is changed, in particular when its status (soft/hard) changes ...