aGrUM  0.13.2
samplingInference_tpl.h
Go to the documentation of this file.
1 /***************************************************************************
2  * Copyright (C) 2005 by Pierre-Henri WUILLEMIN et Christophe GONZALES *
3  * {prenom.nom}_at_lip6.fr *
4  * *
5  * This program is free software; you can redistribute it and/or modify *
6  * it under the terms of the GNU General Public License as published by *
7  * the Free Software Foundation; either version 2 of the License, or *
8  * (at your option) any later version. *
9  * *
10  * This program is distributed in the hope that it will be useful, *
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of *
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
13  * GNU General Public License for more details. *
14  * *
15  * You should have received a copy of the GNU General Public License *
16  * along with this program; if not, write to the *
17  * Free Software Foundation, Inc., *
18  * 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
19  ***************************************************************************/
32 
33 
34 #define DEFAULT_MAXITER 10000000
35 #define DEFAULT_PERIOD_SIZE 100
36 #define DEFAULT_VERBOSITY false
37 #define DEFAULT_TIMEOUT 6000
38 #define DEFAULT_EPSILON 1e-2
39 #define DEFAULT_MIN_EPSILON_RATE 1e-5
40 
41 
42 namespace gum {
43 
44  template < typename GUM_SCALAR >
46  const IBayesNet< GUM_SCALAR >* bn) :
47  ApproximateInference< GUM_SCALAR >(bn),
48  __estimator(), __samplingBN(nullptr) {
55  GUM_CONSTRUCTOR(SamplingInference);
56  }
57 
58 
59  template < typename GUM_SCALAR >
61  GUM_DESTRUCTOR(SamplingInference);
62  if (__samplingBN != nullptr) {
63  if (isContextualized) { // otherwise __samplingBN==&BN()
64  delete __samplingBN;
65  }
66  }
67  }
68 
69  template < typename GUM_SCALAR >
70  INLINE const IBayesNet< GUM_SCALAR >&
72  this->prepareInference();
73  if (__samplingBN == nullptr)
74  return this->BN();
75  else
76  return *__samplingBN;
77  }
78  template < typename GUM_SCALAR >
80  __estimator.setFromBN(&samplingBN(), this->hardEvidenceNodes());
81  this->isSetEstimator = true;
82  }
83 
84  template < typename GUM_SCALAR >
86  LoopyBeliefPropagation< GUM_SCALAR >* lbp, GUM_SCALAR virtualLBPSize) {
87  __estimator.setFromLBP(lbp, this->hardEvidenceNodes(), virtualLBPSize);
88  this->isSetEstimator = true;
89  }
90 
91 
92  template < typename GUM_SCALAR >
95  return __estimator.posterior(this->BN().variable(id));
96  }
97 
98  template < typename GUM_SCALAR >
101  return currentPosterior(this->BN().idFromName(name));
102  }
103 
104  template < typename GUM_SCALAR >
107  return __estimator.posterior(this->BN().variable(id));
108  }
109 
110  template < typename GUM_SCALAR >
112  // Finding Barren nodes
113 
114  BarrenNodesFinder barr_nodes = BarrenNodesFinder(&this->BN().dag());
115  barr_nodes.setTargets(&this->targets());
116  barr_nodes.setEvidence(&this->hardEvidenceNodes());
117  const NodeSet& barren = barr_nodes.barrenNodes();
118 
119  // creating BN fragment
121  for (const auto elmt : this->BN().dag().asNodeSet() - barren)
122  __samplingBN->installNode(elmt);
123 
124  // D-separated nodes
125 
126  dSeparation dsep = gum::dSeparation();
127  NodeSet requisite;
128  dsep.requisiteNodes(
129  this->BN().dag(),
130  this->BN().nodes().asNodeSet(), // no target for approximateInference
131  this->hardEvidenceNodes(),
132  this->softEvidenceNodes(), // should be empty
133  requisite);
134  requisite += this->hardEvidenceNodes();
135 
136  auto nonRequisite = this->BN().dag().asNodeSet() - requisite;
137 
138  for (const auto elmt : nonRequisite)
139  __samplingBN->uninstallNode(elmt);
140  for (const auto hard : this->hardEvidenceNodes()) {
142  I.add(this->BN().variable(hard));
143  I.chgVal(this->BN().variable(hard), this->hardEvidence()[hard]);
144 
145  for (const auto& child : this->BN().children(hard)) {
146  auto p = new gum::Potential< GUM_SCALAR >();
147  *p = this->BN().cpt(child).extract(I);
148  __samplingBN->installCPT(child, p);
149  }
150  }
151 
152  this->isContextualized = true;
154  }
155 
156 
157  template < typename GUM_SCALAR >
159  if (!isSetEstimator) this->_setEstimatorFromBN();
161  }
162 
163  template < typename GUM_SCALAR >
165  //@todo This should be in __prepareInference
166  if (!isContextualized) { this->contextualize(); }
167 
168  this->initApproximationScheme();
170  float w = .0; //
171 
172  // Burn in
173  Ip = this->_burnIn();
174  do {
175  Ip = this->_draw(&w, Ip);
176  __estimator.update(Ip, w);
178  } while (this->continueApproximationScheme(__estimator.confidence()));
179 
180  this->isSetEstimator = false;
181  }
182 
183 
184  template < typename GUM_SCALAR >
186  Instantiation* I) {
188 
189  I->add(samplingBN().variable(nod));
190  I->chgVal(samplingBN().variable(nod),
191  samplingBN().cpt(nod).extract(Itop).draw());
192  }
193 
194  template < typename GUM_SCALAR >
197 
198 
199  template < typename GUM_SCALAR >
201  bool isHardEvidence) {
202  if (!isHardEvidence) {
203  GUM_ERROR(FatalError, "Approximated inference only accept hard evidence");
204  }
205  }
206 
207  template < typename GUM_SCALAR >
209  bool isHardEvidence) {}
210 
211  template < typename GUM_SCALAR >
213  bool contains_hard_evidence) {}
214 
215  template < typename GUM_SCALAR >
216  void
218  bool hasChangedSoftHard) {
219  if (hasChangedSoftHard) {
220  GUM_ERROR(FatalError, "Approximated inference only accept hard evidence");
221  }
222  }
223 
224  template < typename GUM_SCALAR >
226  const IBayesNet< GUM_SCALAR >* bn) {}
227 
228  template < typename GUM_SCALAR >
230 
231  template < typename GUM_SCALAR >
233 
234  template < typename GUM_SCALAR >
236 
237  template < typename GUM_SCALAR >
239 
240  template < typename GUM_SCALAR >
242 
243  template < typename GUM_SCALAR >
245 
246  template < typename GUM_SCALAR >
248  if (this->isInferenceReady()) {
249  __estimator.clear();
250  this->initApproximationScheme();
251  }
252  }
253 } // namespace gum
void _onAllEvidenceErased(bool contains_hard_evidence) override
fired before all the evidence are erased
void _onEvidenceAdded(NodeId id, bool isHardEvidence) override
fired after a new evidence is inserted
aGrUM&#39;s Potential is a multi-dimensional array with tensor operators.
Definition: potential.h:57
virtual void contextualize()
Simplifying the bayesian network with relevance reasonning to lighten the computational charge...
#define DEFAULT_MAXITER
unsigned int NodeId
Type for node ids.
Definition: graphElements.h:97
SamplingInference(const IBayesNet< GUM_SCALAR > *bn)
default constructor
void _onEvidenceChanged(NodeId id, bool hasChangedSoftHard) override
fired after an evidence is changed, in particular when its status (soft/hard) changes ...
void _updateOutdatedBNStructure() override
prepares inference when the latter is in OutdatedBNStructure state
virtual bool isInferenceReady() const noexceptfinal
returns whether the inference object is in a ready state
d-separation analysis (as described in Koller & Friedman 2009)
void _onMarginalTargetAdded(NodeId id) override
fired after a new marginal target is inserted
#define DEFAULT_EPSILON
the d-separation algorithm as described in Koller & Friedman (2009)
Definition: dSeparation.h:41
void setPeriodSize(Size p)
How many samples between two stopping is enable.
virtual void prepareInference() final
prepare the internal inference structures for the next inference
~SamplingInference() override
destructor
Instantiation & chgVal(const DiscreteVariable &v, Idx newval)
Assign newval to variable v in the Instantiation.
void initApproximationScheme()
Initialise the scheme.
virtual void _setEstimatorFromBN()
Initializes the estimators object linked to the simulation.
virtual Instantiation _draw(float *w, Instantiation prev)=0
draws a sample in the bayesian network given a previous one
Class representing the minimal interface for Bayesian Network.
Definition: IBayesNet.h:59
gum is the global namespace for all aGrUM entities
Definition: agrum.h:25
void setMinEpsilonRate(double rate)
Given that we approximate f(t), stopping criterion on d/dt(|f(t+1)-f(t)|).
const NodeProperty< Idx > & hardEvidence() const
indicate for each node with hard evidence which value it took
void setVerbosity(bool v)
Set the verbosity on (true) or off (false).
#define DEFAULT_PERIOD_SIZE
void setMaxTime(double timeout)
Stopping criterion on timeout.
void _onStateChanged() override
fired when the stage is changed
void _onAllMarginalTargetsAdded() override
fired after all the nodes of the BN are added as marginal targets
bool continueApproximationScheme(double error)
Update the scheme w.r.t the new error.
This file contains general methods for simulation-oriented approximate inference. ...
const Potential< GUM_SCALAR > & currentPosterior(NodeId id)
Computes and returns the actual estimation of the posterior of a node.
virtual Instantiation _burnIn()=0
draws samples without updating the estimators
Estimator< GUM_SCALAR > __estimator
Estimator object designed to approximate target posteriors.
<agrum/BN/inference/loopyBeliefPropagation.h>
NodeSet barrenNodes()
returns the set of barren nodes
Portion of a BN identified by the list of nodes and a BayesNet.
BayesNetFragment< GUM_SCALAR > * __samplingBN
const Potential< GUM_SCALAR > & _posterior(NodeId id) override
Computes and returns the posterior of a node.
void _onMarginalTargetErased(NodeId id) override
fired before a marginal target is removed
#define DEFAULT_TIMEOUT
const NodeSet & hardEvidenceNodes() const
returns the set of nodes with hard evidence
#define DEFAULT_MIN_EPSILON_RATE
void _makeInference() override
makes the inference by generating samples
Class for assigning/browsing values to tuples of discrete variables.
Definition: instantiation.h:80
void setMaxIter(Size max)
Stopping criterion on number of iterations.
void requisiteNodes(const DAG &dag, const NodeSet &query, const NodeSet &hardEvidence, const NodeSet &softEvidence, NodeSet &requisite)
Fill the &#39;requisite&#39; nodeset with the requisite nodes in dag given a query and evidence.
Definition: dSeparation.cpp:38
Detect barren nodes for inference in Bayesian networks.
virtual void _setEstimatorFromLBP(LoopyBeliefPropagation< GUM_SCALAR > *lbp, GUM_SCALAR virtualLBPSize)
Initializes the estimators object linked to the simulation.
bool isContextualized
whether the referenced Bayesian Network has been "contextualized"
void _onEvidenceErased(NodeId id, bool isHardEvidence) override
fired before an evidence is removed
virtual const NodeSet & targets() const noexceptfinal
returns the list of marginal targets
void _onAllMarginalTargetsErased() override
fired before a all marginal targets are removed
const NodeSet & softEvidenceNodes() const
returns the set of nodes with soft evidence
void setEpsilon(double eps)
Given that we approximate f(t), stopping criterion on |f(t+1)-f(t)|.
void setTargets(const NodeSet *target_nodes)
sets the set of target nodes we are interested in
void setEvidence(const NodeSet *observed_nodes)
sets the observed nodes in the DAG
virtual void _onContextualize(BayesNetFragment< GUM_SCALAR > *bn)
fired when Bayesian network is contextualized
void add(const DiscreteVariable &v) final
Adds a new variable in the Instantiation.
Detect barren nodes for inference in Bayesian networks.
void _updateOutdatedBNPotentials() override
prepares inference when the latter is in OutdatedBNPotentials state
bool isSetEstimator
whether the Estimator object has been initialized
virtual void _addVarSample(NodeId nod, Instantiation *I)
adds a node to current instantiation
void _onBayesNetChanged(const IBayesNet< GUM_SCALAR > *bn) override
fired after a new Bayes net has been assigned to the engine
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
#define DEFAULT_VERBOSITY
const IBayesNet< GUM_SCALAR > & samplingBN()
get the BayesNet which is used to really perform the sampling
#define GUM_ERROR(type, msg)
Definition: exceptions.h:66
void updateApproximationScheme(unsigned int incr=1)
Update the scheme w.r.t the new error and increment steps.
Class representing Fragment of Bayesian networks.