aGrUM  0.16.0
samplingInference_tpl.h
Go to the documentation of this file.
1 
35 
36 
37 #define DEFAULT_MAXITER 10000000
38 #define DEFAULT_PERIOD_SIZE 100
39 #define DEFAULT_VERBOSITY false
40 #define DEFAULT_TIMEOUT 6000
41 #define DEFAULT_EPSILON 1e-2
42 #define DEFAULT_MIN_EPSILON_RATE 1e-5
43 
44 
45 namespace gum {
46 
47  template < typename GUM_SCALAR >
49  const IBayesNet< GUM_SCALAR >* bn) :
50  ApproximateInference< GUM_SCALAR >(bn),
51  __estimator(), __samplingBN(nullptr) {
58  GUM_CONSTRUCTOR(SamplingInference);
59  }
60 
61 
62  template < typename GUM_SCALAR >
64  GUM_DESTRUCTOR(SamplingInference);
65  if (__samplingBN != nullptr) {
66  if (isContextualized) { // otherwise __samplingBN==&BN()
67  delete __samplingBN;
68  }
69  }
70  }
71 
72  template < typename GUM_SCALAR >
73  INLINE const IBayesNet< GUM_SCALAR >&
75  this->prepareInference();
76  if (__samplingBN == nullptr)
77  return this->BN();
78  else
79  return *__samplingBN;
80  }
81  template < typename GUM_SCALAR >
83  __estimator.setFromBN(&samplingBN(), this->hardEvidenceNodes());
84  this->isSetEstimator = true;
85  }
86 
87  template < typename GUM_SCALAR >
89  LoopyBeliefPropagation< GUM_SCALAR >* lbp, GUM_SCALAR virtualLBPSize) {
90  __estimator.setFromLBP(lbp, this->hardEvidenceNodes(), virtualLBPSize);
91  this->isSetEstimator = true;
92  }
93 
94 
95  template < typename GUM_SCALAR >
98  return __estimator.posterior(this->BN().variable(id));
99  }
100 
101  template < typename GUM_SCALAR >
104  return currentPosterior(this->BN().idFromName(name));
105  }
106 
107  template < typename GUM_SCALAR >
110  return __estimator.posterior(this->BN().variable(id));
111  }
112 
113  template < typename GUM_SCALAR >
115  // Finding Barren nodes
116 
117  BarrenNodesFinder barr_nodes = BarrenNodesFinder(&this->BN().dag());
118  barr_nodes.setTargets(&this->targets());
119  barr_nodes.setEvidence(&this->hardEvidenceNodes());
120  const NodeSet& barren = barr_nodes.barrenNodes();
121 
122  // creating BN fragment
124  for (const auto elmt : this->BN().dag().asNodeSet() - barren)
125  __samplingBN->installNode(elmt);
126 
127  // D-separated nodes
128 
129  dSeparation dsep = gum::dSeparation();
130  NodeSet requisite;
131  dsep.requisiteNodes(
132  this->BN().dag(),
133  this->BN().nodes().asNodeSet(), // no target for approximateInference
134  this->hardEvidenceNodes(),
135  this->softEvidenceNodes(), // should be empty
136  requisite);
137  requisite += this->hardEvidenceNodes();
138 
139  auto nonRequisite = this->BN().dag().asNodeSet() - requisite;
140 
141  for (const auto elmt : nonRequisite)
142  __samplingBN->uninstallNode(elmt);
143  for (const auto hard : this->hardEvidenceNodes()) {
145  I.add(this->BN().variable(hard));
146  I.chgVal(this->BN().variable(hard), this->hardEvidence()[hard]);
147 
148  for (const auto& child : this->BN().children(hard)) {
149  auto p = new gum::Potential< GUM_SCALAR >();
150  *p = this->BN().cpt(child).extract(I);
151  __samplingBN->installCPT(child, p);
152  }
153  }
154 
155  this->isContextualized = true;
157  }
158 
159 
160  template < typename GUM_SCALAR >
162  if (!isSetEstimator) this->_setEstimatorFromBN();
164  }
165 
166  template < typename GUM_SCALAR >
168  //@todo This should be in __prepareInference
169  if (!isContextualized) { this->contextualize(); }
170 
171  this->initApproximationScheme();
173  GUM_SCALAR w = .0; //
174 
175  // Burn in
176  Ip = this->_burnIn();
177  do {
178  Ip = this->_draw(&w, Ip);
179  __estimator.update(Ip, w);
181  } while (this->continueApproximationScheme(__estimator.confidence()));
182 
183  this->isSetEstimator = false;
184  }
185 
186 
187  template < typename GUM_SCALAR >
189  Instantiation* I) {
191 
192  I->add(samplingBN().variable(nod));
193  I->chgVal(samplingBN().variable(nod),
194  samplingBN().cpt(nod).extract(Itop).draw());
195  }
196 
197  template < typename GUM_SCALAR >
200 
201 
202  template < typename GUM_SCALAR >
204  bool isHardEvidence) {
205  if (!isHardEvidence) {
206  GUM_ERROR(FatalError, "Approximated inference only accept hard evidence");
207  }
208  }
209 
210  template < typename GUM_SCALAR >
212  bool isHardEvidence) {}
213 
214  template < typename GUM_SCALAR >
216  bool contains_hard_evidence) {}
217 
218  template < typename GUM_SCALAR >
219  void
221  bool hasChangedSoftHard) {
222  if (hasChangedSoftHard) {
223  GUM_ERROR(FatalError, "Approximated inference only accept hard evidence");
224  }
225  }
226 
227  template < typename GUM_SCALAR >
229  const IBayesNet< GUM_SCALAR >* bn) {}
230 
231  template < typename GUM_SCALAR >
233 
234  template < typename GUM_SCALAR >
236 
237  template < typename GUM_SCALAR >
239 
240  template < typename GUM_SCALAR >
242 
243  template < typename GUM_SCALAR >
245 
246  template < typename GUM_SCALAR >
248 
249  template < typename GUM_SCALAR >
251  if (this->isInferenceReady()) {
252  __estimator.clear();
253  this->initApproximationScheme();
254  }
255  }
256 } // namespace gum
void _onAllEvidenceErased(bool contains_hard_evidence) override
fired before all the evidence are erased
aGrUM&#39;s Potential is a multi-dimensional array with tensor operators.
Definition: potential.h:60
virtual void contextualize()
Simplifying the bayesian network with relevance reasonning to lighten the computational charge...
#define DEFAULT_MAXITER
virtual Instantiation _draw(GUM_SCALAR *w, Instantiation prev)=0
draws a sample in the bayesian network given a previous one
SamplingInference(const IBayesNet< GUM_SCALAR > *bn)
default constructor
void _updateOutdatedBNStructure() override
prepares inference when the latter is in OutdatedBNStructure state
Copyright 2005-2019 Pierre-Henri WUILLEMIN et Christophe GONZALES (LIP6) {prenom.nom}_at_lip6.fr.
#define DEFAULT_EPSILON
the d-separation algorithm as described in Koller & Friedman (2009)
Definition: dSeparation.h:44
void setPeriodSize(Size p)
How many samples between two stopping is enable.
virtual void prepareInference() final
prepare the internal inference structures for the next inference
~SamplingInference() override
destructor
Instantiation & chgVal(const DiscreteVariable &v, Idx newval)
Assign newval to variable v in the Instantiation.
void initApproximationScheme()
Initialise the scheme.
virtual void _setEstimatorFromBN()
Initializes the estimators object linked to the simulation.
Class representing the minimal interface for Bayesian Network.
Definition: IBayesNet.h:62
Copyright 2005-2019 Pierre-Henri WUILLEMIN et Christophe GONZALES (LIP6) {prenom.nom}_at_lip6.fr.
Definition: agrum.h:25
void setMinEpsilonRate(double rate)
Given that we approximate f(t), stopping criterion on d/dt(|f(t+1)-f(t)|).
void _onMarginalTargetAdded(const NodeId id) override
fired after a new marginal target is inserted
void setVerbosity(bool v)
Set the verbosity on (true) or off (false).
#define DEFAULT_PERIOD_SIZE
void setMaxTime(double timeout)
Stopping criterion on timeout.
void _onStateChanged() override
fired when the stage is changed
void _onAllMarginalTargetsAdded() override
fired after all the nodes of the BN are added as marginal targets
void _onEvidenceAdded(const NodeId id, bool isHardEvidence) override
fired after a new evidence is inserted
bool continueApproximationScheme(double error)
Update the scheme w.r.t the new error.
const NodeSet & softEvidenceNodes() const
returns the set of nodes with soft evidence
void _onEvidenceErased(const NodeId id, bool isHardEvidence) override
fired before an evidence is removed
Copyright 2005-2019 Pierre-Henri WUILLEMIN et Christophe GONZALES (LIP6) {prenom.nom}_at_lip6.fr.
const NodeProperty< Idx > & hardEvidence() const
indicate for each node with hard evidence which value it took
const Potential< GUM_SCALAR > & currentPosterior(NodeId id)
Computes and returns the actual estimation of the posterior of a node.
virtual Instantiation _burnIn()=0
draws samples without updating the estimators
Estimator< GUM_SCALAR > __estimator
Estimator object designed to approximate target posteriors.
<agrum/BN/inference/loopyBeliefPropagation.h>
NodeSet barrenNodes()
returns the set of barren nodes
Portion of a BN identified by the list of nodes and a BayesNet.
BayesNetFragment< GUM_SCALAR > * __samplingBN
const Potential< GUM_SCALAR > & _posterior(NodeId id) override
Computes and returns the posterior of a node.
const NodeSet & hardEvidenceNodes() const
returns the set of nodes with hard evidence
#define DEFAULT_TIMEOUT
#define DEFAULT_MIN_EPSILON_RATE
void _makeInference() override
makes the inference by generating samples
Class for assigning/browsing values to tuples of discrete variables.
Definition: instantiation.h:83
void setMaxIter(Size max)
Stopping criterion on number of iterations.
void requisiteNodes(const DAG &dag, const NodeSet &query, const NodeSet &hardEvidence, const NodeSet &softEvidence, NodeSet &requisite)
Fill the &#39;requisite&#39; nodeset with the requisite nodes in dag given a query and evidence.
Definition: dSeparation.cpp:41
Copyright 2005-2019 Pierre-Henri WUILLEMIN et Christophe GONZALES (LIP6) {prenom.nom}_at_lip6.fr.
virtual void _setEstimatorFromLBP(LoopyBeliefPropagation< GUM_SCALAR > *lbp, GUM_SCALAR virtualLBPSize)
Initializes the estimators object linked to the simulation.
void _onMarginalTargetErased(const NodeId id) override
fired before a marginal target is removed
bool isContextualized
whether the referenced Bayesian Network has been "contextualized"
void _onAllMarginalTargetsErased() override
fired before a all marginal targets are removed
virtual bool isInferenceReady() const noexcept final
returns whether the inference object is in a ready state
void setEpsilon(double eps)
Given that we approximate f(t), stopping criterion on |f(t+1)-f(t)|.
void setTargets(const NodeSet *target_nodes)
sets the set of target nodes we are interested in
void setEvidence(const NodeSet *observed_nodes)
sets the observed nodes in the DAG
virtual void _onContextualize(BayesNetFragment< GUM_SCALAR > *bn)
fired when Bayesian network is contextualized
virtual const NodeSet & targets() const noexcept final
returns the list of marginal targets
void add(const DiscreteVariable &v) final
Adds a new variable in the Instantiation.
Detect barren nodes for inference in Bayesian networks.
void _updateOutdatedBNPotentials() override
prepares inference when the latter is in OutdatedBNPotentials state
bool isSetEstimator
whether the Estimator object has been initialized
virtual void _addVarSample(NodeId nod, Instantiation *I)
adds a node to current instantiation
void _onBayesNetChanged(const IBayesNet< GUM_SCALAR > *bn) override
fired after a new Bayes net has been assigned to the engine
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
Size NodeId
Type for node ids.
Definition: graphElements.h:98
#define DEFAULT_VERBOSITY
const IBayesNet< GUM_SCALAR > & samplingBN()
get the BayesNet which is used to really perform the sampling
#define GUM_ERROR(type, msg)
Definition: exceptions.h:55
void updateApproximationScheme(unsigned int incr=1)
Update the scheme w.r.t the new error and increment steps.
Copyright 2005-2019 Pierre-Henri WUILLEMIN et Christophe GONZALES (LIP6) {prenom.nom}_at_lip6.fr.
void _onEvidenceChanged(const NodeId id, bool hasChangedSoftHard) override
fired after an evidence is changed, in particular when its status (soft/hard) changes ...