aGrUM  0.20.3
a C++ library for (probabilistic) graphical models
BayesNetInference.h
Go to the documentation of this file.
1 /**
2  *
3  * Copyright (c) 2005-2021 by Pierre-Henri WUILLEMIN(@LIP6) & Christophe GONZALES(@AMU)
4  * info_at_agrum_dot_org
5  *
6  * This library is free software: you can redistribute it and/or modify
7  * it under the terms of the GNU Lesser General Public License as published by
8  * the Free Software Foundation, either version 3 of the License, or
9  * (at your option) any later version.
10  *
11  * This library is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14  * GNU Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public License
17  * along with this library. If not, see <http://www.gnu.org/licenses/>.
18  *
19  */
20 
21 
22 /**
23  * @file
24  * @brief This file contains abstract class definitions for Bayesian networks
25  * inference classes.
26  *
27  * @author Christophe GONZALES(@AMU) and Pierre-Henri WUILLEMIN(@LIP6)
28  */
29 
30 #ifndef GUM_BAYES_NET_INFERENCE_H
31 #define GUM_BAYES_NET_INFERENCE_H
32 
33 
34 #include <agrum/BN/IBayesNet.h>
35 #include <agrum/tools/graphicalModels/inference/graphicalModelInference.h>
36 #include <agrum/agrum.h>
37 
38 
39 namespace gum {
40 
41 
42  /** @brief type of algorithm to determine barren nodes
43  *
44  * When constructing messages from one clique to its neighbor, we can
45  * determine that some nodes are barren, i.e., they are the only one
46  * at the left hand side of a conditioning bar and they appear in only one
47  * potential. In such case, in a classical BN inference, there is no need
48  * to take them into account since their removal will necessarily create
49  * a constant potential. So, we can discard their potential from the
50  * computation. However, when computing p(evidence), we should not do that
51  * because the constant is important and need be computed.
52  */
54  {
55  FIND_NO_BARREN_NODES, // do not try to find barren nodes
56  FIND_BARREN_NODES // use a bottom-up algorithm to detect barren nodes
57  };
58 
59 
60  // JointTargetedInference, the class for computing joint posteriors, should
61  // have access to the states of Inference and change them when needed: this
62  // will be a friend of Inference
63  template < typename GUM_SCALAR >
64  class JointTargetedInference;
65 
66  // MarginalTargetedInference, the class for computing marginal posteriors,
67  // should have access to the states of Inference and change them when needed:
68  // this should be a friend of Inference
69  template < typename GUM_SCALAR >
70  class MarginalTargetedInference;
71 
72  // EvidenceInference, the class for computing the probability of evidence,
73  // should have access to the states of Inference and change them when needed:
74  // this will be a friend of Inference
75  template < typename GUM_SCALAR >
76  class EvidenceInference;
77 
78 
79  /**
80  * @class BayesNetInference inference.h
81  * <agrum/BN/inference/BayesNetInference.h>
82  * @brief A generic class for Bayes net inference: handles evidence and the
83  * current state of the (incremental) inference
84  * @ingroup bn_group
85  *
86  * The goal of the BayesNetInference class is twofold:
87  * i) handling the common resources of BN inference (bn, soft/hard evidence);
88  * ii) propose a general high-level scheme for all the inference methods.
89  *
90  * A specialized inference just has to specify how to prepare inference, how
91  * to make inference and how to get the posteriors for nodes and set of nodes.
92  * The scheme for every inference derived from BayesNetInference will be
93  * the same:
94  *
95  * 1- ie=SpecificInference(bn); // state <- OutdatedStructure
96  * 2- set targets and evidence in ie
97  * 3- ie.prepareInference(); // state <- Ready4Inference
98  * 4.a- change values of evidence in ie // state <- OutdatedPotentials
99  * 4.b- change some hard evidence or targets // state <- OutdatedStructure
100  * 5- ie.makeInference(); // state <- Done
101  * 6- get posteriors
102  * 7- goto 2 or 4
103  *
104  * BayesNetInference can be in one of 4 different states:
105  * - OutdatedStructure: in this state, the inference is fully unprepared
106  * to be applied because some events changed the "logical" structure of the
107  * BN: for instance a node received a hard evidence, which implies that
108  * its outgoing arcs can be removed from the BN, hence involving a
109  * structural change in the BN. As a consequence, the (incremental)
110  * inference
111  * (probably) needs a significant amount of preparation to be ready for the
112  * next inference. In a Lazy propagation, for instance, this step amounts to
113  * compute a new join tree, hence a new structure in which inference
114  * will be applied. Note that classes that inherit from BayesNetInference
115  * may be smarter than BayesNetInference and may, in some situations,
116  * find out that their
117  * data structures are still ok for inference and, therefore, only resort to
118  * perform the actions related to the OutdatedPotentials state. As an
119  * example, consider a LazyPropagation inference in Bayes Net A->B->C->D->E
120  * in which C has received hard evidence e_C and E is the only target. In
121  * this case, A and B are not needed for inference, the only potentials that
122  * matter are P(D|e_C) and P(E|D). So the smallest join tree needed for
123  * inference contains only one clique DE. Now, adding new evidence e_A on A
124  * has no impact on E given hard evidence e_C. In this case, LazyPropagation
125  * can be smart and not update its join tree.
126  * - OutdatedPotentials: in this state, the structure of the BN remains
127  * unchanged, only some potentials stored in it have changed. Therefore,
128  * the inference probably just needs to invalidate some already computed
129  * potentials to be ready. Only a light amount of preparation is needed to
130  * be able to perform inference.
131  * - Ready4Inference: in this state, all the data structures are ready for
132  * inference. There just remains to perform the inference computations.
133  * - Done: the heavy computations of inference have been done. There might
134  * still remain a few light computations to perform to get the posterior
135  * potentials we need. Typically, in Lazy Propagation, all the messages in
136  * the join tree have been computed but, to get the potentials, we still
137  * need to perform the combinations of the potentials in the cliques with
138  * the messages sent to the cliques. In some inference algorithms, this
139  * step may even be empty.
140  */
141 
142  template < typename GUM_SCALAR >
144  public:
145  /**
146  * current state of the inference
147  *
148  * BayesNetInference can be in one of 4 different states:
149  * - OutdatedStructure: in this state, the inference is fully unprepared
150  * to be applied because some events changed the "logical" structure of
151  * the BN: for instance a node received a hard evidence, which implies
152  * that its outgoing arcs can be removed from the BN, hence involving a
153  * structural change in the BN. As a consequence, the (incremental)
154  * inference (probably) needs a significant amount of preparation to be
155  * ready for the next inference. In a Lazy propagation, for instance,
156  * this step amounts to compute a new join tree, hence a new structure
157  * in which inference will be applied. Note that classes that inherit
158  * from BayesNetInference may be smarter than BayesNetInference and may,
159  * in some situations, find out that their data structures are still ok for
160  * inference and, therefore, only resort to perform the actions related
161  * to the OutdatedPotentials state. As an example, consider a
162  * LazyPropagation inference in Bayes Net A->B->C->D->E
163  * in which C has received hard evidence e_C and E is the only target. In
164  * this case, A and B are not needed for inference, the only potentials
165  * that matter are P(D|e_C) and P(E|D). So the smallest join tree needed
166  * for inference contains only one clique DE. Now, adding new evidence
167  * e_A on A has no impact on E given hard evidence e_C. In this case,
168  * LazyPropagation can be smart and not update its join tree.
169  * - OutdatedPotentials: in this state, the structure of the BN remains
170  * unchanged, only some potentials stored in it have changed. Therefore,
171  * the inference probably just needs to invalidate some already computed
172  * potentials to be ready. Only a light amount of preparation is needed to
173  * be able to perform inference.
174  * - Ready4Inference: in this state, all the data structures are ready for
175  * inference. There just remains to perform the inference computations.
176  * - Done: the heavy computations of inference have been done. There might
177  * still remain a few light computations to perform to get the posterior
178  * potentials we need. Typically, in Lazy Propagation, all the messages in
179  * the join tree have been computed but, to get the potentials, we still
180  * need to perform the combinations of the potentials in the cliques with
181  * the messages sent to the cliques. In some inference algorithms, this
182  * step may even be empty.
183  */
184 
185 
186  // ############################################################################
187  /// @name Constructors / Destructors
188  // ############################################################################
189  /// @{
190 
191  /// default constructor
192  /** @warning note that, by aGrUM's rule, the BN is not copied but only
193  * referenced by the inference algorithm. */
194  explicit BayesNetInference(const IBayesNet< GUM_SCALAR >* bn);
195 
196  /// default constructor with a null BN (useful for virtual inheritance)
197  /** @warning BayesNetInference is virtually inherited by
198  * MarginalTargetedInference. As a result, the lowest descendant of
199  * BayesNetInference will create the latter. To avoid requiring developers
200  * to add in the constructors of their inference algorithms a call to
201  * BayesNetInference( bn ), we added constructor BayesNetInference(),
202  * which will be called automatically by the lowest descendant.
203  * Then, MarginalTargetedInference and JointTargetedInference will take care
204  * of setting the appropriate bn into BayesNetInference. */
206 
207  /// destructor
208  virtual ~BayesNetInference();
209 
210  /// @}
211 
212 
213  // ############################################################################
214  /// @name Accessors / Modifiers
215  // ############################################################################
216  /// @{
217 
218  /// assigns a new BN to the inference engine
219  /** Assigns a new BN to the BayesNetInference engine and sends messages to the
220  * descendants of BayesNetInference to inform them that the BN has changed.
221  * @warning By default, all the nodes of the Bayes net are targets.
222  * @warning note that, by aGrUM's rule, the bn is not copied into the
223  * inference engine but only referenced. */
224  virtual void setBN(const IBayesNet< GUM_SCALAR >* bn);
225 
226  /// Returns a constant reference over the IBayesNet referenced by this class
227  /** @throws UndefinedElement is raised if no Bayes net has been assigned to
228  * the inference. */
229  virtual const IBayesNet< GUM_SCALAR >& BN() const final;
230 
231  private:
232  /// assigns a BN during the inference engine construction
233  void _setBayesNetDuringConstruction_(const IBayesNet< GUM_SCALAR >* bn);
234 
235 
236  /// allow JointInference to access the single targets and inference states
237  friend MarginalTargetedInference< GUM_SCALAR >;
238  friend JointTargetedInference< GUM_SCALAR >;
239  friend EvidenceInference< GUM_SCALAR >;
240  };
241 
242 
243 } // namespace gum
244 
245 
246 #include <agrum/BN/inference/tools/BayesNetInference_tpl.h>
247 
248 
249 #endif // GUM_BAYES_NET_INFERENCE_H
BayesNetInference(const IBayesNet< GUM_SCALAR > *bn)
default constructor
virtual ~BayesNetInference()
destructor
BayesNetInference()
default constructor with a null BN (useful for virtual inheritance)
INLINE void emplace(Args &&... args)
Definition: set_tpl.h:643
<agrum/BN/inference/BayesNetInference.h>
FindBarrenNodesType
type of algorithm to determine barren nodes
void _setBayesNetDuringConstruction_(const IBayesNet< GUM_SCALAR > *bn)
assigns a BN during the inference engine construction
virtual void setBN(const IBayesNet< GUM_SCALAR > *bn)
assigns a new BN to the inference engine
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.