aGrUM  0.20.3
a C++ library for (probabilistic) graphical models
MarkovNetInference.h
Go to the documentation of this file.
1 /**
2  *
3  * Copyright (c) 2005-2021 by Pierre-Henri WUILLEMIN(@LIP6) & Christophe GONZALES(@AMU)
4  * info_at_agrum_dot_org
5  *
6  * This library is free software: you can redistribute it and/or modify
7  * it under the terms of the GNU Lesser General Public License as published by
8  * the Free Software Foundation, either version 3 of the License, or
9  * (at your option) any later version.
10  *
11  * This library is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14  * GNU Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public License
17  * along with this library. If not, see <http://www.gnu.org/licenses/>.
18  *
19  */
20 
21 
22 /**
23  * @file
24  * @brief This file contains abstract class definitions for Markov networks
25  * inference classes.
26  *
27  * @author Christophe GONZALES(@AMU) and Pierre-Henri WUILLEMIN(@LIP6)
28  */
29 
30 #ifndef GUM_MARKOV_NET_INFERENCE_H
31 #define GUM_MARKOV_NET_INFERENCE_H
32 
33 
34 #include <agrum/agrum.h>
35 #include <agrum/tools/graphicalModels/inference/graphicalModelInference.h>
36 #include <agrum/MN/IMarkovNet.h>
37 
38 namespace gum {
39 
40  // JointTargetedMNInference, the class for computing joint posteriors, should
41  // have access to the states of Inference and change them when needed: this
42  // will be a friend of Inference
43  template < typename GUM_SCALAR >
45 
46  // MarginalTargetedMNInference, the class for computing marginal posteriors,
47  // should have access to the states of Inference and change them when needed:
48  // this should be a friend of Inference
49  template < typename GUM_SCALAR >
51 
52  // EvidenceMNInference, the class for computing the probability of evidence,
53  // should have access to the states of Inference and change them when needed:
54  // this will be a friend of Inference
55  template < typename GUM_SCALAR >
56  class EvidenceMNInference;
57 
58 
59  /**
60  * @class MarkovNetInference inference.h
61  * <agrum/MN/inference/MarkovNetInference.h>
62  * @brief A generic class for Markov net inference: handles evidence and the
63  * current state of the (incremental) inference
64  * @ingroup mn_group
65  *
66  * The goal of the MarkovNetInference class is twofold:
67  * i) handling the common resources of MN inference (mn, soft/hard evidence);
68  * ii) propose a general high-level scheme for all the inference methods.
69  *
70  * A specialized inference just has to specify how to prepare inference, how
71  * to make inference and how to get the posteriors for nodes and set of nodes.
72  * The scheme for every inference derived from MarkovNetInference will be
73  * the same:
74  *
75  * 1- ie=SpecificInference(mn); // state <- OutdatedStructure
76  * 2- set targets and evidence in ie
77  * 3- ie.prepareInference(); // state <- ReadyForMNInference
78  * 4.a- change values of evidence in ie // state <- OutdatedPotentials
79  * 4.b- change some hard evidence or targets // state <- OutdatedStructure
80  * 5- ie.makeInference(); // state <- Done
81  * 6- get posteriors
82  * 7- goto 2 or 4
83  *
84  * MarkovNetInference can be in one of 4 different states:
85  * - OutdatedStructure: in this state, the inference is fully unprepared
86  * to be applied because some events changed the "logical" structure of the
87  * MN: for instance a node received a hard evidence, which implies that
88  * its outgoing arcs can be removed from the MN, hence involving a
89  * structural change in the MN. As a consequence, the (incremental)
90  * inference (probably) needs a significant amount of preparation to be ready
91  * for the next inference. In a Lazy propagation, for instance, this step
92  * amounts to compute a new join tree, hence a new structure in which inference
93  * will be applied. Note that classes that inherit from MarkovNetInference
94  * may be smarter than MarkovNetInference and may, in some situations,
95  * find out that their data structures are still ok for inference and,
96  * therefore, only resort to perform the actions related to the
97  * OutdatedPotentials state.
98  * - OutdatedPotentials: in this state, the structure of the MN remains
99  * unchanged, only some potentials stored in it have changed. Therefore,
100  * the inference probably just needs to invalidate some already computed
101  * potentials to be ready. Only a light amount of preparation is needed to
102  * be able to perform inference.
103  * - ReadyForMNInference: in this state, all the data structures are ready for
104  * inference. There just remains to perform the inference computations.
105  * - Done: the heavy computations of inference have been done. There might
106  * still remain a few light computations to perform to get the posterior
107  * potentials we need. Typically, in Lazy Propagation, all the messages in
108  * the join tree have been computed but, to get the potentials, we still
109  * need to perform the combinations of the potentials in the cliques with
110  * the messages sent to the cliques. In some inference algorithms, this
111  * step may even be empty.
112  */
113 
114  template < typename GUM_SCALAR >
116  public:
117  /**
118  * current state of the inference
119  *
120  * MarkovNetInference can be in one of 4 different states:
121  * - OutdatedStructure: in this state, the inference is fully unprepared
122  * to be applied because some events changed the "logical" structure of
123  * the MN: for instance a node received a hard evidence, which implies
124  * that its outgoing arcs can be removed from the MN, hence involving a
125  * structural change in the MN. As a consequence, the (incremental)
126  * inference (probably) needs a significant amount of preparation to be
127  * ready for the next inference. In a Lazy propagation, for instance,
128  * this step amounts to compute a new join tree, hence a new structure
129  * in which inference will be applied. Note that classes that inherit
130  * from MarkovNetInference may be smarter than MarkovNetInference and may,
131  * in some situations, find out that their data structures are still ok for
132  * inference and, therefore, only resort to perform the actions related
133  * to the OutdatedPotentials state.
134  * - OutdatedPotentials: in this state, the structure of the MN remains
135  * unchanged, only some potentials stored in it have changed. Therefore,
136  * the inference probably just needs to invalidate some already computed
137  * potentials to be ready. Only a light amount of preparation is needed to
138  * be able to perform inference.
139  * - ReadyForMNInference: in this state, all the data structures are ready for
140  * inference. There just remains to perform the inference computations.
141  * - Done: the heavy computations of inference have been done. There might
142  * still remain a few light computations to perform to get the posterior
143  * potentials we need. Typically, in Lazy Propagation, all the messages in
144  * the join tree have been computed but, to get the potentials, we still
145  * need to perform the combinations of the potentials in the cliques with
146  * the messages sent to the cliques. In some inference algorithms, this
147  * step may even be empty.
148  */
149 
150  // ############################################################################
151  /// @name Constructors / Destructors
152  // ############################################################################
153  /// @{
154 
155  /// default constructor
156  /** @warning note that, by aGrUM's rule, the MN is not copied but only
157  * referenced by the inference algorithm. */
158  explicit MarkovNetInference(const IMarkovNet< GUM_SCALAR >* mn);
159 
160  /// default constructor with a null MN (useful for virtual inheritance)
161  /** @warning MarkovNetInference is virtually inherited by
162  * MarginalTargetedMNInference. As a result, the lowest descendant of
163  * MarkovNetInference will create the latter. To avoid requiring developpers
164  * to add in the constructors of their inference algorithms a call to
165  * MarkovNetInference( mn ), we added constructor MarkovNetInference(),
166  * which will be called automatically by the lowest descendant.
167  * Then, MarginalTargetedMNInference and JointTargetedMNInference will take
168  * care of setting the appropriate mn into MarkovNetInference. */
170 
171  /// destructor
172  virtual ~MarkovNetInference();
173 
174  /// @}
175 
176 
177  // ############################################################################
178  /// @name Accessors / Modifiers
179  // ############################################################################
180  /// @{
181 
182  /// assigns a new MN to the inference engine
183  /** Assigns a new MN to the MarkovNetInference engine and sends messages to the
184  * descendants of MarkovNetInference to inform them that the MN has changed.
185  * @warning By default, all the nodes of the Markov net are targets.
186  * @warning note that, by aGrUM's rule, the mn is not copied into the
187  * inference engine but only referenced. */
188  virtual void setMN(const IMarkovNet< GUM_SCALAR >* mn);
189 
190  /// Returns a constant reference over the IMarkovNet referenced by this class
191  /** @throws UndefinedElement is raised if no Markov net has been assigned to
192  * the inference. */
193  virtual const IMarkovNet< GUM_SCALAR >& MN() const final;
194 
195  private:
196  /// assigns a MN during the inference engine construction
197  void _setMarkovNetDuringConstruction_(const IMarkovNet< GUM_SCALAR >* mn);
198 
199 
200  /// allow JointInference to access the single targets and inference states
201  friend MarginalTargetedMNInference< GUM_SCALAR >;
202  friend JointTargetedMNInference< GUM_SCALAR >;
203  friend EvidenceMNInference< GUM_SCALAR >;
204  };
205 
206 
207 } // namespace gum
208 
209 
210 #include <agrum/MN/inference/tools/MarkovNetInference_tpl.h>
211 
212 
213 #endif // GUM_MARKOV_NET_INFERENCE_H
void _setMarkovNetDuringConstruction_(const IMarkovNet< GUM_SCALAR > *mn)
assigns a MN during the inference engine construction
virtual const IMarkovNet< GUM_SCALAR > & MN() const final
Returns a constant reference over the IMarkovNet referenced by this class.
MarkovNetInference(const IMarkovNet< GUM_SCALAR > *mn)
default constructor
virtual ~MarkovNetInference()
destructor
INLINE void emplace(Args &&... args)
Definition: set_tpl.h:643
<agrum/MN/inference/evidenceMNInference.h>
virtual void setMN(const IMarkovNet< GUM_SCALAR > *mn)
assigns a new MN to the inference engine
<agrum/MN/inference/marginalTargetedMNInference.h>
MarkovNetInference()
default constructor with a null MN (useful for virtual inheritance)
<agrum/MN/inference/jointTargetedMNInference.h>
<agrum/MN/inference/MarkovNetInference.h>