aGrUM  0.20.3
a C++ library for (probabilistic) graphical models
multipleInferenceEngine.h
Go to the documentation of this file.
1 /**
2  *
3  * Copyright (c) 2005-2021 by Pierre-Henri WUILLEMIN(@LIP6) & Christophe GONZALES(@AMU)
4  * info_at_agrum_dot_org
5  *
6  * This library is free software: you can redistribute it and/or modify
7  * it under the terms of the GNU Lesser General Public License as published by
8  * the Free Software Foundation, either version 3 of the License, or
9  * (at your option) any later version.
10  *
11  * This library is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14  * GNU Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public License
17  * along with this library. If not, see <http://www.gnu.org/licenses/>.
18  *
19  */
20 
21 
22 #ifndef __MULTIPLE_INFERENCE_ENGINES__H__
23 #define __MULTIPLE_INFERENCE_ENGINES__H__
24 
25 /**
26  * @file
27  * @brief Abstract class representing CredalNet inference engines
28  * @author Matthieu HOURBRACQ and Pierre-Henri WUILLEMIN(@LIP6)
29  */
30 
31 /// @todo virtual for all functions that MAY be one day redefined in any derived
32 /// class
33 
34 #include <agrum/BN/inference/lazyPropagation.h>
35 #include <agrum/CN/inference/inferenceEngine.h>
36 
37 namespace gum {
38  namespace credal {
39 
40  /**
41  * @class MultipleInferenceEngine
42  * @headerfile <agrum/CN/multipleInferenceEngine.h>
43  * @brief Class template representing a CredalNet inference engine using
44  * one or more IBayesNet inference engines such as LazyPropagation. Extends
45  * InferenceEngine< GUM_SCALAR >. Used for outer multi-threading such as
46  * CNMonteCarloSampling.
47  * @ingroup cn_group
48  * @tparam GUM_SCALAR A floating type ( float, double, long double ... ).
49  * @tparam BNInferenceEngine A IBayesNet inference engine such as
50  * LazyPropagation.
51  * @author Matthieu HOURBRACQ and Pierre-Henri WUILLEMIN(@LIP6)
52  */
53  template < typename GUM_SCALAR, class BNInferenceEngine >
55  private:
56  /** To easily access InferenceEngine< GUM_SCALAR > methods. */
58 
63 
65  typedef std::vector< _margi_ > _margis_;
66  typedef std::vector< _expe_ > _expes_;
69 
70  typedef typename std::vector< HashTable< std::string, std::vector< GUM_SCALAR > > > _modals_;
71 
72  /**
73  * @brief Ask for redundancy elimination of a node credal set of a calling
74  *thread.
75  *
76  * Called by updateThread_ if vertices are stored.
77  *
78  * @param id A constant reference to the node id whose credal set is to be
79  *checked for redundancy.
80  * @param vertex The vertex to add to the credal set.
81  * @param elimRedund \c true if redundancy elimination is to be performed,
82  *\c
83  *false otherwise and by default.
84  */
85  inline void _updateThreadCredalSets_(const NodeId& id,
86  const std::vector< GUM_SCALAR >& vertex,
87  const bool& elimRedund);
88 
89  protected:
90  /** Threads lower marginals, one per thread. */
92  /** Threads upper marginals, one per thread. */
94  /** Threads lower expectations, one per thread. */
96  /** Threads upper expectations, one per thread. */
98  /** Threads modalities. */
100  /** Threads vertices. */
102  /** Threads evidence. */
104  /** Threads clusters. */
106 
107  /** Threads IBayesNet. */
108  typename std::vector< _bnet_* > workingSet_;
109  /** Threads evidence. */
110  typename std::vector< List< const Potential< GUM_SCALAR >* >* > workingSetE_;
111 
112  /** Threads BNInferenceEngine. */
114  /** Threads optimal IBayesNet. */
116  /** Fusion of threads optimal IBayesNet. */
117  // OptBN< GUM_SCALAR > threadFusion_; // we should use this OptBN if omp
118  // is
119  // disabled (avoid creating 2 objects when only one is necessary)
120  // it should also avoid calling thread fusion operations
121 
122  /// @name Protected initialization methods
123  /// @{
124 
125  /**
126  * @brief Initialize threads data.
127  *
128  * @param num_threads The number of threads.
129  * @param _storeVertices_ \c True if vertices should be stored, \c False
130  *otherwise.
131  * @param _storeBNOpt_ \c True if optimal IBayesNet should be stored, \c
132  *false
133  *otherwise.
134  */
135  void initThreadsData_(const Size& num_threads,
136  const bool _storeVertices_,
137  const bool _storeBNOpt_);
138 
139  /// @}
140 
141  /// @name Protected algorithms methods
142  /// @{
143 
144  /**
145  * @brief Update thread information (marginals, expectations, IBayesNet,
146  *vertices) for a given node id.
147  *
148  * @param id The id of the node to be updated.
149  * @param vertex The vertex.
150  * @param elimRedund \c true if redundancy elimination is to be performed,
151  *\c
152  *false otherwise and by default.
153  * @return \c True if the IBayesNet is kept (for now), \c False otherwise.
154  */
155  inline bool updateThread_(const NodeId& id,
156  const std::vector< GUM_SCALAR >& vertex,
157  const bool& elimRedund = false);
158 
159  /**
160  * @brief Fusion of threads marginals.
161  */
162  inline void updateMarginals_();
163 
164  /**
165  * @brief Compute epsilon and update old marginals.
166  *
167  * @return Epsilon.
168  */
169  inline const GUM_SCALAR computeEpsilon_();
170 
171  /**
172  * Update old marginals (from current marginals). Call this once to
173  * initialize
174  * old marginals (after burn-in for example) and then use computeEpsilon_
175  * which
176  * does the same job but compute epsilon too.
177  */
178  void updateOldMarginals_();
179 
180  /// @}
181 
182  /// @name Proptected post-inference methods
183  /// @{
184 
185  /** Fusion of threads optimal IBayesNet. */
186  void optFusion_();
187  /** Fusion of threads expectations. */
188  void expFusion_();
189  /** @deprecated Fusion of threads vertices. */
190  void verticesFusion_(); // called ?? not done yet
191 
192  /// @}
193 
194  public:
195  /// @name Constructors / Destructors
196  /// @{
197 
198  /**
199  * Constructor.
200  * @param credalNet The CredalNet to be used.
201  */
202  explicit MultipleInferenceEngine(const CredalNet< GUM_SCALAR >& credalNet);
203 
204  /** Destructor. */
205  virtual ~MultipleInferenceEngine();
206 
207  /// @}
208 
209  /// @name Post-inference methods
210  /// @{
211  /**
212  * Erase all inference related data to perform another one. You need to
213  * insert
214  * evidence again if needed but modalities are kept. You can insert new
215  * ones by
216  * using the appropriate method which will delete the old ones.
217  */
218  virtual void eraseAllEvidence();
219  /// @}
220 
221  /// @name Pure virtual methods
222  /// @{
223  /** To be redefined by each credal net algorithm. Starts the inference. */
224  virtual void makeInference() = 0;
225  /// @}
226  };
227 
228 
229 #ifndef GUM_NO_EXTERN_TEMPLATE_CLASS
230  extern template class MultipleInferenceEngine< double, gum::LazyPropagation< double > >;
231 
232  extern template class MultipleInferenceEngine< float, gum::LazyPropagation< float > >;
233 #endif
234 
235  } // namespace credal
236 } // namespace gum
237 
238 #include <agrum/CN/inference/multipleInferenceEngine_tpl.h>
239 
240 #endif
_credalSets_ l_marginalSets_
Threads vertices.
_margis_ l_marginalMin_
Threads lower marginals, one per thread.
_clusters_ l_clusters_
Threads clusters.
INLINE void emplace(Args &&... args)
Definition: set_tpl.h:643
virtual void makeInference()=0
To be redefined by each credal net algorithm.
_expes_ l_expectationMax_
Threads upper expectations, one per thread.
MultipleInferenceEngine(const CredalNet< GUM_SCALAR > &credalNet)
Constructor.
virtual void eraseAllEvidence()
Erase all inference related data to perform another one.
_margis_ l_marginalMax_
Threads upper marginals, one per thread.
bool updateThread_(const NodeId &id, const std::vector< GUM_SCALAR > &vertex, const bool &elimRedund=false)
Update thread information (marginals, expectations, IBayesNet, vertices) for a given node id...
_expes_ l_expectationMin_
Threads lower expectations, one per thread.
void expFusion_()
Fusion of threads expectations.
void _updateThreadCredalSets_(const NodeId &id, const std::vector< GUM_SCALAR > &vertex, const bool &elimRedund)
Ask for redundancy elimination of a node credal set of a calling thread.
std::vector< VarMod2BNsMap< GUM_SCALAR > *> l_optimalNet_
Threads optimal IBayesNet.
void updateMarginals_()
Fusion of threads marginals.
void optFusion_()
Fusion of threads optimal IBayesNet.
const GUM_SCALAR computeEpsilon_()
Compute epsilon and update old marginals.
InferenceEngine< GUM_SCALAR > _infE_
To easily access InferenceEngine< GUM_SCALAR > methods.
void updateOldMarginals_()
Update old marginals (from current marginals).
void initThreadsData_(const Size &num_threads, const bool _storeVertices_, const bool _storeBNOpt_)
Initialize threads data.
namespace for all credal networks entities
Definition: LpInterface.cpp:37