aGrUM  0.16.0
loopyBeliefPropagation_tpl.h
Go to the documentation of this file.
1 
27 #ifndef DOXYGEN_SHOULD_SKIP_THIS
28 
29 # include <algorithm>
30 # include <sstream>
31 # include <string>
32 
33 # define LBP_DEFAULT_MAXITER 100
34 # define LBP_DEFAULT_EPSILON 1e-8
35 # define LBP_DEFAULT_MIN_EPSILON_RATE 1e-10
36 # define LBP_DEFAULT_PERIOD_SIZE 1
37 # define LBP_DEFAULT_VERBOSITY false
38 
39 
40 // to ease parsing for IDE
42 
43 
44 namespace gum {
45 
47  template < typename GUM_SCALAR >
49  const IBayesNet< GUM_SCALAR >* bn) :
50  ApproximateInference< GUM_SCALAR >(bn) {
51  // for debugging purposes
52  GUM_CONSTRUCTOR(LoopyBeliefPropagation);
53 
54  this->setEpsilon(LBP_DEFAULT_EPSILON);
55  this->setMinEpsilonRate(LBP_DEFAULT_MIN_EPSILON_RATE);
56  this->setMaxIter(LBP_DEFAULT_MAXITER);
57  this->setVerbosity(LBP_DEFAULT_VERBOSITY);
58  this->setPeriodSize(LBP_DEFAULT_PERIOD_SIZE);
59 
61  }
62 
64  template < typename GUM_SCALAR >
66  GUM_DESTRUCTOR(LoopyBeliefPropagation);
67  }
68 
69 
70  template < typename GUM_SCALAR >
72  __messages.clear();
73  for (const auto& tail : this->BN().nodes()) {
74  Potential< GUM_SCALAR > p;
75  p.add(this->BN().variable(tail));
76  p.fill(static_cast< GUM_SCALAR >(1));
77 
78  for (const auto& head : this->BN().children(tail)) {
79  __messages.insert(Arc(head, tail), p);
80  __messages.insert(Arc(tail, head), p);
81  }
82  }
83  }
84 
85  template < typename GUM_SCALAR >
88  }
89 
90 
91  template < typename GUM_SCALAR >
92  Potential< GUM_SCALAR >
94  const auto& varX = this->BN().variable(X);
95 
96  auto piX = this->BN().cpt(X);
97  for (const auto& U : this->BN().parents(X)) {
98  piX *= __messages[Arc(U, X)];
99  }
100  piX = piX.margSumIn({&varX});
101 
102  return piX;
103  }
104 
105  template < typename GUM_SCALAR >
106  Potential< GUM_SCALAR >
108  NodeId except) {
109  const auto& varX = this->BN().variable(X);
110  const auto& varExcept = this->BN().variable(except);
111  auto piXexcept = this->BN().cpt(X);
112  for (const auto& U : this->BN().parents(X)) {
113  if (U != except) { piXexcept *= __messages[Arc(U, X)]; }
114  }
115  piXexcept = piXexcept.margSumIn({&varX, &varExcept});
116  return piXexcept;
117  }
118 
119 
120  template < typename GUM_SCALAR >
121  Potential< GUM_SCALAR >
123  Potential< GUM_SCALAR > lamX;
124  if (this->hasEvidence(X)) {
125  lamX = *(this->evidence()[X]);
126  } else {
127  lamX.add(this->BN().variable(X));
128  lamX.fill(1);
129  }
130  for (const auto& Y : this->BN().children(X)) {
131  lamX *= __messages[Arc(Y, X)];
132  }
133 
134  return lamX;
135  }
136 
137  template < typename GUM_SCALAR >
138  Potential< GUM_SCALAR >
140  NodeId except) {
141  Potential< GUM_SCALAR > lamXexcept;
142  if (this->hasEvidence(X)) { //
143  lamXexcept = *this->evidence()[X];
144  } else {
145  lamXexcept.add(this->BN().variable(X));
146  lamXexcept.fill(1);
147  }
148  for (const auto& Y : this->BN().children(X)) {
149  if (Y != except) { lamXexcept *= __messages[Arc(Y, X)]; }
150  }
151 
152  return lamXexcept;
153  }
154 
155 
156  template < typename GUM_SCALAR >
158  auto piX = __computeProdPi(X);
159  auto lamX = __computeProdLambda(X);
160 
161  GUM_SCALAR KL = 0;
162  Arc argKL(0, 0);
163 
164  // update lambda_par (for arc U->x)
165  for (const auto& U : this->BN().parents(X)) {
166  auto newLambda =
167  (__computeProdPi(X, U) * lamX).margSumIn({&this->BN().variable(U)});
168  newLambda.normalize();
169  auto ekl = static_cast< GUM_SCALAR >(0);
170  try {
171  ekl = __messages[Arc(X, U)].KL(newLambda);
172  } catch (InvalidArgument&) {
173  GUM_ERROR(InvalidArgument, "Not compatible pi during computation");
174  } catch (FatalError&) { // 0 misplaced
175  ekl = std::numeric_limits< GUM_SCALAR >::infinity();
176  }
177  if (ekl > KL) {
178  KL = ekl;
179  argKL = Arc(X, U);
180  }
181  __messages.set(Arc(X, U), newLambda);
182  }
183 
184  // update pi_child (for arc x->child)
185  for (const auto& Y : this->BN().children(X)) {
186  auto newPi = (piX * __computeProdLambda(X, Y));
187  newPi.normalize();
188  GUM_SCALAR ekl = KL;
189  try {
190  ekl = __messages[Arc(X, Y)].KL(newPi);
191  } catch (InvalidArgument&) {
192  GUM_ERROR(InvalidArgument, "Not compatible pi during computation");
193  } catch (FatalError&) { // 0 misplaced
194  ekl = std::numeric_limits< GUM_SCALAR >::infinity();
195  }
196  if (ekl > KL) {
197  KL = ekl;
198  argKL = Arc(X, Y);
199  }
200  __messages.set(Arc(X, Y), newPi);
201  }
202 
203  return KL;
204  }
205 
206  template < typename GUM_SCALAR >
208  __init_messages();
209  for (const auto& node : this->BN().topologicalOrder()) {
210  __updateNodeMessage(node);
211  }
212  }
213 
214 
216  template < typename GUM_SCALAR >
218  __initStats();
219  this->initApproximationScheme();
220 
221  std::vector< NodeId > shuffleIds;
222  for (const auto& node : this->BN().nodes())
223  shuffleIds.push_back(node);
224 
225  auto engine = std::default_random_engine{};
226 
227  GUM_SCALAR error = 0.0;
228  do {
229  std::shuffle(std::begin(shuffleIds), std::end(shuffleIds), engine);
231  for (const auto& node : shuffleIds) {
232  GUM_SCALAR e = __updateNodeMessage(node);
233  if (e > error) error = e;
234  }
235  } while (this->continueApproximationScheme(error));
236  }
237 
238 
240  template < typename GUM_SCALAR >
241  INLINE const Potential< GUM_SCALAR >&
243  auto p = __computeProdPi(id) * __computeProdLambda(id);
244  p.normalize();
245  __posteriors.set(id, p);
246 
247  return __posteriors[id];
248  }
249 } /* namespace gum */
250 
251 #endif // DOXYGEN_SHOULD_SKIP_THIS
const NodeProperty< const Potential< GUM_SCALAR > *> & evidence() const
returns the set of evidence
Copyright 2005-2019 Pierre-Henri WUILLEMIN et Christophe GONZALES (LIP6) {prenom.nom}_at_lip6.fr.
void setPeriodSize(Size p)
How many samples between two stopping is enable.
void initApproximationScheme()
Initialise the scheme.
Copyright 2005-2019 Pierre-Henri WUILLEMIN et Christophe GONZALES (LIP6) {prenom.nom}_at_lip6.fr.
Definition: agrum.h:25
void setMinEpsilonRate(double rate)
Given that we approximate f(t), stopping criterion on d/dt(|f(t+1)-f(t)|).
Potential< GUM_SCALAR > __computeProdLambda(NodeId X)
void setVerbosity(bool v)
Set the verbosity on (true) or off (false).
virtual ~LoopyBeliefPropagation()
Destructor.
GUM_SCALAR __updateNodeMessage(NodeId X)
bool continueApproximationScheme(double error)
Update the scheme w.r.t the new error.
virtual bool hasEvidence() const final
indicates whether some node(s) have received evidence
LoopyBeliefPropagation(const IBayesNet< GUM_SCALAR > *bn)
Default constructor.
KL is the base class for KL computation betweens 2 BNs.
ArcProperty< Potential< GUM_SCALAR > > __messages
virtual const Potential< GUM_SCALAR > & _posterior(NodeId id)
asks derived classes for the posterior of a given variable
void setMaxIter(Size max)
Stopping criterion on number of iterations.
void setEpsilon(double eps)
Given that we approximate f(t), stopping criterion on |f(t+1)-f(t)|.
NodeProperty< Potential< GUM_SCALAR > > __posteriors
virtual void _makeInference()
called when the inference has to be performed effectively
Potential< GUM_SCALAR > __computeProdPi(NodeId X)
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
Size NodeId
Type for node ids.
Definition: graphElements.h:98
#define GUM_ERROR(type, msg)
Definition: exceptions.h:55
virtual void _updateOutdatedBNStructure()
prepares inference when the latter is in OutdatedBNStructure state
void updateApproximationScheme(unsigned int incr=1)
Update the scheme w.r.t the new error and increment steps.