aGrUM  0.13.2
CNLoopyPropagation_tpl.h
Go to the documentation of this file.
1 
2 /***************************************************************************
3  * Copyright (C) 2017 by Pierre-Henri WUILLEMIN and Christophe GONZALES *
4  * {prenom.nom}_at_lip6.fr *
5  * *
6  * This program is free software; you can redistribute it and/or modify *
7  * it under the terms of the GNU General Public License as published by *
8  * the Free Software Foundation; either version 2 of the License, or *
9  * (at your option) any later version. *
10  * *
11  * This program is distributed in the hope that it will be useful, *
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of *
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
14  * GNU General Public License for more details. *
15  * *
16  * You should have received a copy of the GNU General Public License *
17  * along with this program; if not, write to the *
18  * Free Software Foundation, Inc., *
19  * 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
20  ***************************************************************************/
21 
22 
24 
25 namespace gum {
26  namespace credal {
27 
28  template < typename GUM_SCALAR >
29  void CNLoopyPropagation< GUM_SCALAR >::saveInference(const std::string& path) {
30  std::string path_name = path.substr(0, path.size() - 4);
31  path_name = path_name + ".res";
32 
33  std::ofstream res(path_name.c_str(), std::ios::out | std::ios::trunc);
34 
35  if (!res.good()) {
37  "CNLoopyPropagation<GUM_SCALAR>::saveInference(std::"
38  "string & path) : could not open file : "
39  + path_name);
40  }
41 
42  std::string ext = path.substr(path.size() - 3, path.size());
43 
44  if (std::strcmp(ext.c_str(), "evi") == 0) {
45  std::ifstream evi(path.c_str(), std::ios::in);
46  std::string ligne;
47 
48  if (!evi.good()) {
50  "CNLoopyPropagation<GUM_SCALAR>::saveInference(std::"
51  "string & path) : could not open file : "
52  + ext);
53  }
54 
55  while (evi.good()) {
56  getline(evi, ligne);
57  res << ligne << "\n";
58  }
59 
60  evi.close();
61  }
62 
63  res << "[RESULTATS]"
64  << "\n";
65 
66  for (auto node : __bnet->nodes()) {
67  // calcul distri posteriori
68  GUM_SCALAR msg_p_min = 1.0;
69  GUM_SCALAR msg_p_max = 0.0;
70 
71  // cas evidence, calcul immediat
72  if (__infE::_evidence.exists(node)) {
73  if (__infE::_evidence[node][1] == 0.) {
74  msg_p_min = 0.;
75  } else if (__infE::_evidence[node][1] == 1.) {
76  msg_p_min = 1.;
77  }
78 
79  msg_p_max = msg_p_min;
80  }
81  // sinon depuis node P et node L
82  else {
83  GUM_SCALAR min = _NodesP_min[node];
84  GUM_SCALAR max;
85 
86  if (_NodesP_max.exists(node)) {
87  max = _NodesP_max[node];
88  } else {
89  max = min;
90  }
91 
92  GUM_SCALAR lmin = _NodesL_min[node];
93  GUM_SCALAR lmax;
94 
95  if (_NodesL_max.exists(node)) {
96  lmax = _NodesL_max[node];
97  } else {
98  lmax = lmin;
99  }
100 
101  // cas limites sur min
102  if (min == _INF && lmin == 0.) {
103  std::cout << "proba ERR (negatif) : pi = inf, l = 0" << std::endl;
104  }
105 
106  if (lmin == _INF) { // cas infini
107  msg_p_min = GUM_SCALAR(1.);
108  } else if (min == 0. || lmin == 0.) {
109  msg_p_min = GUM_SCALAR(0.);
110  } else {
111  msg_p_min = GUM_SCALAR(1. / (1. + ((1. / min - 1.) * 1. / lmin)));
112  }
113 
114  // cas limites sur max
115  if (max == _INF && lmax == 0.) {
116  std::cout << "proba ERR (negatif) : pi = inf, l = 0" << std::endl;
117  }
118 
119  if (lmax == _INF) { // cas infini
120  msg_p_max = GUM_SCALAR(1.);
121  } else if (max == 0. || lmax == 0.) {
122  msg_p_max = GUM_SCALAR(0.);
123  } else {
124  msg_p_max = GUM_SCALAR(1. / (1. + ((1. / max - 1.) * 1. / lmax)));
125  }
126  }
127 
128  if (msg_p_min != msg_p_min && msg_p_max == msg_p_max) {
129  msg_p_min = msg_p_max;
130  }
131 
132  if (msg_p_max != msg_p_max && msg_p_min == msg_p_min) {
133  msg_p_max = msg_p_min;
134  }
135 
136  if (msg_p_max != msg_p_max && msg_p_min != msg_p_min) {
137  std::cout << std::endl;
138  std::cout << "pas de proba calculable (verifier observations)"
139  << std::endl;
140  }
141 
142  res << "P(" << __bnet->variable(node).name() << " | e) = ";
143 
144  if (__infE::_evidence.exists(node)) {
145  res << "(observe)" << std::endl;
146  } else {
147  res << std::endl;
148  }
149 
150  res << "\t\t" << __bnet->variable(node).label(0) << " [ "
151  << (GUM_SCALAR)1. - msg_p_max;
152 
153  if (msg_p_min != msg_p_max) {
154  res << ", " << (GUM_SCALAR)1. - msg_p_min << " ] | ";
155  } else {
156  res << " ] | ";
157  }
158 
159  res << __bnet->variable(node).label(1) << " [ " << msg_p_min;
160 
161  if (msg_p_min != msg_p_max) {
162  res << ", " << msg_p_max << " ]" << std::endl;
163  } else {
164  res << " ]" << std::endl;
165  }
166  } // end of : for each node
167 
168  res.close();
169  }
170 
180  template < typename GUM_SCALAR >
181  void
183  GUM_SCALAR& msg_l_max,
184  std::vector< GUM_SCALAR >& lx,
185  GUM_SCALAR& num_min,
186  GUM_SCALAR& num_max,
187  GUM_SCALAR& den_min,
188  GUM_SCALAR& den_max) {
189  GUM_SCALAR old_msg_min = msg_l_min;
190  GUM_SCALAR old_msg_max = msg_l_max;
191 
192  GUM_SCALAR num_min_tmp = 1.;
193  GUM_SCALAR den_min_tmp = 1.;
194  GUM_SCALAR num_max_tmp = 1.;
195  GUM_SCALAR den_max_tmp = 1.;
196 
197  GUM_SCALAR res_min = 1.0, res_max = 0.0;
198 
199  auto lsize = lx.size();
200 
201  for (decltype(lsize) i = 0; i < lsize; i++) {
202  bool non_defini_min = false;
203  bool non_defini_max = false;
204 
205  if (lx[i] == _INF) {
206  num_min_tmp = num_min;
207  den_min_tmp = den_max;
208  num_max_tmp = num_max;
209  den_max_tmp = den_min;
210  } else if (lx[i] == (GUM_SCALAR)1.) {
211  num_min_tmp = GUM_SCALAR(1.);
212  den_min_tmp = GUM_SCALAR(1.);
213  num_max_tmp = GUM_SCALAR(1.);
214  den_max_tmp = GUM_SCALAR(1.);
215  } else if (lx[i] > (GUM_SCALAR)1.) {
216  GUM_SCALAR li = GUM_SCALAR(1.) / (lx[i] - GUM_SCALAR(1.));
217  num_min_tmp = num_min + li;
218  den_min_tmp = den_max + li;
219  num_max_tmp = num_max + li;
220  den_max_tmp = den_min + li;
221  } else if (lx[i] < (GUM_SCALAR)1.) {
222  GUM_SCALAR li = GUM_SCALAR(1.) / (lx[i] - GUM_SCALAR(1.));
223  num_min_tmp = num_max + li;
224  den_min_tmp = den_min + li;
225  num_max_tmp = num_min + li;
226  den_max_tmp = den_max + li;
227  }
228 
229  if (den_min_tmp == 0. && num_min_tmp == 0.) {
230  non_defini_min = true;
231  } else if (den_min_tmp == 0. && num_min_tmp != 0.) {
232  res_min = _INF;
233  } else if (den_min_tmp != _INF || num_min_tmp != _INF) {
234  res_min = num_min_tmp / den_min_tmp;
235  }
236 
237  if (den_max_tmp == 0. && num_max_tmp == 0.) {
238  non_defini_max = true;
239  } else if (den_max_tmp == 0. && num_max_tmp != 0.) {
240  res_max = _INF;
241  } else if (den_max_tmp != _INF || num_max_tmp != _INF) {
242  res_max = num_max_tmp / den_max_tmp;
243  }
244 
245  if (non_defini_max && non_defini_min) {
246  std::cout << "undefined msg" << std::endl;
247  continue;
248  } else if (non_defini_min && !non_defini_max) {
249  res_min = res_max;
250  } else if (non_defini_max && !non_defini_min) {
251  res_max = res_min;
252  }
253 
254  if (res_min < 0.) { res_min = 0.; }
255 
256  if (res_max < 0.) { res_max = 0.; }
257 
258  if (msg_l_min == msg_l_max && msg_l_min == -2.) {
259  msg_l_min = res_min;
260  msg_l_max = res_max;
261  }
262 
263  if (res_max > msg_l_max) { msg_l_max = res_max; }
264 
265  if (res_min < msg_l_min) { msg_l_min = res_min; }
266 
267  } // end of : for each lx
268  }
269 
273  template < typename GUM_SCALAR >
275  std::vector< std::vector< GUM_SCALAR > >& combi_msg_p,
276  const NodeId& id,
277  GUM_SCALAR& msg_l_min,
278  GUM_SCALAR& msg_l_max,
279  std::vector< GUM_SCALAR >& lx,
280  const Idx& pos) {
281  GUM_SCALAR num_min = 0.;
282  GUM_SCALAR num_max = 0.;
283  GUM_SCALAR den_min = 0.;
284  GUM_SCALAR den_max = 0.;
285 
286  auto taille = combi_msg_p.size();
287 
288  std::vector< typename std::vector< GUM_SCALAR >::iterator > it(taille);
289 
290  for (decltype(taille) i = 0; i < taille; i++) {
291  it[i] = combi_msg_p[i].begin();
292  }
293 
294  Size pp = pos;
295  Size pas = Size(intPow(2, pp));
296 
297  Size combi_den = 0;
298  Size combi_num = pp;
299 
300  // marginalisation
301  while (it[taille - 1] != combi_msg_p[taille - 1].end()) {
302  GUM_SCALAR prod = 1.;
303 
304  for (decltype(taille) k = 0; k < taille; k++) {
305  prod *= *it[k];
306  }
307 
308  den_min += (__cn->get_CPT_min()[id][combi_den] * prod);
309  den_max += (__cn->get_CPT_max()[id][combi_den] * prod);
310 
311  num_min += (__cn->get_CPT_min()[id][combi_num] * prod);
312  num_max += (__cn->get_CPT_max()[id][combi_num] * prod);
313 
314  combi_den++;
315  combi_num++;
316 
317  if (combi_den % pp == 0) {
318  combi_den += pp;
319  combi_num += pp;
320  }
321 
322  // incrementation
323  ++it[0];
324 
325  for (decltype(taille) i = 0;
326  (i < taille - 1) && (it[i] == combi_msg_p[i].end());
327  ++i) {
328  it[i] = combi_msg_p[i].begin();
329  ++it[i + 1];
330  }
331  } // end of : marginalisation
332 
333  _compute_ext(msg_l_min, msg_l_max, lx, num_min, num_max, den_min, den_max);
334  }
335 
340  template < typename GUM_SCALAR >
342  std::vector< std::vector< GUM_SCALAR > >& combi_msg_p,
343  const NodeId& id,
344  GUM_SCALAR& msg_p_min,
345  GUM_SCALAR& msg_p_max) {
346  GUM_SCALAR min = 0.;
347  GUM_SCALAR max = 0.;
348 
349  auto taille = combi_msg_p.size();
350 
351  std::vector< typename std::vector< GUM_SCALAR >::iterator > it(taille);
352 
353  for (decltype(taille) i = 0; i < taille; i++) {
354  it[i] = combi_msg_p[i].begin();
355  }
356 
357  int combi = 0;
358  auto theEnd = combi_msg_p[taille - 1].end();
359 
360  while (it[taille - 1] != theEnd) {
361  GUM_SCALAR prod = 1.;
362 
363  for (decltype(taille) k = 0; k < taille; k++) {
364  prod *= *it[k];
365  }
366 
367  min += (__cn->get_CPT_min()[id][combi] * prod);
368  max += (__cn->get_CPT_max()[id][combi] * prod);
369 
370  combi++;
371 
372  // incrementation
373  ++it[0];
374 
375  for (decltype(taille) i = 0;
376  (i < taille - 1) && (it[i] == combi_msg_p[i].end());
377  ++i) {
378  it[i] = combi_msg_p[i].begin();
379  ++it[i + 1];
380  }
381  }
382 
383  if (min < msg_p_min) { msg_p_min = min; }
384 
385  if (max > msg_p_max) { msg_p_max = max; }
386  }
387 
391  template < typename GUM_SCALAR >
393  std::vector< std::vector< std::vector< GUM_SCALAR > > >& msgs_p,
394  const NodeId& id,
395  GUM_SCALAR& msg_p_min,
396  GUM_SCALAR& msg_p_max) {
397  auto taille = msgs_p.size();
398 
399  // source node
400  if (taille == 0) {
401  msg_p_min = __cn->get_CPT_min()[id][0];
402  msg_p_max = __cn->get_CPT_max()[id][0];
403  return;
404  }
405 
406  decltype(taille) msgPerm = 1;
407 #pragma omp parallel
408  {
409  GUM_SCALAR msg_pmin = msg_p_min;
410  GUM_SCALAR msg_pmax = msg_p_max;
411 
412  std::vector< std::vector< GUM_SCALAR > > combi_msg_p(taille);
413 
414  decltype(taille) confs = 1;
415 
416 #pragma omp for
417 
418  for (long i = 0; i < long(taille); i++) {
419  confs *= msgs_p[i].size();
420  }
421 
422 #pragma omp atomic
423  msgPerm *= confs;
424 #pragma omp barrier
425 #pragma omp \
426  flush // ( msgPerm ) let the compiler choose what to flush (due to mvsc)
427 
428 #pragma omp for
429 
430  for (int j = 0; j < int(msgPerm); j++) {
431  // get jth msg :
432  auto jvalue = j;
433 
434  for (decltype(taille) i = 0; i < taille; i++) {
435  if (msgs_p[i].size() == 2) {
436  combi_msg_p[i] = (jvalue & 1) ? msgs_p[i][1] : msgs_p[i][0];
437  jvalue /= 2;
438  } else {
439  combi_msg_p[i] = msgs_p[i][0];
440  }
441  }
442 
443  _compute_ext(combi_msg_p, id, msg_pmin, msg_pmax);
444  }
445 
446 // since min is _INF and max is 0 at init, there is no issue having more threads
447 // here
448 // than during for loop
449 #pragma omp critical(msgpminmax)
450  {
451 #pragma omp flush //( msg_p_min )
452  //#pragma omp flush ( msg_p_max ) let the compiler choose what to
453  // flush (due to mvsc)
454 
455  if (msg_p_min > msg_pmin) { msg_p_min = msg_pmin; }
456 
457  if (msg_p_max < msg_pmax) { msg_p_max = msg_pmax; }
458  }
459  }
460  return;
461  }
462 
467  template < typename GUM_SCALAR >
469  std::vector< std::vector< std::vector< GUM_SCALAR > > >& msgs_p,
470  const NodeId& id,
471  GUM_SCALAR& real_msg_l_min,
472  GUM_SCALAR& real_msg_l_max,
473  std::vector< GUM_SCALAR >& lx,
474  const Idx& pos) {
475  GUM_SCALAR msg_l_min = real_msg_l_min;
476  GUM_SCALAR msg_l_max = real_msg_l_max;
477 
478  auto taille = msgs_p.size();
479 
480  // one parent node, the one receiving the message
481  if (taille == 0) {
482  GUM_SCALAR num_min = __cn->get_CPT_min()[id][1];
483  GUM_SCALAR num_max = __cn->get_CPT_max()[id][1];
484  GUM_SCALAR den_min = __cn->get_CPT_min()[id][0];
485  GUM_SCALAR den_max = __cn->get_CPT_max()[id][0];
486 
487  _compute_ext(msg_l_min, msg_l_max, lx, num_min, num_max, den_min, den_max);
488 
489  real_msg_l_min = msg_l_min;
490  real_msg_l_max = msg_l_max;
491  return;
492  }
493 
494  decltype(taille) msgPerm = 1;
495 #pragma omp parallel
496  {
497  GUM_SCALAR msg_lmin = msg_l_min;
498  GUM_SCALAR msg_lmax = msg_l_max;
499  std::vector< std::vector< GUM_SCALAR > > combi_msg_p(taille);
500 
501  decltype(taille) confs = 1;
502 #pragma omp for
503 
504  for (int i = 0; i < int(taille); i++) {
505  confs *= msgs_p[i].size();
506  }
507 
508 #pragma omp atomic
509  msgPerm *= confs;
510 #pragma omp barrier
511 #pragma omp flush(msgPerm)
512 
513 // direct binary representation of config, no need for iterators
514 #pragma omp for
515 
516  for (long j = 0; j < long(msgPerm); j++) {
517  // get jth msg :
518  auto jvalue = j;
519 
520  for (decltype(taille) i = 0; i < taille; i++) {
521  if (msgs_p[i].size() == 2) {
522  combi_msg_p[i] = (jvalue & 1) ? msgs_p[i][1] : msgs_p[i][0];
523  jvalue /= 2;
524  } else {
525  combi_msg_p[i] = msgs_p[i][0];
526  }
527  }
528 
529  _compute_ext(combi_msg_p, id, msg_lmin, msg_lmax, lx, pos);
530  }
531 
532 // there may be more threads here than in the for loop, therefor positive test
533 // is NECESSARY (init is -2)
534 #pragma omp critical(msglminmax)
535  {
536 #pragma omp flush(msg_l_min)
537 #pragma omp flush(msg_l_max)
538 
539  if ((msg_l_min > msg_lmin || msg_l_min == -2) && msg_lmin > 0) {
540  msg_l_min = msg_lmin;
541  }
542 
543  if ((msg_l_max < msg_lmax || msg_l_max == -2) && msg_lmax > 0) {
544  msg_l_max = msg_lmax;
545  }
546  }
547  }
548 
549  real_msg_l_min = msg_l_min;
550  real_msg_l_max = msg_l_max;
551  }
552 
553  template < typename GUM_SCALAR >
555  if (_InferenceUpToDate) { return; }
556 
557  _initialize();
558 
559  __infE::initApproximationScheme();
560 
561  switch (__inferenceType) {
562  case InferenceType::nodeToNeighbours:
563  _makeInferenceNodeToNeighbours();
564  break;
565 
566  case InferenceType::ordered: _makeInferenceByOrderedArcs(); break;
567 
568  case InferenceType::randomOrder: _makeInferenceByRandomOrder(); break;
569  }
570 
571  //_updateMarginals();
572  _updateIndicatrices(); // will call _updateMarginals()
573 
574  _computeExpectations();
575 
576  _InferenceUpToDate = true;
577  }
578 
579  template < typename GUM_SCALAR >
581  __infE::eraseAllEvidence();
582 
583  _ArcsL_min.clear();
584  _ArcsL_max.clear();
585  _ArcsP_min.clear();
586  _ArcsP_max.clear();
587  _NodesL_min.clear();
588  _NodesL_max.clear();
589  _NodesP_min.clear();
590  _NodesP_max.clear();
591 
592  _InferenceUpToDate = false;
593 
594  if (_msg_l_sent.size() > 0) {
595  for (auto node : __bnet->nodes()) {
596  delete _msg_l_sent[node];
597  }
598  }
599 
600  _msg_l_sent.clear();
601  _update_l.clear();
602  _update_p.clear();
603 
604  active_nodes_set.clear();
605  next_active_nodes_set.clear();
606  }
607 
608  template < typename GUM_SCALAR >
610  const DAG& graphe = __bnet->dag();
611 
612  // use const iterators with cbegin when available
613  for (auto node : __bnet->topologicalOrder()) {
614  _update_p.set(node, false);
615  _update_l.set(node, false);
616  NodeSet* _parents = new NodeSet();
617  _msg_l_sent.set(node, _parents);
618 
619  // accelerer init pour evidences
620  if (__infE::_evidence.exists(node)) {
621  if (__infE::_evidence[node][1] != 0.
622  && __infE::_evidence[node][1] != 1.) {
624  "CNLoopyPropagation can only handle HARD evidences");
625  }
626 
627  active_nodes_set.insert(node);
628  _update_l.set(node, true);
629  _update_p.set(node, true);
630 
631  if (__infE::_evidence[node][1] == (GUM_SCALAR)1.) {
632  _NodesL_min.set(node, _INF);
633  _NodesP_min.set(node, (GUM_SCALAR)1.);
634  } else if (__infE::_evidence[node][1] == (GUM_SCALAR)0.) {
635  _NodesL_min.set(node, (GUM_SCALAR)0.);
636  _NodesP_min.set(node, (GUM_SCALAR)0.);
637  }
638 
639  std::vector< GUM_SCALAR > marg(2);
640  marg[1] = _NodesP_min[node];
641  marg[0] = 1 - marg[1];
642 
643  __infE::_oldMarginalMin.set(node, marg);
644  __infE::_oldMarginalMax.set(node, marg);
645 
646  continue;
647  }
648 
649  NodeSet _par = graphe.parents(node);
650  NodeSet _enf = graphe.children(node);
651 
652  if (_par.size() == 0) {
653  active_nodes_set.insert(node);
654  _update_p.set(node, true);
655  _update_l.set(node, true);
656  }
657 
658  if (_enf.size() == 0) {
659  active_nodes_set.insert(node);
660  _update_p.set(node, true);
661  _update_l.set(node, true);
662  }
663 
668  const auto parents = &__bnet->cpt(node).variablesSequence();
669 
670  std::vector< std::vector< std::vector< GUM_SCALAR > > > msgs_p;
671  std::vector< std::vector< GUM_SCALAR > > msg_p;
672  std::vector< GUM_SCALAR > distri(2);
673 
674  // +1 from start to avoid _counting itself
675  // use const iterators when available with cbegin
676  for (auto jt = ++parents->begin(), theEnd = parents->end(); jt != theEnd;
677  ++jt) {
678  // compute probability distribution to avoid doing it multiple times
679  // (at
680  // each combination of messages)
681  distri[1] = _NodesP_min[__bnet->nodeId(**jt)];
682  distri[0] = (GUM_SCALAR)1. - distri[1];
683  msg_p.push_back(distri);
684 
685  if (_NodesP_max.exists(__bnet->nodeId(**jt))) {
686  distri[1] = _NodesP_max[__bnet->nodeId(**jt)];
687  distri[0] = (GUM_SCALAR)1. - distri[1];
688  msg_p.push_back(distri);
689  }
690 
691  msgs_p.push_back(msg_p);
692  msg_p.clear();
693  }
694 
695  GUM_SCALAR msg_p_min = 1.;
696  GUM_SCALAR msg_p_max = 0.;
697 
698  if (__cn->currentNodeType(node)
700  _enum_combi(msgs_p, node, msg_p_min, msg_p_max);
701  }
702 
703  if (msg_p_min <= (GUM_SCALAR)0.) { msg_p_min = (GUM_SCALAR)0.; }
704 
705  if (msg_p_max <= (GUM_SCALAR)0.) { msg_p_max = (GUM_SCALAR)0.; }
706 
707  _NodesP_min.set(node, msg_p_min);
708  std::vector< GUM_SCALAR > marg(2);
709  marg[1] = msg_p_min;
710  marg[0] = 1 - msg_p_min;
711 
712  __infE::_oldMarginalMin.set(node, marg);
713 
714  if (msg_p_min != msg_p_max) {
715  marg[1] = msg_p_max;
716  marg[0] = 1 - msg_p_max;
717  _NodesP_max.insert(node, msg_p_max);
718  }
719 
720  __infE::_oldMarginalMax.set(node, marg);
721 
722  _NodesL_min.set(node, (GUM_SCALAR)1.);
723  }
724 
725  for (auto arc : __bnet->arcs()) {
726  _ArcsP_min.set(arc, _NodesP_min[arc.tail()]);
727 
728  if (_NodesP_max.exists(arc.tail())) {
729  _ArcsP_max.set(arc, _NodesP_max[arc.tail()]);
730  }
731 
732  _ArcsL_min.set(arc, _NodesL_min[arc.tail()]);
733  }
734  }
735 
736  template < typename GUM_SCALAR >
738  const DAG& graphe = __bnet->dag();
739 
740  GUM_SCALAR eps;
741  // to validate TestSuite
742  __infE::continueApproximationScheme(1.);
743 
744  do {
745  for (auto node : active_nodes_set) {
746  for (auto chil : graphe.children(node)) {
747  if (__cn->currentNodeType(chil)
749  continue;
750  }
751 
752  _msgP(node, chil);
753  }
754 
755  for (auto par : graphe.parents(node)) {
756  if (__cn->currentNodeType(node)
758  continue;
759  }
760 
761  _msgL(node, par);
762  }
763  }
764 
765  eps = _calculateEpsilon();
766 
767  __infE::updateApproximationScheme();
768 
769  active_nodes_set.clear();
770  active_nodes_set = next_active_nodes_set;
771  next_active_nodes_set.clear();
772 
773  } while (__infE::continueApproximationScheme(eps)
774  && active_nodes_set.size() > 0);
775 
776  __infE::stopApproximationScheme(); // just to be sure of the
777  // approximationScheme has been notified of
778  // the end of looop
779  }
780 
781  template < typename GUM_SCALAR >
783  Size nbrArcs = __bnet->dag().sizeArcs();
784 
785  std::vector< cArcP > seq;
786  seq.reserve(nbrArcs);
787 
788  for (const auto& arc : __bnet->arcs()) {
789  seq.push_back(&arc);
790  }
791 
792  GUM_SCALAR eps;
793  // validate TestSuite
794  __infE::continueApproximationScheme(1.);
795 
796  do {
797  for (Size j = 0, theEnd = nbrArcs / 2; j < theEnd; j++) {
798  auto w1 = rand() % nbrArcs, w2 = rand() % nbrArcs;
799 
800  if (w1 == w2) { continue; }
801 
802  std::swap(seq[w1], seq[w2]);
803  }
804 
805  for (const auto it : seq) {
806  if (__cn->currentNodeType(it->tail())
808  || __cn->currentNodeType(it->head())
810  continue;
811  }
812 
813  _msgP(it->tail(), it->head());
814  _msgL(it->head(), it->tail());
815  }
816 
817  eps = _calculateEpsilon();
818 
819  __infE::updateApproximationScheme();
820 
821  } while (__infE::continueApproximationScheme(eps));
822  }
823 
824  // gives slightly worse results for some variable/modalities than other
825  // inference
826  // types (node D on 2U network loose 0.03 precision)
827  template < typename GUM_SCALAR >
829  Size nbrArcs = __bnet->dag().sizeArcs();
830 
831  std::vector< cArcP > seq;
832  seq.reserve(nbrArcs);
833 
834  for (const auto& arc : __bnet->arcs()) {
835  seq.push_back(&arc);
836  }
837 
838  GUM_SCALAR eps;
839  // validate TestSuite
840  __infE::continueApproximationScheme(1.);
841 
842  do {
843  for (const auto it : seq) {
844  if (__cn->currentNodeType(it->tail())
846  || __cn->currentNodeType(it->head())
848  continue;
849  }
850 
851  _msgP(it->tail(), it->head());
852  _msgL(it->head(), it->tail());
853  }
854 
855  eps = _calculateEpsilon();
856 
857  __infE::updateApproximationScheme();
858 
859  } while (__infE::continueApproximationScheme(eps));
860  }
861 
862  template < typename GUM_SCALAR >
864  NodeSet const& children = __bnet->children(Y);
865  NodeSet const& _parents = __bnet->parents(Y);
866 
867  const auto parents = &__bnet->cpt(Y).variablesSequence();
868 
869  if (((children.size() + parents->size() - 1) == 1)
870  && (!__infE::_evidence.exists(Y))) {
871  return;
872  }
873 
874  bool update_l = _update_l[Y];
875  bool update_p = _update_p[Y];
876 
877  if (!update_p && !update_l) { return; }
878 
879  _msg_l_sent[Y]->insert(X);
880 
881  // for future refresh LM/PI
882  if (_msg_l_sent[Y]->size() == _parents.size()) {
883  _msg_l_sent[Y]->clear();
884  _update_l[Y] = false;
885  }
886 
887  // refresh LM_part
888  if (update_l) {
889  if (!children.empty() && !__infE::_evidence.exists(Y)) {
890  GUM_SCALAR lmin = 1.;
891  GUM_SCALAR lmax = 1.;
892 
893  for (auto chil : children) {
894  lmin *= _ArcsL_min[Arc(Y, chil)];
895 
896  if (_ArcsL_max.exists(Arc(Y, chil))) {
897  lmax *= _ArcsL_max[Arc(Y, chil)];
898  } else {
899  lmax *= _ArcsL_min[Arc(Y, chil)];
900  }
901  }
902 
903  lmin = lmax;
904 
905  if (lmax != lmax && lmin == lmin) { lmax = lmin; }
906 
907  if (lmax != lmax && lmin != lmin) {
908  std::cout << "no likelihood defined [lmin, lmax] (incompatibles "
909  "evidence ?)"
910  << std::endl;
911  }
912 
913  if (lmin < 0.) { lmin = 0.; }
914 
915  if (lmax < 0.) { lmax = 0.; }
916 
917  // no need to update nodeL if evidence since nodeL will never be used
918 
919  _NodesL_min[Y] = lmin;
920 
921  if (lmin != lmax) {
922  _NodesL_max.set(Y, lmax);
923  } else if (_NodesL_max.exists(Y)) {
924  _NodesL_max.erase(Y);
925  }
926 
927  } // end of : node has children & no evidence
928 
929  } // end of : if update_l
930 
931  GUM_SCALAR lmin = _NodesL_min[Y];
932  GUM_SCALAR lmax;
933 
934  if (_NodesL_max.exists(Y)) {
935  lmax = _NodesL_max[Y];
936  } else {
937  lmax = lmin;
938  }
939 
944  if (lmin == lmax && lmin == 1.) {
945  _ArcsL_min[Arc(X, Y)] = lmin;
946 
947  if (_ArcsL_max.exists(Arc(X, Y))) { _ArcsL_max.erase(Arc(X, Y)); }
948 
949  return;
950  }
951 
952  // garder pour chaque noeud un table des parents maj, une fois tous maj,
953  // stop
954  // jusque notification msg L ou P
955 
956  if (update_p || update_l) {
957  std::vector< std::vector< std::vector< GUM_SCALAR > > > msgs_p;
958  std::vector< std::vector< GUM_SCALAR > > msg_p;
959  std::vector< GUM_SCALAR > distri(2);
960 
961  Idx pos;
962 
963  // +1 from start to avoid _counting itself
964  // use const iterators with cbegin when available
965  for (auto jt = ++parents->begin(), theEnd = parents->end(); jt != theEnd;
966  ++jt) {
967  if (__bnet->nodeId(**jt) == X) {
968  // retirer la variable courante de la taille
969  pos = parents->pos(*jt) - 1;
970  continue;
971  }
972 
973  // compute probability distribution to avoid doing it multiple times
974  // (at
975  // each combination of messages)
976  distri[1] = _ArcsP_min[Arc(__bnet->nodeId(**jt), Y)];
977  distri[0] = GUM_SCALAR(1.) - distri[1];
978  msg_p.push_back(distri);
979 
980  if (_ArcsP_max.exists(Arc(__bnet->nodeId(**jt), Y))) {
981  distri[1] = _ArcsP_max[Arc(__bnet->nodeId(**jt), Y)];
982  distri[0] = GUM_SCALAR(1.) - distri[1];
983  msg_p.push_back(distri);
984  }
985 
986  msgs_p.push_back(msg_p);
987  msg_p.clear();
988  }
989 
990  GUM_SCALAR min = -2.;
991  GUM_SCALAR max = -2.;
992 
993  std::vector< GUM_SCALAR > lx;
994  lx.push_back(lmin);
995 
996  if (lmin != lmax) { lx.push_back(lmax); }
997 
998  _enum_combi(msgs_p, Y, min, max, lx, pos);
999 
1000  if (min == -2. || max == -2.) {
1001  if (min != -2.) {
1002  max = min;
1003  } else if (max != -2.) {
1004  min = max;
1005  } else {
1006  std::cout << std::endl;
1007  std::cout << "!!!! pas de message L calculable !!!!" << std::endl;
1008  return;
1009  }
1010  }
1011 
1012  if (min < 0.) { min = 0.; }
1013 
1014  if (max < 0.) { max = 0.; }
1015 
1016  bool update = false;
1017 
1018  if (min != _ArcsL_min[Arc(X, Y)]) {
1019  _ArcsL_min[Arc(X, Y)] = min;
1020  update = true;
1021  }
1022 
1023  if (_ArcsL_max.exists(Arc(X, Y))) {
1024  if (max != _ArcsL_max[Arc(X, Y)]) {
1025  if (max != min) {
1026  _ArcsL_max[Arc(X, Y)] = max;
1027  } else { // if ( max == min )
1028  _ArcsL_max.erase(Arc(X, Y));
1029  }
1030 
1031  update = true;
1032  }
1033  } else {
1034  if (max != min) {
1035  _ArcsL_max.insert(Arc(X, Y), max);
1036  update = true;
1037  }
1038  }
1039 
1040  if (update) {
1041  _update_l.set(X, true);
1042  next_active_nodes_set.insert(X);
1043  }
1044 
1045  } // end of update_p || update_l
1046  }
1047 
1048  template < typename GUM_SCALAR >
1050  const NodeId demanding_child) {
1051  NodeSet const& children = __bnet->children(X);
1052 
1053  const auto parents = &__bnet->cpt(X).variablesSequence();
1054 
1055  if (((children.size() + parents->size() - 1) == 1)
1056  && (!__infE::_evidence.exists(X))) {
1057  return;
1058  }
1059 
1060  // LM_part ---- from all children but one --- the lonely one will get the
1061  // message
1062 
1063  if (__infE::_evidence.exists(X)) {
1064  _ArcsP_min[Arc(X, demanding_child)] = __infE::_evidence[X][1];
1065 
1066  if (_ArcsP_max.exists(Arc(X, demanding_child))) {
1067  _ArcsP_max.erase(Arc(X, demanding_child));
1068  }
1069 
1070  return;
1071  }
1072 
1073  bool update_l = _update_l[X];
1074  bool update_p = _update_p[X];
1075 
1076  if (!update_p && !update_l) { return; }
1077 
1078  GUM_SCALAR lmin = 1.;
1079  GUM_SCALAR lmax = 1.;
1080 
1081  // use cbegin if available
1082  for (auto chil : children) {
1083  if (chil == demanding_child) { continue; }
1084 
1085  lmin *= _ArcsL_min[Arc(X, chil)];
1086 
1087  if (_ArcsL_max.exists(Arc(X, chil))) {
1088  lmax *= _ArcsL_max[Arc(X, chil)];
1089  } else {
1090  lmax *= _ArcsL_min[Arc(X, chil)];
1091  }
1092  }
1093 
1094  if (lmin != lmin && lmax == lmax) { lmin = lmax; }
1095 
1096  if (lmax != lmax && lmin == lmin) { lmax = lmin; }
1097 
1098  if (lmax != lmax && lmin != lmin) {
1099  std::cout << "pas de vraisemblance definie [lmin, lmax] (observations "
1100  "incompatibles ?)"
1101  << std::endl;
1102  return;
1103  }
1104 
1105  if (lmin < 0.) { lmin = 0.; }
1106 
1107  if (lmax < 0.) { lmax = 0.; }
1108 
1109  // refresh PI_part
1110  GUM_SCALAR min = _INF;
1111  GUM_SCALAR max = 0.;
1112 
1113  if (update_p) {
1114  std::vector< std::vector< std::vector< GUM_SCALAR > > > msgs_p;
1115  std::vector< std::vector< GUM_SCALAR > > msg_p;
1116  std::vector< GUM_SCALAR > distri(2);
1117 
1118  // +1 from start to avoid _counting itself
1119  // use const_iterators if available
1120  for (auto jt = ++parents->begin(), theEnd = parents->end(); jt != theEnd;
1121  ++jt) {
1122  // compute probability distribution to avoid doing it multiple times
1123  // (at
1124  // each combination of messages)
1125  distri[1] = _ArcsP_min[Arc(__bnet->nodeId(**jt), X)];
1126  distri[0] = GUM_SCALAR(1.) - distri[1];
1127  msg_p.push_back(distri);
1128 
1129  if (_ArcsP_max.exists(Arc(__bnet->nodeId(**jt), X))) {
1130  distri[1] = _ArcsP_max[Arc(__bnet->nodeId(**jt), X)];
1131  distri[0] = GUM_SCALAR(1.) - distri[1];
1132  msg_p.push_back(distri);
1133  }
1134 
1135  msgs_p.push_back(msg_p);
1136  msg_p.clear();
1137  }
1138 
1139  _enum_combi(msgs_p, X, min, max);
1140 
1141  if (min < 0.) { min = 0.; }
1142 
1143  if (max < 0.) { max = 0.; }
1144 
1145  if (min == _INF || max == _INF) {
1146  std::cout << " ERREUR msg P min = max = INF " << std::endl;
1147  std::cout.flush();
1148  return;
1149  }
1150 
1151  _NodesP_min[X] = min;
1152 
1153  if (min != max) {
1154  _NodesP_max.set(X, max);
1155  } else if (_NodesP_max.exists(X)) {
1156  _NodesP_max.erase(X);
1157  }
1158 
1159  _update_p.set(X, false);
1160 
1161  } // end of update_p
1162  else {
1163  min = _NodesP_min[X];
1164 
1165  if (_NodesP_max.exists(X)) {
1166  max = _NodesP_max[X];
1167  } else {
1168  max = min;
1169  }
1170  }
1171 
1172  if (update_p || update_l) {
1173  GUM_SCALAR msg_p_min;
1174  GUM_SCALAR msg_p_max;
1175 
1176  // cas limites sur min
1177  if (min == _INF && lmin == 0.) {
1178  std::cout << "MESSAGE P ERR (negatif) : pi = inf, l = 0" << std::endl;
1179  }
1180 
1181  if (lmin == _INF) { // cas infini
1182  msg_p_min = GUM_SCALAR(1.);
1183  } else if (min == 0. || lmin == 0.) {
1184  msg_p_min = 0;
1185  } else {
1186  msg_p_min = GUM_SCALAR(1. / (1. + ((1. / min - 1.) * 1. / lmin)));
1187  }
1188 
1189  // cas limites sur max
1190  if (max == _INF && lmax == 0.) {
1191  std::cout << "MESSAGE P ERR (negatif) : pi = inf, l = 0" << std::endl;
1192  }
1193 
1194  if (lmax == _INF) { // cas infini
1195  msg_p_max = GUM_SCALAR(1.);
1196  } else if (max == 0. || lmax == 0.) {
1197  msg_p_max = 0;
1198  } else {
1199  msg_p_max = GUM_SCALAR(1. / (1. + ((1. / max - 1.) * 1. / lmax)));
1200  }
1201 
1202  if (msg_p_min != msg_p_min && msg_p_max == msg_p_max) {
1203  msg_p_min = msg_p_max;
1204  std::cout << std::endl;
1205  std::cout << "msg_p_min is NaN" << std::endl;
1206  }
1207 
1208  if (msg_p_max != msg_p_max && msg_p_min == msg_p_min) {
1209  msg_p_max = msg_p_min;
1210  std::cout << std::endl;
1211  std::cout << "msg_p_max is NaN" << std::endl;
1212  }
1213 
1214  if (msg_p_max != msg_p_max && msg_p_min != msg_p_min) {
1215  std::cout << std::endl;
1216  std::cout << "pas de message P calculable (verifier observations)"
1217  << std::endl;
1218  return;
1219  }
1220 
1221  if (msg_p_min < 0.) { msg_p_min = 0.; }
1222 
1223  if (msg_p_max < 0.) { msg_p_max = 0.; }
1224 
1225  bool update = false;
1226 
1227  if (msg_p_min != _ArcsP_min[Arc(X, demanding_child)]) {
1228  _ArcsP_min[Arc(X, demanding_child)] = msg_p_min;
1229  update = true;
1230  }
1231 
1232  if (_ArcsP_max.exists(Arc(X, demanding_child))) {
1233  if (msg_p_max != _ArcsP_max[Arc(X, demanding_child)]) {
1234  if (msg_p_max != msg_p_min) {
1235  _ArcsP_max[Arc(X, demanding_child)] = msg_p_max;
1236  } else { // if ( msg_p_max == msg_p_min )
1237  _ArcsP_max.erase(Arc(X, demanding_child));
1238  }
1239 
1240  update = true;
1241  }
1242  } else {
1243  if (msg_p_max != msg_p_min) {
1244  _ArcsP_max.insert(Arc(X, demanding_child), msg_p_max);
1245  update = true;
1246  }
1247  }
1248 
1249  if (update) {
1250  _update_p.set(demanding_child, true);
1251  next_active_nodes_set.insert(demanding_child);
1252  }
1253 
1254  } // end of : update_l || update_p
1255  }
1256 
1257  template < typename GUM_SCALAR >
1259  for (auto node : __bnet->nodes()) {
1260  if ((!refreshIndic)
1261  && __cn->currentNodeType(node)
1263  continue;
1264  }
1265 
1266  NodeSet const& children = __bnet->children(node);
1267 
1268  auto parents = &__bnet->cpt(node).variablesSequence();
1269 
1270  if (_update_l[node]) {
1271  GUM_SCALAR lmin = 1.;
1272  GUM_SCALAR lmax = 1.;
1273 
1274  if (!children.empty() && !__infE::_evidence.exists(node)) {
1275  for (auto chil : children) {
1276  lmin *= _ArcsL_min[Arc(node, chil)];
1277 
1278  if (_ArcsL_max.exists(Arc(node, chil))) {
1279  lmax *= _ArcsL_max[Arc(node, chil)];
1280  } else {
1281  lmax *= _ArcsL_min[Arc(node, chil)];
1282  }
1283  }
1284 
1285  if (lmin != lmin && lmax == lmax) { lmin = lmax; }
1286 
1287  lmax = lmin;
1288 
1289  if (lmax != lmax && lmin != lmin) {
1290  std::cout
1291  << "pas de vraisemblance definie [lmin, lmax] (observations "
1292  "incompatibles ?)"
1293  << std::endl;
1294  return;
1295  }
1296 
1297  if (lmin < 0.) { lmin = 0.; }
1298 
1299  if (lmax < 0.) { lmax = 0.; }
1300 
1301  _NodesL_min[node] = lmin;
1302 
1303  if (lmin != lmax) {
1304  _NodesL_max.set(node, lmax);
1305  } else if (_NodesL_max.exists(node)) {
1306  _NodesL_max.erase(node);
1307  }
1308  }
1309 
1310  } // end of : update_l
1311 
1312  if (_update_p[node]) {
1313  if ((parents->size() - 1) > 0 && !__infE::_evidence.exists(node)) {
1314  std::vector< std::vector< std::vector< GUM_SCALAR > > > msgs_p;
1315  std::vector< std::vector< GUM_SCALAR > > msg_p;
1316  std::vector< GUM_SCALAR > distri(2);
1317 
1318  // +1 from start to avoid _counting itself
1319  // cbegin
1320  for (auto jt = ++parents->begin(), theEnd = parents->end();
1321  jt != theEnd;
1322  ++jt) {
1323  // compute probability distribution to avoid doing it multiple
1324  // times
1325  // (at each combination of messages)
1326  distri[1] = _ArcsP_min[Arc(__bnet->nodeId(**jt), node)];
1327  distri[0] = GUM_SCALAR(1.) - distri[1];
1328  msg_p.push_back(distri);
1329 
1330  if (_ArcsP_max.exists(Arc(__bnet->nodeId(**jt), node))) {
1331  distri[1] = _ArcsP_max[Arc(__bnet->nodeId(**jt), node)];
1332  distri[0] = GUM_SCALAR(1.) - distri[1];
1333  msg_p.push_back(distri);
1334  }
1335 
1336  msgs_p.push_back(msg_p);
1337  msg_p.clear();
1338  }
1339 
1340  GUM_SCALAR min = _INF;
1341  GUM_SCALAR max = 0.;
1342 
1343  _enum_combi(msgs_p, node, min, max);
1344 
1345  if (min < 0.) { min = 0.; }
1346 
1347  if (max < 0.) { max = 0.; }
1348 
1349  _NodesP_min[node] = min;
1350 
1351  if (min != max) {
1352  _NodesP_max.set(node, max);
1353  } else if (_NodesP_max.exists(node)) {
1354  _NodesP_max.erase(node);
1355  }
1356 
1357  _update_p[node] = false;
1358  }
1359  } // end of update_p
1360 
1361  } // end of : for each node
1362  }
1363 
1364  template < typename GUM_SCALAR >
1366  for (auto node : __bnet->nodes()) {
1367  GUM_SCALAR msg_p_min = 1.;
1368  GUM_SCALAR msg_p_max = 0.;
1369 
1370  if (__infE::_evidence.exists(node)) {
1371  if (__infE::_evidence[node][1] == 0.) {
1372  msg_p_min = (GUM_SCALAR)0.;
1373  } else if (__infE::_evidence[node][1] == 1.) {
1374  msg_p_min = 1.;
1375  }
1376 
1377  msg_p_max = msg_p_min;
1378  } else {
1379  GUM_SCALAR min = _NodesP_min[node];
1380  GUM_SCALAR max;
1381 
1382  if (_NodesP_max.exists(node)) {
1383  max = _NodesP_max[node];
1384  } else {
1385  max = min;
1386  }
1387 
1388  GUM_SCALAR lmin = _NodesL_min[node];
1389  GUM_SCALAR lmax;
1390 
1391  if (_NodesL_max.exists(node)) {
1392  lmax = _NodesL_max[node];
1393  } else {
1394  lmax = lmin;
1395  }
1396 
1397  if (min == _INF || max == _INF) {
1398  std::cout << " min ou max === _INF !!!!!!!!!!!!!!!!!!!!!!!!!! "
1399  << std::endl;
1400  return;
1401  }
1402 
1403  if (min == _INF && lmin == 0.) {
1404  std::cout << "proba ERR (negatif) : pi = inf, l = 0" << std::endl;
1405  return;
1406  }
1407 
1408  if (lmin == _INF) {
1409  msg_p_min = GUM_SCALAR(1.);
1410  } else if (min == 0. || lmin == 0.) {
1411  msg_p_min = GUM_SCALAR(0.);
1412  } else {
1413  msg_p_min = GUM_SCALAR(1. / (1. + ((1. / min - 1.) * 1. / lmin)));
1414  }
1415 
1416  if (max == _INF && lmax == 0.) {
1417  std::cout << "proba ERR (negatif) : pi = inf, l = 0" << std::endl;
1418  return;
1419  }
1420 
1421  if (lmax == _INF) {
1422  msg_p_max = GUM_SCALAR(1.);
1423  } else if (max == 0. || lmax == 0.) {
1424  msg_p_max = GUM_SCALAR(0.);
1425  } else {
1426  msg_p_max = GUM_SCALAR(1. / (1. + ((1. / max - 1.) * 1. / lmax)));
1427  }
1428  }
1429 
1430  if (msg_p_min != msg_p_min && msg_p_max == msg_p_max) {
1431  msg_p_min = msg_p_max;
1432  std::cout << std::endl;
1433  std::cout << "msg_p_min is NaN" << std::endl;
1434  }
1435 
1436  if (msg_p_max != msg_p_max && msg_p_min == msg_p_min) {
1437  msg_p_max = msg_p_min;
1438  std::cout << std::endl;
1439  std::cout << "msg_p_max is NaN" << std::endl;
1440  }
1441 
1442  if (msg_p_max != msg_p_max && msg_p_min != msg_p_min) {
1443  std::cout << std::endl;
1444  std::cout << "Please check the observations (no proba can be computed)"
1445  << std::endl;
1446  return;
1447  }
1448 
1449  if (msg_p_min < 0.) { msg_p_min = 0.; }
1450 
1451  if (msg_p_max < 0.) { msg_p_max = 0.; }
1452 
1453  __infE::_marginalMin[node][0] = 1 - msg_p_max;
1454  __infE::_marginalMax[node][0] = 1 - msg_p_min;
1455  __infE::_marginalMin[node][1] = msg_p_min;
1456  __infE::_marginalMax[node][1] = msg_p_max;
1457  }
1458  }
1459 
1460  template < typename GUM_SCALAR >
1462  _refreshLMsPIs();
1463  _updateMarginals();
1464 
1465  return __infE::_computeEpsilon();
1466  }
1467 
1468  template < typename GUM_SCALAR >
1470  for (auto node : __bnet->nodes()) {
1471  if (__cn->currentNodeType(node)
1473  continue;
1474  }
1475 
1476  for (auto pare : __bnet->parents(node)) {
1477  _msgP(pare, node);
1478  }
1479  }
1480 
1481  _refreshLMsPIs(true);
1482  _updateMarginals();
1483  }
1484 
1485  template < typename GUM_SCALAR >
1487  if (__infE::_modal.empty()) { return; }
1488 
1489  std::vector< std::vector< GUM_SCALAR > > vertices(
1490  2, std::vector< GUM_SCALAR >(2));
1491 
1492  for (auto node : __bnet->nodes()) {
1493  vertices[0][0] = __infE::_marginalMin[node][0];
1494  vertices[0][1] = __infE::_marginalMax[node][1];
1495 
1496  vertices[1][0] = __infE::_marginalMax[node][0];
1497  vertices[1][1] = __infE::_marginalMin[node][1];
1498 
1499  for (auto vertex = 0, vend = 2; vertex != vend; vertex++) {
1500  __infE::_updateExpectations(node, vertices[vertex]);
1501  // test credal sets vertices elim
1502  // remove with L2U since variables are binary
1503  // but does the user know that ?
1504  __infE::_updateCredalSets(
1505  node,
1506  vertices[vertex]); // no redundancy elimination with 2 vertices
1507  }
1508  }
1509  }
1510 
1511  template < typename GUM_SCALAR >
1513  const CredalNet< GUM_SCALAR >& cnet) :
1514  InferenceEngine< GUM_SCALAR >::InferenceEngine(cnet) {
1515  if (!cnet.isSeparatelySpecified()) {
1517  "CNLoopyPropagation is only available "
1518  "with separately specified nets");
1519  }
1520 
1521  // test for binary cn
1522  for (auto node : cnet.current_bn().nodes())
1523  if (cnet.current_bn().variable(node).domainSize() != 2) {
1525  "CNLoopyPropagation is only available "
1526  "with binary credal networks");
1527  }
1528 
1529  // test if compute CPTMinMax has been called
1530  if (!cnet.hasComputedCPTMinMax()) {
1532  "CNLoopyPropagation only works when "
1533  "\"computeCPTMinMax()\" has been called for "
1534  "this credal net");
1535  }
1536 
1537  __cn = &cnet;
1538  __bnet = &cnet.current_bn();
1539 
1541  _InferenceUpToDate = false;
1542 
1543  GUM_CONSTRUCTOR(CNLoopyPropagation);
1544  }
1545 
1546  template < typename GUM_SCALAR >
1548  _InferenceUpToDate = false;
1549 
1550  if (_msg_l_sent.size() > 0) {
1551  for (auto node : __bnet->nodes()) {
1552  delete _msg_l_sent[node];
1553  }
1554  }
1555 
1556  //_msg_l_sent.clear();
1557  //_update_l.clear();
1558  //_update_p.clear();
1559 
1560  GUM_DESTRUCTOR(CNLoopyPropagation);
1561  }
1562 
1563  template < typename GUM_SCALAR >
1565  __inferenceType = inft;
1566  }
1567 
1568  template < typename GUM_SCALAR >
1571  return __inferenceType;
1572  }
1573  } // namespace credal
1574 } // end of namespace gum
unsigned long Size
In aGrUM, hashed values are unsigned long int.
Definition: types.h:50
unsigned int NodeId
Type for node ids.
Definition: graphElements.h:97
bool empty() const noexcept
Indicates whether the set is the empty set.
Definition: set_tpl.h:707
Set< NodeId > NodeSet
Some typdefs and define for shortcuts ...
void _makeInferenceByOrderedArcs()
Starts the inference with this inference type.
#define _INF
void eraseAllEvidence()
Erase all inference related data to perform another one.
InferenceType __inferenceType
The choosen inference type.
Class implementing loopy-propagation with binary networks - L2U algorithm.
void _initialize()
Topological forward propagation to initialize old marginals & messages.
void swap(HashTable< LpCol, double > *&a, HashTable< LpCol, double > *&b)
Swap the addresses of two pointers to hashTables.
const CredalNet< GUM_SCALAR > * __cn
A pointer to the CredalNet to be used.
<agrum/CN/CNLoopyPropagation.h>
void erase(const Key &k)
Erases an element from the set.
Definition: set_tpl.h:656
gum is the global namespace for all aGrUM entities
Definition: agrum.h:25
NodeType currentNodeType(const NodeId &id) const
const BayesNet< GUM_SCALAR > & current_bn() const
void _computeExpectations()
Since the network is binary, expectations can be computed from the final marginals which give us the ...
void _makeInferenceByRandomOrder()
Starts the inference with this inference type.
const IBayesNet< GUM_SCALAR > * __bnet
A pointer to it&#39;s IBayesNet used as a DAG.
void _updateMarginals()
Compute marginals from up-to-date messages.
Class template representing a Credal Network.
Definition: credalNet.h:87
InferenceType inferenceType()
Get the inference type.
const NodeSet & parents(const NodeId id) const
returns the set of nodes with arc ingoing to a given node
bool _InferenceUpToDate
TRUE if inference has already been performed, FALSE otherwise.
The base class for all directed edgesThis class is used as a basis for manipulating all directed edge...
CNLoopyPropagation(const CredalNet< GUM_SCALAR > &cnet)
Constructor.
Uses a node-set so we don&#39;t iterate on nodes that can&#39;t send a new message.
const bool isSeparatelySpecified() const
GUM_SCALAR _calculateEpsilon()
Compute epsilon.
void saveInference(const std::string &path)
void _msgL(const NodeId X, const NodeId demanding_parent)
Sends a message to one&#39;s parent, i.e.
void _enum_combi(std::vector< std::vector< std::vector< GUM_SCALAR > > > &msgs_p, const NodeId &id, GUM_SCALAR &msg_l_min, GUM_SCALAR &msg_l_max, std::vector< GUM_SCALAR > &lx, const Idx &pos)
Used by _msgL.
void makeInference()
Starts the inference.
NodeProperty< NodeSet * > _msg_l_sent
Used to keep track of one&#39;s messages sent to it&#39;s parents.
Abstract class template representing a CredalNet inference engine.
void _refreshLMsPIs(bool refreshIndic=false)
Get the last messages from one&#39;s parents and children.
const NodeSet & children(const NodeId id) const
returns the set of nodes with arc outgoing from a given node
InferenceType
Inference type to be used by the algorithm.
void _compute_ext(GUM_SCALAR &msg_l_min, GUM_SCALAR &msg_l_max, std::vector< GUM_SCALAR > &lx, GUM_SCALAR &num_min, GUM_SCALAR &num_max, GUM_SCALAR &den_min, GUM_SCALAR &den_max)
Used by _msgL.
void _updateIndicatrices()
Only update indicatrices variables at the end of computations ( calls _msgP ).
void _msgP(const NodeId X, const NodeId demanding_child)
Sends a message to one&#39;s child, i.e.
unsigned long intPow(unsigned long base, unsigned long exponent)
Specialized pow function with integers (faster implementation).
Definition: pow_inl.h:33
Size size() const noexcept
Returns the number of elements in the set.
Definition: set_tpl.h:701
unsigned long Idx
Type for indexes.
Definition: types.h:43
Base class for dag.
Definition: DAG.h:98
void insert(const Key &k)
Inserts a new element into the set.
Definition: set_tpl.h:613
#define GUM_ERROR(type, msg)
Definition: exceptions.h:66
void _makeInferenceNodeToNeighbours()
Starts the inference with this inference type.
const bool hasComputedCPTMinMax() const