aGrUM  0.20.2
a C++ library for (probabilistic) graphical models
CNLoopyPropagation_tpl.h
Go to the documentation of this file.
1 /**
2  *
3  * Copyright 2005-2020 Pierre-Henri WUILLEMIN(@LIP6) & Christophe GONZALES(@AMU)
4  * info_at_agrum_dot_org
5  *
6  * This library is free software: you can redistribute it and/or modify
7  * it under the terms of the GNU Lesser General Public License as published by
8  * the Free Software Foundation, either version 3 of the License, or
9  * (at your option) any later version.
10  *
11  * This library is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14  * GNU Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public License
17  * along with this library. If not, see <http://www.gnu.org/licenses/>.
18  *
19  */
20 
21 
22 #include <agrum/CN/inference/CNLoopyPropagation.h>
23 
24 namespace gum {
25  namespace credal {
26 
27  template < typename GUM_SCALAR >
28  void CNLoopyPropagation< GUM_SCALAR >::saveInference(const std::string& path) {
29  std::string path_name = path.substr(0, path.size() - 4);
30  path_name = path_name + ".res";
31 
33 
34  if (!res.good()) {
36  "CNLoopyPropagation<GUM_SCALAR>::saveInference(std::"
37  "string & path) : could not open file : "
38  + path_name);
39  }
40 
41  std::string ext = path.substr(path.size() - 3, path.size());
42 
43  if (std::strcmp(ext.c_str(), "evi") == 0) {
44  std::ifstream evi(path.c_str(), std::ios::in);
45  std::string ligne;
46 
47  if (!evi.good()) {
49  "CNLoopyPropagation<GUM_SCALAR>::saveInference(std::"
50  "string & path) : could not open file : "
51  + ext);
52  }
53 
54  while (evi.good()) {
55  getline(evi, ligne);
56  res << ligne << "\n";
57  }
58 
59  evi.close();
60  }
61 
62  res << "[RESULTATS]"
63  << "\n";
64 
65  for (auto node: bnet__->nodes()) {
66  // calcul distri posteriori
67  GUM_SCALAR msg_p_min = 1.0;
68  GUM_SCALAR msg_p_max = 0.0;
69 
70  // cas evidence, calcul immediat
71  if (infE__::evidence_.exists(node)) {
72  if (infE__::evidence_[node][1] == 0.) {
73  msg_p_min = 0.;
74  } else if (infE__::evidence_[node][1] == 1.) {
75  msg_p_min = 1.;
76  }
77 
79  }
80  // sinon depuis node P et node L
81  else {
84 
85  if (NodesP_max_.exists(node)) {
86  max = NodesP_max_[node];
87  } else {
88  max = min;
89  }
90 
93 
94  if (NodesL_max_.exists(node)) {
96  } else {
97  lmax = lmin;
98  }
99 
100  // cas limites sur min
101  if (min == INF_ && lmin == 0.) {
102  std::cout << "proba ERR (negatif) : pi = inf, l = 0" << std::endl;
103  }
104 
105  if (lmin == INF_) { // cas infini
106  msg_p_min = GUM_SCALAR(1.);
107  } else if (min == 0. || lmin == 0.) {
108  msg_p_min = GUM_SCALAR(0.);
109  } else {
110  msg_p_min = GUM_SCALAR(1. / (1. + ((1. / min - 1.) * 1. / lmin)));
111  }
112 
113  // cas limites sur max
114  if (max == INF_ && lmax == 0.) {
115  std::cout << "proba ERR (negatif) : pi = inf, l = 0" << std::endl;
116  }
117 
118  if (lmax == INF_) { // cas infini
119  msg_p_max = GUM_SCALAR(1.);
120  } else if (max == 0. || lmax == 0.) {
121  msg_p_max = GUM_SCALAR(0.);
122  } else {
123  msg_p_max = GUM_SCALAR(1. / (1. + ((1. / max - 1.) * 1. / lmax)));
124  }
125  }
126 
127  if (msg_p_min != msg_p_min && msg_p_max == msg_p_max) {
129  }
130 
131  if (msg_p_max != msg_p_max && msg_p_min == msg_p_min) {
133  }
134 
135  if (msg_p_max != msg_p_max && msg_p_min != msg_p_min) {
136  std::cout << std::endl;
137  std::cout << "pas de proba calculable (verifier observations)"
138  << std::endl;
139  }
140 
141  res << "P(" << bnet__->variable(node).name() << " | e) = ";
142 
143  if (infE__::evidence_.exists(node)) {
144  res << "(observe)" << std::endl;
145  } else {
146  res << std::endl;
147  }
148 
149  res << "\t\t" << bnet__->variable(node).label(0) << " [ "
150  << (GUM_SCALAR)1. - msg_p_max;
151 
152  if (msg_p_min != msg_p_max) {
153  res << ", " << (GUM_SCALAR)1. - msg_p_min << " ] | ";
154  } else {
155  res << " ] | ";
156  }
157 
158  res << bnet__->variable(node).label(1) << " [ " << msg_p_min;
159 
160  if (msg_p_min != msg_p_max) {
161  res << ", " << msg_p_max << " ]" << std::endl;
162  } else {
163  res << " ]" << std::endl;
164  }
165  } // end of : for each node
166 
167  res.close();
168  }
169 
170  /**
171  * pour les fonctions suivantes, les GUM_SCALAR min/max doivent etre
172  * initialises
173  * (min a 1 et max a 0) pour comparer avec les resultats intermediaires
174  */
175 
176  /**
177  * une fois les cpts marginalises sur X et Ui, on calcul le min/max,
178  */
179  template < typename GUM_SCALAR >
183  std::vector< GUM_SCALAR >& lx,
187  GUM_SCALAR& den_max) {
188  GUM_SCALAR num_min_tmp = 1.;
189  GUM_SCALAR den_min_tmp = 1.;
190  GUM_SCALAR num_max_tmp = 1.;
191  GUM_SCALAR den_max_tmp = 1.;
192 
193  GUM_SCALAR res_min = 1.0, res_max = 0.0;
194 
195  auto lsize = lx.size();
196 
197  for (decltype(lsize) i = 0; i < lsize; i++) {
198  bool non_defini_min = false;
199  bool non_defini_max = false;
200 
201  if (lx[i] == INF_) {
206  } else if (lx[i] == (GUM_SCALAR)1.) {
207  num_min_tmp = GUM_SCALAR(1.);
208  den_min_tmp = GUM_SCALAR(1.);
209  num_max_tmp = GUM_SCALAR(1.);
210  den_max_tmp = GUM_SCALAR(1.);
211  } else if (lx[i] > (GUM_SCALAR)1.) {
212  GUM_SCALAR li = GUM_SCALAR(1.) / (lx[i] - GUM_SCALAR(1.));
213  num_min_tmp = num_min + li;
214  den_min_tmp = den_max + li;
215  num_max_tmp = num_max + li;
216  den_max_tmp = den_min + li;
217  } else if (lx[i] < (GUM_SCALAR)1.) {
218  GUM_SCALAR li = GUM_SCALAR(1.) / (lx[i] - GUM_SCALAR(1.));
219  num_min_tmp = num_max + li;
220  den_min_tmp = den_min + li;
221  num_max_tmp = num_min + li;
222  den_max_tmp = den_max + li;
223  }
224 
225  if (den_min_tmp == 0. && num_min_tmp == 0.) {
226  non_defini_min = true;
227  } else if (den_min_tmp == 0. && num_min_tmp != 0.) {
228  res_min = INF_;
229  } else if (den_min_tmp != INF_ || num_min_tmp != INF_) {
231  }
232 
233  if (den_max_tmp == 0. && num_max_tmp == 0.) {
234  non_defini_max = true;
235  } else if (den_max_tmp == 0. && num_max_tmp != 0.) {
236  res_max = INF_;
237  } else if (den_max_tmp != INF_ || num_max_tmp != INF_) {
239  }
240 
242  std::cout << "undefined msg" << std::endl;
243  continue;
244  } else if (non_defini_min && !non_defini_max) {
245  res_min = res_max;
246  } else if (non_defini_max && !non_defini_min) {
247  res_max = res_min;
248  }
249 
250  if (res_min < 0.) { res_min = 0.; }
251 
252  if (res_max < 0.) { res_max = 0.; }
253 
254  if (msg_l_min == msg_l_max && msg_l_min == -2.) {
255  msg_l_min = res_min;
256  msg_l_max = res_max;
257  }
258 
259  if (res_max > msg_l_max) { msg_l_max = res_max; }
260 
261  if (res_min < msg_l_min) { msg_l_min = res_min; }
262 
263  } // end of : for each lx
264  }
265 
266  /**
267  * extremes pour une combinaison des parents, message vers parent
268  */
269  template < typename GUM_SCALAR >
272  const NodeId& id,
275  std::vector< GUM_SCALAR >& lx,
276  const Idx& pos) {
277  GUM_SCALAR num_min = 0.;
278  GUM_SCALAR num_max = 0.;
279  GUM_SCALAR den_min = 0.;
280  GUM_SCALAR den_max = 0.;
281 
282  auto taille = combi_msg_p.size();
283 
284  std::vector< typename std::vector< GUM_SCALAR >::iterator > it(taille);
285 
286  for (decltype(taille) i = 0; i < taille; i++) {
287  it[i] = combi_msg_p[i].begin();
288  }
289 
290  Size pp = pos;
291 
292  Size combi_den = 0;
293  Size combi_num = pp;
294 
295  // marginalisation
296  while (it[taille - 1] != combi_msg_p[taille - 1].end()) {
297  GUM_SCALAR prod = 1.;
298 
299  for (decltype(taille) k = 0; k < taille; k++) {
300  prod *= *it[k];
301  }
302 
305 
308 
309  combi_den++;
310  combi_num++;
311 
312  if (pp != 0) {
313  if (combi_den % pp == 0) {
314  combi_den += pp;
315  combi_num += pp;
316  }
317  }
318 
319  // incrementation
320  ++it[0];
321 
322  for (decltype(taille) i = 0;
323  (i < taille - 1) && (it[i] == combi_msg_p[i].end());
324  ++i) {
325  it[i] = combi_msg_p[i].begin();
326  ++it[i + 1];
327  }
328  } // end of : marginalisation
329 
331  }
332 
333  /**
334  * extremes pour une combinaison des parents, message vers enfant
335  * marginalisation cpts
336  */
337  template < typename GUM_SCALAR >
340  const NodeId& id,
343  GUM_SCALAR min = 0.;
344  GUM_SCALAR max = 0.;
345 
346  auto taille = combi_msg_p.size();
347 
348  std::vector< typename std::vector< GUM_SCALAR >::iterator > it(taille);
349 
350  for (decltype(taille) i = 0; i < taille; i++) {
351  it[i] = combi_msg_p[i].begin();
352  }
353 
354  int combi = 0;
355  auto theEnd = combi_msg_p[taille - 1].end();
356 
357  while (it[taille - 1] != theEnd) {
358  GUM_SCALAR prod = 1.;
359 
360  for (decltype(taille) k = 0; k < taille; k++) {
361  prod *= *it[k];
362  }
363 
364  min += (cn__->get_binaryCPT_min()[id][combi] * prod);
365  max += (cn__->get_binaryCPT_max()[id][combi] * prod);
366 
367  combi++;
368 
369  // incrementation
370  ++it[0];
371 
372  for (decltype(taille) i = 0;
373  (i < taille - 1) && (it[i] == combi_msg_p[i].end());
374  ++i) {
375  it[i] = combi_msg_p[i].begin();
376  ++it[i + 1];
377  }
378  }
379 
380  if (min < msg_p_min) { msg_p_min = min; }
381 
382  if (max > msg_p_max) { msg_p_max = max; }
383  }
384 
385  /**
386  * enumerate combinations messages parents, pour message vers enfant
387  */
388  template < typename GUM_SCALAR >
391  const NodeId& id,
394  auto taille = msgs_p.size();
395 
396  // source node
397  if (taille == 0) {
400  return;
401  }
402 
403  decltype(taille) msgPerm = 1;
404 #pragma omp parallel
405  {
408 
410 
411  decltype(taille) confs = 1;
412 
413 #pragma omp for
414 
415  for (long i = 0; i < long(taille); i++) {
416  confs *= msgs_p[i].size();
417  }
418 
419 #pragma omp atomic
420  msgPerm *= confs;
421 #pragma omp barrier
422 #pragma omp
423  flush // ( msgPerm ) let the compiler choose what to flush (due to mvsc)
424 
425 #pragma omp for
426 
427  for (int j = 0; j < int(msgPerm); j++) {
428  // get jth msg :
429  auto jvalue = j;
430 
431  for (decltype(taille) i = 0; i < taille; i++) {
432  if (msgs_p[i].size() == 2) {
433  combi_msg_p[i] = (jvalue & 1) ? msgs_p[i][1] : msgs_p[i][0];
434  jvalue /= 2;
435  } else {
436  combi_msg_p[i] = msgs_p[i][0];
437  }
438  }
439 
441  }
442 
443 // since min is INF_ and max is 0 at init, there is no issue having more threads
444 // here
445 // than during for loop
446 #pragma omp critical(msgpminmax)
447  {
448 #pragma omp flush //( msg_p_min )
449  //#pragma omp flush ( msg_p_max ) let the compiler choose what to
450  // flush (due to mvsc)
451 
452  if (msg_p_min > msg_pmin) { msg_p_min = msg_pmin; }
453 
454  if (msg_p_max < msg_pmax) { msg_p_max = msg_pmax; }
455  }
456  }
457  return;
458  }
459 
460  /**
461  * comme precedemment mais pour message parent, vraisemblance prise en
462  * compte
463  */
464  template < typename GUM_SCALAR >
467  const NodeId& id,
470  std::vector< GUM_SCALAR >& lx,
471  const Idx& pos) {
474 
475  auto taille = msgs_p.size();
476 
477  // one parent node, the one receiving the message
478  if (taille == 0) {
483 
485 
488  return;
489  }
490 
491  decltype(taille) msgPerm = 1;
492 #pragma omp parallel
493  {
497 
498  decltype(taille) confs = 1;
499 #pragma omp for
500 
501  for (int i = 0; i < int(taille); i++) {
502  confs *= msgs_p[i].size();
503  }
504 
505 #pragma omp atomic
506  msgPerm *= confs;
507 #pragma omp barrier
508 #pragma omp flush(msgPerm)
509 
510 // direct binary representation of config, no need for iterators
511 #pragma omp for
512 
513  for (long j = 0; j < long(msgPerm); j++) {
514  // get jth msg :
515  auto jvalue = j;
516 
517  for (decltype(taille) i = 0; i < taille; i++) {
518  if (msgs_p[i].size() == 2) {
519  combi_msg_p[i] = (jvalue & 1) ? msgs_p[i][1] : msgs_p[i][0];
520  jvalue /= 2;
521  } else {
522  combi_msg_p[i] = msgs_p[i][0];
523  }
524  }
525 
527  }
528 
529 // there may be more threads here than in the for loop, therefor positive test
530 // is NECESSARY (init is -2)
531 #pragma omp critical(msglminmax)
532  {
533 #pragma omp flush(msg_l_min)
534 #pragma omp flush(msg_l_max)
535 
536  if ((msg_l_min > msg_lmin || msg_l_min == -2) && msg_lmin > 0) {
538  }
539 
540  if ((msg_l_max < msg_lmax || msg_l_max == -2) && msg_lmax > 0) {
542  }
543  }
544  }
545 
548  }
549 
550  template < typename GUM_SCALAR >
552  if (InferenceUpToDate_) { return; }
553 
554  initialize_();
555 
557 
558  switch (inferenceType__) {
561  break;
562 
563  case InferenceType::ordered:
565  break;
566 
569  break;
570  }
571 
572  //_updateMarginals();
573  updateIndicatrices_(); // will call updateMarginals_()
574 
576 
577  InferenceUpToDate_ = true;
578  }
579 
580  template < typename GUM_SCALAR >
583 
584  ArcsL_min_.clear();
585  ArcsL_max_.clear();
586  ArcsP_min_.clear();
587  ArcsP_max_.clear();
588  NodesL_min_.clear();
589  NodesL_max_.clear();
590  NodesP_min_.clear();
591  NodesP_max_.clear();
592 
593  InferenceUpToDate_ = false;
594 
595  if (msg_l_sent_.size() > 0) {
596  for (auto node: bnet__->nodes()) {
597  delete msg_l_sent_[node];
598  }
599  }
600 
601  msg_l_sent_.clear();
602  update_l_.clear();
603  update_p_.clear();
604 
607  }
608 
609  template < typename GUM_SCALAR >
611  const DAG& graphe = bnet__->dag();
612 
613  // use const iterators with cbegin when available
614  for (auto node: bnet__->topologicalOrder()) {
615  update_p_.set(node, false);
616  update_l_.set(node, false);
617  NodeSet* parents_ = new NodeSet();
619 
620  // accelerer init pour evidences
621  if (infE__::evidence_.exists(node)) {
622  if (infE__::evidence_[node][1] != 0.
623  && infE__::evidence_[node][1] != 1.) {
625  "CNLoopyPropagation can only handle HARD evidences");
626  }
627 
629  update_l_.set(node, true);
630  update_p_.set(node, true);
631 
632  if (infE__::evidence_[node][1] == (GUM_SCALAR)1.) {
635  } else if (infE__::evidence_[node][1] == (GUM_SCALAR)0.) {
638  }
639 
640  std::vector< GUM_SCALAR > marg(2);
641  marg[1] = NodesP_min_[node];
642  marg[0] = 1 - marg[1];
643 
646 
647  continue;
648  }
649 
652 
653  if (par_.size() == 0) {
655  update_p_.set(node, true);
656  update_l_.set(node, true);
657  }
658 
659  if (enf_.size() == 0) {
661  update_p_.set(node, true);
662  update_l_.set(node, true);
663  }
664 
665  /**
666  * messages and so parents need to be read in order of appearance
667  * use potentials instead of dag
668  */
669  const auto parents = &bnet__->cpt(node).variablesSequence();
670 
673  std::vector< GUM_SCALAR > distri(2);
674 
675  // +1 from start to avoid counting_ itself
676  // use const iterators when available with cbegin
677  for (auto jt = ++parents->begin(), theEnd = parents->end(); jt != theEnd;
678  ++jt) {
679  // compute probability distribution to avoid doing it multiple times
680  // (at
681  // each combination of messages)
682  distri[1] = NodesP_min_[bnet__->nodeId(**jt)];
683  distri[0] = (GUM_SCALAR)1. - distri[1];
685 
686  if (NodesP_max_.exists(bnet__->nodeId(**jt))) {
687  distri[1] = NodesP_max_[bnet__->nodeId(**jt)];
688  distri[0] = (GUM_SCALAR)1. - distri[1];
690  }
691 
693  msg_p.clear();
694  }
695 
696  GUM_SCALAR msg_p_min = 1.;
697  GUM_SCALAR msg_p_max = 0.;
698 
702  }
703 
704  if (msg_p_min <= (GUM_SCALAR)0.) { msg_p_min = (GUM_SCALAR)0.; }
705 
706  if (msg_p_max <= (GUM_SCALAR)0.) { msg_p_max = (GUM_SCALAR)0.; }
707 
709  std::vector< GUM_SCALAR > marg(2);
710  marg[1] = msg_p_min;
711  marg[0] = 1 - msg_p_min;
712 
714 
715  if (msg_p_min != msg_p_max) {
716  marg[1] = msg_p_max;
717  marg[0] = 1 - msg_p_max;
719  }
720 
722 
724  }
725 
726  for (auto arc: bnet__->arcs()) {
728 
729  if (NodesP_max_.exists(arc.tail())) {
731  }
732 
734  }
735  }
736 
737  template < typename GUM_SCALAR >
739  const DAG& graphe = bnet__->dag();
740 
741  GUM_SCALAR eps;
742  // to validate TestSuite
744 
745  do {
746  for (auto node: active_nodes_set) {
747  for (auto chil: graphe.children(node)) {
750  continue;
751  }
752 
753  msgP_(node, chil);
754  }
755 
756  for (auto par: graphe.parents(node)) {
759  continue;
760  }
761 
762  msgL_(node, par);
763  }
764  }
765 
767 
769 
773 
775  && active_nodes_set.size() > 0);
776 
777  infE__::stopApproximationScheme(); // just to be sure of the
778  // approximationScheme has been notified of
779  // the end of looop
780  }
781 
782  template < typename GUM_SCALAR >
784  Size nbrArcs = bnet__->dag().sizeArcs();
785 
786  std::vector< cArcP > seq;
788 
789  for (const auto& arc: bnet__->arcs()) {
790  seq.push_back(&arc);
791  }
792 
793  GUM_SCALAR eps;
794  // validate TestSuite
796 
797  do {
798  for (Size j = 0, theEnd = nbrArcs / 2; j < theEnd; j++) {
799  auto w1 = rand() % nbrArcs, w2 = rand() % nbrArcs;
800 
801  if (w1 == w2) { continue; }
802 
803  std::swap(seq[w1], seq[w2]);
804  }
805 
806  for (const auto it: seq) {
807  if (cn__->currentNodeType(it->tail())
809  || cn__->currentNodeType(it->head())
811  continue;
812  }
813 
814  msgP_(it->tail(), it->head());
815  msgL_(it->head(), it->tail());
816  }
817 
819 
821 
823  }
824 
825  // gives slightly worse results for some variable/modalities than other
826  // inference
827  // types (node D on 2U network loose 0.03 precision)
828  template < typename GUM_SCALAR >
830  Size nbrArcs = bnet__->dag().sizeArcs();
831 
832  std::vector< cArcP > seq;
834 
835  for (const auto& arc: bnet__->arcs()) {
836  seq.push_back(&arc);
837  }
838 
839  GUM_SCALAR eps;
840  // validate TestSuite
842 
843  do {
844  for (const auto it: seq) {
845  if (cn__->currentNodeType(it->tail())
847  || cn__->currentNodeType(it->head())
849  continue;
850  }
851 
852  msgP_(it->tail(), it->head());
853  msgL_(it->head(), it->tail());
854  }
855 
857 
859 
861  }
862 
863  template < typename GUM_SCALAR >
864  void CNLoopyPropagation< GUM_SCALAR >::msgL_(const NodeId Y, const NodeId X) {
865  NodeSet const& children = bnet__->children(Y);
866  NodeSet const& parents_ = bnet__->parents(Y);
867 
868  const auto parents = &bnet__->cpt(Y).variablesSequence();
869 
870  if (((children.size() + parents->size() - 1) == 1)
871  && (!infE__::evidence_.exists(Y))) {
872  return;
873  }
874 
875  bool update_l = update_l_[Y];
876  bool update_p = update_p_[Y];
877 
878  if (!update_p && !update_l) { return; }
879 
880  msg_l_sent_[Y]->insert(X);
881 
882  // for future refresh LM/PI
883  if (msg_l_sent_[Y]->size() == parents_.size()) {
884  msg_l_sent_[Y]->clear();
885  update_l_[Y] = false;
886  }
887 
888  // refresh LM_part
889  if (update_l) {
890  if (!children.empty() && !infE__::evidence_.exists(Y)) {
891  GUM_SCALAR lmin = 1.;
892  GUM_SCALAR lmax = 1.;
893 
894  for (auto chil: children) {
895  lmin *= ArcsL_min_[Arc(Y, chil)];
896 
897  if (ArcsL_max_.exists(Arc(Y, chil))) {
898  lmax *= ArcsL_max_[Arc(Y, chil)];
899  } else {
900  lmax *= ArcsL_min_[Arc(Y, chil)];
901  }
902  }
903 
904  lmin = lmax;
905 
906  if (lmax != lmax && lmin == lmin) { lmax = lmin; }
907 
908  if (lmax != lmax && lmin != lmin) {
909  std::cout << "no likelihood defined [lmin, lmax] (incompatibles "
910  "evidence ?)"
911  << std::endl;
912  }
913 
914  if (lmin < 0.) { lmin = 0.; }
915 
916  if (lmax < 0.) { lmax = 0.; }
917 
918  // no need to update nodeL if evidence since nodeL will never be used
919 
920  NodesL_min_[Y] = lmin;
921 
922  if (lmin != lmax) {
923  NodesL_max_.set(Y, lmax);
924  } else if (NodesL_max_.exists(Y)) {
926  }
927 
928  } // end of : node has children & no evidence
929 
930  } // end of : if update_l
931 
934 
935  if (NodesL_max_.exists(Y)) {
936  lmax = NodesL_max_[Y];
937  } else {
938  lmax = lmin;
939  }
940 
941  /**
942  * lmin == lmax == 1 => sends 1 as message to parents
943  */
944 
945  if (lmin == lmax && lmin == 1.) {
946  ArcsL_min_[Arc(X, Y)] = lmin;
947 
948  if (ArcsL_max_.exists(Arc(X, Y))) { ArcsL_max_.erase(Arc(X, Y)); }
949 
950  return;
951  }
952 
953  // garder pour chaque noeud un table des parents maj, une fois tous maj,
954  // stop
955  // jusque notification msg L ou P
956 
957  if (update_p || update_l) {
960  std::vector< GUM_SCALAR > distri(2);
961 
962  Idx pos;
963 
964  // +1 from start to avoid counting_ itself
965  // use const iterators with cbegin when available
966  for (auto jt = ++parents->begin(), theEnd = parents->end(); jt != theEnd;
967  ++jt) {
968  if (bnet__->nodeId(**jt) == X) {
969  // retirer la variable courante de la taille
970  pos = parents->pos(*jt) - 1;
971  continue;
972  }
973 
974  // compute probability distribution to avoid doing it multiple times
975  // (at each combination of messages)
976  distri[1] = ArcsP_min_[Arc(bnet__->nodeId(**jt), Y)];
977  distri[0] = GUM_SCALAR(1.) - distri[1];
979 
980  if (ArcsP_max_.exists(Arc(bnet__->nodeId(**jt), Y))) {
981  distri[1] = ArcsP_max_[Arc(bnet__->nodeId(**jt), Y)];
982  distri[0] = GUM_SCALAR(1.) - distri[1];
984  }
985 
987  msg_p.clear();
988  }
989 
990  GUM_SCALAR min = -2.;
991  GUM_SCALAR max = -2.;
992 
993  std::vector< GUM_SCALAR > lx;
994  lx.push_back(lmin);
995 
996  if (lmin != lmax) { lx.push_back(lmax); }
997 
998  enum_combi_(msgs_p, Y, min, max, lx, pos);
999 
1000  if (min == -2. || max == -2.) {
1001  if (min != -2.) {
1002  max = min;
1003  } else if (max != -2.) {
1004  min = max;
1005  } else {
1006  std::cout << std::endl;
1007  std::cout << "!!!! pas de message L calculable !!!!" << std::endl;
1008  return;
1009  }
1010  }
1011 
1012  if (min < 0.) { min = 0.; }
1013 
1014  if (max < 0.) { max = 0.; }
1015 
1016  bool update = false;
1017 
1018  if (min != ArcsL_min_[Arc(X, Y)]) {
1019  ArcsL_min_[Arc(X, Y)] = min;
1020  update = true;
1021  }
1022 
1023  if (ArcsL_max_.exists(Arc(X, Y))) {
1024  if (max != ArcsL_max_[Arc(X, Y)]) {
1025  if (max != min) {
1026  ArcsL_max_[Arc(X, Y)] = max;
1027  } else { // if ( max == min )
1028  ArcsL_max_.erase(Arc(X, Y));
1029  }
1030 
1031  update = true;
1032  }
1033  } else {
1034  if (max != min) {
1035  ArcsL_max_.insert(Arc(X, Y), max);
1036  update = true;
1037  }
1038  }
1039 
1040  if (update) {
1041  update_l_.set(X, true);
1043  }
1044 
1045  } // end of update_p || update_l
1046  }
1047 
1048  template < typename GUM_SCALAR >
1050  const NodeId demanding_child) {
1051  NodeSet const& children = bnet__->children(X);
1052 
1053  const auto parents = &bnet__->cpt(X).variablesSequence();
1054 
1055  if (((children.size() + parents->size() - 1) == 1)
1056  && (!infE__::evidence_.exists(X))) {
1057  return;
1058  }
1059 
1060  // LM_part ---- from all children but one --- the lonely one will get the
1061  // message
1062 
1063  if (infE__::evidence_.exists(X)) {
1065 
1068  }
1069 
1070  return;
1071  }
1072 
1073  bool update_l = update_l_[X];
1074  bool update_p = update_p_[X];
1075 
1076  if (!update_p && !update_l) { return; }
1077 
1078  GUM_SCALAR lmin = 1.;
1079  GUM_SCALAR lmax = 1.;
1080 
1081  // use cbegin if available
1082  for (auto chil: children) {
1083  if (chil == demanding_child) { continue; }
1084 
1085  lmin *= ArcsL_min_[Arc(X, chil)];
1086 
1087  if (ArcsL_max_.exists(Arc(X, chil))) {
1088  lmax *= ArcsL_max_[Arc(X, chil)];
1089  } else {
1090  lmax *= ArcsL_min_[Arc(X, chil)];
1091  }
1092  }
1093 
1094  if (lmin != lmin && lmax == lmax) { lmin = lmax; }
1095 
1096  if (lmax != lmax && lmin == lmin) { lmax = lmin; }
1097 
1098  if (lmax != lmax && lmin != lmin) {
1099  std::cout << "pas de vraisemblance definie [lmin, lmax] (observations "
1100  "incompatibles ?)"
1101  << std::endl;
1102  return;
1103  }
1104 
1105  if (lmin < 0.) { lmin = 0.; }
1106 
1107  if (lmax < 0.) { lmax = 0.; }
1108 
1109  // refresh PI_part
1110  GUM_SCALAR min = INF_;
1111  GUM_SCALAR max = 0.;
1112 
1113  if (update_p) {
1115  std::vector< std::vector< GUM_SCALAR > > msg_p;
1116  std::vector< GUM_SCALAR > distri(2);
1117 
1118  // +1 from start to avoid counting_ itself
1119  // use const_iterators if available
1120  for (auto jt = ++parents->begin(), theEnd = parents->end(); jt != theEnd;
1121  ++jt) {
1122  // compute probability distribution to avoid doing it multiple times
1123  // (at
1124  // each combination of messages)
1125  distri[1] = ArcsP_min_[Arc(bnet__->nodeId(**jt), X)];
1126  distri[0] = GUM_SCALAR(1.) - distri[1];
1128 
1129  if (ArcsP_max_.exists(Arc(bnet__->nodeId(**jt), X))) {
1130  distri[1] = ArcsP_max_[Arc(bnet__->nodeId(**jt), X)];
1131  distri[0] = GUM_SCALAR(1.) - distri[1];
1133  }
1134 
1136  msg_p.clear();
1137  }
1138 
1139  enum_combi_(msgs_p, X, min, max);
1140 
1141  if (min < 0.) { min = 0.; }
1142 
1143  if (max < 0.) { max = 0.; }
1144 
1145  if (min == INF_ || max == INF_) {
1146  std::cout << " ERREUR msg P min = max = INF " << std::endl;
1147  std::cout.flush();
1148  return;
1149  }
1150 
1151  NodesP_min_[X] = min;
1152 
1153  if (min != max) {
1154  NodesP_max_.set(X, max);
1155  } else if (NodesP_max_.exists(X)) {
1156  NodesP_max_.erase(X);
1157  }
1158 
1159  update_p_.set(X, false);
1160 
1161  } // end of update_p
1162  else {
1163  min = NodesP_min_[X];
1164 
1165  if (NodesP_max_.exists(X)) {
1166  max = NodesP_max_[X];
1167  } else {
1168  max = min;
1169  }
1170  }
1171 
1172  if (update_p || update_l) {
1175 
1176  // cas limites sur min
1177  if (min == INF_ && lmin == 0.) {
1178  std::cout << "MESSAGE P ERR (negatif) : pi = inf, l = 0" << std::endl;
1179  }
1180 
1181  if (lmin == INF_) { // cas infini
1182  msg_p_min = GUM_SCALAR(1.);
1183  } else if (min == 0. || lmin == 0.) {
1184  msg_p_min = 0;
1185  } else {
1186  msg_p_min = GUM_SCALAR(1. / (1. + ((1. / min - 1.) * 1. / lmin)));
1187  }
1188 
1189  // cas limites sur max
1190  if (max == INF_ && lmax == 0.) {
1191  std::cout << "MESSAGE P ERR (negatif) : pi = inf, l = 0" << std::endl;
1192  }
1193 
1194  if (lmax == INF_) { // cas infini
1195  msg_p_max = GUM_SCALAR(1.);
1196  } else if (max == 0. || lmax == 0.) {
1197  msg_p_max = 0;
1198  } else {
1199  msg_p_max = GUM_SCALAR(1. / (1. + ((1. / max - 1.) * 1. / lmax)));
1200  }
1201 
1202  if (msg_p_min != msg_p_min && msg_p_max == msg_p_max) {
1203  msg_p_min = msg_p_max;
1204  std::cout << std::endl;
1205  std::cout << "msg_p_min is NaN" << std::endl;
1206  }
1207 
1208  if (msg_p_max != msg_p_max && msg_p_min == msg_p_min) {
1209  msg_p_max = msg_p_min;
1210  std::cout << std::endl;
1211  std::cout << "msg_p_max is NaN" << std::endl;
1212  }
1213 
1214  if (msg_p_max != msg_p_max && msg_p_min != msg_p_min) {
1215  std::cout << std::endl;
1216  std::cout << "pas de message P calculable (verifier observations)"
1217  << std::endl;
1218  return;
1219  }
1220 
1221  if (msg_p_min < 0.) { msg_p_min = 0.; }
1222 
1223  if (msg_p_max < 0.) { msg_p_max = 0.; }
1224 
1225  bool update = false;
1226 
1229  update = true;
1230  }
1231 
1234  if (msg_p_max != msg_p_min) {
1236  } else { // if ( msg_p_max == msg_p_min )
1238  }
1239 
1240  update = true;
1241  }
1242  } else {
1243  if (msg_p_max != msg_p_min) {
1245  update = true;
1246  }
1247  }
1248 
1249  if (update) {
1250  update_p_.set(demanding_child, true);
1252  }
1253 
1254  } // end of : update_l || update_p
1255  }
1256 
1257  template < typename GUM_SCALAR >
1259  for (auto node: bnet__->nodes()) {
1260  if ((!refreshIndic)
1262  == CredalNet< GUM_SCALAR >::NodeType::Indic) {
1263  continue;
1264  }
1265 
1266  NodeSet const& children = bnet__->children(node);
1267 
1268  auto parents = &bnet__->cpt(node).variablesSequence();
1269 
1270  if (update_l_[node]) {
1271  GUM_SCALAR lmin = 1.;
1272  GUM_SCALAR lmax = 1.;
1273 
1274  if (!children.empty() && !infE__::evidence_.exists(node)) {
1275  for (auto chil: children) {
1276  lmin *= ArcsL_min_[Arc(node, chil)];
1277 
1278  if (ArcsL_max_.exists(Arc(node, chil))) {
1279  lmax *= ArcsL_max_[Arc(node, chil)];
1280  } else {
1281  lmax *= ArcsL_min_[Arc(node, chil)];
1282  }
1283  }
1284 
1285  if (lmin != lmin && lmax == lmax) { lmin = lmax; }
1286 
1287  lmax = lmin;
1288 
1289  if (lmax != lmax && lmin != lmin) {
1290  std::cout
1291  << "pas de vraisemblance definie [lmin, lmax] (observations "
1292  "incompatibles ?)"
1293  << std::endl;
1294  return;
1295  }
1296 
1297  if (lmin < 0.) { lmin = 0.; }
1298 
1299  if (lmax < 0.) { lmax = 0.; }
1300 
1301  NodesL_min_[node] = lmin;
1302 
1303  if (lmin != lmax) {
1305  } else if (NodesL_max_.exists(node)) {
1307  }
1308  }
1309 
1310  } // end of : update_l
1311 
1312  if (update_p_[node]) {
1313  if ((parents->size() - 1) > 0 && !infE__::evidence_.exists(node)) {
1315  std::vector< std::vector< GUM_SCALAR > > msg_p;
1316  std::vector< GUM_SCALAR > distri(2);
1317 
1318  // +1 from start to avoid counting_ itself
1319  // cbegin
1320  for (auto jt = ++parents->begin(), theEnd = parents->end();
1321  jt != theEnd;
1322  ++jt) {
1323  // compute probability distribution to avoid doing it multiple
1324  // times
1325  // (at each combination of messages)
1326  distri[1] = ArcsP_min_[Arc(bnet__->nodeId(**jt), node)];
1327  distri[0] = GUM_SCALAR(1.) - distri[1];
1329 
1330  if (ArcsP_max_.exists(Arc(bnet__->nodeId(**jt), node))) {
1331  distri[1] = ArcsP_max_[Arc(bnet__->nodeId(**jt), node)];
1332  distri[0] = GUM_SCALAR(1.) - distri[1];
1334  }
1335 
1337  msg_p.clear();
1338  }
1339 
1340  GUM_SCALAR min = INF_;
1341  GUM_SCALAR max = 0.;
1342 
1344 
1345  if (min < 0.) { min = 0.; }
1346 
1347  if (max < 0.) { max = 0.; }
1348 
1349  NodesP_min_[node] = min;
1350 
1351  if (min != max) {
1352  NodesP_max_.set(node, max);
1353  } else if (NodesP_max_.exists(node)) {
1355  }
1356 
1357  update_p_[node] = false;
1358  }
1359  } // end of update_p
1360 
1361  } // end of : for each node
1362  }
1363 
1364  template < typename GUM_SCALAR >
1366  for (auto node: bnet__->nodes()) {
1367  GUM_SCALAR msg_p_min = 1.;
1368  GUM_SCALAR msg_p_max = 0.;
1369 
1370  if (infE__::evidence_.exists(node)) {
1371  if (infE__::evidence_[node][1] == 0.) {
1372  msg_p_min = (GUM_SCALAR)0.;
1373  } else if (infE__::evidence_[node][1] == 1.) {
1374  msg_p_min = 1.;
1375  }
1376 
1377  msg_p_max = msg_p_min;
1378  } else {
1380  GUM_SCALAR max;
1381 
1382  if (NodesP_max_.exists(node)) {
1383  max = NodesP_max_[node];
1384  } else {
1385  max = min;
1386  }
1387 
1389  GUM_SCALAR lmax;
1391  if (NodesL_max_.exists(node)) {
1392  lmax = NodesL_max_[node];
1393  } else {
1394  lmax = lmin;
1395  }
1396 
1397  if (min == INF_ || max == INF_) {
1398  std::cout << " min ou max === INF_ !!!!!!!!!!!!!!!!!!!!!!!!!! "
1399  << std::endl;
1400  return;
1401  }
1402 
1403  if (min == INF_ && lmin == 0.) {
1404  std::cout << "proba ERR (negatif) : pi = inf, l = 0" << std::endl;
1405  return;
1406  }
1407 
1408  if (lmin == INF_) {
1409  msg_p_min = GUM_SCALAR(1.);
1410  } else if (min == 0. || lmin == 0.) {
1411  msg_p_min = GUM_SCALAR(0.);
1412  } else {
1413  msg_p_min = GUM_SCALAR(1. / (1. + ((1. / min - 1.) * 1. / lmin)));
1414  }
1415 
1416  if (max == INF_ && lmax == 0.) {
1417  std::cout << "proba ERR (negatif) : pi = inf, l = 0" << std::endl;
1418  return;
1419  }
1420 
1421  if (lmax == INF_) {
1422  msg_p_max = GUM_SCALAR(1.);
1423  } else if (max == 0. || lmax == 0.) {
1424  msg_p_max = GUM_SCALAR(0.);
1425  } else {
1426  msg_p_max = GUM_SCALAR(1. / (1. + ((1. / max - 1.) * 1. / lmax)));
1427  }
1428  }
1429 
1430  if (msg_p_min != msg_p_min && msg_p_max == msg_p_max) {
1431  msg_p_min = msg_p_max;
1432  std::cout << std::endl;
1433  std::cout << "msg_p_min is NaN" << std::endl;
1434  }
1435 
1436  if (msg_p_max != msg_p_max && msg_p_min == msg_p_min) {
1437  msg_p_max = msg_p_min;
1438  std::cout << std::endl;
1439  std::cout << "msg_p_max is NaN" << std::endl;
1440  }
1441 
1442  if (msg_p_max != msg_p_max && msg_p_min != msg_p_min) {
1443  std::cout << std::endl;
1444  std::cout << "Please check the observations (no proba can be computed)"
1445  << std::endl;
1446  return;
1447  }
1448 
1449  if (msg_p_min < 0.) { msg_p_min = 0.; }
1450 
1451  if (msg_p_max < 0.) { msg_p_max = 0.; }
1452 
1453  infE__::marginalMin_[node][0] = 1 - msg_p_max;
1454  infE__::marginalMax_[node][0] = 1 - msg_p_min;
1457  GUM_TRACE_VAR(node << ":" << infE__::marginalMin_[node]);
1458  GUM_TRACE_VAR(node << ":" << infE__::marginalMax_[node]);
1459  }
1460  }
1461 
1462  template < typename GUM_SCALAR >
1464  refreshLMsPIs_();
1465  updateMarginals_();
1466 
1467  return infE__::computeEpsilon_();
1468  }
1469 
1470  template < typename GUM_SCALAR >
1472  for (auto node: bnet__->nodes()) {
1473  if (cn__->currentNodeType(node)
1474  != CredalNet< GUM_SCALAR >::NodeType::Indic) {
1475  continue;
1476  }
1477 
1478  for (auto pare: bnet__->parents(node)) {
1479  msgP_(pare, node);
1480  }
1481  }
1482 
1483  refreshLMsPIs_(true);
1484  updateMarginals_();
1485  }
1486 
1487  template < typename GUM_SCALAR >
1489  if (infE__::modal_.empty()) { return; }
1490 
1492  2,
1493  std::vector< GUM_SCALAR >(2));
1494 
1495  for (auto node: bnet__->nodes()) {
1496  vertices[0][0] = infE__::marginalMin_[node][0];
1497  vertices[0][1] = infE__::marginalMax_[node][1];
1498 
1499  vertices[1][0] = infE__::marginalMax_[node][0];
1500  vertices[1][1] = infE__::marginalMin_[node][1];
1501 
1502  for (auto vertex = 0, vend = 2; vertex != vend; vertex++) {
1504  // test credal sets vertices elim
1505  // remove with L2U since variables are binary
1506  // but does the user know that ?
1508  node,
1509  vertices[vertex]); // no redundancy elimination with 2 vertices
1510  }
1511  }
1512  }
1513 
1514  template < typename GUM_SCALAR >
1516  const CredalNet< GUM_SCALAR >& cnet) :
1518  if (!cnet.isSeparatelySpecified()) {
1520  "CNLoopyPropagation is only available "
1521  "with separately specified nets");
1522  }
1523 
1524  // test for binary cn
1525  for (auto node: cnet.current_bn().nodes())
1526  if (cnet.current_bn().variable(node).domainSize() != 2) {
1528  "CNLoopyPropagation is only available "
1529  "with binary credal networks");
1530  }
1531 
1532  // test if compute CPTMinMax has been called
1535  "CNLoopyPropagation only works when "
1536  "\"computeBinaryCPTMinMax()\" has been called for "
1537  "this credal net");
1538  }
1539 
1540  cn__ = &cnet;
1541  bnet__ = &cnet.current_bn();
1542 
1544  InferenceUpToDate_ = false;
1545 
1547  }
1548 
1549  template < typename GUM_SCALAR >
1551  InferenceUpToDate_ = false;
1552 
1553  if (msg_l_sent_.size() > 0) {
1554  for (auto node: bnet__->nodes()) {
1555  delete msg_l_sent_[node];
1556  }
1557  }
1558 
1559  //_msg_l_sent.clear();
1560  //_update_l.clear();
1561  //_update_p.clear();
1562 
1564  }
1565 
1566  template < typename GUM_SCALAR >
1569  }
1570 
1571  template < typename GUM_SCALAR >
1574  return inferenceType__;
1575  }
1576  } // namespace credal
1577 } // end of namespace gum
INLINE void emplace(Args &&... args)
Definition: set_tpl.h:669
namespace for all credal networks entities
Definition: LpInterface.cpp:37