28 template <
typename GUM_SCALAR >
30 std::string path_name = path.substr(0, path.size() - 4);
31 path_name = path_name +
".res";
33 std::ofstream res(path_name.c_str(), std::ios::out | std::ios::trunc);
37 "CNLoopyPropagation<GUM_SCALAR>::saveInference(std::" 38 "string & path) : could not open file : " 42 std::string ext = path.substr(path.size() - 3, path.size());
44 if (std::strcmp(ext.c_str(),
"evi") == 0) {
45 std::ifstream evi(path.c_str(), std::ios::in);
50 "CNLoopyPropagation<GUM_SCALAR>::saveInference(std::" 51 "string & path) : could not open file : " 66 for (
auto node : __bnet->nodes()) {
68 GUM_SCALAR msg_p_min = 1.0;
69 GUM_SCALAR msg_p_max = 0.0;
72 if (__infE::_evidence.exists(node)) {
73 if (__infE::_evidence[node][1] == 0.) {
75 }
else if (__infE::_evidence[node][1] == 1.) {
79 msg_p_max = msg_p_min;
83 GUM_SCALAR min = _NodesP_min[node];
86 if (_NodesP_max.exists(node)) {
87 max = _NodesP_max[node];
92 GUM_SCALAR lmin = _NodesL_min[node];
95 if (_NodesL_max.exists(node)) {
96 lmax = _NodesL_max[node];
102 if (min ==
_INF && lmin == 0.) {
103 std::cout <<
"proba ERR (negatif) : pi = inf, l = 0" << std::endl;
107 msg_p_min = GUM_SCALAR(1.);
108 }
else if (min == 0. || lmin == 0.) {
109 msg_p_min = GUM_SCALAR(0.);
111 msg_p_min = GUM_SCALAR(1. / (1. + ((1. / min - 1.) * 1. / lmin)));
115 if (max ==
_INF && lmax == 0.) {
116 std::cout <<
"proba ERR (negatif) : pi = inf, l = 0" << std::endl;
120 msg_p_max = GUM_SCALAR(1.);
121 }
else if (max == 0. || lmax == 0.) {
122 msg_p_max = GUM_SCALAR(0.);
124 msg_p_max = GUM_SCALAR(1. / (1. + ((1. / max - 1.) * 1. / lmax)));
128 if (msg_p_min != msg_p_min && msg_p_max == msg_p_max) {
129 msg_p_min = msg_p_max;
132 if (msg_p_max != msg_p_max && msg_p_min == msg_p_min) {
133 msg_p_max = msg_p_min;
136 if (msg_p_max != msg_p_max && msg_p_min != msg_p_min) {
137 std::cout << std::endl;
138 std::cout <<
"pas de proba calculable (verifier observations)" 142 res <<
"P(" << __bnet->variable(node).name() <<
" | e) = ";
144 if (__infE::_evidence.exists(node)) {
145 res <<
"(observe)" << std::endl;
150 res <<
"\t\t" << __bnet->variable(node).label(0) <<
" [ " 151 << (GUM_SCALAR)1. - msg_p_max;
153 if (msg_p_min != msg_p_max) {
154 res <<
", " << (GUM_SCALAR)1. - msg_p_min <<
" ] | ";
159 res << __bnet->variable(node).label(1) <<
" [ " << msg_p_min;
161 if (msg_p_min != msg_p_max) {
162 res <<
", " << msg_p_max <<
" ]" << std::endl;
164 res <<
" ]" << std::endl;
180 template <
typename GUM_SCALAR >
182 GUM_SCALAR& msg_l_min,
183 GUM_SCALAR& msg_l_max,
184 std::vector< GUM_SCALAR >& lx,
188 GUM_SCALAR& den_max) {
189 GUM_SCALAR num_min_tmp = 1.;
190 GUM_SCALAR den_min_tmp = 1.;
191 GUM_SCALAR num_max_tmp = 1.;
192 GUM_SCALAR den_max_tmp = 1.;
194 GUM_SCALAR res_min = 1.0, res_max = 0.0;
196 auto lsize = lx.size();
198 for (decltype(lsize) i = 0; i < lsize; i++) {
199 bool non_defini_min =
false;
200 bool non_defini_max =
false;
203 num_min_tmp = num_min;
204 den_min_tmp = den_max;
205 num_max_tmp = num_max;
206 den_max_tmp = den_min;
207 }
else if (lx[i] == (GUM_SCALAR)1.) {
208 num_min_tmp = GUM_SCALAR(1.);
209 den_min_tmp = GUM_SCALAR(1.);
210 num_max_tmp = GUM_SCALAR(1.);
211 den_max_tmp = GUM_SCALAR(1.);
212 }
else if (lx[i] > (GUM_SCALAR)1.) {
213 GUM_SCALAR li = GUM_SCALAR(1.) / (lx[i] - GUM_SCALAR(1.));
214 num_min_tmp = num_min + li;
215 den_min_tmp = den_max + li;
216 num_max_tmp = num_max + li;
217 den_max_tmp = den_min + li;
218 }
else if (lx[i] < (GUM_SCALAR)1.) {
219 GUM_SCALAR li = GUM_SCALAR(1.) / (lx[i] - GUM_SCALAR(1.));
220 num_min_tmp = num_max + li;
221 den_min_tmp = den_min + li;
222 num_max_tmp = num_min + li;
223 den_max_tmp = den_max + li;
226 if (den_min_tmp == 0. && num_min_tmp == 0.) {
227 non_defini_min =
true;
228 }
else if (den_min_tmp == 0. && num_min_tmp != 0.) {
230 }
else if (den_min_tmp !=
_INF || num_min_tmp !=
_INF) {
231 res_min = num_min_tmp / den_min_tmp;
234 if (den_max_tmp == 0. && num_max_tmp == 0.) {
235 non_defini_max =
true;
236 }
else if (den_max_tmp == 0. && num_max_tmp != 0.) {
238 }
else if (den_max_tmp !=
_INF || num_max_tmp !=
_INF) {
239 res_max = num_max_tmp / den_max_tmp;
242 if (non_defini_max && non_defini_min) {
243 std::cout <<
"undefined msg" << std::endl;
245 }
else if (non_defini_min && !non_defini_max) {
247 }
else if (non_defini_max && !non_defini_min) {
251 if (res_min < 0.) { res_min = 0.; }
253 if (res_max < 0.) { res_max = 0.; }
255 if (msg_l_min == msg_l_max && msg_l_min == -2.) {
260 if (res_max > msg_l_max) { msg_l_max = res_max; }
262 if (res_min < msg_l_min) { msg_l_min = res_min; }
270 template <
typename GUM_SCALAR >
272 std::vector< std::vector< GUM_SCALAR > >& combi_msg_p,
274 GUM_SCALAR& msg_l_min,
275 GUM_SCALAR& msg_l_max,
276 std::vector< GUM_SCALAR >& lx,
278 GUM_SCALAR num_min = 0.;
279 GUM_SCALAR num_max = 0.;
280 GUM_SCALAR den_min = 0.;
281 GUM_SCALAR den_max = 0.;
283 auto taille = combi_msg_p.size();
285 std::vector< typename std::vector< GUM_SCALAR >::iterator > it(taille);
287 for (decltype(taille) i = 0; i < taille; i++) {
288 it[i] = combi_msg_p[i].begin();
297 while (it[taille - 1] != combi_msg_p[taille - 1].end()) {
298 GUM_SCALAR prod = 1.;
300 for (decltype(taille) k = 0; k < taille; k++) {
304 den_min += (__cn->get_CPT_min()[id][combi_den] * prod);
305 den_max += (__cn->get_CPT_max()[id][combi_den] * prod);
307 num_min += (__cn->get_CPT_min()[id][combi_num] * prod);
308 num_max += (__cn->get_CPT_max()[id][combi_num] * prod);
313 if (combi_den % pp == 0) {
321 for (decltype(taille) i = 0;
322 (i < taille - 1) && (it[i] == combi_msg_p[i].end());
324 it[i] = combi_msg_p[i].begin();
329 _compute_ext(msg_l_min, msg_l_max, lx, num_min, num_max, den_min, den_max);
336 template <
typename GUM_SCALAR >
338 std::vector< std::vector< GUM_SCALAR > >& combi_msg_p,
340 GUM_SCALAR& msg_p_min,
341 GUM_SCALAR& msg_p_max) {
345 auto taille = combi_msg_p.size();
347 std::vector< typename std::vector< GUM_SCALAR >::iterator > it(taille);
349 for (decltype(taille) i = 0; i < taille; i++) {
350 it[i] = combi_msg_p[i].begin();
354 auto theEnd = combi_msg_p[taille - 1].end();
356 while (it[taille - 1] != theEnd) {
357 GUM_SCALAR prod = 1.;
359 for (decltype(taille) k = 0; k < taille; k++) {
363 min += (__cn->get_CPT_min()[id][combi] * prod);
364 max += (__cn->get_CPT_max()[id][combi] * prod);
371 for (decltype(taille) i = 0;
372 (i < taille - 1) && (it[i] == combi_msg_p[i].end());
374 it[i] = combi_msg_p[i].begin();
379 if (min < msg_p_min) { msg_p_min = min; }
381 if (max > msg_p_max) { msg_p_max = max; }
387 template <
typename GUM_SCALAR >
389 std::vector< std::vector< std::vector< GUM_SCALAR > > >& msgs_p,
391 GUM_SCALAR& msg_p_min,
392 GUM_SCALAR& msg_p_max) {
393 auto taille = msgs_p.size();
397 msg_p_min = __cn->get_CPT_min()[id][0];
398 msg_p_max = __cn->get_CPT_max()[id][0];
402 decltype(taille) msgPerm = 1;
405 GUM_SCALAR msg_pmin = msg_p_min;
406 GUM_SCALAR msg_pmax = msg_p_max;
408 std::vector< std::vector< GUM_SCALAR > > combi_msg_p(taille);
410 decltype(taille) confs = 1;
414 for (
long i = 0; i < long(taille); i++) {
415 confs *= msgs_p[i].size();
422 flush // ( msgPerm ) let the compiler choose what to flush (due to mvsc) 426 for (
int j = 0; j < int(msgPerm); j++) {
430 for (decltype(taille) i = 0; i < taille; i++) {
431 if (msgs_p[i].size() == 2) {
432 combi_msg_p[i] = (jvalue & 1) ? msgs_p[i][1] : msgs_p[i][0];
435 combi_msg_p[i] = msgs_p[i][0];
439 _compute_ext(combi_msg_p,
id, msg_pmin, msg_pmax);
445 #pragma omp critical(msgpminmax) 447 #pragma omp flush //( msg_p_min ) 451 if (msg_p_min > msg_pmin) { msg_p_min = msg_pmin; }
453 if (msg_p_max < msg_pmax) { msg_p_max = msg_pmax; }
463 template <
typename GUM_SCALAR >
465 std::vector< std::vector< std::vector< GUM_SCALAR > > >& msgs_p,
467 GUM_SCALAR& real_msg_l_min,
468 GUM_SCALAR& real_msg_l_max,
469 std::vector< GUM_SCALAR >& lx,
471 GUM_SCALAR msg_l_min = real_msg_l_min;
472 GUM_SCALAR msg_l_max = real_msg_l_max;
474 auto taille = msgs_p.size();
478 GUM_SCALAR num_min = __cn->get_CPT_min()[id][1];
479 GUM_SCALAR num_max = __cn->get_CPT_max()[id][1];
480 GUM_SCALAR den_min = __cn->get_CPT_min()[id][0];
481 GUM_SCALAR den_max = __cn->get_CPT_max()[id][0];
483 _compute_ext(msg_l_min, msg_l_max, lx, num_min, num_max, den_min, den_max);
485 real_msg_l_min = msg_l_min;
486 real_msg_l_max = msg_l_max;
490 decltype(taille) msgPerm = 1;
493 GUM_SCALAR msg_lmin = msg_l_min;
494 GUM_SCALAR msg_lmax = msg_l_max;
495 std::vector< std::vector< GUM_SCALAR > > combi_msg_p(taille);
497 decltype(taille) confs = 1;
500 for (
int i = 0; i < int(taille); i++) {
501 confs *= msgs_p[i].size();
507 #pragma omp flush(msgPerm) 512 for (
long j = 0; j < long(msgPerm); j++) {
516 for (decltype(taille) i = 0; i < taille; i++) {
517 if (msgs_p[i].size() == 2) {
518 combi_msg_p[i] = (jvalue & 1) ? msgs_p[i][1] : msgs_p[i][0];
521 combi_msg_p[i] = msgs_p[i][0];
525 _compute_ext(combi_msg_p,
id, msg_lmin, msg_lmax, lx, pos);
530 #pragma omp critical(msglminmax) 532 #pragma omp flush(msg_l_min) 533 #pragma omp flush(msg_l_max) 535 if ((msg_l_min > msg_lmin || msg_l_min == -2) && msg_lmin > 0) {
536 msg_l_min = msg_lmin;
539 if ((msg_l_max < msg_lmax || msg_l_max == -2) && msg_lmax > 0) {
540 msg_l_max = msg_lmax;
545 real_msg_l_min = msg_l_min;
546 real_msg_l_max = msg_l_max;
549 template <
typename GUM_SCALAR >
551 if (_InferenceUpToDate) {
return; }
555 __infE::initApproximationScheme();
557 switch (__inferenceType) {
558 case InferenceType::nodeToNeighbours:
559 _makeInferenceNodeToNeighbours();
562 case InferenceType::ordered: _makeInferenceByOrderedArcs();
break;
564 case InferenceType::randomOrder: _makeInferenceByRandomOrder();
break;
568 _updateIndicatrices();
570 _computeExpectations();
572 _InferenceUpToDate =
true;
575 template <
typename GUM_SCALAR >
577 __infE::eraseAllEvidence();
588 _InferenceUpToDate =
false;
590 if (_msg_l_sent.size() > 0) {
591 for (
auto node : __bnet->nodes()) {
592 delete _msg_l_sent[node];
600 active_nodes_set.clear();
601 next_active_nodes_set.clear();
604 template <
typename GUM_SCALAR >
606 const DAG& graphe = __bnet->dag();
609 for (
auto node : __bnet->topologicalOrder()) {
610 _update_p.set(node,
false);
611 _update_l.set(node,
false);
613 _msg_l_sent.set(node, _parents);
616 if (__infE::_evidence.exists(node)) {
617 if (__infE::_evidence[node][1] != 0.
618 && __infE::_evidence[node][1] != 1.) {
620 "CNLoopyPropagation can only handle HARD evidences");
623 active_nodes_set.insert(node);
624 _update_l.set(node,
true);
625 _update_p.set(node,
true);
627 if (__infE::_evidence[node][1] == (GUM_SCALAR)1.) {
628 _NodesL_min.set(node,
_INF);
629 _NodesP_min.set(node, (GUM_SCALAR)1.);
630 }
else if (__infE::_evidence[node][1] == (GUM_SCALAR)0.) {
631 _NodesL_min.set(node, (GUM_SCALAR)0.);
632 _NodesP_min.set(node, (GUM_SCALAR)0.);
635 std::vector< GUM_SCALAR > marg(2);
636 marg[1] = _NodesP_min[node];
637 marg[0] = 1 - marg[1];
639 __infE::_oldMarginalMin.set(node, marg);
640 __infE::_oldMarginalMax.set(node, marg);
648 if (_par.
size() == 0) {
649 active_nodes_set.
insert(node);
650 _update_p.set(node,
true);
651 _update_l.set(node,
true);
654 if (_enf.
size() == 0) {
655 active_nodes_set.insert(node);
656 _update_p.set(node,
true);
657 _update_l.set(node,
true);
664 const auto parents = &__bnet->cpt(node).variablesSequence();
666 std::vector< std::vector< std::vector< GUM_SCALAR > > > msgs_p;
667 std::vector< std::vector< GUM_SCALAR > > msg_p;
668 std::vector< GUM_SCALAR > distri(2);
672 for (
auto jt = ++parents->begin(), theEnd = parents->end(); jt != theEnd;
677 distri[1] = _NodesP_min[__bnet->nodeId(**jt)];
678 distri[0] = (GUM_SCALAR)1. - distri[1];
679 msg_p.push_back(distri);
681 if (_NodesP_max.exists(__bnet->nodeId(**jt))) {
682 distri[1] = _NodesP_max[__bnet->nodeId(**jt)];
683 distri[0] = (GUM_SCALAR)1. - distri[1];
684 msg_p.push_back(distri);
687 msgs_p.push_back(msg_p);
691 GUM_SCALAR msg_p_min = 1.;
692 GUM_SCALAR msg_p_max = 0.;
694 if (__cn->currentNodeType(node)
696 _enum_combi(msgs_p, node, msg_p_min, msg_p_max);
699 if (msg_p_min <= (GUM_SCALAR)0.) { msg_p_min = (GUM_SCALAR)0.; }
701 if (msg_p_max <= (GUM_SCALAR)0.) { msg_p_max = (GUM_SCALAR)0.; }
703 _NodesP_min.set(node, msg_p_min);
704 std::vector< GUM_SCALAR > marg(2);
706 marg[0] = 1 - msg_p_min;
708 __infE::_oldMarginalMin.set(node, marg);
710 if (msg_p_min != msg_p_max) {
712 marg[0] = 1 - msg_p_max;
713 _NodesP_max.insert(node, msg_p_max);
716 __infE::_oldMarginalMax.set(node, marg);
718 _NodesL_min.set(node, (GUM_SCALAR)1.);
721 for (
auto arc : __bnet->arcs()) {
722 _ArcsP_min.set(arc, _NodesP_min[arc.tail()]);
724 if (_NodesP_max.exists(arc.tail())) {
725 _ArcsP_max.set(arc, _NodesP_max[arc.tail()]);
728 _ArcsL_min.set(arc, _NodesL_min[arc.tail()]);
732 template <
typename GUM_SCALAR >
734 const DAG& graphe = __bnet->dag();
738 __infE::continueApproximationScheme(1.);
741 for (
auto node : active_nodes_set) {
742 for (
auto chil : graphe.
children(node)) {
743 if (__cn->currentNodeType(chil)
751 for (
auto par : graphe.
parents(node)) {
752 if (__cn->currentNodeType(node)
761 eps = _calculateEpsilon();
763 __infE::updateApproximationScheme();
765 active_nodes_set.clear();
766 active_nodes_set = next_active_nodes_set;
767 next_active_nodes_set.clear();
769 }
while (__infE::continueApproximationScheme(eps)
770 && active_nodes_set.size() > 0);
772 __infE::stopApproximationScheme();
777 template <
typename GUM_SCALAR >
779 Size nbrArcs = __bnet->dag().sizeArcs();
781 std::vector< cArcP > seq;
782 seq.reserve(nbrArcs);
784 for (
const auto& arc : __bnet->arcs()) {
790 __infE::continueApproximationScheme(1.);
793 for (
Size j = 0, theEnd = nbrArcs / 2; j < theEnd; j++) {
794 auto w1 = rand() % nbrArcs, w2 = rand() % nbrArcs;
796 if (w1 == w2) {
continue; }
801 for (
const auto it : seq) {
802 if (__cn->currentNodeType(it->tail())
809 _msgP(it->tail(), it->head());
810 _msgL(it->head(), it->tail());
813 eps = _calculateEpsilon();
815 __infE::updateApproximationScheme();
817 }
while (__infE::continueApproximationScheme(eps));
823 template <
typename GUM_SCALAR >
825 Size nbrArcs = __bnet->dag().sizeArcs();
827 std::vector< cArcP > seq;
828 seq.reserve(nbrArcs);
830 for (
const auto& arc : __bnet->arcs()) {
836 __infE::continueApproximationScheme(1.);
839 for (
const auto it : seq) {
840 if (__cn->currentNodeType(it->tail())
847 _msgP(it->tail(), it->head());
848 _msgL(it->head(), it->tail());
851 eps = _calculateEpsilon();
853 __infE::updateApproximationScheme();
855 }
while (__infE::continueApproximationScheme(eps));
858 template <
typename GUM_SCALAR >
860 NodeSet const& children = __bnet->children(Y);
861 NodeSet const& _parents = __bnet->parents(Y);
863 const auto parents = &__bnet->cpt(Y).variablesSequence();
865 if (((children.
size() + parents->size() - 1) == 1)
866 && (!__infE::_evidence.exists(Y))) {
870 bool update_l = _update_l[Y];
871 bool update_p = _update_p[Y];
873 if (!update_p && !update_l) {
return; }
875 _msg_l_sent[Y]->
insert(X);
878 if (_msg_l_sent[Y]->size() == _parents.
size()) {
879 _msg_l_sent[Y]->clear();
880 _update_l[Y] =
false;
885 if (!children.
empty() && !__infE::_evidence.exists(Y)) {
886 GUM_SCALAR lmin = 1.;
887 GUM_SCALAR lmax = 1.;
889 for (
auto chil : children) {
890 lmin *= _ArcsL_min[
Arc(Y, chil)];
892 if (_ArcsL_max.exists(
Arc(Y, chil))) {
893 lmax *= _ArcsL_max[
Arc(Y, chil)];
895 lmax *= _ArcsL_min[
Arc(Y, chil)];
901 if (lmax != lmax && lmin == lmin) { lmax = lmin; }
903 if (lmax != lmax && lmin != lmin) {
904 std::cout <<
"no likelihood defined [lmin, lmax] (incompatibles " 909 if (lmin < 0.) { lmin = 0.; }
911 if (lmax < 0.) { lmax = 0.; }
915 _NodesL_min[Y] = lmin;
918 _NodesL_max.set(Y, lmax);
919 }
else if (_NodesL_max.exists(Y)) {
920 _NodesL_max.erase(Y);
927 GUM_SCALAR lmin = _NodesL_min[Y];
930 if (_NodesL_max.exists(Y)) {
931 lmax = _NodesL_max[Y];
940 if (lmin == lmax && lmin == 1.) {
941 _ArcsL_min[
Arc(X, Y)] = lmin;
943 if (_ArcsL_max.exists(
Arc(X, Y))) { _ArcsL_max.erase(
Arc(X, Y)); }
952 if (update_p || update_l) {
953 std::vector< std::vector< std::vector< GUM_SCALAR > > > msgs_p;
954 std::vector< std::vector< GUM_SCALAR > > msg_p;
955 std::vector< GUM_SCALAR > distri(2);
961 for (
auto jt = ++parents->begin(), theEnd = parents->end(); jt != theEnd;
963 if (__bnet->nodeId(**jt) == X) {
965 pos = parents->pos(*jt) - 1;
972 distri[1] = _ArcsP_min[
Arc(__bnet->nodeId(**jt), Y)];
973 distri[0] = GUM_SCALAR(1.) - distri[1];
974 msg_p.push_back(distri);
976 if (_ArcsP_max.exists(
Arc(__bnet->nodeId(**jt), Y))) {
977 distri[1] = _ArcsP_max[
Arc(__bnet->nodeId(**jt), Y)];
978 distri[0] = GUM_SCALAR(1.) - distri[1];
979 msg_p.push_back(distri);
982 msgs_p.push_back(msg_p);
986 GUM_SCALAR min = -2.;
987 GUM_SCALAR max = -2.;
989 std::vector< GUM_SCALAR > lx;
992 if (lmin != lmax) { lx.push_back(lmax); }
994 _enum_combi(msgs_p, Y, min, max, lx, pos);
996 if (min == -2. || max == -2.) {
999 }
else if (max != -2.) {
1002 std::cout << std::endl;
1003 std::cout <<
"!!!! pas de message L calculable !!!!" << std::endl;
1008 if (min < 0.) { min = 0.; }
1010 if (max < 0.) { max = 0.; }
1012 bool update =
false;
1014 if (min != _ArcsL_min[
Arc(X, Y)]) {
1015 _ArcsL_min[
Arc(X, Y)] = min;
1019 if (_ArcsL_max.exists(
Arc(X, Y))) {
1020 if (max != _ArcsL_max[
Arc(X, Y)]) {
1022 _ArcsL_max[
Arc(X, Y)] = max;
1024 _ArcsL_max.erase(
Arc(X, Y));
1031 _ArcsL_max.insert(
Arc(X, Y), max);
1037 _update_l.set(X,
true);
1038 next_active_nodes_set.insert(X);
1044 template <
typename GUM_SCALAR >
1046 const NodeId demanding_child) {
1047 NodeSet const& children = __bnet->children(X);
1049 const auto parents = &__bnet->cpt(X).variablesSequence();
1051 if (((children.
size() + parents->size() - 1) == 1)
1052 && (!__infE::_evidence.exists(X))) {
1059 if (__infE::_evidence.exists(X)) {
1060 _ArcsP_min[
Arc(X, demanding_child)] = __infE::_evidence[X][1];
1062 if (_ArcsP_max.exists(
Arc(X, demanding_child))) {
1063 _ArcsP_max.
erase(
Arc(X, demanding_child));
1069 bool update_l = _update_l[X];
1070 bool update_p = _update_p[X];
1072 if (!update_p && !update_l) {
return; }
1074 GUM_SCALAR lmin = 1.;
1075 GUM_SCALAR lmax = 1.;
1078 for (
auto chil : children) {
1079 if (chil == demanding_child) {
continue; }
1081 lmin *= _ArcsL_min[
Arc(X, chil)];
1083 if (_ArcsL_max.exists(
Arc(X, chil))) {
1084 lmax *= _ArcsL_max[
Arc(X, chil)];
1086 lmax *= _ArcsL_min[
Arc(X, chil)];
1090 if (lmin != lmin && lmax == lmax) { lmin = lmax; }
1092 if (lmax != lmax && lmin == lmin) { lmax = lmin; }
1094 if (lmax != lmax && lmin != lmin) {
1095 std::cout <<
"pas de vraisemblance definie [lmin, lmax] (observations " 1101 if (lmin < 0.) { lmin = 0.; }
1103 if (lmax < 0.) { lmax = 0.; }
1106 GUM_SCALAR min =
_INF;
1107 GUM_SCALAR max = 0.;
1110 std::vector< std::vector< std::vector< GUM_SCALAR > > > msgs_p;
1111 std::vector< std::vector< GUM_SCALAR > > msg_p;
1112 std::vector< GUM_SCALAR > distri(2);
1116 for (
auto jt = ++parents->begin(), theEnd = parents->end(); jt != theEnd;
1121 distri[1] = _ArcsP_min[
Arc(__bnet->nodeId(**jt), X)];
1122 distri[0] = GUM_SCALAR(1.) - distri[1];
1123 msg_p.push_back(distri);
1125 if (_ArcsP_max.exists(
Arc(__bnet->nodeId(**jt), X))) {
1126 distri[1] = _ArcsP_max[
Arc(__bnet->nodeId(**jt), X)];
1127 distri[0] = GUM_SCALAR(1.) - distri[1];
1128 msg_p.push_back(distri);
1131 msgs_p.push_back(msg_p);
1135 _enum_combi(msgs_p, X, min, max);
1137 if (min < 0.) { min = 0.; }
1139 if (max < 0.) { max = 0.; }
1142 std::cout <<
" ERREUR msg P min = max = INF " << std::endl;
1147 _NodesP_min[X] = min;
1150 _NodesP_max.set(X, max);
1151 }
else if (_NodesP_max.exists(X)) {
1152 _NodesP_max.erase(X);
1155 _update_p.set(X,
false);
1159 min = _NodesP_min[X];
1161 if (_NodesP_max.exists(X)) {
1162 max = _NodesP_max[X];
1168 if (update_p || update_l) {
1169 GUM_SCALAR msg_p_min;
1170 GUM_SCALAR msg_p_max;
1173 if (min ==
_INF && lmin == 0.) {
1174 std::cout <<
"MESSAGE P ERR (negatif) : pi = inf, l = 0" << std::endl;
1178 msg_p_min = GUM_SCALAR(1.);
1179 }
else if (min == 0. || lmin == 0.) {
1182 msg_p_min = GUM_SCALAR(1. / (1. + ((1. / min - 1.) * 1. / lmin)));
1186 if (max ==
_INF && lmax == 0.) {
1187 std::cout <<
"MESSAGE P ERR (negatif) : pi = inf, l = 0" << std::endl;
1191 msg_p_max = GUM_SCALAR(1.);
1192 }
else if (max == 0. || lmax == 0.) {
1195 msg_p_max = GUM_SCALAR(1. / (1. + ((1. / max - 1.) * 1. / lmax)));
1198 if (msg_p_min != msg_p_min && msg_p_max == msg_p_max) {
1199 msg_p_min = msg_p_max;
1200 std::cout << std::endl;
1201 std::cout <<
"msg_p_min is NaN" << std::endl;
1204 if (msg_p_max != msg_p_max && msg_p_min == msg_p_min) {
1205 msg_p_max = msg_p_min;
1206 std::cout << std::endl;
1207 std::cout <<
"msg_p_max is NaN" << std::endl;
1210 if (msg_p_max != msg_p_max && msg_p_min != msg_p_min) {
1211 std::cout << std::endl;
1212 std::cout <<
"pas de message P calculable (verifier observations)" 1217 if (msg_p_min < 0.) { msg_p_min = 0.; }
1219 if (msg_p_max < 0.) { msg_p_max = 0.; }
1221 bool update =
false;
1223 if (msg_p_min != _ArcsP_min[
Arc(X, demanding_child)]) {
1224 _ArcsP_min[
Arc(X, demanding_child)] = msg_p_min;
1228 if (_ArcsP_max.exists(
Arc(X, demanding_child))) {
1229 if (msg_p_max != _ArcsP_max[
Arc(X, demanding_child)]) {
1230 if (msg_p_max != msg_p_min) {
1231 _ArcsP_max[
Arc(X, demanding_child)] = msg_p_max;
1233 _ArcsP_max.erase(
Arc(X, demanding_child));
1239 if (msg_p_max != msg_p_min) {
1240 _ArcsP_max.insert(
Arc(X, demanding_child), msg_p_max);
1246 _update_p.set(demanding_child,
true);
1247 next_active_nodes_set.insert(demanding_child);
1253 template <
typename GUM_SCALAR >
1255 for (
auto node : __bnet->nodes()) {
1257 && __cn->currentNodeType(node)
1262 NodeSet const& children = __bnet->children(node);
1264 auto parents = &__bnet->cpt(node).variablesSequence();
1266 if (_update_l[node]) {
1267 GUM_SCALAR lmin = 1.;
1268 GUM_SCALAR lmax = 1.;
1270 if (!children.
empty() && !__infE::_evidence.exists(node)) {
1271 for (
auto chil : children) {
1272 lmin *= _ArcsL_min[
Arc(node, chil)];
1274 if (_ArcsL_max.exists(
Arc(node, chil))) {
1275 lmax *= _ArcsL_max[
Arc(node, chil)];
1277 lmax *= _ArcsL_min[
Arc(node, chil)];
1281 if (lmin != lmin && lmax == lmax) { lmin = lmax; }
1285 if (lmax != lmax && lmin != lmin) {
1287 <<
"pas de vraisemblance definie [lmin, lmax] (observations " 1293 if (lmin < 0.) { lmin = 0.; }
1295 if (lmax < 0.) { lmax = 0.; }
1297 _NodesL_min[node] = lmin;
1300 _NodesL_max.set(node, lmax);
1301 }
else if (_NodesL_max.exists(node)) {
1302 _NodesL_max.
erase(node);
1308 if (_update_p[node]) {
1309 if ((parents->size() - 1) > 0 && !__infE::_evidence.exists(node)) {
1310 std::vector< std::vector< std::vector< GUM_SCALAR > > > msgs_p;
1311 std::vector< std::vector< GUM_SCALAR > > msg_p;
1312 std::vector< GUM_SCALAR > distri(2);
1316 for (
auto jt = ++parents->begin(), theEnd = parents->end();
1322 distri[1] = _ArcsP_min[
Arc(__bnet->nodeId(**jt), node)];
1323 distri[0] = GUM_SCALAR(1.) - distri[1];
1324 msg_p.push_back(distri);
1326 if (_ArcsP_max.exists(
Arc(__bnet->nodeId(**jt), node))) {
1327 distri[1] = _ArcsP_max[
Arc(__bnet->nodeId(**jt), node)];
1328 distri[0] = GUM_SCALAR(1.) - distri[1];
1329 msg_p.push_back(distri);
1332 msgs_p.push_back(msg_p);
1336 GUM_SCALAR min =
_INF;
1337 GUM_SCALAR max = 0.;
1339 _enum_combi(msgs_p, node, min, max);
1341 if (min < 0.) { min = 0.; }
1343 if (max < 0.) { max = 0.; }
1345 _NodesP_min[node] = min;
1348 _NodesP_max.set(node, max);
1349 }
else if (_NodesP_max.exists(node)) {
1350 _NodesP_max.erase(node);
1353 _update_p[node] =
false;
1360 template <
typename GUM_SCALAR >
1362 for (
auto node : __bnet->nodes()) {
1363 GUM_SCALAR msg_p_min = 1.;
1364 GUM_SCALAR msg_p_max = 0.;
1366 if (__infE::_evidence.exists(node)) {
1367 if (__infE::_evidence[node][1] == 0.) {
1368 msg_p_min = (GUM_SCALAR)0.;
1369 }
else if (__infE::_evidence[node][1] == 1.) {
1373 msg_p_max = msg_p_min;
1375 GUM_SCALAR min = _NodesP_min[node];
1378 if (_NodesP_max.exists(node)) {
1379 max = _NodesP_max[node];
1384 GUM_SCALAR lmin = _NodesL_min[node];
1387 if (_NodesL_max.exists(node)) {
1388 lmax = _NodesL_max[node];
1394 std::cout <<
" min ou max === _INF !!!!!!!!!!!!!!!!!!!!!!!!!! " 1399 if (min ==
_INF && lmin == 0.) {
1400 std::cout <<
"proba ERR (negatif) : pi = inf, l = 0" << std::endl;
1405 msg_p_min = GUM_SCALAR(1.);
1406 }
else if (min == 0. || lmin == 0.) {
1407 msg_p_min = GUM_SCALAR(0.);
1409 msg_p_min = GUM_SCALAR(1. / (1. + ((1. / min - 1.) * 1. / lmin)));
1412 if (max ==
_INF && lmax == 0.) {
1413 std::cout <<
"proba ERR (negatif) : pi = inf, l = 0" << std::endl;
1418 msg_p_max = GUM_SCALAR(1.);
1419 }
else if (max == 0. || lmax == 0.) {
1420 msg_p_max = GUM_SCALAR(0.);
1422 msg_p_max = GUM_SCALAR(1. / (1. + ((1. / max - 1.) * 1. / lmax)));
1426 if (msg_p_min != msg_p_min && msg_p_max == msg_p_max) {
1427 msg_p_min = msg_p_max;
1428 std::cout << std::endl;
1429 std::cout <<
"msg_p_min is NaN" << std::endl;
1432 if (msg_p_max != msg_p_max && msg_p_min == msg_p_min) {
1433 msg_p_max = msg_p_min;
1434 std::cout << std::endl;
1435 std::cout <<
"msg_p_max is NaN" << std::endl;
1438 if (msg_p_max != msg_p_max && msg_p_min != msg_p_min) {
1439 std::cout << std::endl;
1440 std::cout <<
"Please check the observations (no proba can be computed)" 1445 if (msg_p_min < 0.) { msg_p_min = 0.; }
1447 if (msg_p_max < 0.) { msg_p_max = 0.; }
1449 __infE::_marginalMin[node][0] = 1 - msg_p_max;
1450 __infE::_marginalMax[node][0] = 1 - msg_p_min;
1451 __infE::_marginalMin[node][1] = msg_p_min;
1452 __infE::_marginalMax[node][1] = msg_p_max;
1456 template <
typename GUM_SCALAR >
1461 return __infE::_computeEpsilon();
1464 template <
typename GUM_SCALAR >
1466 for (
auto node : __bnet->nodes()) {
1467 if (__cn->currentNodeType(node)
1472 for (
auto pare : __bnet->parents(node)) {
1477 _refreshLMsPIs(
true);
1481 template <
typename GUM_SCALAR >
1483 if (__infE::_modal.empty()) {
return; }
1485 std::vector< std::vector< GUM_SCALAR > > vertices(
1486 2, std::vector< GUM_SCALAR >(2));
1488 for (
auto node : __bnet->nodes()) {
1489 vertices[0][0] = __infE::_marginalMin[node][0];
1490 vertices[0][1] = __infE::_marginalMax[node][1];
1492 vertices[1][0] = __infE::_marginalMax[node][0];
1493 vertices[1][1] = __infE::_marginalMin[node][1];
1495 for (
auto vertex = 0, vend = 2; vertex != vend; vertex++) {
1496 __infE::_updateExpectations(node, vertices[vertex]);
1500 __infE::_updateCredalSets(
1507 template <
typename GUM_SCALAR >
1513 "CNLoopyPropagation is only available " 1514 "with separately specified nets");
1519 if (cnet.
current_bn().variable(node).domainSize() != 2) {
1521 "CNLoopyPropagation is only available " 1522 "with binary credal networks");
1528 "CNLoopyPropagation only works when " 1529 "\"computeCPTMinMax()\" has been called for " 1542 template <
typename GUM_SCALAR >
1547 for (
auto node :
__bnet->nodes()) {
1559 template <
typename GUM_SCALAR >
1564 template <
typename GUM_SCALAR >
const bool isSeparatelySpecified() const
bool empty() const noexcept
Indicates whether the set is the empty set.
Set< NodeId > NodeSet
Some typdefs and define for shortcuts ...
void _makeInferenceByOrderedArcs()
Starts the inference with this inference type.
void eraseAllEvidence()
Erase all inference related data to perform another one.
InferenceType __inferenceType
The choosen inference type.
NodeType currentNodeType(const NodeId &id) const
Copyright 2005-2019 Pierre-Henri WUILLEMIN et Christophe GONZALES (LIP6) {prenom.nom}_at_lip6.fr.
void _initialize()
Topological forward propagation to initialize old marginals & messages.
void swap(HashTable< LpCol, double > *&a, HashTable< LpCol, double > *&b)
Swap the addresses of two pointers to hashTables.
const CredalNet< GUM_SCALAR > * __cn
A pointer to the CredalNet to be used.
<agrum/CN/CNLoopyPropagation.h>
void erase(const Key &k)
Erases an element from the set.
Copyright 2005-2019 Pierre-Henri WUILLEMIN et Christophe GONZALES (LIP6) {prenom.nom}_at_lip6.fr.
void _computeExpectations()
Since the network is binary, expectations can be computed from the final marginals which give us the ...
void _makeInferenceByRandomOrder()
Starts the inference with this inference type.
const IBayesNet< GUM_SCALAR > * __bnet
A pointer to it's IBayesNet used as a DAG.
void _updateMarginals()
Compute marginals from up-to-date messages.
Class template representing a Credal Network.
InferenceType inferenceType()
Get the inference type.
bool _InferenceUpToDate
TRUE if inference has already been performed, FALSE otherwise.
The base class for all directed edgesThis class is used as a basis for manipulating all directed edge...
CNLoopyPropagation(const CredalNet< GUM_SCALAR > &cnet)
Constructor.
Uses a node-set so we don't iterate on nodes that can't send a new message.
const NodeSet & parents(const NodeId id) const
returns the set of nodes with arc ingoing to a given node
GUM_SCALAR _calculateEpsilon()
Compute epsilon.
void saveInference(const std::string &path)
void _msgL(const NodeId X, const NodeId demanding_parent)
Sends a message to one's parent, i.e.
void _enum_combi(std::vector< std::vector< std::vector< GUM_SCALAR > > > &msgs_p, const NodeId &id, GUM_SCALAR &msg_l_min, GUM_SCALAR &msg_l_max, std::vector< GUM_SCALAR > &lx, const Idx &pos)
Used by _msgL.
void makeInference()
Starts the inference.
NodeProperty< NodeSet *> _msg_l_sent
Used to keep track of one's messages sent to it's parents.
const BayesNet< GUM_SCALAR > & current_bn() const
const NodeSet & children(const NodeId id) const
returns the set of nodes with arc outgoing from a given node
Abstract class template representing a CredalNet inference engine.
void _refreshLMsPIs(bool refreshIndic=false)
Get the last messages from one's parents and children.
virtual ~CNLoopyPropagation()
Destructor.
const bool hasComputedCPTMinMax() const
InferenceType
Inference type to be used by the algorithm.
Size Idx
Type for indexes.
void _compute_ext(GUM_SCALAR &msg_l_min, GUM_SCALAR &msg_l_max, std::vector< GUM_SCALAR > &lx, GUM_SCALAR &num_min, GUM_SCALAR &num_max, GUM_SCALAR &den_min, GUM_SCALAR &den_max)
Used by _msgL.
void _updateIndicatrices()
Only update indicatrices variables at the end of computations ( calls _msgP ).
std::size_t Size
In aGrUM, hashed values are unsigned long int.
void _msgP(const NodeId X, const NodeId demanding_child)
Sends a message to one's child, i.e.
Size size() const noexcept
Returns the number of elements in the set.
Size NodeId
Type for node ids.
void insert(const Key &k)
Inserts a new element into the set.
#define GUM_ERROR(type, msg)
void _makeInferenceNodeToNeighbours()
Starts the inference with this inference type.