28 #ifndef DOXYGEN_SHOULD_SKIP_THIS 42 template <
typename GUM_SCALAR >
44 const IBayesNet< GUM_SCALAR >* BN,
47 bool use_binary_join_tree) :
48 JointTargetedInference< GUM_SCALAR >(BN),
49 EvidenceInference< GUM_SCALAR >(BN),
50 __use_binary_join_tree(use_binary_join_tree) {
64 template <
typename GUM_SCALAR >
68 for (
const auto pot : pots.second)
93 template <
typename GUM_SCALAR >
95 const Triangulation& new_triangulation) {
104 template <
typename GUM_SCALAR >
112 template <
typename GUM_SCALAR >
121 template <
typename GUM_SCALAR >
148 "setRelevantPotentialsFinderType for type " 149 << (
unsigned int)type <<
" is not implemented yet");
162 template <
typename GUM_SCALAR >
164 Potential< GUM_SCALAR >* (*proj)(
const Potential< GUM_SCALAR >&,
165 const Set< const DiscreteVariable* >&)) {
171 template <
typename GUM_SCALAR >
173 Potential< GUM_SCALAR >* (*comb)(
const Potential< GUM_SCALAR >&,
174 const Potential< GUM_SCALAR >&)) {
180 template <
typename GUM_SCALAR >
184 potset.second.clear();
186 mess_computed.second =
false;
189 for (
const auto& potset : __created_potentials)
190 for (
const auto pot : potset.second)
194 for (
const auto& pot : __target_posteriors)
196 for (
const auto& pot : __joint_target_posteriors)
206 template <
typename GUM_SCALAR >
219 "setFindBarrenNodesType for type " 220 << (
unsigned int)type <<
" is not implemented yet");
232 template <
typename GUM_SCALAR >
235 bool isHardEvidence) {
244 }
catch (DuplicateElement&) {
256 template <
typename GUM_SCALAR >
259 bool isHardEvidence) {
267 }
catch (DuplicateElement&) {
283 template <
typename GUM_SCALAR >
292 }
catch (DuplicateElement&) {
309 template <
typename GUM_SCALAR >
312 bool hasChangedSoftHard) {
313 if (hasChangedSoftHard)
318 }
catch (DuplicateElement&) {
328 template <
typename GUM_SCALAR >
330 const IBayesNet< GUM_SCALAR >* bn) {}
334 template <
typename GUM_SCALAR >
340 template <
typename GUM_SCALAR >
346 template <
typename GUM_SCALAR >
352 template <
typename GUM_SCALAR >
358 template <
typename GUM_SCALAR >
363 template <
typename GUM_SCALAR >
368 template <
typename GUM_SCALAR >
373 template <
typename GUM_SCALAR >
378 template <
typename GUM_SCALAR >
391 for (
const auto node : this->
targets()) {
392 if (!
__graph.
exists(node) && !hard_ev_nodes.exists(node))
return true;
396 bool containing_clique_found =
false;
397 for (
const auto node : nodes) {
400 for (
const auto xnode : nodes) {
401 if (!clique.contains(xnode) && !hard_ev_nodes.exists(xnode)) {
407 containing_clique_found =
true;
412 if (!containing_clique_found)
return true;
418 if ((change.second == EvidenceChangeType::EVIDENCE_ADDED)
429 template <
typename GUM_SCALAR >
445 const auto& bn = this->
BN();
447 for (
const auto node : bn.dag())
460 target_nodes += nodeset;
465 if (target_nodes.size() != bn.size()) {
466 BarrenNodesFinder finder(&(bn.dag()));
467 finder.setTargets(&target_nodes);
470 for (
const auto& pair : this->
evidence()) {
471 evidence_nodes.
insert(pair.first);
473 finder.setEvidence(&evidence_nodes);
475 NodeSet barren_nodes = finder.barrenNodes();
478 for (
const auto node : barren_nodes) {
485 for (
const auto node :
__graph) {
486 const NodeSet& parents = bn.parents(node);
487 for (
auto iter1 = parents.cbegin(); iter1 != parents.cend(); ++iter1) {
488 __graph.addEdge(*iter1, node);
490 for (++iter2; iter2 != parents.cend(); ++iter2) {
491 __graph.addEdge(*iter1, *iter2);
500 for (
auto iter1 = nodeset.cbegin(); iter1 != nodeset.cend(); ++iter1) {
502 for (++iter2; iter2 != nodeset.cend(); ++iter2) {
503 __graph.addEdge(*iter1, *iter2);
511 __graph.eraseNode(node);
524 BinaryJoinTreeConverterDefault bjt_converter;
526 __JT =
new CliqueGraph(
527 bjt_converter.convert(triang_jt, this->domainSizes(), emptyset));
529 __JT =
new CliqueGraph(triang_jt);
537 const std::vector< NodeId >& JT_elim_order =
539 NodeProperty< int > elim_order(
Size(JT_elim_order.size()));
540 for (std::size_t i = std::size_t(0), size = JT_elim_order.size(); i < size;
542 elim_order.insert(JT_elim_order[i], (
int)i);
543 const DAG& dag = bn.dag();
544 for (
const auto node : __graph) {
546 NodeId first_eliminated_node = node;
547 int elim_number = elim_order[first_eliminated_node];
549 for (
const auto parent : dag.parents(node)) {
550 if (__graph.existsNode(parent) && (elim_order[parent] < elim_number)) {
551 elim_number = elim_order[parent];
552 first_eliminated_node = parent;
567 for (
const auto node : __hard_ev_nodes) {
569 NodeSet pars(dag.parents(node).size());
570 for (
const auto par : dag.parents(node))
571 if (__graph.exists(par)) pars.
insert(par);
574 NodeId first_eliminated_node = *(pars.begin());
575 int elim_number = elim_order[first_eliminated_node];
577 for (
const auto parent : pars) {
578 if (elim_order[parent] < elim_number) {
579 elim_number = elim_order[parent];
580 first_eliminated_node = parent;
600 for (
const auto node : __hard_ev_nodes)
601 if (nodeset.contains(node)) nodeset.
erase(node);
603 if (!nodeset.empty()) {
606 NodeId first_eliminated_node = *(nodeset.begin());
607 int elim_number = elim_order[first_eliminated_node];
608 for (
const auto node : nodeset) {
609 if (elim_order[node] < elim_number) {
610 elim_number = elim_order[node];
611 first_eliminated_node = node;
629 for (
const auto node : *
__JT) {
634 for (
const auto& potlist : __created_potentials)
635 for (
const auto pot : potlist.second)
637 __created_potentials.clear();
641 for (
const auto pot_pair : __hard_ev_projected_CPTs)
642 delete pot_pair.second;
643 __hard_ev_projected_CPTs.clear();
651 __separator_potentials.clear();
652 __messages_computed.clear();
653 for (
const auto& edge : __JT->edges()) {
654 const Arc arc1(edge.first(), edge.second());
655 __separator_potentials.insert(arc1, empty_set);
656 __messages_computed.insert(arc1,
false);
657 const Arc arc2(Arc(edge.second(), edge.first()));
658 __separator_potentials.insert(arc2, empty_set);
659 __messages_computed.insert(arc2,
false);
663 for (
const auto& pot : __target_posteriors)
665 __target_posteriors.clear();
666 for (
const auto& pot : __joint_target_posteriors)
668 __joint_target_posteriors.clear();
677 for (
const auto node : dag) {
678 if (__graph.exists(node) || __hard_ev_nodes.contains(node)) {
679 const Potential< GUM_SCALAR >& cpt = bn.cpt(node);
683 const auto& variables = cpt.variablesSequence();
684 for (
const auto var : variables) {
685 NodeId xnode = bn.nodeId(*var);
686 if (__hard_ev_nodes.contains(xnode)) hard_nodes.insert(xnode);
692 if (hard_nodes.empty()) {
698 if (hard_nodes.size() == variables.size()) {
700 const auto& vars = cpt.variablesSequence();
701 for (
const auto var : vars)
703 for (
Size i = 0; i < hard_nodes.size(); ++i) {
704 inst.chgVal(variables[i], hard_evidence[bn.nodeId(*(variables[i]))]);
709 Set< const DiscreteVariable* > hard_variables;
711 for (
const auto xnode : hard_nodes) {
712 marg_cpt_set.insert(
evidence[xnode]);
713 hard_variables.insert(&(bn.variable(xnode)));
717 MultiDimCombineAndProjectDefault< GUM_SCALAR, Potential >
720 combine_and_project.combineAndProject(marg_cpt_set, hard_variables);
723 if (new_cpt_list.size() != 1) {
725 for (
const auto pot : new_cpt_list) {
726 if (!marg_cpt_set.contains(pot))
delete pot;
729 "the projection of a potential containing " 730 <<
"hard evidence is empty!");
732 const Potential< GUM_SCALAR >* projected_cpt = *(new_cpt_list.begin());
734 __hard_ev_projected_CPTs.insert(node, projected_cpt);
747 __evidence_changes.clear();
753 template <
typename GUM_SCALAR >
768 template <
typename GUM_SCALAR >
772 invalidated_cliques.insert(to_id);
775 const Arc arc(from_id, to_id);
776 bool& message_computed = __messages_computed[arc];
777 if (message_computed) {
778 message_computed =
false;
779 __separator_potentials[arc].clear();
780 if (__created_potentials.exists(arc)) {
781 auto& arc_created_potentials = __created_potentials[arc];
782 for (
const auto pot : arc_created_potentials)
784 arc_created_potentials.clear();
788 for (
const auto node_id : __JT->neighbours(to_id)) {
789 if (node_id != from_id)
798 template <
typename GUM_SCALAR >
807 NodeSet hard_nodes_changed(__hard_ev_nodes.size());
808 for (
const auto node : __hard_ev_nodes)
809 if (__evidence_changes.exists(node)) hard_nodes_changed.
insert(node);
811 NodeSet nodes_with_projected_CPTs_changed;
812 const auto& bn = this->
BN();
813 for (
auto pot_iter = __hard_ev_projected_CPTs.beginSafe();
814 pot_iter != __hard_ev_projected_CPTs.endSafe();
816 for (
const auto var : bn.cpt(pot_iter.key()).variablesSequence()) {
817 if (hard_nodes_changed.contains(bn.nodeId(*var))) {
818 nodes_with_projected_CPTs_changed.insert(pot_iter.key());
819 delete pot_iter.val();
822 __hard_ev_projected_CPTs.erase(pot_iter);
836 NodeSet invalidated_cliques(__JT->size());
837 for (
const auto& pair : __evidence_changes) {
840 invalidated_cliques.
insert(clique);
841 for (
const auto neighbor : __JT->neighbours(clique)) {
849 for (
const auto node : nodes_with_projected_CPTs_changed) {
851 invalidated_cliques.
insert(clique);
852 for (
const auto neighbor : __JT->neighbours(clique)) {
862 for (
auto iter = __target_posteriors.beginSafe();
863 iter != __target_posteriors.endSafe();
865 if (__graph.exists(iter.key())
868 __target_posteriors.erase(iter);
873 for (
auto iter = __target_posteriors.beginSafe();
874 iter != __target_posteriors.endSafe();
876 if (hard_nodes_changed.contains(iter.key())) {
878 __target_posteriors.erase(iter);
883 for (
auto iter = __joint_target_posteriors.beginSafe();
884 iter != __joint_target_posteriors.endSafe();
888 __joint_target_posteriors.erase(iter);
898 __node_to_soft_evidence.clear();
902 __node_to_soft_evidence.insert(node,
evidence[node]);
913 for (
const auto node : nodes_with_projected_CPTs_changed) {
915 const Potential< GUM_SCALAR >& cpt = bn.cpt(node);
916 const auto& variables = cpt.variablesSequence();
917 Set< const DiscreteVariable* > hard_variables;
919 for (
const auto var : variables) {
920 NodeId xnode = bn.nodeId(*var);
921 if (__hard_ev_nodes.exists(xnode)) {
922 marg_cpt_set.insert(
evidence[xnode]);
923 hard_variables.insert(var);
928 MultiDimCombineAndProjectDefault< GUM_SCALAR, Potential >
931 combine_and_project.combineAndProject(marg_cpt_set, hard_variables);
934 if (new_cpt_list.size() != 1) {
936 for (
const auto pot : new_cpt_list) {
937 if (!marg_cpt_set.contains(pot))
delete pot;
940 "the projection of a potential containing " 941 <<
"hard evidence is empty!");
943 const Potential< GUM_SCALAR >* projected_cpt = *(new_cpt_list.begin());
945 __hard_ev_projected_CPTs.insert(node, projected_cpt);
951 const Potential< GUM_SCALAR >& cpt = bn.cpt(node_cst.first);
952 const auto& variables = cpt.variablesSequence();
954 for (
const auto var : variables)
956 for (
const auto var : variables) {
957 inst.chgVal(var, hard_evidence[bn.nodeId(*var)]);
959 node_cst.second = cpt.get(inst);
963 __evidence_changes.clear();
968 template <
typename GUM_SCALAR >
972 for (
const auto node : this->
targets()) {
975 }
catch (Exception&) {}
980 }
catch (Exception&) {}
984 std::vector< std::pair< NodeId, Size > > possible_roots(clique_targets.size());
985 const auto& bn = this->
BN();
987 for (
const auto clique_id : clique_targets) {
988 const auto& clique = __JT->clique(clique_id);
990 for (
const auto node : clique) {
991 dom_size *= bn.variable(node).domainSize();
993 possible_roots[i] = std::pair< NodeId, Size >(clique_id, dom_size);
998 std::sort(possible_roots.begin(),
999 possible_roots.end(),
1000 [](
const std::pair< NodeId, Size >& a,
1001 const std::pair< NodeId, Size >& b) ->
bool {
1002 return a.second < b.second;
1006 NodeProperty< bool > marked = __JT->nodesProperty(
false);
1007 std::function< void(NodeId, NodeId) > diffuse_marks =
1008 [&marked, &diffuse_marks,
this](
NodeId node,
NodeId from) {
1009 if (!marked[node]) {
1010 marked[node] =
true;
1011 for (
const auto neigh : __JT->neighbours(node))
1012 if ((neigh != from) && !marked[neigh]) diffuse_marks(neigh, node);
1016 for (
const auto xclique : possible_roots) {
1017 NodeId clique = xclique.first;
1018 if (!marked[clique]) {
1020 diffuse_marks(clique, clique);
1027 template <
typename GUM_SCALAR >
1030 for (
const auto other : __JT->neighbours(
id)) {
1031 if ((other != from) && !__messages_computed[Arc(other,
id)])
1035 if ((
id != from) && !__messages_computed[Arc(
id, from)]) {
1042 template <
typename GUM_SCALAR >
1044 Set<
const Potential< GUM_SCALAR >* >& pot_list,
1045 Set< const DiscreteVariable* >& kept_vars) {}
1049 template <
typename GUM_SCALAR >
1051 Set<
const Potential< GUM_SCALAR >* >& pot_list,
1052 Set< const DiscreteVariable* >& kept_vars) {
1055 const auto& bn = this->
BN();
1056 for (
const auto var : kept_vars) {
1057 kept_ids.insert(bn.nodeId(*var));
1067 for (
auto iter = pot_list.beginSafe(); iter != pot_list.endSafe(); ++iter) {
1068 const Sequence< const DiscreteVariable* >& vars =
1069 (**iter).variablesSequence();
1071 for (
const auto var : vars) {
1072 if (requisite_nodes.exists(bn.nodeId(*var))) {
1078 if (!found) { pot_list.erase(iter); }
1084 template <
typename GUM_SCALAR >
1086 Set<
const Potential< GUM_SCALAR >* >& pot_list,
1087 Set< const DiscreteVariable* >& kept_vars) {
1090 const auto& bn = this->
BN();
1091 for (
const auto var : kept_vars) {
1092 kept_ids.insert(bn.nodeId(*var));
1105 template <
typename GUM_SCALAR >
1107 Set<
const Potential< GUM_SCALAR >* >& pot_list,
1108 Set< const DiscreteVariable* >& kept_vars) {
1111 const auto& bn = this->
BN();
1112 for (
const auto var : kept_vars) {
1113 kept_ids.insert(bn.nodeId(*var));
1118 dsep.relevantPotentials(bn,
1127 template <
typename GUM_SCALAR >
1129 Set<
const Potential< GUM_SCALAR >* >& pot_list,
1130 Set< const DiscreteVariable* >& kept_vars) {
1148 default:
GUM_ERROR(FatalError,
"not implemented yet");
1154 template <
typename GUM_SCALAR >
1155 Set< const Potential< GUM_SCALAR >* >
1157 __PotentialSet& pot_list, Set< const DiscreteVariable* >& del_vars) {
1160 Set< const DiscreteVariable* > the_del_vars = del_vars;
1161 for (
auto iter = the_del_vars.beginSafe(); iter != the_del_vars.endSafe();
1163 NodeId id = this->
BN().nodeId(**iter);
1166 the_del_vars.erase(iter);
1171 HashTable< const DiscreteVariable*, __PotentialSet > var2pots;
1173 for (
const auto pot : pot_list) {
1174 const Sequence< const DiscreteVariable* >& vars = pot->variablesSequence();
1175 for (
const auto var : vars) {
1176 if (the_del_vars.exists(var)) {
1177 if (!var2pots.exists(var)) { var2pots.insert(var, empty_pot_set); }
1178 var2pots[var].insert(pot);
1185 HashTable< const Potential< GUM_SCALAR >*, Set< const DiscreteVariable* > >
1187 Set< const DiscreteVariable* > empty_var_set;
1188 for (
const auto elt : var2pots) {
1189 if (elt.second.size() == 1) {
1190 const Potential< GUM_SCALAR >* pot = *(elt.second.begin());
1191 if (!pot2barren_var.exists(pot)) {
1192 pot2barren_var.insert(pot, empty_var_set);
1194 pot2barren_var[pot].insert(elt.first);
1203 for (
const auto elt : pot2barren_var) {
1206 const Potential< GUM_SCALAR >* pot = elt.first;
1207 pot_list.erase(pot);
1211 if (pot->variablesSequence().size() != elt.second.size()) {
1212 auto new_pot = projector.project(*pot, elt.second);
1213 pot_list.insert(new_pot);
1214 projected_pots.insert(new_pot);
1218 return projected_pots;
1223 template <
typename GUM_SCALAR >
1224 Set< const Potential< GUM_SCALAR >* >
1226 Set<
const Potential< GUM_SCALAR >* > pot_list,
1227 Set< const DiscreteVariable* >& del_vars,
1228 Set< const DiscreteVariable* >& kept_vars) {
1241 MultiDimCombineAndProjectDefault< GUM_SCALAR, Potential > combine_and_project(
1244 combine_and_project.combineAndProject(pot_list, del_vars);
1249 for (
auto iter = barren_projected_potentials.beginSafe();
1250 iter != barren_projected_potentials.endSafe();
1252 if (!new_pot_list.exists(*iter))
delete *iter;
1256 for (
auto iter_pot = new_pot_list.beginSafe();
1257 iter_pot != new_pot_list.endSafe();
1259 if ((*iter_pot)->variablesSequence().size() == 0) {
1266 new_pot_list.erase(iter_pot);
1270 return new_pot_list;
1275 template <
typename GUM_SCALAR >
1282 for (
const auto other_id : __JT->neighbours(from_id))
1283 if (other_id != to_id)
1284 pot_list += __separator_potentials[Arc(other_id, from_id)];
1287 const NodeSet& from_clique = __JT->clique(from_id);
1288 const NodeSet& separator = __JT->separator(from_id, to_id);
1289 Set< const DiscreteVariable* > del_vars(from_clique.size());
1290 Set< const DiscreteVariable* > kept_vars(separator.size());
1291 const auto& bn = this->
BN();
1293 for (
const auto node : from_clique) {
1294 if (!separator.contains(node)) {
1295 del_vars.insert(&(bn.variable(node)));
1297 kept_vars.insert(&(bn.variable(node)));
1308 const Arc arc(from_id, to_id);
1309 for (
auto iter = new_pot_list.beginSafe(); iter != new_pot_list.endSafe();
1311 const auto pot = *iter;
1312 if (pot->variablesSequence().size() == 1) {
1313 bool is_all_ones =
true;
1314 for (Instantiation inst(*pot); !inst.end(); ++inst) {
1316 is_all_ones =
false;
1321 if (!pot_list.exists(pot))
delete pot;
1322 new_pot_list.erase(iter);
1327 if (!pot_list.exists(pot)) {
1328 if (!__created_potentials.exists(arc))
1330 __created_potentials[arc].insert(pot);
1334 __separator_potentials[arc] = std::move(new_pot_list);
1335 __messages_computed[arc] =
true;
1340 template <
typename GUM_SCALAR >
1343 for (
const auto node : this->
targets()) {
1347 if (__graph.exists(node)) {
1362 template <
typename GUM_SCALAR >
1363 Potential< GUM_SCALAR >*
1365 const auto& bn = this->
BN();
1370 return new Potential< GUM_SCALAR >(*(this->
evidence()[id]));
1384 for (
const auto other : __JT->neighbours(clique_of_id))
1385 pot_list += __separator_potentials[Arc(other, clique_of_id)];
1388 const NodeSet& nodes = __JT->clique(clique_of_id);
1389 Set< const DiscreteVariable* > kept_vars{&(bn.variable(
id))};
1390 Set< const DiscreteVariable* > del_vars(nodes.size());
1391 for (
const auto node : nodes) {
1392 if (node !=
id) del_vars.
insert(&(bn.variable(node)));
1398 Potential< GUM_SCALAR >* joint =
nullptr;
1400 if (new_pot_list.size() == 1) {
1401 joint =
const_cast< Potential< GUM_SCALAR >*
>(*(new_pot_list.begin()));
1404 if (pot_list.exists(joint)) {
1405 joint =
new Potential< GUM_SCALAR >(*joint);
1409 new_pot_list.clear();
1412 MultiDimCombinationDefault< GUM_SCALAR, Potential > fast_combination(
1414 joint = fast_combination.combine(new_pot_list);
1418 for (
const auto pot : new_pot_list)
1419 if (!pot_list.exists(pot))
delete pot;
1424 bool nonzero_found =
false;
1425 for (Instantiation inst(*joint); !inst.end(); ++inst) {
1426 if (joint->get(inst)) {
1427 nonzero_found =
true;
1431 if (!nonzero_found) {
1435 "some evidence entered into the Bayes " 1436 "net are incompatible (their joint proba = 0)");
1443 template <
typename GUM_SCALAR >
1444 const Potential< GUM_SCALAR >&
1447 if (__target_posteriors.exists(
id)) {
return *(__target_posteriors[id]); }
1452 __target_posteriors.insert(
id, joint);
1459 template <
typename GUM_SCALAR >
1460 Potential< GUM_SCALAR >*
1467 if (targets.contains(node)) {
1468 targets.
erase(node);
1469 hard_ev_nodes.insert(node);
1476 if (targets.empty()) {
1478 for (
const auto node :
set) {
1481 if (pot_list.size() == 1) {
1482 auto pot =
new Potential< GUM_SCALAR >(**(pot_list.begin()));
1485 MultiDimCombinationDefault< GUM_SCALAR, Potential > fast_combination(
1487 return fast_combination.combine(pot_list);
1498 }
catch (NotFound&) {
1504 for (
const auto node : targets) {
1505 if (!__graph.exists(node)) {
1506 GUM_ERROR(UndefinedElement, node <<
" is not a target node");
1512 const std::vector< NodeId >& JT_elim_order =
1515 NodeProperty< int > elim_order(
Size(JT_elim_order.size()));
1516 for (std::size_t i = std::size_t(0), size = JT_elim_order.size(); i < size;
1518 elim_order.insert(JT_elim_order[i], (
int)i);
1519 NodeId first_eliminated_node = *(targets.begin());
1520 int elim_number = elim_order[first_eliminated_node];
1521 for (
const auto node : targets) {
1522 if (elim_order[node] < elim_number) {
1523 elim_number = elim_order[node];
1524 first_eliminated_node = node;
1533 const NodeSet& clique_nodes = __JT->clique(clique_of_set);
1534 for (
const auto node : targets) {
1535 if (!clique_nodes.contains(node)) {
1536 GUM_ERROR(UndefinedElement,
set <<
" is not a joint target");
1553 for (
const auto other : __JT->neighbours(clique_of_set))
1554 pot_list += __separator_potentials[Arc(other, clique_of_set)];
1557 const NodeSet& nodes = __JT->clique(clique_of_set);
1558 Set< const DiscreteVariable* > del_vars(nodes.size());
1559 Set< const DiscreteVariable* > kept_vars(targets.size());
1560 const auto& bn = this->
BN();
1561 for (
const auto node : nodes) {
1562 if (!targets.contains(node)) {
1563 del_vars.insert(&(bn.variable(node)));
1565 kept_vars.insert(&(bn.variable(node)));
1572 Potential< GUM_SCALAR >* joint =
nullptr;
1574 if ((new_pot_list.size() == 1) && hard_ev_nodes.empty()) {
1575 joint =
const_cast< Potential< GUM_SCALAR >*
>(*(new_pot_list.begin()));
1579 if (pot_list.exists(joint)) {
1580 joint =
new Potential< GUM_SCALAR >(*joint);
1584 new_pot_list.clear();
1590 for (
const auto node : hard_ev_nodes) {
1591 new_new_pot_list.insert(
evidence[node]);
1593 MultiDimCombinationDefault< GUM_SCALAR, Potential > fast_combination(
1595 joint = fast_combination.combine(new_new_pot_list);
1599 for (
const auto pot : new_pot_list)
1600 if (!pot_list.exists(pot))
delete pot;
1604 bool nonzero_found =
false;
1605 for (Instantiation inst(*joint); !inst.end(); ++inst) {
1606 if ((*joint)[inst]) {
1607 nonzero_found =
true;
1611 if (!nonzero_found) {
1615 "some evidence entered into the Bayes " 1616 "net are incompatible (their joint proba = 0)");
1624 template <
typename GUM_SCALAR >
1625 const Potential< GUM_SCALAR >&
1628 if (__joint_target_posteriors.exists(
set)) {
1629 return *(__joint_target_posteriors[
set]);
1635 __joint_target_posteriors.insert(
set, joint);
1642 template <
typename GUM_SCALAR >
1646 if (__joint_target_posteriors.exists(wanted_target))
1647 return *(__joint_target_posteriors[wanted_target]);
1653 if (!__joint_target_posteriors.exists(declared_target)) {
1658 const auto& bn = this->
BN();
1659 Set< const DiscreteVariable* > del_vars;
1660 for (
const auto node : declared_target)
1661 if (!wanted_target.contains(node)) del_vars.insert(&(bn.variable(node)));
1662 Potential< GUM_SCALAR >* pot =
new Potential< GUM_SCALAR >(
1663 __joint_target_posteriors[declared_target]->margSumOut(del_vars));
1666 __joint_target_posteriors.insert(wanted_target, pot);
1672 template <
typename GUM_SCALAR >
1697 GUM_SCALAR prob_ev = 1;
1698 for (
const auto root :
__roots) {
1700 NodeId node = *(__JT->clique(root).begin());
1703 for (Instantiation iter(*tmp); !iter.end(); ++iter)
1704 sum += tmp->get(iter);
1709 for (
const auto& projected_cpt : __constants)
1710 prob_ev *= projected_cpt.second;
1720 #endif // DOXYGEN_SHOULD_SKIP_THIS ~LazyPropagation() final
destructor
NodeProperty< const Potential< GUM_SCALAR > *> __node_to_soft_evidence
the soft evidence stored in the cliques per their assigned node in the BN
HashTable< NodeSet, const Potential< GUM_SCALAR > *> __joint_target_posteriors
the set of set target posteriors computed during the last inference
ArcProperty< bool > __messages_computed
indicates whether a message (from one clique to another) has been computed
void setFindBarrenNodesType(FindBarrenNodesType type)
sets how we determine barren nodes
NodeProperty< const Potential< GUM_SCALAR > *> __hard_ev_projected_CPTs
the CPTs that were projected due to hard evidence nodes
Set< const Potential< GUM_SCALAR > *> __PotentialSet
void _updateOutdatedBNStructure() final
prepares inference when the latter is in OutdatedBNStructure state
virtual void clear()
removes all the nodes and edges from the graph
virtual void addNodeWithId(const NodeId id)
try to insert a node with the given id
const NodeProperty< const Potential< GUM_SCALAR > *> & evidence() const
returns the set of evidence
bool __isNewJTNeeded() const
check whether a new join tree is really needed for the next inference
void setRelevantPotentialsFinderType(RelevantPotentialsFinderType type)
sets how we determine the relevant potentials to combine
JunctionTree * __junctionTree
the junction tree to answer the last inference query
Triangulation * __triangulation
the triangulation class creating the junction tree used for inference
void _onAllEvidenceErased(bool has_hard_evidence) final
fired before all the evidence are erased
void __findRelevantPotentialsWithdSeparation3(__PotentialSet &pot_list, Set< const DiscreteVariable * > &kept_vars)
update a set of potentials: the remaining are those to be combined to produce a message on a separato...
bool __is_new_jt_needed
indicates whether a new join tree is needed for the next inference
Potential< GUM_SCALAR > * _unnormalizedJointPosterior(NodeId id) final
returns a fresh potential equal to P(argument,evidence)
ArcProperty< __PotentialSet > __created_potentials
the set of potentials created for the last inference messages
Set< NodeId > NodeSet
Some typdefs and define for shortcuts ...
void setTriangulation(const Triangulation &new_triangulation)
use a new triangulation algorithm
void __findRelevantPotentialsXX(__PotentialSet &pot_list, Set< const DiscreteVariable * > &kept_vars)
update a set of potentials: the remaining are those to be combined to produce a message on a separato...
d-separation analysis (as described in Koller & Friedman 2009)
static void relevantPotentials(const IBayesNet< GUM_SCALAR > &bn, const NodeSet &query, const NodeSet &hardEvidence, const NodeSet &softEvidence, Set< const TABLE< GUM_SCALAR > * > &potentials)
update a set of potentials, keeping only those d-connected with query variables given evidence ...
void __findRelevantPotentialsWithdSeparation2(__PotentialSet &pot_list, Set< const DiscreteVariable * > &kept_vars)
update a set of potentials: the remaining are those to be combined to produce a message on a separato...
The BayesBall algorithm (as described by Schachter).
bool exists(const Key &key) const
Checks whether there exists an element with a given key in the hashtable.
NodeSet __hard_ev_nodes
the hard evidence nodes which were projected in CPTs
void erase(const Key &k)
Erases an element from the set.
bool exists(const NodeId id) const
alias for existsNode
gum is the global namespace for all aGrUM entities
GUM_SCALAR evidenceProbability() final
returns the probability of evidence
void _onJointTargetAdded(const NodeSet &set) final
fired after a new joint target is inserted
void _onAllJointTargetsErased() final
fired before a all the joint targets are removed
const Potential< GUM_SCALAR > & _jointPosterior(const NodeSet &set) final
returns the posterior of a declared target set
RelevantPotentialsFinderType
type of algorithm for determining the relevant potentials for combinations using some d-separation an...
virtual void eraseNode(const NodeId id)
remove a node and its adjacent edges from the graph
FindBarrenNodesType __barren_nodes_type
the type of barren nodes computation we wish
NodeProperty< const Potential< GUM_SCALAR > *> __target_posteriors
the set of single posteriors computed during the last inference
virtual void makeInference() final
perform the heavy computations needed to compute the targets' posteriors
const GUM_SCALAR __1_minus_epsilon
for comparisons with 1 - epsilon
void _onAllTargetsErased() final
fired before a all single and joint_targets are removed
NodeSet __roots
a clique node used as a root in each connected component of __JT
void _onMarginalTargetErased(const NodeId id) final
fired before a single target is removed
virtual void _onBayesNetChanged(const IBayesNet< GUM_SCALAR > *bn) final
fired after a new Bayes net has been assigned to the engine
void _onEvidenceChanged(const NodeId id, bool hasChangedSoftHard) final
fired after an evidence is changed, in particular when its status (soft/hard) changes ...
FindBarrenNodesType
type of algorithm to determine barren nodes
const Potential< GUM_SCALAR > & _posterior(NodeId id) final
returns the posterior of a given variable
const NodeSet & softEvidenceNodes() const
returns the set of nodes with soft evidence
void __collectMessage(NodeId id, NodeId from)
actually perform the collect phase
ArcProperty< __PotentialSet > __separator_potentials
the list of all potentials stored in the separators after inferences
virtual const NodeProperty< Size > & domainSizes() const final
get the domain sizes of the random variables of the BN
virtual NodeId createdJunctionTreeClique(const NodeId id)=0
returns the Id of the clique created by the elimination of a given node during the triangulation proc...
const NodeProperty< Idx > & hardEvidence() const
indicate for each node with hard evidence which value it took
virtual bool isDone() const noexcept final
returns whether the inference object is in a done state
const JoinTree * joinTree()
returns the current join tree used
NodeProperty< GUM_SCALAR > __constants
the constants resulting from the projections of CPTs defined over only hard evidence nodes remove th...
JoinTree * __JT
the join (or junction) tree used to answer the last inference query
CliqueGraph JoinTree
a join tree is a clique graph satisfying the running intersection property (but some cliques may be i...
void _updateOutdatedBNPotentials() final
prepares inference when the latter is in OutdatedBNPotentials state
void _onEvidenceAdded(const NodeId id, bool isHardEvidence) final
fired after a new evidence is inserted
virtual const Set< NodeSet > & jointTargets() const noexcept final
returns the list of joint targets
void _makeInference() final
called when the inference has to be performed effectively
void __diffuseMessageInvalidations(NodeId from_id, NodeId to_id, NodeSet &invalidated_cliques)
invalidate all the messages sent from a given clique
Header files of gum::Instantiation.
void _onAllMarginalTargetsAdded() final
fired after all the nodes of the BN are added as single targets
void __setProjectionFunction(Potential< GUM_SCALAR > *(*proj)(const Potential< GUM_SCALAR > &, const Set< const DiscreteVariable * > &))
sets the operator for performing the projections
void _setOutdatedBNStructureState()
put the inference into an outdated BN structure state
const NodeSet & clique(const NodeId idClique) const
returns the set of nodes included into a given clique
const NodeSet & hardEvidenceNodes() const
returns the set of nodes with hard evidence
NodeProperty< __PotentialSet > __clique_potentials
the list of all potentials stored in the cliques
void __computeJoinTreeRoots()
compute a root for each connected component of __JT
HashTable< NodeSet, NodeId > __joint_target_to_clique
for each set target, assign a clique in the JT that contains it
__PotentialSet __removeBarrenVariables(__PotentialSet &pot_list, Set< const DiscreteVariable * > &del_vars)
CliqueGraph JunctionTree
a junction tree is a clique graph satisfying the running intersection property and such that no cliqu...
void __createNewJT()
create a new junction tree as well as its related data structures
Detect barren nodes for inference in Bayesian networks.
static INLINE Potential< GUM_SCALAR > * LPNewprojPotential(const Potential< GUM_SCALAR > &t1, const Set< const DiscreteVariable * > &del_vars)
bool __use_binary_join_tree
indicates whether we should transform junction trees into binary join trees
void __invalidateAllMessages()
invalidate all messages, posteriors and created potentials
Potential< GUM_SCALAR > *(* __projection_op)(const Potential< GUM_SCALAR > &, const Set< const DiscreteVariable * > &)
the operator for performing the projections
__PotentialSet __marginalizeOut(__PotentialSet pot_list, Set< const DiscreteVariable * > &del_vars, Set< const DiscreteVariable * > &kept_vars)
removes variables del_vars from a list of potentials and returns the resulting list ...
void clear()
Removes all the elements in the hash table.
LazyPropagation(const IBayesNet< GUM_SCALAR > *BN, RelevantPotentialsFinderType=RelevantPotentialsFinderType::DSEP_BAYESBALL_POTENTIALS, FindBarrenNodesType=FindBarrenNodesType::FIND_BARREN_NODES, bool use_binary_join_tree=true)
default constructor
const JunctionTree * junctionTree()
returns the current junction tree
void _onJointTargetErased(const NodeSet &set) final
fired before a joint target is removed
An algorithm for converting a join tree into a binary join tree.
virtual bool isInferenceReady() const noexcept final
returns whether the inference object is in a ready state
virtual Triangulation * newFactory() const =0
returns a fresh triangulation of the same type as the current object but with an empty graph ...
void _onAllMarginalTargetsErased() final
fired before a all the single targets are removed
virtual const NodeSet & targets() const noexcept final
returns the list of marginal targets
HashTable< NodeId, NodeId > __node_to_clique
for each node of __graph (~ in the Bayes net), associate an ID in the JT
void clear()
Removes all the elements, if any, from the set.
void __produceMessage(NodeId from_id, NodeId to_id)
creates the message sent by clique from_id to clique to_id
std::size_t Size
In aGrUM, hashed values are unsigned long int.
RelevantPotentialsFinderType __find_relevant_potential_type
the type of relevant potential finding algorithm to be used
void(LazyPropagation< GUM_SCALAR >::* __findRelevantPotentials)(Set< const Potential< GUM_SCALAR > * > &pot_list, Set< const DiscreteVariable * > &kept_vars)
update a set of potentials: the remaining are those to be combined to produce a message on a separato...
virtual const CliqueGraph & junctionTree()=0
returns a compatible junction tree
value_type & insert(const Key &key, const Val &val)
Adds a new element (actually a copy of this element) into the hash table.
void __findRelevantPotentialsGetAll(__PotentialSet &pot_list, Set< const DiscreteVariable * > &kept_vars)
update a set of potentials: the remaining are those to be combined to produce a message on a separato...
virtual const std::vector< NodeId > & eliminationOrder()=0
returns an elimination ordering compatible with the triangulated graph
void _onMarginalTargetAdded(const NodeId id) final
fired after a new single target is inserted
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
Size NodeId
Type for node ids.
void insert(const Key &k)
Inserts a new element into the set.
UndiGraph __graph
the undigraph extracted from the BN and used to construct the join tree
void __setCombinationFunction(Potential< GUM_SCALAR > *(*comb)(const Potential< GUM_SCALAR > &, const Potential< GUM_SCALAR > &))
sets the operator for performing the combinations
void _onEvidenceErased(const NodeId id, bool isHardEvidence) final
fired before an evidence is removed
#define GUM_ERROR(type, msg)
virtual void setGraph(const UndiGraph *graph, const NodeProperty< Size > *domsizes)=0
initialize the triangulation data structures for a new graph
static void requisiteNodes(const DAG &dag, const NodeSet &query, const NodeSet &hardEvidence, const NodeSet &softEvidence, NodeSet &requisite)
Fill the 'requisite' nodeset with the requisite nodes in dag given a query and evidence.
Potential< GUM_SCALAR > *(* __combination_op)(const Potential< GUM_SCALAR > &, const Potential< GUM_SCALAR > &)
the operator for performing the combinations
void _setOutdatedBNPotentialsState()
puts the inference into an OutdatedBNPotentials state if it is not already in an OutdatedBNStructure ...
NodeProperty< EvidenceChangeType > __evidence_changes
indicates which nodes of the BN have evidence that changed since the last inference ...
void __findRelevantPotentialsWithdSeparation(__PotentialSet &pot_list, Set< const DiscreteVariable * > &kept_vars)
update a set of potentials: the remaining are those to be combined to produce a message on a separato...