aGrUM  0.20.3
a C++ library for (probabilistic) graphical models
inferenceEngine_tpl.h
Go to the documentation of this file.
1 /**
2  *
3  * Copyright (c) 2005-2021 by Pierre-Henri WUILLEMIN(@LIP6) & Christophe GONZALES(@AMU)
4  * info_at_agrum_dot_org
5  *
6  * This library is free software: you can redistribute it and/or modify
7  * it under the terms of the GNU Lesser General Public License as published by
8  * the Free Software Foundation, either version 3 of the License, or
9  * (at your option) any later version.
10  *
11  * This library is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14  * GNU Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public License
17  * along with this library. If not, see <http://www.gnu.org/licenses/>.
18  *
19  */
20 
21 
22 /** @file
23  * @brief the class for computing G2 scores
24  *
25  * @author Christophe GONZALES(@AMU) and Pierre-Henri WUILLEMIN(@LIP6)
26  */
27 #include <agrum/CN/inference/inferenceEngine.h>
28 #include <agrum/agrum.h>
29 
30 namespace gum {
31  namespace credal {
32 
33  /*template< typename GUM_SCALAR >
34  InferenceEngine< GUM_SCALAR >::InferenceEngine () : ApproximationScheme() {
35  std::cout << "InferenceEngine construct ()" << std::endl;
36  GUM_CONSTRUCTOR ( InferenceEngine )
37  }*/
38 
39  template < typename GUM_SCALAR >
40  InferenceEngine< GUM_SCALAR >::InferenceEngine(const CredalNet< GUM_SCALAR >& credalNet) :
41  ApproximationScheme() {
42  credalNet_ = &credalNet;
43 
44  dbnOpt_.setCNet(credalNet);
45 
46  initMarginals_();
47 
48  GUM_CONSTRUCTOR(InferenceEngine);
49  }
50 
51  template < typename GUM_SCALAR >
54  }
55 
56  template < typename GUM_SCALAR >
58  return *credalNet_;
59  }
60 
61  template < typename GUM_SCALAR >
63  evidence_.clear();
64  query_.clear();
65  /*
66  marginalMin_.clear();
67  marginalMax_.clear();
68  oldMarginalMin_.clear();
69  oldMarginalMax_.clear();
70  */
72  /*
73  expectationMin_.clear();
74  expectationMax_.clear();
75  */
77 
78  // marginalSets_.clear();
80 
83 
84  //_modal.clear();
85 
86  //_t0.clear();
87  //_t1.clear();
88  }
89 
90  /*
91  template< typename GUM_SCALAR >
92  void InferenceEngine< GUM_SCALAR >::setIterStop ( const int &iter_stop ) {
93  iterStop_ = iter_stop;
94  }*/
95 
96  template < typename GUM_SCALAR >
97  void InferenceEngine< GUM_SCALAR >::storeBNOpt(const bool value) {
99  }
100 
101  template < typename GUM_SCALAR >
104 
105  if (value) initMarginalSets_();
106  }
107 
108  template < typename GUM_SCALAR >
110  bool oldValue = repetitiveInd_;
112 
113  // do not compute clusters more than once
115  }
116 
117  template < typename GUM_SCALAR >
119  return repetitiveInd_;
120  }
121  /*
122  template< typename GUM_SCALAR >
123  int InferenceEngine< GUM_SCALAR >::iterStop () const {
124  return iterStop_;
125  }*/
126 
127  template < typename GUM_SCALAR >
129  return storeVertices_;
130  }
131 
132  template < typename GUM_SCALAR >
134  return storeBNOpt_;
135  }
136 
137  template < typename GUM_SCALAR >
139  return &dbnOpt_;
140  }
141 
142  template < typename GUM_SCALAR >
145 
146  if (!mod_stream.good()) {
148  "void InferenceEngine< GUM_SCALAR "
149  ">::insertModals(const std::string & path) : "
150  "could not open input file : "
151  << path);
152  }
153 
154  if (!modal_.empty()) modal_.clear();
155 
156  std::string line, tmp;
157  char * cstr, *p;
158 
159  while (mod_stream.good()) {
161 
162  if (line.size() == 0) continue;
163 
164  cstr = new char[line.size() + 1];
165  strcpy(cstr, line.c_str());
166 
167  p = strtok(cstr, " ");
168  tmp = p;
169 
171  p = strtok(nullptr, " ");
172 
173  while (p != nullptr) {
175  p = strtok(nullptr, " ");
176  } // end of : line
177 
178  modal_.insert(tmp, values); //[tmp] = values;
179 
180  delete[] p;
181  delete[] cstr;
182  } // end of : file
183 
184  mod_stream.close();
185 
187  }
188 
189  template < typename GUM_SCALAR >
191  const std::map< std::string, std::vector< GUM_SCALAR > >& modals) {
192  if (!modal_.empty()) modal_.clear();
193 
194  for (auto it = modals.cbegin(), theEnd = modals.cend(); it != theEnd; ++it) {
195  NodeId id;
196 
197  try {
199  } catch (NotFound& err) {
201  continue;
202  }
203 
204  // check that modals are net compatible
206 
207  if (dSize != it->second.size()) continue;
208 
209  // GUM_ERROR(OperationNotAllowed, "void InferenceEngine< GUM_SCALAR
210  // >::insertModals( const std::map< std::string, std::vector< GUM_SCALAR
211  // > >
212  // &modals) : modalities does not respect variable cardinality : " <<
213  // credalNet_->current_bn().variable( id ).name() << " : " << dSize << "
214  // != "
215  // << it->second.size());
216 
217  modal_.insert(it->first, it->second); //[ it->first ] = it->second;
218  }
219 
220  //_modal = modals;
221 
223  }
224 
225  template < typename GUM_SCALAR >
227  const std::map< std::string, std::vector< GUM_SCALAR > >& eviMap) {
228  if (!evidence_.empty()) evidence_.clear();
229 
230  for (auto it = eviMap.cbegin(), theEnd = eviMap.cend(); it != theEnd; ++it) {
231  NodeId id;
232 
233  try {
235  } catch (NotFound& err) {
237  continue;
238  }
239 
241  }
242  }
243 
244  // check that observed variables DO exists in the network (otherwise Lazy
245  // report
246  // an error and app crash)
247  template < typename GUM_SCALAR >
249  const NodeProperty< std::vector< GUM_SCALAR > >& evidence) {
250  if (!evidence_.empty()) evidence_.clear();
251 
252  // use cbegin() to get const_iterator when available in aGrUM hashtables
253  for (const auto& elt: evidence) {
254  try {
256  } catch (NotFound& err) {
258  continue;
259  }
260 
262  }
263  }
264 
265  template < typename GUM_SCALAR >
268 
269  if (!evi_stream.good()) {
271  "void InferenceEngine< GUM_SCALAR "
272  ">::insertEvidence(const std::string & path) : could not "
273  "open input file : "
274  << path);
275  }
276 
277  if (!evidence_.empty()) evidence_.clear();
278 
279  std::string line, tmp;
280  char * cstr, *p;
281 
282  while (evi_stream.good() && std::strcmp(line.c_str(), "[EVIDENCE]") != 0) {
284  }
285 
286  while (evi_stream.good()) {
288 
289  if (std::strcmp(line.c_str(), "[QUERY]") == 0) break;
290 
291  if (line.size() == 0) continue;
292 
293  cstr = new char[line.size() + 1];
294  strcpy(cstr, line.c_str());
295 
296  p = strtok(cstr, " ");
297  tmp = p;
298 
299  // if user input is wrong
300  NodeId node = -1;
301 
302  try {
304  } catch (NotFound& err) {
306  continue;
307  }
308 
310  p = strtok(nullptr, " ");
311 
312  while (p != nullptr) {
314  p = strtok(nullptr, " ");
315  } // end of : line
316 
318 
319  delete[] p;
320  delete[] cstr;
321  } // end of : file
322 
323  evi_stream.close();
324  }
325 
326  template < typename GUM_SCALAR >
328  const NodeProperty< std::vector< bool > >& query) {
329  if (!query_.empty()) query_.clear();
330 
331  for (const auto& elt: query) {
332  try {
334  } catch (NotFound& err) {
336  continue;
337  }
338 
340  }
341  }
342 
343  template < typename GUM_SCALAR >
346 
347  if (!evi_stream.good()) {
349  "void InferenceEngine< GUM_SCALAR >::insertQuery(const "
350  "std::string & path) : could not open input file : "
351  << path);
352  }
353 
354  if (!query_.empty()) query_.clear();
355 
356  std::string line, tmp;
357  char * cstr, *p;
358 
359  while (evi_stream.good() && std::strcmp(line.c_str(), "[QUERY]") != 0) {
361  }
362 
363  while (evi_stream.good()) {
365 
366  if (std::strcmp(line.c_str(), "[EVIDENCE]") == 0) break;
367 
368  if (line.size() == 0) continue;
369 
370  cstr = new char[line.size() + 1];
371  strcpy(cstr, line.c_str());
372 
373  p = strtok(cstr, " ");
374  tmp = p;
375 
376  // if user input is wrong
377  NodeId node = -1;
378 
379  try {
381  } catch (NotFound& err) {
383  continue;
384  }
385 
387 
388  p = strtok(nullptr, " ");
389 
390  if (p == nullptr) {
391  query_.insert(node, std::vector< bool >(dSize, true));
392  } else {
393  std::vector< bool > values(dSize, false);
394 
395  while (p != nullptr) {
396  if ((Size)atoi(p) >= dSize)
398  "void InferenceEngine< GUM_SCALAR "
399  ">::insertQuery(const std::string & path) : "
400  "query modality is higher or equal to "
401  "cardinality");
402 
403  values[atoi(p)] = true;
404  p = strtok(nullptr, " ");
405  } // end of : line
406 
408  }
409 
410  delete[] p;
411  delete[] cstr;
412  } // end of : file
413 
414  evi_stream.close();
415  }
416 
417  template < typename GUM_SCALAR >
421  }
422 
423  template < typename GUM_SCALAR >
427  }
428 
429  template < typename GUM_SCALAR >
431  try {
435  return res;
436  } catch (NotFound& err) { throw(err); }
437  }
438 
439  template < typename GUM_SCALAR >
441  try {
445  return res;
446  } catch (NotFound& err) { throw(err); }
447  }
448 
449  template < typename GUM_SCALAR >
450  const GUM_SCALAR&
452  try {
454  } catch (NotFound& err) { throw(err); }
455  }
456 
457  template < typename GUM_SCALAR >
458  const GUM_SCALAR&
460  try {
462  } catch (NotFound& err) { throw(err); }
463  }
464 
465  template < typename GUM_SCALAR >
467  try {
468  return expectationMin_[id];
469  } catch (NotFound& err) { throw(err); }
470  }
471 
472  template < typename GUM_SCALAR >
474  try {
475  return expectationMax_[id];
476  } catch (NotFound& err) { throw(err); }
477  }
478 
479  template < typename GUM_SCALAR >
480  const std::vector< GUM_SCALAR >&
482  std::string errTxt = "const std::vector< GUM_SCALAR > & InferenceEngine< "
483  "GUM_SCALAR >::dynamicExpMin ( const std::string & "
484  "varName ) const : ";
485 
486  if (dynamicExpMin_.empty())
487  GUM_ERROR(OperationNotAllowed, errTxt + "_dynamicExpectations() needs to be called before")
488 
489  if (!dynamicExpMin_.exists(varName) /*dynamicExpMin_.find(varName) == dynamicExpMin_.end()*/)
490  GUM_ERROR(NotFound, errTxt + "variable name not found : " << varName)
491 
492  return dynamicExpMin_[varName];
493  }
494 
495  template < typename GUM_SCALAR >
496  const std::vector< GUM_SCALAR >&
498  std::string errTxt = "const std::vector< GUM_SCALAR > & InferenceEngine< "
499  "GUM_SCALAR >::dynamicExpMax ( const std::string & "
500  "varName ) const : ";
501 
502  if (dynamicExpMax_.empty())
503  GUM_ERROR(OperationNotAllowed, errTxt + "_dynamicExpectations() needs to be called before")
504 
505  if (!dynamicExpMax_.exists(varName) /*dynamicExpMin_.find(varName) == dynamicExpMin_.end()*/)
506  GUM_ERROR(NotFound, errTxt + "variable name not found : " << varName)
507 
508  return dynamicExpMax_[varName];
509  }
510 
511  template < typename GUM_SCALAR >
512  const std::vector< std::vector< GUM_SCALAR > >&
514  return marginalSets_[id];
515  }
516 
517  template < typename GUM_SCALAR >
520 
521  if (!m_stream.good()) {
523  "void InferenceEngine< GUM_SCALAR >::saveMarginals(const "
524  "std::string & path) const : could not open output file "
525  ": "
526  << path);
527  }
528 
529  for (const auto& elt: marginalMin_) {
530  Size esize = Size(elt.second.size());
531 
532  for (Size mod = 0; mod < esize; mod++) {
533  m_stream << credalNet_->current_bn().variable(elt.first).name() << " " << mod << " "
534  << (elt.second)[mod] << " " << marginalMax_[elt.first][mod] << std::endl;
535  }
536  }
537 
538  m_stream.close();
539  }
540 
541  template < typename GUM_SCALAR >
543  if (dynamicExpMin_.empty()) //_modal.empty())
544  return;
545 
546  // else not here, to keep the const (natural with a saving process)
547  // else if(dynamicExpMin_.empty() || dynamicExpMax_.empty())
548  //_dynamicExpectations(); // works with or without a dynamic network
549 
551 
552  if (!m_stream.good()) {
554  "void InferenceEngine< GUM_SCALAR "
555  ">::saveExpectations(const std::string & path) : could "
556  "not open output file : "
557  << path);
558  }
559 
560  for (const auto& elt: dynamicExpMin_) {
561  m_stream << elt.first; // it->first;
562 
563  // iterates over a vector
564  for (const auto& elt2: elt.second) {
565  m_stream << " " << elt2;
566  }
567 
568  m_stream << std::endl;
569  }
570 
571  for (const auto& elt: dynamicExpMax_) {
572  m_stream << elt.first;
573 
574  // iterates over a vector
575  for (const auto& elt2: elt.second) {
576  m_stream << " " << elt2;
577  }
578 
579  m_stream << std::endl;
580  }
581 
582  m_stream.close();
583  }
584 
585  template < typename GUM_SCALAR >
588  output << std::endl;
589 
590  // use cbegin() when available
591  for (const auto& elt: marginalMin_) {
592  Size esize = Size(elt.second.size());
593 
594  for (Size mod = 0; mod < esize; mod++) {
595  output << "P(" << credalNet_->current_bn().variable(elt.first).name() << "=" << mod
596  << "|e) = [ ";
597  output << marginalMin_[elt.first][mod] << ", " << marginalMax_[elt.first][mod] << " ]";
598 
599  if (!query_.empty())
600  if (query_.exists(elt.first) && query_[elt.first][mod]) output << " QUERY";
601 
602  output << std::endl;
603  }
604 
605  output << std::endl;
606  }
607 
608  return output.str();
609  }
610 
611  template < typename GUM_SCALAR >
614 
615  if (!m_stream.good()) {
617  "void InferenceEngine< GUM_SCALAR >::saveVertices(const "
618  "std::string & path) : could not open outpul file : "
619  << path);
620  }
621 
622  for (const auto& elt: marginalSets_) {
624 
625  for (const auto& elt2: elt.second) {
626  m_stream << "[";
627  bool first = true;
628 
629  for (const auto& elt3: elt2) {
630  if (!first) {
631  m_stream << ",";
632  first = false;
633  }
634 
635  m_stream << elt3;
636  }
637 
638  m_stream << "]\n";
639  }
640  }
641 
642  m_stream.close();
643  }
644 
645  template < typename GUM_SCALAR >
651 
652  for (auto node: credalNet_->current_bn().nodes()) {
656 
659  }
660  }
661 
662  template < typename GUM_SCALAR >
665 
666  if (!storeVertices_) return;
667 
668  for (auto node: credalNet_->current_bn().nodes())
670  }
671 
672  // since only monitored variables in modal_ will be alble to compute
673  // expectations, it is useless to initialize those for all variables
674  // modal_ variables will always be checked further, so it is not necessary
675  // to
676  // check it here, but doing so will use less memory
677  template < typename GUM_SCALAR >
681 
682  if (modal_.empty()) return;
683 
684  for (auto node: credalNet_->current_bn().nodes()) {
686 
688  auto delim = var_name.find_first_of("_");
690 
691  if (!modal_.exists(var_name)) continue;
692 
695  }
696  }
697 
698  template < typename GUM_SCALAR >
701  }
702 
703  template < typename GUM_SCALAR >
705  // no modals, no expectations computed during inference
706  if (expectationMin_.empty() || modal_.empty()) return;
707 
708  // already called by the algorithm or the user
709  if (dynamicExpMax_.size() > 0 && dynamicExpMin_.size() > 0) return;
710 
711  // typedef typename std::map< int, GUM_SCALAR > innerMap;
712  using innerMap = typename gum::HashTable< int, GUM_SCALAR >;
713 
714  // typedef typename std::map< std::string, innerMap > outerMap;
715  using outerMap = typename gum::HashTable< std::string, innerMap >;
716 
717  // typedef typename std::map< std::string, std::vector< GUM_SCALAR > >
718  // mod;
719 
720  // si non dynamique, sauver directement expectationMin_ et Max (revient au
721  // meme
722  // mais plus rapide)
724 
725  for (const auto& elt: expectationMin_) {
727 
729  auto delim = var_name.find_first_of("_");
732 
733  // to be sure (don't store not monitored variables' expectations)
734  // although it
735  // should be taken care of before this point
736  if (!modal_.exists(var_name)) continue;
737 
740  = elt.second; // we iterate with min iterators
744  }
745 
746  for (const auto& elt: expectationsMin) {
747  typename std::vector< GUM_SCALAR > dynExp(elt.second.size());
748 
749  for (const auto& elt2: elt.second)
751 
753  }
754 
755  for (const auto& elt: expectationsMax) {
756  typename std::vector< GUM_SCALAR > dynExp(elt.second.size());
757 
758  for (const auto& elt2: elt.second) {
760  }
761 
763  }
764  }
765 
766  template < typename GUM_SCALAR >
768  timeSteps_ = 0;
769  t0_.clear();
770  t1_.clear();
771 
772  // t = 0 vars belongs to t0_ as keys
773  for (auto node: credalNet_->current_bn().dag().nodes()) {
775  auto delim = var_name.find_first_of("_");
776 
777  if (delim > var_name.size()) {
779  "void InferenceEngine< GUM_SCALAR "
780  ">::repetitiveInit_() : the network does not "
781  "appear to be dynamic");
782  }
783 
785 
786  if (time_step.compare("0") == 0) t0_.insert(node, std::vector< NodeId >());
787  }
788 
789  // t = 1 vars belongs to either t0_ as member value or t1_ as keys
790  for (const auto& node: credalNet_->current_bn().dag().nodes()) {
792  auto delim = var_name.find_first_of("_");
797 
798  if (time_step.compare("1") == 0) {
799  bool found = false;
800 
801  for (const auto& elt: t0_) {
805 
806  if (var_name.compare(var_0_name) == 0) {
809 
812  else
813  t1_.insert(node, std::vector< NodeId >());
814 
815  found = true;
816  break;
817  }
818  }
819 
820  if (!found) { t1_.insert(node, std::vector< NodeId >()); }
821  }
822  }
823 
824  // t > 1 vars belongs to either t0_ or t1_ as member value
825  // remember timeSteps_
826  for (auto node: credalNet_->current_bn().dag().nodes()) {
828  auto delim = var_name.find_first_of("_");
833 
834  if (time_step.compare("0") != 0 && time_step.compare("1") != 0) {
835  // keep max time_step
837 
839  bool found = false;
840 
841  for (const auto& elt: t0_) {
845 
846  if (var_name.compare(var_0_name) == 0) {
849 
850  if (potential->domainSize() == potential2->domainSize()) {
852  found = true;
853  break;
854  }
855  }
856  }
857 
858  if (!found) {
859  for (const auto& elt: t1_) {
861  auto delim = var_0_name.find_first_of("_");
863 
864  if (var_name.compare(var_0_name) == 0) {
867 
868  if (potential->domainSize() == potential2->domainSize()) {
870  break;
871  }
872  }
873  }
874  }
875  }
876  }
877  }
878 
879  template < typename GUM_SCALAR >
880  void
882  const std::vector< GUM_SCALAR >& vertex) {
884  auto delim = var_name.find_first_of("_");
885 
887 
888  if (modal_.exists(var_name) /*modal_.find(var_name) != modal_.end()*/) {
889  GUM_SCALAR exp = 0;
890  auto vsize = vertex.size();
891 
892  for (Size mod = 0; mod < vsize; mod++)
893  exp += vertex[mod] * modal_[var_name][mod];
894 
896 
898  }
899  }
900 
901  template < typename GUM_SCALAR >
903  const std::vector< GUM_SCALAR >& vertex,
904  const bool& elimRedund) {
905  auto& nodeCredalSet = marginalSets_[id];
906  auto dsize = vertex.size();
907 
908  bool eq = true;
909 
910  for (auto it = nodeCredalSet.cbegin(), itEnd = nodeCredalSet.cend(); it != itEnd; ++it) {
911  eq = true;
912 
913  for (Size i = 0; i < dsize; i++) {
914  if (std::fabs(vertex[i] - (*it)[i]) > 1e-6) {
915  eq = false;
916  break;
917  }
918  }
919 
920  if (eq) break;
921  }
922 
923  if (!eq || nodeCredalSet.size() == 0) {
925  return;
926  } else
927  return;
928 
929  // because of next lambda return condition
930  if (nodeCredalSet.size() == 1) return;
931 
932  // check that the point and all previously added ones are not inside the
933  // actual
934  // polytope
935  auto itEnd
937  nodeCredalSet.end(),
938  [&](const std::vector< GUM_SCALAR >& v) -> bool {
939  for (auto jt = v.cbegin(),
940  jtEnd = v.cend(),
945  jt != jtEnd && minIt != minItEnd && maxIt != maxItEnd;
946  ++jt, ++minIt, ++maxIt) {
947  if ((std::fabs(*jt - *minIt) < 1e-6 || std::fabs(*jt - *maxIt) < 1e-6)
948  && std::fabs(*minIt - *maxIt) > 1e-6)
949  return false;
950  }
951  return true;
952  });
953 
955 
956  // we need at least 2 points to make a convex combination
957  if (!elimRedund || nodeCredalSet.size() <= 2) return;
958 
959  // there may be points not inside the polytope but on one of it's facet,
960  // meaning it's still a convex combination of vertices of this facet. Here
961  // we
962  // need lrs.
964  lrsWrapper.setUpV((unsigned int)dsize, (unsigned int)(nodeCredalSet.size()));
965 
966  for (const auto& vtx: nodeCredalSet)
968 
970 
972  }
973 
974  template < typename GUM_SCALAR >
975  const NodeProperty< std::vector< NodeId > >&
977  return t0_;
978  }
979 
980  template < typename GUM_SCALAR >
981  const NodeProperty< std::vector< NodeId > >&
983  return t1_;
984  }
985 
986  template < typename GUM_SCALAR >
988  GUM_SCALAR eps = 0;
989 #pragma omp parallel
990  {
991  GUM_SCALAR tEps = 0;
993 
994  /// int tId = getThreadNumber();
995  int nsize = int(marginalMin_.size());
996 
997 #pragma omp for
998 
999  for (int i = 0; i < nsize; i++) {
1000  auto dSize = marginalMin_[i].size();
1001 
1002  for (Size j = 0; j < dSize; j++) {
1003  // on min
1005  delta = (delta < 0) ? (-delta) : delta;
1006  tEps = (tEps < delta) ? delta : tEps;
1007 
1008  // on max
1010  delta = (delta < 0) ? (-delta) : delta;
1011  tEps = (tEps < delta) ? delta : tEps;
1012 
1015  }
1016  } // end of : all variables
1017 
1018 #pragma omp critical(epsilon_max)
1019  {
1020 #pragma omp flush(eps)
1021  eps = (eps < tEps) ? tEps : eps;
1022  }
1023  }
1024 
1025  return eps;
1026  }
1027  } // namespace credal
1028 } // namespace gum
INLINE void emplace(Args &&... args)
Definition: set_tpl.h:643
namespace for all credal networks entities
Definition: LpInterface.cpp:37