aGrUM  0.16.0
inferenceEngine_tpl.h
Go to the documentation of this file.
1 
29 #include <agrum/agrum.h>
30 
31 namespace gum {
32  namespace credal {
33 
34  /*template< typename GUM_SCALAR >
35  InferenceEngine< GUM_SCALAR >::InferenceEngine () : ApproximationScheme() {
36  std::cout << "InferenceEngine construct ()" << std::endl;
37  GUM_CONSTRUCTOR ( InferenceEngine );
38  }*/
39 
40  template < typename GUM_SCALAR >
42  const CredalNet< GUM_SCALAR >& credalNet) :
45 
46  _dbnOpt.setCNet(credalNet);
47 
49 
50  GUM_CONSTRUCTOR(InferenceEngine);
51  }
52 
53  template < typename GUM_SCALAR >
55  GUM_DESTRUCTOR(InferenceEngine);
56  }
57 
58  template < typename GUM_SCALAR >
60  return *_credalNet;
61  }
62 
63  template < typename GUM_SCALAR >
65  _evidence.clear();
66  _query.clear();
67  /*
68  _marginalMin.clear();
69  _marginalMax.clear();
70  _oldMarginalMin.clear();
71  _oldMarginalMax.clear();
72  */
74  /*
75  _expectationMin.clear();
76  _expectationMax.clear();
77  */
79 
80  // _marginalSets.clear();
82 
83  _dynamicExpMin.clear();
84  _dynamicExpMax.clear();
85 
86  //_modal.clear();
87 
88  //_t0.clear();
89  //_t1.clear();
90  }
91 
92  /*
93  template< typename GUM_SCALAR >
94  void InferenceEngine< GUM_SCALAR >::setIterStop ( const int &iter_stop ) {
95  _iterStop = iter_stop;
96  }*/
97 
98  template < typename GUM_SCALAR >
100  _storeBNOpt = value;
101  }
102 
103  template < typename GUM_SCALAR >
105  _storeVertices = value;
106 
107  if (value) _initMarginalSets();
108  }
109 
110  template < typename GUM_SCALAR >
112  bool oldValue = _repetitiveInd;
113  _repetitiveInd = repetitive;
114 
115  // do not compute clusters more than once
116  if (_repetitiveInd && !oldValue) _repetitiveInit();
117  }
118 
119  template < typename GUM_SCALAR >
121  return _repetitiveInd;
122  }
123  /*
124  template< typename GUM_SCALAR >
125  int InferenceEngine< GUM_SCALAR >::iterStop () const {
126  return _iterStop;
127  }*/
128 
129  template < typename GUM_SCALAR >
131  return _storeVertices;
132  }
133 
134  template < typename GUM_SCALAR >
136  return _storeBNOpt;
137  }
138 
139  template < typename GUM_SCALAR >
142  return &_dbnOpt;
143  }
144 
145  template < typename GUM_SCALAR >
146  void InferenceEngine< GUM_SCALAR >::insertModalsFile(const std::string& path) {
147  std::ifstream mod_stream(path.c_str(), std::ios::in);
148 
149  if (!mod_stream.good()) {
151  "void InferenceEngine< GUM_SCALAR "
152  ">::insertModals(const std::string & path) : "
153  "could not open input file : "
154  << path);
155  }
156 
157  if (!_modal.empty()) _modal.clear();
158 
159  std::string line, tmp;
160  char * cstr, *p;
161 
162  while (mod_stream.good()) {
163  getline(mod_stream, line);
164 
165  if (line.size() == 0) continue;
166 
167  cstr = new char[line.size() + 1];
168  strcpy(cstr, line.c_str());
169 
170  p = strtok(cstr, " ");
171  tmp = p;
172 
173  std::vector< GUM_SCALAR > values;
174  p = strtok(nullptr, " ");
175 
176  while (p != nullptr) {
177  values.push_back(GUM_SCALAR(atof(p)));
178  p = strtok(nullptr, " ");
179  } // end of : line
180 
181  _modal.insert(tmp, values); //[tmp] = values;
182 
183  delete[] p;
184  delete[] cstr;
185  } // end of : file
186 
187  mod_stream.close();
188 
190  }
191 
192  template < typename GUM_SCALAR >
194  const std::map< std::string, std::vector< GUM_SCALAR > >& modals) {
195  if (!_modal.empty()) _modal.clear();
196 
197  for (auto it = modals.cbegin(), theEnd = modals.cend(); it != theEnd; ++it) {
198  NodeId id;
199 
200  try {
201  id = _credalNet->current_bn().idFromName(it->first);
202  } catch (NotFound& err) {
203  GUM_SHOWERROR(err);
204  continue;
205  }
206 
207  // check that modals are net compatible
208  auto dSize = _credalNet->current_bn().variable(id).domainSize();
209 
210  if (dSize != it->second.size()) continue;
211 
212  // GUM_ERROR(OperationNotAllowed, "void InferenceEngine< GUM_SCALAR
213  // >::insertModals( const std::map< std::string, std::vector< GUM_SCALAR
214  // > >
215  // &modals) : modalities does not respect variable cardinality : " <<
216  // _credalNet->current_bn().variable( id ).name() << " : " << dSize << "
217  // != "
218  // << it->second.size());
219 
220  _modal.insert(it->first, it->second); //[ it->first ] = it->second;
221  }
222 
223  //_modal = modals;
224 
226  }
227 
228  template < typename GUM_SCALAR >
230  const std::map< std::string, std::vector< GUM_SCALAR > >& eviMap) {
231  if (!_evidence.empty()) _evidence.clear();
232 
233  for (auto it = eviMap.cbegin(), theEnd = eviMap.cend(); it != theEnd; ++it) {
234  NodeId id;
235 
236  try {
237  id = _credalNet->current_bn().idFromName(it->first);
238  } catch (NotFound& err) {
239  GUM_SHOWERROR(err);
240  continue;
241  }
242 
243  _evidence.insert(id, it->second);
244  }
245  }
246 
247  // check that observed variables DO exists in the network (otherwise Lazy
248  // report
249  // an error and app crash)
250  template < typename GUM_SCALAR >
252  const NodeProperty< std::vector< GUM_SCALAR > >& evidence) {
253  if (!_evidence.empty()) _evidence.clear();
254 
255  // use cbegin() to get const_iterator when available in aGrUM hashtables
256  for (const auto& elt : evidence) {
257  try {
258  _credalNet->current_bn().variable(elt.first);
259  } catch (NotFound& err) {
260  GUM_SHOWERROR(err);
261  continue;
262  }
263 
264  _evidence.insert(elt.first, elt.second);
265  }
266  }
267 
268  template < typename GUM_SCALAR >
269  void
271  std::ifstream evi_stream(path.c_str(), std::ios::in);
272 
273  if (!evi_stream.good()) {
275  "void InferenceEngine< GUM_SCALAR "
276  ">::insertEvidence(const std::string & path) : could not "
277  "open input file : "
278  << path);
279  }
280 
281  if (!_evidence.empty()) _evidence.clear();
282 
283  std::string line, tmp;
284  char * cstr, *p;
285 
286  while (evi_stream.good() && std::strcmp(line.c_str(), "[EVIDENCE]") != 0) {
287  getline(evi_stream, line);
288  }
289 
290  while (evi_stream.good()) {
291  getline(evi_stream, line);
292 
293  if (std::strcmp(line.c_str(), "[QUERY]") == 0) break;
294 
295  if (line.size() == 0) continue;
296 
297  cstr = new char[line.size() + 1];
298  strcpy(cstr, line.c_str());
299 
300  p = strtok(cstr, " ");
301  tmp = p;
302 
303  // if user input is wrong
304  NodeId node = -1;
305 
306  try {
307  node = _credalNet->current_bn().idFromName(tmp);
308  } catch (NotFound& err) {
309  GUM_SHOWERROR(err);
310  continue;
311  }
312 
313  std::vector< GUM_SCALAR > values;
314  p = strtok(nullptr, " ");
315 
316  while (p != nullptr) {
317  values.push_back(GUM_SCALAR(atof(p)));
318  p = strtok(nullptr, " ");
319  } // end of : line
320 
321  _evidence.insert(node, values);
322 
323  delete[] p;
324  delete[] cstr;
325  } // end of : file
326 
327  evi_stream.close();
328  }
329 
330  template < typename GUM_SCALAR >
332  const NodeProperty< std::vector< bool > >& query) {
333  if (!_query.empty()) _query.clear();
334 
335  for (const auto& elt : query) {
336  try {
337  _credalNet->current_bn().variable(elt.first);
338  } catch (NotFound& err) {
339  GUM_SHOWERROR(err);
340  continue;
341  }
342 
343  _query.insert(elt.first, elt.second);
344  }
345  }
346 
347  template < typename GUM_SCALAR >
348  void InferenceEngine< GUM_SCALAR >::insertQueryFile(const std::string& path) {
349  std::ifstream evi_stream(path.c_str(), std::ios::in);
350 
351  if (!evi_stream.good()) {
353  "void InferenceEngine< GUM_SCALAR >::insertQuery(const "
354  "std::string & path) : could not open input file : "
355  << path);
356  }
357 
358  if (!_query.empty()) _query.clear();
359 
360  std::string line, tmp;
361  char * cstr, *p;
362 
363  while (evi_stream.good() && std::strcmp(line.c_str(), "[QUERY]") != 0) {
364  getline(evi_stream, line);
365  }
366 
367  while (evi_stream.good()) {
368  getline(evi_stream, line);
369 
370  if (std::strcmp(line.c_str(), "[EVIDENCE]") == 0) break;
371 
372  if (line.size() == 0) continue;
373 
374  cstr = new char[line.size() + 1];
375  strcpy(cstr, line.c_str());
376 
377  p = strtok(cstr, " ");
378  tmp = p;
379 
380  // if user input is wrong
381  NodeId node = -1;
382 
383  try {
384  node = _credalNet->current_bn().idFromName(tmp);
385  } catch (NotFound& err) {
386  GUM_SHOWERROR(err);
387  continue;
388  }
389 
390  auto dSize = _credalNet->current_bn().variable(node).domainSize();
391 
392  p = strtok(nullptr, " ");
393 
394  if (p == nullptr) {
395  _query.insert(node, std::vector< bool >(dSize, true));
396  } else {
397  std::vector< bool > values(dSize, false);
398 
399  while (p != nullptr) {
400  if ((Size)atoi(p) >= dSize)
402  "void InferenceEngine< GUM_SCALAR "
403  ">::insertQuery(const std::string & path) : "
404  "query modality is higher or equal to "
405  "cardinality");
406 
407  values[atoi(p)] = true;
408  p = strtok(nullptr, " ");
409  } // end of : line
410 
411  _query.insert(node, values);
412  }
413 
414  delete[] p;
415  delete[] cstr;
416  } // end of : file
417 
418  evi_stream.close();
419  }
420 
421  template < typename GUM_SCALAR >
422  const std::vector< GUM_SCALAR >& InferenceEngine< GUM_SCALAR >::marginalMin(
423  const std::string& varName) const {
424  try {
425  return _marginalMin[_credalNet->current_bn().idFromName(varName)];
426  } catch (NotFound& err) { throw(err); }
427  }
428 
429  template < typename GUM_SCALAR >
430  const std::vector< GUM_SCALAR >& InferenceEngine< GUM_SCALAR >::marginalMax(
431  const std::string& varName) const {
432  try {
433  return _marginalMax[_credalNet->current_bn().idFromName(varName)];
434  } catch (NotFound& err) { throw(err); }
435  }
436 
437  template < typename GUM_SCALAR >
438  const std::vector< GUM_SCALAR >&
440  try {
441  return _marginalMin[id];
442  } catch (NotFound& err) { throw(err); }
443  }
444 
445  template < typename GUM_SCALAR >
446  const std::vector< GUM_SCALAR >&
448  try {
449  return _marginalMax[id];
450  } catch (NotFound& err) { throw(err); }
451  }
452 
453  template < typename GUM_SCALAR >
455  const std::string& varName) const {
456  try {
457  return _expectationMin[_credalNet->current_bn().idFromName(varName)];
458  } catch (NotFound& err) { throw(err); }
459  }
460 
461  template < typename GUM_SCALAR >
463  const std::string& varName) const {
464  try {
465  return _expectationMax[_credalNet->current_bn().idFromName(varName)];
466  } catch (NotFound& err) { throw(err); }
467  }
468 
469  template < typename GUM_SCALAR >
470  const GUM_SCALAR&
472  try {
473  return _expectationMin[id];
474  } catch (NotFound& err) { throw(err); }
475  }
476 
477  template < typename GUM_SCALAR >
478  const GUM_SCALAR&
480  try {
481  return _expectationMax[id];
482  } catch (NotFound& err) { throw(err); }
483  }
484 
485  template < typename GUM_SCALAR >
486  const std::vector< GUM_SCALAR >& InferenceEngine< GUM_SCALAR >::dynamicExpMin(
487  const std::string& varName) const {
488  std::string errTxt = "const std::vector< GUM_SCALAR > & InferenceEngine< "
489  "GUM_SCALAR >::dynamicExpMin ( const std::string & "
490  "varName ) const : ";
491 
492  if (_dynamicExpMin.empty())
494  errTxt + "_dynamicExpectations() needs to be called before");
495 
496  if (!_dynamicExpMin.exists(
497  varName) /*_dynamicExpMin.find(varName) == _dynamicExpMin.end()*/)
498  GUM_ERROR(NotFound, errTxt + "variable name not found : " << varName);
499 
500  return _dynamicExpMin[varName];
501  }
502 
503  template < typename GUM_SCALAR >
504  const std::vector< GUM_SCALAR >& InferenceEngine< GUM_SCALAR >::dynamicExpMax(
505  const std::string& varName) const {
506  std::string errTxt = "const std::vector< GUM_SCALAR > & InferenceEngine< "
507  "GUM_SCALAR >::dynamicExpMax ( const std::string & "
508  "varName ) const : ";
509 
510  if (_dynamicExpMax.empty())
512  errTxt + "_dynamicExpectations() needs to be called before");
513 
514  if (!_dynamicExpMax.exists(
515  varName) /*_dynamicExpMin.find(varName) == _dynamicExpMin.end()*/)
516  GUM_ERROR(NotFound, errTxt + "variable name not found : " << varName);
517 
518  return _dynamicExpMax[varName];
519  }
520 
521  template < typename GUM_SCALAR >
522  const std::vector< std::vector< GUM_SCALAR > >&
524  return _marginalSets[id];
525  }
526 
527  template < typename GUM_SCALAR >
529  const std::string& path) const {
530  std::ofstream m_stream(path.c_str(), std::ios::out | std::ios::trunc);
531 
532  if (!m_stream.good()) {
534  "void InferenceEngine< GUM_SCALAR >::saveMarginals(const "
535  "std::string & path) const : could not open output file "
536  ": "
537  << path);
538  }
539 
540  for (const auto& elt : _marginalMin) {
541  Size esize = Size(elt.second.size());
542 
543  for (Size mod = 0; mod < esize; mod++) {
544  m_stream << _credalNet->current_bn().variable(elt.first).name() << " "
545  << mod << " " << (elt.second)[mod] << " "
546  << _marginalMax[elt.first][mod] << std::endl;
547  }
548  }
549 
550  m_stream.close();
551  }
552 
553  template < typename GUM_SCALAR >
555  const std::string& path) const {
556  if (_dynamicExpMin.empty()) //_modal.empty())
557  return;
558 
559  // else not here, to keep the const (natural with a saving process)
560  // else if(_dynamicExpMin.empty() || _dynamicExpMax.empty())
561  //_dynamicExpectations(); // works with or without a dynamic network
562 
563  std::ofstream m_stream(path.c_str(), std::ios::out | std::ios::trunc);
564 
565  if (!m_stream.good()) {
567  "void InferenceEngine< GUM_SCALAR "
568  ">::saveExpectations(const std::string & path) : could "
569  "not open output file : "
570  << path);
571  }
572 
573  for (const auto& elt : _dynamicExpMin) {
574  m_stream << elt.first; // it->first;
575 
576  // iterates over a vector
577  for (const auto& elt2 : elt.second) {
578  m_stream << " " << elt2;
579  }
580 
581  m_stream << std::endl;
582  }
583 
584  for (const auto& elt : _dynamicExpMax) {
585  m_stream << elt.first;
586 
587  // iterates over a vector
588  for (const auto& elt2 : elt.second) {
589  m_stream << " " << elt2;
590  }
591 
592  m_stream << std::endl;
593  }
594 
595  m_stream.close();
596  }
597 
598  template < typename GUM_SCALAR >
600  std::stringstream output;
601  output << std::endl;
602 
603  // use cbegin() when available
604  for (const auto& elt : _marginalMin) {
605  Size esize = Size(elt.second.size());
606 
607  for (Size mod = 0; mod < esize; mod++) {
608  output << "P(" << _credalNet->current_bn().variable(elt.first).name()
609  << "=" << mod << "|e) = [ ";
610  output << _marginalMin[elt.first][mod] << ", "
611  << _marginalMax[elt.first][mod] << " ]";
612 
613  if (!_query.empty())
614  if (_query.exists(elt.first) && _query[elt.first][mod])
615  output << " QUERY";
616 
617  output << std::endl;
618  }
619 
620  output << std::endl;
621  }
622 
623  return output.str();
624  }
625 
626  template < typename GUM_SCALAR >
627  void
628  InferenceEngine< GUM_SCALAR >::saveVertices(const std::string& path) const {
629  std::ofstream m_stream(path.c_str(), std::ios::out | std::ios::trunc);
630 
631  if (!m_stream.good()) {
633  "void InferenceEngine< GUM_SCALAR >::saveVertices(const "
634  "std::string & path) : could not open outpul file : "
635  << path);
636  }
637 
638  for (const auto& elt : _marginalSets) {
639  m_stream << _credalNet->current_bn().variable(elt.first).name()
640  << std::endl;
641 
642  for (const auto& elt2 : elt.second) {
643  m_stream << "[";
644  bool first = true;
645 
646  for (const auto& elt3 : elt2) {
647  if (!first) {
648  m_stream << ",";
649  first = false;
650  }
651 
652  m_stream << elt3;
653  }
654 
655  m_stream << "]\n";
656  }
657  }
658 
659  m_stream.close();
660  }
661 
662  template < typename GUM_SCALAR >
668 
669  for (auto node : _credalNet->current_bn().nodes()) {
670  auto dSize = _credalNet->current_bn().variable(node).domainSize();
671  _marginalMin.insert(node, std::vector< GUM_SCALAR >(dSize, 1));
672  _oldMarginalMin.insert(node, std::vector< GUM_SCALAR >(dSize, 1));
673 
674  _marginalMax.insert(node, std::vector< GUM_SCALAR >(dSize, 0));
675  _oldMarginalMax.insert(node, std::vector< GUM_SCALAR >(dSize, 0));
676  }
677  }
678 
679  template < typename GUM_SCALAR >
682 
683  if (!_storeVertices) return;
684 
685  for (auto node : _credalNet->current_bn().nodes())
686  _marginalSets.insert(node, std::vector< std::vector< GUM_SCALAR > >());
687  }
688 
689  // since only monitored variables in _modal will be alble to compute
690  // expectations, it is useless to initialize those for all variables
691  // _modal variables will always be checked further, so it is not necessary
692  // to
693  // check it here, but doing so will use less memory
694  template < typename GUM_SCALAR >
698 
699  if (_modal.empty()) return;
700 
701  for (auto node : _credalNet->current_bn().nodes()) {
702  std::string var_name, time_step;
703 
704  var_name = _credalNet->current_bn().variable(node).name();
705  auto delim = var_name.find_first_of("_");
706  var_name = var_name.substr(0, delim);
707 
708  if (!_modal.exists(var_name)) continue;
709 
710  _expectationMin.insert(node, _modal[var_name].back());
711  _expectationMax.insert(node, _modal[var_name].front());
712  }
713  }
714 
715  template < typename GUM_SCALAR >
718  }
719 
720  template < typename GUM_SCALAR >
722  // no modals, no expectations computed during inference
723  if (_expectationMin.empty() || _modal.empty()) return;
724 
725  // already called by the algorithm or the user
726  if (_dynamicExpMax.size() > 0 && _dynamicExpMin.size() > 0) return;
727 
728  // typedef typename std::map< int, GUM_SCALAR > innerMap;
729  using innerMap = typename gum::HashTable< int, GUM_SCALAR >;
730 
731  // typedef typename std::map< std::string, innerMap > outerMap;
732  using outerMap = typename gum::HashTable< std::string, innerMap >;
733 
734  // typedef typename std::map< std::string, std::vector< GUM_SCALAR > >
735  // mod;
736 
737  // si non dynamique, sauver directement _expectationMin et Max (revient au
738  // meme
739  // mais plus rapide)
740  outerMap expectationsMin, expectationsMax;
741 
742  for (const auto& elt : _expectationMin) {
743  std::string var_name, time_step;
744 
745  var_name = _credalNet->current_bn().variable(elt.first).name();
746  auto delim = var_name.find_first_of("_");
747  time_step = var_name.substr(delim + 1, var_name.size());
748  var_name = var_name.substr(0, delim);
749 
750  // to be sure (don't store not monitored variables' expectations)
751  // although it
752  // should be taken care of before this point
753  if (!_modal.exists(var_name)) continue;
754 
755  expectationsMin.getWithDefault(var_name, innerMap())
756  .getWithDefault(atoi(time_step.c_str()), 0) =
757  elt.second; // we iterate with min iterators
758  expectationsMax.getWithDefault(var_name, innerMap())
759  .getWithDefault(atoi(time_step.c_str()), 0) =
760  _expectationMax[elt.first];
761  }
762 
763  for (const auto& elt : expectationsMin) {
764  typename std::vector< GUM_SCALAR > dynExp(elt.second.size());
765 
766  for (const auto& elt2 : elt.second)
767  dynExp[elt2.first] = elt2.second;
768 
769  _dynamicExpMin.insert(elt.first, dynExp);
770  }
771 
772  for (const auto& elt : expectationsMax) {
773  typename std::vector< GUM_SCALAR > dynExp(elt.second.size());
774 
775  for (const auto& elt2 : elt.second) {
776  dynExp[elt2.first] = elt2.second;
777  }
778 
779  _dynamicExpMax.insert(elt.first, dynExp);
780  }
781  }
782 
783  template < typename GUM_SCALAR >
785  _timeSteps = 0;
786  _t0.clear();
787  _t1.clear();
788 
789  // t = 0 vars belongs to _t0 as keys
790  for (auto node : _credalNet->current_bn().dag().nodes()) {
791  std::string var_name = _credalNet->current_bn().variable(node).name();
792  auto delim = var_name.find_first_of("_");
793 
794  if (delim > var_name.size()) {
796  "void InferenceEngine< GUM_SCALAR "
797  ">::_repetitiveInit() : the network does not "
798  "appear to be dynamic");
799  }
800 
801  std::string time_step = var_name.substr(delim + 1, 1);
802 
803  if (time_step.compare("0") == 0) _t0.insert(node, std::vector< NodeId >());
804  }
805 
806  // t = 1 vars belongs to either _t0 as member value or _t1 as keys
807  for (const auto& node : _credalNet->current_bn().dag().nodes()) {
808  std::string var_name = _credalNet->current_bn().variable(node).name();
809  auto delim = var_name.find_first_of("_");
810  std::string time_step = var_name.substr(delim + 1, var_name.size());
811  var_name = var_name.substr(0, delim);
812  delim = time_step.find_first_of("_");
813  time_step = time_step.substr(0, delim);
814 
815  if (time_step.compare("1") == 0) {
816  bool found = false;
817 
818  for (const auto& elt : _t0) {
819  std::string var_0_name =
820  _credalNet->current_bn().variable(elt.first).name();
821  delim = var_0_name.find_first_of("_");
822  var_0_name = var_0_name.substr(0, delim);
823 
824  if (var_name.compare(var_0_name) == 0) {
825  const Potential< GUM_SCALAR >* potential(
826  &_credalNet->current_bn().cpt(node));
827  const Potential< GUM_SCALAR >* potential2(
828  &_credalNet->current_bn().cpt(elt.first));
829 
830  if (potential->domainSize() == potential2->domainSize())
831  _t0[elt.first].push_back(node);
832  else
833  _t1.insert(node, std::vector< NodeId >());
834 
835  found = true;
836  break;
837  }
838  }
839 
840  if (!found) { _t1.insert(node, std::vector< NodeId >()); }
841  }
842  }
843 
844  // t > 1 vars belongs to either _t0 or _t1 as member value
845  // remember _timeSteps
846  for (auto node : _credalNet->current_bn().dag().nodes()) {
847  std::string var_name = _credalNet->current_bn().variable(node).name();
848  auto delim = var_name.find_first_of("_");
849  std::string time_step = var_name.substr(delim + 1, var_name.size());
850  var_name = var_name.substr(0, delim);
851  delim = time_step.find_first_of("_");
852  time_step = time_step.substr(0, delim);
853 
854  if (time_step.compare("0") != 0 && time_step.compare("1") != 0) {
855  // keep max time_step
856  if (atoi(time_step.c_str()) > _timeSteps)
857  _timeSteps = atoi(time_step.c_str());
858 
859  std::string var_0_name;
860  bool found = false;
861 
862  for (const auto& elt : _t0) {
863  std::string var_0_name =
864  _credalNet->current_bn().variable(elt.first).name();
865  delim = var_0_name.find_first_of("_");
866  var_0_name = var_0_name.substr(0, delim);
867 
868  if (var_name.compare(var_0_name) == 0) {
869  const Potential< GUM_SCALAR >* potential(
870  &_credalNet->current_bn().cpt(node));
871  const Potential< GUM_SCALAR >* potential2(
872  &_credalNet->current_bn().cpt(elt.first));
873 
874  if (potential->domainSize() == potential2->domainSize()) {
875  _t0[elt.first].push_back(node);
876  found = true;
877  break;
878  }
879  }
880  }
881 
882  if (!found) {
883  for (const auto& elt : _t1) {
884  std::string var_0_name =
885  _credalNet->current_bn().variable(elt.first).name();
886  auto delim = var_0_name.find_first_of("_");
887  var_0_name = var_0_name.substr(0, delim);
888 
889  if (var_name.compare(var_0_name) == 0) {
890  const Potential< GUM_SCALAR >* potential(
891  &_credalNet->current_bn().cpt(node));
892  const Potential< GUM_SCALAR >* potential2(
893  &_credalNet->current_bn().cpt(elt.first));
894 
895  if (potential->domainSize() == potential2->domainSize()) {
896  _t1[elt.first].push_back(node);
897  break;
898  }
899  }
900  }
901  }
902  }
903  }
904  }
905 
906  template < typename GUM_SCALAR >
908  const NodeId& id, const std::vector< GUM_SCALAR >& vertex) {
909  std::string var_name = _credalNet->current_bn().variable(id).name();
910  auto delim = var_name.find_first_of("_");
911 
912  var_name = var_name.substr(0, delim);
913 
914  if (_modal.exists(var_name) /*_modal.find(var_name) != _modal.end()*/) {
915  GUM_SCALAR exp = 0;
916  auto vsize = vertex.size();
917 
918  for (Size mod = 0; mod < vsize; mod++)
919  exp += vertex[mod] * _modal[var_name][mod];
920 
921  if (exp > _expectationMax[id]) _expectationMax[id] = exp;
922 
923  if (exp < _expectationMin[id]) _expectationMin[id] = exp;
924  }
925  }
926 
927  template < typename GUM_SCALAR >
929  const NodeId& id,
930  const std::vector< GUM_SCALAR >& vertex,
931  const bool& elimRedund) {
932  auto& nodeCredalSet = _marginalSets[id];
933  auto dsize = vertex.size();
934 
935  bool eq = true;
936 
937  for (auto it = nodeCredalSet.cbegin(), itEnd = nodeCredalSet.cend();
938  it != itEnd;
939  ++it) {
940  eq = true;
941 
942  for (Size i = 0; i < dsize; i++) {
943  if (std::fabs(vertex[i] - (*it)[i]) > 1e-6) {
944  eq = false;
945  break;
946  }
947  }
948 
949  if (eq) break;
950  }
951 
952  if (!eq || nodeCredalSet.size() == 0) {
953  nodeCredalSet.push_back(vertex);
954  return;
955  } else
956  return;
957 
958  // because of next lambda return condition
959  if (nodeCredalSet.size() == 1) return;
960 
961  // check that the point and all previously added ones are not inside the
962  // actual
963  // polytope
964  auto itEnd = std::remove_if(
965  nodeCredalSet.begin(),
966  nodeCredalSet.end(),
967  [&](const std::vector< GUM_SCALAR >& v) -> bool {
968  for (auto jt = v.cbegin(),
969  jtEnd = v.cend(),
970  minIt = _marginalMin[id].cbegin(),
971  minItEnd = _marginalMin[id].cend(),
972  maxIt = _marginalMax[id].cbegin(),
973  maxItEnd = _marginalMax[id].cend();
974  jt != jtEnd && minIt != minItEnd && maxIt != maxItEnd;
975  ++jt, ++minIt, ++maxIt) {
976  if ((std::fabs(*jt - *minIt) < 1e-6 || std::fabs(*jt - *maxIt) < 1e-6)
977  && std::fabs(*minIt - *maxIt) > 1e-6)
978  return false;
979  }
980  return true;
981  });
982 
983  nodeCredalSet.erase(itEnd, nodeCredalSet.end());
984 
985  // we need at least 2 points to make a convex combination
986  if (!elimRedund || nodeCredalSet.size() <= 2) return;
987 
988  // there may be points not inside the polytope but on one of it's facet,
989  // meaning it's still a convex combination of vertices of this facet. Here
990  // we
991  // need lrs.
992  LRSWrapper< GUM_SCALAR > lrsWrapper;
993  lrsWrapper.setUpV((unsigned int)dsize, (unsigned int)(nodeCredalSet.size()));
994 
995  for (const auto& vtx : nodeCredalSet)
996  lrsWrapper.fillV(vtx);
997 
998  lrsWrapper.elimRedundVrep();
999 
1000  _marginalSets[id] = lrsWrapper.getOutput();
1001  }
1002 
1003  template < typename GUM_SCALAR >
1006  return _t0;
1007  }
1008 
1009  template < typename GUM_SCALAR >
1012  return _t1;
1013  }
1014 
1015  template < typename GUM_SCALAR >
1017  GUM_SCALAR eps = 0;
1018 #pragma omp parallel
1019  {
1020  GUM_SCALAR tEps = 0;
1021  GUM_SCALAR delta;
1022 
1024  int nsize = int(_marginalMin.size());
1025 
1026 #pragma omp for
1027 
1028  for (int i = 0; i < nsize; i++) {
1029  auto dSize = _marginalMin[i].size();
1030 
1031  for (Size j = 0; j < dSize; j++) {
1032  // on min
1033  delta = _marginalMin[i][j] - _oldMarginalMin[i][j];
1034  delta = (delta < 0) ? (-delta) : delta;
1035  tEps = (tEps < delta) ? delta : tEps;
1036 
1037  // on max
1038  delta = _marginalMax[i][j] - _oldMarginalMax[i][j];
1039  delta = (delta < 0) ? (-delta) : delta;
1040  tEps = (tEps < delta) ? delta : tEps;
1041 
1042  _oldMarginalMin[i][j] = _marginalMin[i][j];
1043  _oldMarginalMax[i][j] = _marginalMax[i][j];
1044  }
1045  } // end of : all variables
1046 
1047 #pragma omp critical(epsilon_max)
1048  {
1049 #pragma omp flush(eps)
1050  eps = (eps < tEps) ? tEps : eps;
1051  }
1052  }
1053 
1054  return eps;
1055  }
1056  } // namespace credal
1057 } // namespace gum
virtual ~InferenceEngine()
Destructor.
const NodeProperty< std::vector< NodeId > > & getT1Cluster() const
Get the _t1 cluster.
aGrUM&#39;s Potential is a multi-dimensional array with tensor operators.
Definition: potential.h:60
const std::vector< GUM_SCALAR > & marginalMax(const NodeId id) const
Get the upper marginals of a given node id.
const GUM_SCALAR & expectationMax(const NodeId id) const
Get the upper expectation of a given node id.
margi _oldMarginalMin
Old lower marginals used to compute epsilon.
void insertModals(const std::map< std::string, std::vector< GUM_SCALAR > > &modals)
Insert variables modalities from map to compute expectations.
Approximation Scheme.
Size size() const noexcept
Returns the number of elements stored into the hashtable.
void insertModalsFile(const std::string &path)
Insert variables modalities from file to compute expectations.
bool _storeBNOpt
Iterations limit stopping rule used by some algorithms such as CNMonteCarloSampling.
#define GUM_SHOWERROR(e)
Definition: exceptions.h:61
void _dynamicExpectations()
Rearrange lower and upper expectations to suit dynamic networks.
dynExpe _dynamicExpMin
Lower dynamic expectations.
bool repetitiveInd() const
Get the current independence status.
credalSet _marginalSets
Credal sets vertices, if enabled.
virtual void insertEvidenceFile(const std::string &path)
Insert evidence from file.
const std::vector< GUM_SCALAR > & marginalMin(const NodeId id) const
Get the lower marginals of a given node id.
void insertQueryFile(const std::string &path)
Insert query variables states from file.
margi _marginalMin
Lower marginals.
bool storeVertices() const
Get the number of iterations without changes used to stop some algorithms.
int _timeSteps
The number of time steps of this network (only usefull for dynamic networks).
bool exists(const Key &key) const
Checks whether there exists an element with a given key in the hashtable.
const std::vector< std::vector< GUM_SCALAR > > & vertices(const NodeId id) const
Get the vertice of a given node id.
margi _oldMarginalMax
Old upper marginals used to compute epsilon.
VarMod2BNsMap< GUM_SCALAR > * getVarMod2BNsMap()
Get optimum IBayesNet.
const std::vector< GUM_SCALAR > & dynamicExpMax(const std::string &varName) const
Get the upper dynamic expectation of a given variable prefix (without the time step included...
void dynamicExpectations()
Compute dynamic expectations.
const GUM_SCALAR _computeEpsilon()
Compute approximation scheme epsilon using the old marginals and the new ones.
Copyright 2005-2019 Pierre-Henri WUILLEMIN et Christophe GONZALES (LIP6) {prenom.nom}_at_lip6.fr.
Definition: agrum.h:25
void saveVertices(const std::string &path) const
Saves vertices to file.
void saveMarginals(const std::string &path) const
Saves marginals to file.
Copyright 2005-2019 Pierre-Henri WUILLEMIN et Christophe GONZALES (LIP6) {prenom.nom}_at_lip6.fr.
void _repetitiveInit()
Initialize _t0 and _t1 clusters.
expe _expectationMax
Upper expectations, if some variables modalities were inserted.
InferenceEngine(const CredalNet< GUM_SCALAR > &credalNet)
Construtor.
The class for generic Hash Tables.
Definition: hashTable.h:679
cluster _t0
Clusters of nodes used with dynamic networks.
std::string toString() const
Print all nodes marginals to standart output.
void fillV(const std::vector< GUM_SCALAR > &vertex)
Creates the V-representation of a polytope by adding a vertex to the problem input __input...
Class template representing a Credal Network.
Definition: credalNet.h:89
Class template acting as a wrapper for Lexicographic Reverse Search by David Avis.
Definition: LrsWrapper.h:107
VarMod2BNsMap< GUM_SCALAR > _dbnOpt
Object used to efficiently store optimal bayes net during inference, for some algorithms.
const CredalNet< GUM_SCALAR > * _credalNet
A pointer to the Credal Net used.
const const_iterator & cend() const noexcept
Returns the unsafe const_iterator pointing to the end of the hashtable.
dynExpe _dynamicExpMax
Upper dynamic expectations.
void _initMarginals()
Initialize lower and upper old marginals and marginals before inference, with the lower marginal bein...
void insertQuery(const NodeProperty< std::vector< bool > > &query)
Insert query variables and states from Property.
query _query
Holds the query nodes states.
bool _repetitiveInd
True if using repetitive independence ( dynamic network only ), False otherwise.
void setUpV(const Size &card, const Size &vertices)
Sets up a V-representation.
const_iterator cbegin() const
Returns an unsafe const_iterator pointing to the beginning of the hashtable.
dynExpe _modal
Variables modalities used to compute expectations.
void _updateCredalSets(const NodeId &id, const std::vector< GUM_SCALAR > &vertex, const bool &elimRedund=false)
Given a node id and one of it&#39;s possible vertex, update it&#39;s credal set.
Abstract class template representing a CredalNet inference engine.
void _initMarginalSets()
Initialize credal set vertices with empty sets.
const CredalNet< GUM_SCALAR > & credalNet()
Get this creadal network.
margi _evidence
Holds observed variables states.
cluster _t1
Clusters of nodes used with dynamic networks.
void clear()
Removes all the elements in the hash table.
expe _expectationMin
Lower expectations, if some variables modalities were inserted.
void elimRedundVrep()
V-Redundancy elimination.
const NodeProperty< std::vector< NodeId > > & getT0Cluster() const
Get the _t0 cluster.
const matrix & getOutput() const
Get the output matrix solution of the problem.
bool _storeVertices
True if credal sets vertices are stored, False otherwise.
void setRepetitiveInd(const bool repetitive)
std::size_t Size
In aGrUM, hashed values are unsigned long int.
Definition: types.h:48
const GUM_SCALAR & expectationMin(const NodeId id) const
Get the lower expectation of a given node id.
value_type & insert(const Key &key, const Val &val)
Adds a new element (actually a copy of this element) into the hash table.
Class used to store optimum IBayesNet during some inference algorithms.
Definition: varMod2BNsMap.h:56
void _initExpectations()
Initialize lower and upper expectations before inference, with the lower expectation being initialize...
void saveExpectations(const std::string &path) const
Saves expectations to file.
bool empty() const noexcept
Indicates whether the hash table is empty.
void insertEvidence(const std::map< std::string, std::vector< GUM_SCALAR > > &eviMap)
Insert evidence from map.
Size NodeId
Type for node ids.
Definition: graphElements.h:98
void _updateExpectations(const NodeId &id, const std::vector< GUM_SCALAR > &vertex)
Given a node id and one of it&#39;s possible vertex obtained during inference, update this node lower and...
margi _marginalMax
Upper marginals.
#define GUM_ERROR(type, msg)
Definition: exceptions.h:55
virtual void eraseAllEvidence()
Erase all inference related data to perform another one.
const std::vector< GUM_SCALAR > & dynamicExpMin(const std::string &varName) const
Get the lower dynamic expectation of a given variable prefix (without the time step included...